code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def make_id(self):
if self.url_id is None:
self.url_id = select([func.coalesce(func.max(self.__class__.url_id + 1), 1)],
self.__class__.parent == self.parent) | Create a new URL id that is unique to the parent container |
def hideValue(self, value):
list.remove(self, value)
self._hidden.append(value) | Hide the given value from the domain
After that call the given value won't be seen as a possible value
on that domain anymore. The hidden value will be restored when the
previous saved state is popped.
@param value: Object currently available in the domain |
def unicoder(p):
if isinstance(p, unicode):
return p
if isinstance(p, str):
return decoder(p)
else:
return unicode(decoder(p)) | Make sure a Unicode string is returned |
def _prepare_bam_file(bam_file, tmp_dir, config):
sort_mode = _get_sort_order(bam_file, config)
if sort_mode != "queryname":
bam_file = sort(bam_file, config, "queryname")
return bam_file | Pipe sort by name cmd in case sort by coordinates |
def multipleOrderComparison(cls, orders):
comparers = [ (o.keyfn, 1 if o.isAscending() else -1) for o in orders]
def cmpfn(a, b):
for keyfn, ascOrDesc in comparers:
comparison = cmp(keyfn(a), keyfn(b)) * ascOrDesc
if comparison is not 0:
return comparison
return 0
return cmpfn | Returns a function that will compare two items according to `orders` |
def list_tags(self):
from highton.models.tag import Tag
return fields.ListField(
name=self.ENDPOINT,
init_class=Tag
).decode(
self.element_from_string(
self._get_request(
endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Tag.ENDPOINT,
).text
)
) | Get the tags of current object
:return: the tags
:rtype: list |
def gotoHome(self):
mode = QTextCursor.MoveAnchor
if QApplication.instance().keyboardModifiers() == Qt.ShiftModifier:
mode = QTextCursor.KeepAnchor
cursor = self.textCursor()
block = projex.text.nativestring(cursor.block().text())
cursor.movePosition( QTextCursor.StartOfBlock, mode )
if block.startswith('>>> '):
cursor.movePosition(QTextCursor.Right, mode, 4)
elif block.startswith('... '):
match = re.match('...\s*', block)
cursor.movePosition(QTextCursor.Right, mode, match.end())
self.setTextCursor(cursor) | Navigates to the home position for the edit. |
def is_compliant(self, path):
if not os.path.isdir(path):
log('Path specified %s is not a directory.' % path, level=ERROR)
raise ValueError("%s is not a directory." % path)
if not self.recursive:
return super(DirectoryPermissionAudit, self).is_compliant(path)
compliant = True
for root, dirs, _ in os.walk(path):
if len(dirs) > 0:
continue
if not super(DirectoryPermissionAudit, self).is_compliant(root):
compliant = False
continue
return compliant | Checks if the directory is compliant.
Used to determine if the path specified and all of its children
directories are in compliance with the check itself.
:param path: the directory path to check
:returns: True if the directory tree is compliant, otherwise False. |
def odt_to_ri(f, res, nm):
r
km = (2 * np.pi * nm) / res
ri = nm * np.sqrt(f / km**2 + 1)
negrootcoord = np.where(ri.real < 0)
ri[negrootcoord] *= -1
return ri | r"""Convert the ODT object function to refractive index
In :abbr:`ODT (Optical Diffraction Tomography)`, the object function
is defined by the Helmholtz equation
.. math::
f(\mathbf{r}) = k_\mathrm{m}^2 \left[
\left( \frac{n(\mathbf{r})}{n_\mathrm{m}} \right)^2 - 1
\right]
with :math:`k_\mathrm{m} = \frac{2\pi n_\mathrm{m}}{\lambda}`.
By inverting this equation, we obtain the refractive index
:math:`n(\mathbf{r})`.
.. math::
n(\mathbf{r}) = n_\mathrm{m}
\sqrt{\frac{f(\mathbf{r})}{k_\mathrm{m}^2} + 1 }
Parameters
----------
f: n-dimensional ndarray
The reconstructed object function :math:`f(\mathbf{r})`.
res: float
The size of the vacuum wave length :math:`\lambda` in pixels.
nm: float
The refractive index of the medium :math:`n_\mathrm{m}` that
surrounds the object in :math:`f(\mathbf{r})`.
Returns
-------
ri: n-dimensional ndarray
The complex refractive index :math:`n(\mathbf{r})`.
Notes
-----
Because this function computes the root of a complex number, there
are several solutions to the refractive index. Always the positive
(real) root of the refractive index is used. |
def __ensure_suffix_stem(t, suffix):
tpath = str(t)
if not tpath.endswith(suffix):
stem = tpath
tpath += suffix
return tpath, stem
else:
stem, ext = os.path.splitext(tpath)
return t, stem | Ensure that the target t has the given suffix, and return the file's stem. |
async def query(cls, query: str,
variables: Optional[Mapping[str, Any]] = None,
) -> Any:
gql_query = {
'query': query,
'variables': variables if variables else {},
}
rqst = Request(cls.session, 'POST', '/admin/graphql')
rqst.set_json(gql_query)
async with rqst.fetch() as resp:
return await resp.json() | Sends the GraphQL query and returns the response.
:param query: The GraphQL query string.
:param variables: An optional key-value dictionary
to fill the interpolated template variables
in the query.
:returns: The object parsed from the response JSON string. |
def pprint(self, ind):
pp = pprint.PrettyPrinter(indent=ind)
pp.pprint(self.tree) | pretty prints the tree with indentation |
def song_play(self, song):
if 'storeId' in song:
song_id = song['storeId']
elif 'trackId' in song:
song_id = song['trackId']
else:
song_id = song['id']
song_duration = song['durationMillis']
event = mc_calls.ActivityRecordRealtime.play(song_id, song_duration)
response = self._call(
mc_calls.ActivityRecordRealtime,
event
)
return True if response.body['eventResults'][0]['code'] == 'OK' else False | Add play to song play count.
Parameters:
song (dict): A song dict.
Returns:
bool: ``True`` if successful, ``False`` if not. |
def get_item_name(self, item, parent):
names = self.get_name_elements(item)
if not names:
raise MissingNameElementError
name = names[0].text
prefix = self.item_name_prefix(parent)
if prefix:
name = prefix + name
return name | Returns the value of the first name element found inside of element |
def sunrise(self, date=None, local=True, use_elevation=True):
if local and self.timezone is None:
raise ValueError("Local time requested but Location has no timezone set.")
if self.astral is None:
self.astral = Astral()
if date is None:
date = datetime.date.today()
elevation = self.elevation if use_elevation else 0
sunrise = self.astral.sunrise_utc(date, self.latitude, self.longitude, elevation)
if local:
return sunrise.astimezone(self.tz)
else:
return sunrise | Return sunrise time.
Calculates the time in the morning when the sun is a 0.833 degrees
below the horizon. This is to account for refraction.
:param date: The date for which to calculate the sunrise time.
If no date is specified then the current date will be used.
:type date: :class:`~datetime.date`
:param local: True = Time to be returned in location's time zone;
False = Time to be returned in UTC.
If not specified then the time will be returned in local time
:type local: bool
:param use_elevation: True = Return times that allow for the location's elevation;
False = Return times that don't use elevation.
If not specified then times will take elevation into account.
:type use_elevation: bool
:returns: The date and time at which sunrise occurs.
:rtype: :class:`~datetime.datetime` |
def disconnect_child(self, sprite, *handlers):
handlers = handlers or self._child_handlers.get(sprite, [])
for handler in list(handlers):
if sprite.handler_is_connected(handler):
sprite.disconnect(handler)
if handler in self._child_handlers.get(sprite, []):
self._child_handlers[sprite].remove(handler)
if not self._child_handlers[sprite]:
del self._child_handlers[sprite] | disconnects from child event. if handler is not specified, will
disconnect from all the child sprite events |
def _put_bucket_logging(self):
logging_config = {}
if self.s3props['logging']['enabled']:
logging_config = {
'LoggingEnabled': {
'TargetBucket': self.s3props['logging']['logging_bucket'],
'TargetGrants': self.s3props['logging']['logging_grants'],
'TargetPrefix': self.s3props['logging']['logging_bucket_prefix']
}
}
_response = self.s3client.put_bucket_logging(Bucket=self.bucket, BucketLoggingStatus=logging_config)
LOG.debug('Response setting up S3 logging: %s', _response)
LOG.info('S3 logging configuration updated') | Adds bucket logging policy to bucket for s3 access requests |
def bel_process_belrdf():
if request.method == 'OPTIONS':
return {}
response = request.body.read().decode('utf-8')
body = json.loads(response)
belrdf = body.get('belrdf')
bp = bel.process_belrdf(belrdf)
return _stmts_from_proc(bp) | Process BEL RDF and return INDRA Statements. |
def resample_nn_1d(a, centers):
ix = []
for center in centers:
index = (np.abs(a - center)).argmin()
if index not in ix:
ix.append(index)
return ix | Return one-dimensional nearest-neighbor indexes based on user-specified centers.
Parameters
----------
a : array-like
1-dimensional array of numeric values from which to
extract indexes of nearest-neighbors
centers : array-like
1-dimensional array of numeric values representing a subset of values to approximate
Returns
-------
An array of indexes representing values closest to given array values |
def get_enabled():
ret = set()
for name in _iter_service_names():
if _service_is_upstart(name):
if _upstart_is_enabled(name):
ret.add(name)
else:
if _service_is_sysv(name):
if _sysv_is_enabled(name):
ret.add(name)
return sorted(ret) | Return the enabled services
CLI Example:
.. code-block:: bash
salt '*' service.get_enabled |
def image_list(list_aliases=False, remote_addr=None,
cert=None, key=None, verify_cert=True):
client = pylxd_client_get(remote_addr, cert, key, verify_cert)
images = client.images.all()
if list_aliases:
return {i.fingerprint: [a['name'] for a in i.aliases] for i in images}
return map(_pylxd_model_to_dict, images) | Lists all images from the LXD.
list_aliases :
Return a dict with the fingerprint as key and
a list of aliases as value instead.
remote_addr :
An URL to a remote Server, you also have to give cert and key if
you provide remote_addr and its a TCP Address!
Examples:
https://myserver.lan:8443
/var/lib/mysocket.sock
cert :
PEM Formatted SSL Certificate.
Examples:
~/.config/lxc/client.crt
key :
PEM Formatted SSL Key.
Examples:
~/.config/lxc/client.key
verify_cert : True
Wherever to verify the cert, this is by default True
but in the most cases you want to set it off as LXD
normaly uses self-signed certificates.
CLI Examples:
.. code-block:: bash
$ salt '*' lxd.image_list true --out=json
$ salt '*' lxd.image_list --out=json |
def numberOfConnectedDistalSynapses(self, cells=None):
if cells is None:
cells = xrange(self.numberOfCells())
n = _countWhereGreaterEqualInRows(self.internalDistalPermanences, cells,
self.connectedPermanenceDistal)
for permanences in self.distalPermanences:
n += _countWhereGreaterEqualInRows(permanences, cells,
self.connectedPermanenceDistal)
return n | Returns the number of connected distal synapses on these cells.
Parameters:
----------------------------
@param cells (iterable)
Indices of the cells. If None return count for all cells. |
def rollback(self, rb_id=1):
rpc_command = '<Unlock/><Rollback><Previous>{rb_id}</Previous></Rollback><Lock/>'.format(rb_id=rb_id)
self._execute_rpc(rpc_command) | Rollback the last committed configuration.
:param rb_id: Rollback a specific number of steps. Default: 1 |
def _update_explicit_bucket_count(a_float, dist):
buckets = dist.explicitBuckets
if buckets is None:
raise ValueError(_BAD_UNSET_BUCKETS % (u'explicit buckets'))
bucket_counts = dist.bucketCounts
bounds = buckets.bounds
if len(bucket_counts) < len(bounds) + 1:
raise ValueError(_BAD_LOW_BUCKET_COUNT)
bucket_counts[bisect.bisect(bounds, a_float)] += 1 | Adds `a_float` to `dist`, updating its explicit buckets.
Args:
a_float (float): a new value
dist (:class:`endpoints_management.gen.servicecontrol_v1_messages.Distribution`):
the Distribution being updated
Raises:
ValueError: if `dist` does not already have explict buckets defined
ValueError: if there are not enough bucket count fields in `dist` |
def is_sw_writable(self):
sw = self.get_property('sw')
return sw in (rdltypes.AccessType.rw, rdltypes.AccessType.rw1,
rdltypes.AccessType.w, rdltypes.AccessType.w1) | Field is writable by software |
def save(self):
out = Outgest(self.output, self.selection_array.astype('uint8'), self.headers, self.config_path)
out.save()
out.upload() | Save as a FITS file and attempt an upload if designated in the configuration file |
def _daterange(self, recarr, date_range):
idx = self._datetime64_index(recarr)
if idx and len(recarr):
dts = recarr[idx]
mask = Series(np.zeros(len(dts)), index=dts)
start, end = _start_end(date_range, dts)
mask[start:end] = 1.0
return recarr[mask.values.astype(bool)]
return recarr | Given a recarr, slice out the given artic.date.DateRange if a
datetime64 index exists |
def apply(self, func, args=(), **kwargs):
kwargs.pop('shortcut', None)
applied = (func(ds, *args, **kwargs) for ds in self._iter_grouped())
combined = self._combine(applied)
return combined.rename({self._resample_dim: self._dim}) | Apply a function over each Dataset in the groups generated for
resampling and concatenate them together into a new Dataset.
`func` is called like `func(ds, *args, **kwargs)` for each dataset `ds`
in this group.
Apply uses heuristics (like `pandas.GroupBy.apply`) to figure out how
to stack together the datasets. The rule is:
1. If the dimension along which the group coordinate is defined is
still in the first grouped item after applying `func`, then stack
over this dimension.
2. Otherwise, stack over the new dimension given by name of this
grouping (the argument to the `groupby` function).
Parameters
----------
func : function
Callable to apply to each sub-dataset.
args : tuple, optional
Positional arguments passed on to `func`.
**kwargs
Used to call `func(ds, **kwargs)` for each sub-dataset `ar`.
Returns
-------
applied : Dataset or DataArray
The result of splitting, applying and combining this dataset. |
def get_bin_version_str(bin_path, version_flag='-v', kw={}):
try:
prog = _get_exec_binary(bin_path, kw)
version_str = version_expr.search(
check_output([prog, version_flag], **kw).decode(locale)
).groups()[0]
except OSError:
logger.warning("failed to execute '%s'", bin_path)
return None
except Exception:
logger.exception(
"encountered unexpected error while trying to find version of "
"'%s':", bin_path
)
return None
logger.info("'%s' is version '%s'", bin_path, version_str)
return version_str | Get the version string through the binary. |
def _send_command(self, command, expected_bytes):
response = self.con.send_xid_command(command, expected_bytes)
return response | Send an XID command to the device |
def shift_up_right(self, times=1):
try:
return Location(self._rank + times, self._file + times)
except IndexError as e:
raise IndexError(e) | Finds Location shifted up right by 1
:rtype: Location |
def read(self, entity=None, attrs=None, ignore=None, params=None):
if attrs is None:
attrs = self.read_json()
attrs['search_'] = attrs.pop('search')
attr = 'max_count'
if ignore is None:
ignore = set()
if attr not in ignore:
attrs[attr] = DiscoveryRule(
self._server_config,
id=attrs['id'],
).update_json([])[attr]
return super(DiscoveryRule, self).read(entity, attrs, ignore, params) | Work around a bug. Rename ``search`` to ``search_``.
For more information on the bug, see `Bugzilla #1257255
<https://bugzilla.redhat.com/show_bug.cgi?id=1257255>`_. |
def installation(self):
self.locations = []
url = ("https://tccna.honeywell.com/WebAPI/emea/api/v1/location"
"/installationInfo?userId=%s"
"&includeTemperatureControlSystems=True"
% self.account_info['userId'])
response = requests.get(url, headers=self._headers())
response.raise_for_status()
self.installation_info = response.json()
self.system_id = (self.installation_info[0]['gateways'][0]
['temperatureControlSystems'][0]['systemId'])
for loc_data in self.installation_info:
self.locations.append(Location(self, loc_data))
return self.installation_info | Return the details of the installation. |
def show_pageitems(_, token):
if len(token.contents.split()) != 1:
msg = '%r tag takes no arguments' % token.contents.split()[0]
raise template.TemplateSyntaxError(msg)
return ShowPageItemsNode() | Show page items.
Usage:
.. code-block:: html+django
{% show_pageitems per_page %} |
def is_dataset(ds):
import tensorflow as tf
from tensorflow_datasets.core.utils import py_utils
dataset_types = [tf.data.Dataset]
v1_ds = py_utils.rgetattr(tf, "compat.v1.data.Dataset", None)
v2_ds = py_utils.rgetattr(tf, "compat.v2.data.Dataset", None)
if v1_ds is not None:
dataset_types.append(v1_ds)
if v2_ds is not None:
dataset_types.append(v2_ds)
return isinstance(ds, tuple(dataset_types)) | Whether ds is a Dataset. Compatible across TF versions. |
def _construct_location_stack_entry(location, num_traverses):
if not isinstance(num_traverses, int) or num_traverses < 0:
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid '
u'value for "num_traverses" {}. This is not allowed.'
.format(num_traverses))
if not isinstance(location, Location):
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid '
u'value for "location" {}. This is not allowed.'
.format(location))
return LocationStackEntry(location=location, num_traverses=num_traverses) | Return a LocationStackEntry namedtuple with the specified parameters. |
def rrtmg_lw_gen_source(ext, build_dir):
thispath = config.local_path
module_src = []
for item in modules:
fullname = join(thispath,'rrtmg_lw_v4.85','gcm_model','modules',item)
module_src.append(fullname)
for item in src:
if item in mod_src:
fullname = join(thispath,'sourcemods',item)
else:
fullname = join(thispath,'rrtmg_lw_v4.85','gcm_model','src',item)
module_src.append(fullname)
sourcelist = [join(thispath, '_rrtmg_lw.pyf'),
join(thispath, 'Driver.f90')]
try:
config.have_f90c()
return module_src + sourcelist
except:
print('No Fortran 90 compiler found, not building RRTMG_LW extension!')
return None | Add RRTMG_LW fortran source if Fortran 90 compiler available,
if no compiler is found do not try to build the extension. |
def open_url(url, retries=0, sleep=0.5):
return open_conn(retries=retries, sleep=sleep, **parse_url(url)) | Open a mysql connection to a url. Note that if your password has
punctuation characters, it might break the parsing of url.
url: A string in the form "mysql://username:password@host.domain/database" |
def client_secrets(cls, client_id):
secrets = yield cls.view.get(key=client_id, include_docs=True)
raise Return([cls(**secret['doc']) for secret in secrets['rows']]) | Get the client's secrets using the client_id
:param client_id: the client ID, e.g. a service ID
:returns: list OAuthSecret instances |
def add_key(self, attributes, store_key):
undefined = False
try:
value = self.get_value(attributes)
except (KeyError, IndexError):
undefined = True
self.remove_key(store_key)
if not undefined:
if isinstance(value, (list,tuple)):
values = value
hash_value = self.get_hash_for(value)
self.add_hashed_value(hash_value, store_key)
else:
values = [value]
for value in values:
hash_value = self.get_hash_for(value)
self.add_hashed_value(hash_value, store_key)
else:
self.add_undefined(store_key) | Add key to the index.
:param attributes: Attributes to be added to the index
:type attributes: dict(str)
:param store_key: The key for the document in the store
:type store_key: str |
def versions(runas=None):
ret = _rbenv_exec(['versions', '--bare'], runas=runas)
return [] if ret is False else ret.splitlines() | List the installed versions of ruby
CLI Example:
.. code-block:: bash
salt '*' rbenv.versions |
def overlay_class_names(self, image, predictions):
scores = predictions.get_field("scores").tolist()
labels = predictions.get_field("labels").tolist()
labels = [self.CATEGORIES[i] for i in labels]
boxes = predictions.bbox
template = "{}: {:.2f}"
for box, score, label in zip(boxes, scores, labels):
x, y = box[:2]
s = template.format(label, score)
cv2.putText(
image, s, (x, y), cv2.FONT_HERSHEY_SIMPLEX, .5, (255, 255, 255), 1
)
return image | Adds detected class names and scores in the positions defined by the
top-left corner of the predicted bounding box
Arguments:
image (np.ndarray): an image as returned by OpenCV
predictions (BoxList): the result of the computation by the model.
It should contain the field `scores` and `labels`. |
async def delete(self):
if self.refresh_handle:
self.refresh_handle.cancel()
self.refresh_handle = None
request = stun.Message(message_method=stun.Method.REFRESH,
message_class=stun.Class.REQUEST)
request.attributes['LIFETIME'] = 0
await self.request(request)
logger.info('TURN allocation deleted %s', self.relayed_address)
if self.receiver:
self.receiver.connection_lost(None) | Delete the TURN allocation. |
def update(self, table_name, set_query, where=None):
self.validate_access_permission(["w", "a"])
self.verify_table_existence(table_name)
query = SqlQuery.make_update(table_name, set_query, where)
return self.execute_query(query, logging.getLogger().findCaller()) | Execute an UPDATE query.
Args:
table_name (|str|):
Table name of executing the query.
set_query (|str|):
``SET`` clause for the update query.
where (|arg_where_type| , optional):
``WHERE`` clause for the update query.
Defaults to |None|.
Raises:
IOError:
|raises_write_permission|
simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
simplesqlite.OperationalError:
|raises_operational_error| |
def query(self, string, repeat_n_times=None):
if not repeat_n_times:
repeat_n_times = self.__determine_how_many_times_to_repeat_query(string)
lines = self.__get_command_lines(string)
return_list = []
for line in lines:
lst = self.__query_n_times(line, repeat_n_times)
if lst and lst[0]:
return_list = lst
return return_list | This method performs the operations onto self.g
:param string: The list of operations to perform. The sequences of commands should be separated by a semicolon
An example might be
CREATE {'tag': 'PERSON', 'text': 'joseph'}(v1), {'relation': 'LIVES_AT'}(v1,v2),
{'tag': 'PLACE', 'text': 'London'}(v2)
MATCH {}(_a), {'relation': 'LIVES_AT'}(_a,_b), {}(_b)
WHERE (= (get _a "text") "joseph")
RETURN _a,_b;
:param repeat_n_times: The maximum number of times the graph is queried. It sets the maximum length of
the return list. If None then the value is set by the function
self.__determine_how_many_times_to_repeat_query(string)
:return: If the RETURN command is called with a list of variables names, a list of JSON with
the corresponding properties is returned. If the RETURN command is used alone, a list with the entire
graph is returned. Otherwise it returns an empty list |
def set_user_perm(obj, perm, sid):
info = (
win32security.OWNER_SECURITY_INFORMATION |
win32security.GROUP_SECURITY_INFORMATION |
win32security.DACL_SECURITY_INFORMATION
)
sd = win32security.GetUserObjectSecurity(obj, info)
dacl = sd.GetSecurityDescriptorDacl()
ace_cnt = dacl.GetAceCount()
found = False
for idx in range(0, ace_cnt):
(aceType, aceFlags), ace_mask, ace_sid = dacl.GetAce(idx)
ace_exists = (
aceType == ntsecuritycon.ACCESS_ALLOWED_ACE_TYPE and
ace_mask == perm and
ace_sid == sid
)
if ace_exists:
break
else:
dacl.AddAccessAllowedAce(dacl.GetAclRevision(), perm, sid)
sd.SetSecurityDescriptorDacl(1, dacl, 0)
win32security.SetUserObjectSecurity(obj, info, sd) | Set an object permission for the given user sid |
def strip_head(sequence, values):
values = set(values)
return list(itertools.dropwhile(lambda x: x in values, sequence)) | Strips elements of `values` from the beginning of `sequence`. |
def ReceiveSOAP(self, readerclass=None, **kw):
if self.ps: return self.ps
if not self.IsSOAP():
raise TypeError(
'Response is "%s", not "text/xml"' % self.reply_headers.type)
if len(self.data) == 0:
raise TypeError('Received empty response')
self.ps = ParsedSoap(self.data,
readerclass=readerclass or self.readerclass,
encodingStyle=kw.get('encodingStyle'))
if self.sig_handler is not None:
self.sig_handler.verify(self.ps)
return self.ps | Get back a SOAP message. |
def select(self, sql):
cursor = self.connection.cursor()
try:
cursor.execute(sql)
results = [list(i) for i in cursor.fetchall()]
finally:
cursor.close()
return results | Execute arbitrary SQL select query against the database
and return the results.
:param sql: SQL select query to execute
:type sql: string
:returns: SQL select query result
:rtype: list of lists
:raises: MySQLdb.Error |
def get_domain(self, service_id, version_number, name):
content = self._fetch("/service/%s/version/%d/domain/%s" % (service_id, version_number, name))
return FastlyDomain(self, content) | Get the domain for a particular service and version. |
def _check_accept_keywords(approved, flag):
if flag in approved:
return False
elif (flag.startswith('~') and flag[1:] in approved) \
or ('~'+flag in approved):
return False
else:
return True | check compatibility of accept_keywords |
def _snapshot_to_data(snapshot):
data = {}
data['id'] = snapshot[0]
data['type'] = ['single', 'pre', 'post'][snapshot[1]]
if data['type'] == 'post':
data['pre'] = snapshot[2]
if snapshot[3] != -1:
data['timestamp'] = snapshot[3]
else:
data['timestamp'] = int(time.time())
data['user'] = getpwuid(snapshot[4])[0]
data['description'] = snapshot[5]
data['cleanup'] = snapshot[6]
data['userdata'] = {}
for key, value in snapshot[7].items():
data['userdata'][key] = value
return data | Returns snapshot data from a D-Bus response.
A snapshot D-Bus response is a dbus.Struct containing the
information related to a snapshot:
[id, type, pre_snapshot, timestamp, user, description,
cleanup_algorithm, userdata]
id: dbus.UInt32
type: dbus.UInt16
pre_snapshot: dbus.UInt32
timestamp: dbus.Int64
user: dbus.UInt32
description: dbus.String
cleaup_algorithm: dbus.String
userdata: dbus.Dictionary |
def default_storable(python_type, exposes=None, version=None, storable_type=None, peek=default_peek):
if not exposes:
for extension in expose_extensions:
try:
exposes = extension(python_type)
except (SystemExit, KeyboardInterrupt):
raise
except:
pass
else:
if exposes:
break
if not exposes:
raise AttributeError('`exposes` required for type: {!r}'.format(python_type))
return Storable(python_type, key=storable_type, \
handlers=StorableHandler(version=version, exposes=exposes, \
poke=poke(exposes), peek=peek(python_type, exposes))) | Default mechanics for building the storable instance for a type.
Arguments:
python_type (type): type.
exposes (iterable): attributes exposed by the type.
version (tuple): version number.
storable_type (str): universal string identifier for the type.
peek (callable): peeking routine.
Returns:
Storable: storable instance. |
def apply_to_structure(self, structure):
def_struct = structure.copy()
old_latt = def_struct.lattice.matrix
new_latt = np.transpose(np.dot(self, np.transpose(old_latt)))
def_struct.lattice = Lattice(new_latt)
return def_struct | Apply the deformation gradient to a structure.
Args:
structure (Structure object): the structure object to
be modified by the deformation |
def reroot_graph(G: nx.DiGraph, node: str) -> nx.DiGraph:
G = G.copy()
for n, successors in list(nx.bfs_successors(G, source=node)):
for s in successors:
G.add_edge(s, n, **G.edges[n, s])
G.remove_edge(n, s)
return G | Return a copy of the graph rooted at the given node |
def get_or_generate_vocabulary(data_dir,
tmp_dir,
data_prefix,
max_page_size_exp,
approx_vocab_size=32768,
strip=True):
num_pages_for_vocab_generation = approx_vocab_size // 3
vocab_file = vocab_filename(approx_vocab_size, strip)
def my_generator(data_prefix):
count = 0
for page in corpus_page_generator(
all_corpus_files(data_prefix)[::-1], tmp_dir, max_page_size_exp):
revisions = page["revisions"]
if revisions:
text = get_text(revisions[-1], strip=strip)
yield text
count += 1
if count % 100 == 0:
tf.logging.info("reading pages for vocab %d" % count)
if count > num_pages_for_vocab_generation:
break
return generator_utils.get_or_generate_vocab_inner(data_dir, vocab_file,
approx_vocab_size,
my_generator(data_prefix)) | Get or generate the vocabulary.
Args:
data_dir: a string
tmp_dir: a string
data_prefix: a string
max_page_size_exp: an integer
approx_vocab_size: an integer
strip: a boolean
Returns:
a TextEncoder |
def get_config_dir(path, pattern="*.config", configspec=None, allow_errors=False):
logger = logging.getLogger(__name__)
logger.debug("Loading all files matching {0} in {1}".format(pattern, path))
files = Globber(path, include=[pattern], recursive=False).glob()
files = sorted(files)
config = ConfigObj()
for filename in files:
logger.debug("- Loading config for {0}".format(filename))
try:
conf = ConfigObj(filename, configspec=configspec)
except ConfigObjError, coe:
logger.error("An error occurred while parsing {0}: {1}".format(filename, str(coe)))
continue
if configspec:
conf.validate(Validator())
config.merge(conf)
return config | Load an entire directory of configuration files, merging them into one.
This function will load multiple configuration files matching the given pattern,
in the given path, and merge them. The found files are first sorted alphabetically,
and then loaded and merged. A good practice is to use ConfigObj sections, for easy
loading of information like per-host configuration.
Parameters
----------
path: string
Absolute path to a directory of ConfigObj files
pattern: string
Globbing pattern used to find files. Defaults to *.config.
configspec: ConfigObj
Used to sanitize the values in the resulting ConfigObj. Validation errors are currently
not exposed to the caller.
allow_errors: boolean
If False, errors raised by ConfigObj are not caught.
If True, errors raise by ConfigObj are caught, and an error is logged using logger. |
def process_added_port(self, device_details):
device = device_details['device']
port_id = device_details['port_id']
reprocess = True
try:
self._process_added_port(device_details)
LOG.debug("Updating cached port %s status as UP.", port_id)
self._update_port_status_cache(device, device_bound=True)
LOG.info("Port %s processed.", port_id)
except os_win_exc.HyperVvNicNotFound:
LOG.debug('vNIC %s not found. This can happen if the VM was '
'destroyed.', port_id)
reprocess = False
except os_win_exc.HyperVPortNotFoundException:
LOG.debug('vSwitch port %s not found. This can happen if the VM '
'was destroyed.', port_id)
except Exception as ex:
LOG.exception("Exception encountered while processing "
"port %(port_id)s. Exception: %(ex)s",
dict(port_id=port_id, ex=ex))
else:
reprocess = False
if reprocess:
self._added_ports.add(device)
self._refresh_cache = True
return False
return True | Process the new ports.
Wraps _process_added_port, and treats the sucessful and exception
cases. |
async def load_kube_config(config_file=None, context=None,
client_configuration=None,
persist_config=True):
if config_file is None:
config_file = KUBE_CONFIG_DEFAULT_LOCATION
loader = _get_kube_config_loader_for_yaml_file(
config_file, active_context=context,
persist_config=persist_config)
if client_configuration is None:
config = type.__call__(Configuration)
await loader.load_and_set(config)
Configuration.set_default(config)
else:
await loader.load_and_set(client_configuration)
return loader | Loads authentication and cluster information from kube-config file
and stores them in kubernetes.client.configuration.
:param config_file: Name of the kube-config file.
:param context: set the active context. If is set to None, current_context
from config file will be used.
:param client_configuration: The kubernetes.client.Configuration to
set configs to.
:param persist_config: If True, config file will be updated when changed
(e.g GCP token refresh). |
def scgi_request(url, methodname, *params, **kw):
xmlreq = xmlrpclib.dumps(params, methodname)
xmlresp = SCGIRequest(url).send(xmlreq)
if kw.get("deserialize", True):
xmlresp = xmlresp.replace("<i8>", "<i4>").replace("</i8>", "</i4>")
return xmlrpclib.loads(xmlresp)[0][0]
else:
return xmlresp | Send a XMLRPC request over SCGI to the given URL.
@param url: Endpoint URL.
@param methodname: XMLRPC method name.
@param params: Tuple of simple python objects.
@keyword deserialize: Parse XML result? (default is True)
@return: XMLRPC response, or the equivalent Python data. |
def _get_states(self, result):
if 'devices' not in result.keys():
return
for device_states in result['devices']:
device = self.__devices[device_states['deviceURL']]
try:
device.set_active_states(device_states['states'])
except KeyError:
pass | Get states of devices. |
def token_network_connect(
self,
registry_address: PaymentNetworkID,
token_address: TokenAddress,
funds: TokenAmount,
initial_channel_target: int = 3,
joinable_funds_target: float = 0.4,
) -> None:
if not is_binary_address(registry_address):
raise InvalidAddress('registry_address must be a valid address in binary')
if not is_binary_address(token_address):
raise InvalidAddress('token_address must be a valid address in binary')
token_network_identifier = views.get_token_network_identifier_by_token_address(
chain_state=views.state_from_raiden(self.raiden),
payment_network_id=registry_address,
token_address=token_address,
)
connection_manager = self.raiden.connection_manager_for_token_network(
token_network_identifier,
)
has_enough_reserve, estimated_required_reserve = has_enough_gas_reserve(
raiden=self.raiden,
channels_to_open=initial_channel_target,
)
if not has_enough_reserve:
raise InsufficientGasReserve((
'The account balance is below the estimated amount necessary to '
'finish the lifecycles of all active channels. A balance of at '
f'least {estimated_required_reserve} wei is required.'
))
connection_manager.connect(
funds=funds,
initial_channel_target=initial_channel_target,
joinable_funds_target=joinable_funds_target,
) | Automatically maintain channels open for the given token network.
Args:
token_address: the ERC20 token network to connect to.
funds: the amount of funds that can be used by the ConnectionMananger.
initial_channel_target: number of channels to open proactively.
joinable_funds_target: fraction of the funds that will be used to join
channels opened by other participants. |
def handleRequest(self, req):
name = req["method"]
params = req["params"]
id=req["id"]
obj=None
try:
obj = getMethodByName(self.service, name)
except MethodNameNotAllowed,e:
self.sendResponse(id, None, e)
except:
self.sendResponse(id, None, MethodNotFound())
if obj:
try:
rslt = obj(*params)
self.sendResponse(id, rslt, None)
except TypeError:
s=getTracebackStr()
self.sendResponse(id, None, InvalidMethodParameters())
except:
s=getTracebackStr()
self.sendResponse(id, None, s) | handles a request by calling the appropriete method the service exposes |
def rotateInZMat(theta_deg):
ct = np.cos( np.radians(theta_deg))
st = np.sin( np.radians(theta_deg))
rMat = np.array([ [ ct, -st, 0],
[ st, ct, 0],
[ 0, 0, 1],
])
return rMat | Rotate a vector theta degrees around the z-axis
Equivalent to yaw left
Rotates the vector in the sense that the x-axis is rotated
towards the y-axis. If looking along the z-axis (which is
not the way you usually look at it), the vector rotates
clockwise.
If sitting on the vector [1,0,0], the rotation is towards the left
Input:
theta_deg (float) Angle through which vectors should be
rotated in degrees
Returns:
A matrix
To rotate a vector, premultiply by this matrix.
To rotate the coord sys underneath the vector, post multiply |
def is_recording(self) -> Optional[bool]:
status_response = self._client.get_state(
'api/monitors/alarm/id:{}/command:status.json'.format(
self._monitor_id
)
)
if not status_response:
_LOGGER.warning('Could not get status for monitor {}'.format(
self._monitor_id
))
return None
status = status_response.get('status')
if status == '':
return False
return int(status) == STATE_ALARM | Indicate if this Monitor is currently recording. |
def get_queryset(self):
try:
date = ElectionDay.objects.get(date=self.kwargs["date"])
except Exception:
raise APIException(
"No elections on {}.".format(self.kwargs["date"])
)
division_ids = []
normal_elections = date.elections.filter()
if len(normal_elections) > 0:
for election in date.elections.all():
if election.division.level.name == DivisionLevel.STATE:
division_ids.append(election.division.uid)
elif election.division.level.name == DivisionLevel.DISTRICT:
division_ids.append(election.division.parent.uid)
return Division.objects.filter(uid__in=division_ids) | Returns a queryset of all states holding a non-special election on
a date. |
def filename_addstring(filename, text):
fn, ext = os.path.splitext(filename)
return fn + text + ext | Add `text` to filename, keeping the extension in place
For example when adding a timestamp to the filename |
def FromFile(cls, filename, overwrite=False, auto_transfer=True, **kwds):
path = os.path.expanduser(filename)
if os.path.exists(path) and not overwrite:
raise exceptions.InvalidUserInputError(
'File %s exists and overwrite not specified' % path)
return cls(open(path, 'wb'), close_stream=True,
auto_transfer=auto_transfer, **kwds) | Create a new download object from a filename. |
def createMonitor(self, callback=None, errback=None, **kwargs):
import ns1.monitoring
monitor = ns1.monitoring.Monitor(self.config)
return monitor.create(callback=callback, errback=errback, **kwargs) | Create a monitor |
def get_all_messages_in_thread(self, participant_id, thread_id, check_who_read=True):
try:
messages = Message.objects.filter(thread__id=thread_id).\
order_by('-id').\
select_related('thread').\
prefetch_related('thread__participation_set', 'thread__participation_set__participant')
except Exception:
return Message.objects.none()
messages = self.check_who_read(messages)
return messages | Returns all the messages in a thread. |
def read_piezo_tensor(self):
header_pattern = r"PIEZOELECTRIC TENSOR for field in x, y, " \
r"z\s+\(C/m\^2\)\s+([X-Z][X-Z]\s+)+\-+"
row_pattern = r"[x-z]\s+" + r"\s+".join([r"(\-*[\.\d]+)"] * 6)
footer_pattern = r"BORN EFFECTIVE"
pt_table = self.read_table_pattern(header_pattern, row_pattern,
footer_pattern, postprocess=float)
self.data["piezo_tensor"] = pt_table | Parse the piezo tensor data |
def set_album(self, album):
self._set_attr(TALB(encoding=3, text=album.decode('utf-8'))) | Sets song's album
:param album: album |
def put(self, request, **resources):
if not self._meta.form:
return None
if not self._meta.name in resources or not resources[self._meta.name]:
raise HttpError(
"Resource not found.", status=status.HTTP_404_NOT_FOUND)
resource = resources.pop(self._meta.name)
updated = UpdatedList()
for o in as_tuple(resource):
form = self._meta.form(data=request.data, instance=o, **resources)
if not form.is_valid():
raise FormError(form)
updated.append(form.save())
return updated if len(updated) > 1 else updated[-1] | Default PUT method. Uses self form. Allow bulk update.
:return object: changed instance or raise form's error |
def get_world_size():
if torch.distributed.is_available() and torch.distributed.is_initialized():
world_size = torch.distributed.get_world_size()
else:
world_size = 1
return world_size | Gets total number of distributed workers or returns one if distributed is
not initialized. |
def almost_equal_norm(self,other,tol,relative=True):
if (tol<0):
raise ValueError("Tolerance cannot be negative")
if type(other) != type(self):
return False
if self.dtype != other.dtype:
return False
if len(self) != len(other):
return False
diff = self.numpy()-other.numpy()
dnorm = norm(diff)
if relative:
return (dnorm <= tol*norm(self))
else:
return (dnorm <= tol) | Compare whether two array types are almost equal, normwise.
If the 'relative' parameter is 'True' (the default) then the
'tol' parameter (which must be positive) is interpreted as a
relative tolerance, and the comparison returns 'True' only if
abs(norm(self-other)) <= tol*abs(norm(self)).
If 'relative' is 'False', then 'tol' is an absolute tolerance,
and the comparison is true only if
abs(norm(self-other)) <= tol
Other meta-data (type, dtype, and length) must be exactly equal.
If either object's memory lives on the GPU it will be copied to
the CPU for the comparison, which may be slow. But the original
object itself will not have its memory relocated nor scheme
changed.
Parameters
----------
other
another Python object, that should be tested for
almost-equality with 'self', based on their norms.
tol
a non-negative number, the tolerance, which is interpreted
as either a relative tolerance (the default) or an absolute
tolerance.
relative
A boolean, indicating whether 'tol' should be interpreted
as a relative tolerance (if True, the default if this argument
is omitted) or as an absolute tolerance (if tol is False).
Returns
-------
boolean
'True' if the data agree within the tolerance, as
interpreted by the 'relative' keyword, and if the types,
lengths, and dtypes are exactly the same. |
def _add_transcripts(self, variant_obj, gemini_variant):
query = "SELECT * from variant_impacts WHERE variant_id = {0}".format(
gemini_variant['variant_id']
)
gq = GeminiQuery(self.db)
gq.run(query)
for gemini_transcript in gq:
transcript = Transcript(
hgnc_symbol=gemini_transcript['gene'],
transcript_id=gemini_transcript['transcript'],
consequence=gemini_transcript['impact_so'],
biotype=gemini_transcript['biotype'],
polyphen=gemini_transcript['polyphen_pred'],
sift=gemini_transcript['sift_pred'],
HGVSc=gemini_transcript['codon_change'],
HGVSp=', '.join([gemini_transcript['aa_change'] or '', gemini_transcript['aa_length'] or ''])
)
variant_obj.add_transcript(transcript) | Add all transcripts for a variant
Go through all transcripts found for the variant
Args:
gemini_variant (GeminiQueryRow): The gemini variant
Yields:
transcript (puzzle.models.Transcript) |
def get_product_metadata_path(product_name):
string_date = product_name.split('_')[-1]
date = datetime.datetime.strptime(string_date, '%Y%m%dT%H%M%S')
path = 'products/{0}/{1}/{2}/{3}'.format(date.year, date.month, date.day, product_name)
return {
product_name: {
'metadata': '{0}/{1}'.format(path, 'metadata.xml'),
'tiles': get_tile_metadata_path('{0}/{1}'.format(path, 'productInfo.json'))
}
} | gets a single products metadata |
def hmget(key, *fields, **options):
host = options.get('host', None)
port = options.get('port', None)
database = options.get('db', None)
password = options.get('password', None)
server = _connect(host, port, database, password)
return server.hmget(key, *fields) | Returns the values of all the given hash fields.
.. versionadded:: 2017.7.0
CLI Example:
.. code-block:: bash
salt '*' redis.hmget foo_hash bar_field1 bar_field2 |
def segment(self, eps, min_time):
new_segments = []
for segment in self.segments:
segmented = segment.segment(eps, min_time)
for seg in segmented:
new_segments.append(Segment(seg))
self.segments = new_segments
return self | In-place segmentation of segments
Spatio-temporal segmentation of each segment
The number of segments may increse after this step
Returns:
This track |
def set_suffix(self):
self.suffix = self.w.suffix.get_text()
self.logger.debug('Output suffix set to {0}'.format(self.suffix)) | Set output suffix. |
def jdnDate(jdn):
a = jdn + 32044
b = (4*a + 3) // 146097
c = a - (146097*b) // 4
d = (4*c + 3) // 1461
e = c - (1461*d) // 4
m = (5*e + 2) // 153
day = e + 1 - (153*m + 2) // 5
month = m + 3 - 12*(m//10)
year = 100*b + d - 4800 + m//10
return [year, month, day] | Converts Julian Day Number to Gregorian date. |
def sortable_title(portal, title):
if not title:
return ''
def_charset = portal.plone_utils.getSiteEncoding()
sortabletitle = str(title.lower().strip())
sortabletitle = num_sort_regex.sub(zero_fill, sortabletitle)
for charset in [def_charset, 'latin-1', 'utf-8']:
try:
sortabletitle = safe_unicode(sortabletitle, charset)[:30]
sortabletitle = sortabletitle.encode(def_charset or 'utf-8')
break
except UnicodeError:
pass
except TypeError:
sortabletitle = sortabletitle[:30]
break
return sortabletitle | Convert title to sortable title |
def public_key_sec(self):
if self.is_coinbase():
return None
opcodes = ScriptTools.opcode_list(self.script)
if len(opcodes) == 2 and opcodes[0].startswith("[30"):
sec = h2b(opcodes[1][1:-1])
return sec
return None | Return the public key as sec, or None in case of failure. |
def list_scans(self, source_id=None):
if source_id:
target_url = self.client.get_url('SCAN', 'GET', 'multi', {'source_id': source_id})
else:
target_url = self.client.get_ulr('SCAN', 'GET', 'all')
return base.Query(self.client.get_manager(Scan), target_url) | Filterable list of Scans for a Source.
Ordered newest to oldest by default |
def process_delayed_asserts(self, print_only=False):
if self.__delayed_assert_failures:
exception_output = ''
exception_output += "\n*** DELAYED ASSERTION FAILURES FOR: "
exception_output += "%s\n" % self.id()
all_failing_checks = self.__delayed_assert_failures
self.__delayed_assert_failures = []
for tb in all_failing_checks:
exception_output += "%s\n" % tb
if print_only:
print(exception_output)
else:
raise Exception(exception_output) | To be used with any test that uses delayed_asserts, which are
non-terminating verifications that only raise exceptions
after this method is called.
This is useful for pages with multiple elements to be checked when
you want to find as many bugs as possible in a single test run
before having all the exceptions get raised simultaneously.
Might be more useful if this method is called after processing all
the delayed asserts on a single html page so that the failure
screenshot matches the location of the delayed asserts.
If "print_only" is set to True, the exception won't get raised. |
def changed(self, code_changed=False, value_changed=False):
for d in self._dependents:
d._dep_changed(self, code_changed=code_changed,
value_changed=value_changed) | Inform dependents that this shaderobject has changed. |
def multi_lpop(self, queue, number, transaction=False):
try:
self._multi_lpop_pipeline(self, queue, number)
except:
raise | Pops multiple elements from a list |
def visit_DictComp(self, node: ast.DictComp) -> None:
if node in self._recomputed_values:
value = self._recomputed_values[node]
text = self._atok.get_text(node)
self.reprs[text] = value
self.generic_visit(node=node) | Represent the dictionary comprehension by dumping its source code. |
def validate(self, value):
if self.required and not value:
raise OAuthValidationError({'error': 'invalid_request'})
for val in value:
if not self.valid_value(val):
raise OAuthValidationError({
'error': 'invalid_request',
'error_description': _("'%s' is not a valid scope.") % \
val}) | Validates that the input is a list or tuple. |
def _from_rest_on_create(model, props):
fields = model.get_fields_with_prop('on_create')
for field in fields:
props[field[0]] = field[1] | Assign the default values when creating a model
This is done on fields with `on_create=<value>`. |
def load_args(self, args, clargs):
args = self.parser.parse_args(args=clargs, namespace=args)
if args.subcommand is None:
self.parser.print_help()
args = None
return args | Parse arguments and return configuration settings. |
def hosts_args(self):
host_args = []
for row in self.hosts:
if isinstance(row, dict):
host_args.append(row["host"])
else:
host_args.append(row)
dedupe = list()
for each in host_args:
if each not in dedupe:
dedupe.append(each)
return dedupe | hosts list can contain strings specifying a host directly
or dicts containing a "host" key to specify the host
this way we can allow passing further config details (color, name etc.)
with each host as well as simply dropping in addresses for quick
setup depending on the user's needs |
def run(self):
self.graphite.start()
while True:
log.debug('Graphite pusher is sleeping for %d seconds', self.period)
time.sleep(self.period)
log.debug('Pushing stats to Graphite')
try:
self.push()
log.debug('Done pushing stats to Graphite')
except:
log.exception('Exception while pushing stats to Graphite')
raise | Loop forever, pushing out stats. |
def itermovieshash(self):
cur = self._db.firstkey()
while cur is not None:
yield cur
cur = self._db.nextkey(cur) | Iterate over movies hash stored in the database. |
def aes_encrypt(key: bytes, plain_text: bytes) -> bytes:
aes_cipher = AES.new(key, AES_CIPHER_MODE)
encrypted, tag = aes_cipher.encrypt_and_digest(plain_text)
cipher_text = bytearray()
cipher_text.extend(aes_cipher.nonce)
cipher_text.extend(tag)
cipher_text.extend(encrypted)
return bytes(cipher_text) | AES-GCM encryption
Parameters
----------
key: bytes
AES session key, which derived from two secp256k1 keys
plain_text: bytes
Plain text to encrypt
Returns
-------
bytes
nonce(16 bytes) + tag(16 bytes) + encrypted data |
def scroll_to(self, selector, by=By.CSS_SELECTOR,
timeout=settings.SMALL_TIMEOUT):
if self.demo_mode:
self.slow_scroll_to(selector, by=by, timeout=timeout)
return
if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT:
timeout = self.__get_new_timeout(timeout)
element = self.wait_for_element_visible(
selector, by=by, timeout=timeout)
try:
self.__scroll_to_element(element)
except (StaleElementReferenceException, ENI_Exception):
self.wait_for_ready_state_complete()
time.sleep(0.05)
element = self.wait_for_element_visible(
selector, by=by, timeout=timeout)
self.__scroll_to_element(element) | Fast scroll to destination |
def get_blocks(self, block_structure=None):
if block_structure is None:
block_structure = self.block_structure
try:
return self._get_blocks(block_structure)
except IncompatibleBlockStructures as e:
raise e | For a reducible circuit, get a sequence of subblocks that when
concatenated again yield the original circuit. The block structure
given has to be compatible with the circuits actual block structure,
i.e. it can only be more coarse-grained.
Args:
block_structure (tuple): The block structure according to which the
subblocks are generated (default = ``None``, corresponds to the
circuit's own block structure)
Returns:
A tuple of subblocks that the circuit consists of.
Raises:
.IncompatibleBlockStructures |
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba) | Blend a rectangle onto the image |
def asset_class(self) -> str:
result = self.parent.name if self.parent else ""
cursor = self.parent
while cursor:
result = cursor.name + ":" + result
cursor = cursor.parent
return result | Returns the full asset class path for this stock |
def get_plugin_font(self, rich_text=False):
if rich_text:
option = 'rich_font'
font_size_delta = self.RICH_FONT_SIZE_DELTA
else:
option = 'font'
font_size_delta = self.FONT_SIZE_DELTA
return get_font(option=option, font_size_delta=font_size_delta) | Return plugin font option.
All plugins in Spyder use a global font. This is a convenience method
in case some plugins will have a delta size based on the default size. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.