code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def create_window(self):
self.undocked_window = window = PluginWindow(self)
window.setAttribute(Qt.WA_DeleteOnClose)
icon = self.get_plugin_icon()
if is_text_string(icon):
icon = self.get_icon(icon)
window.setWindowIcon(icon)
window.setWindowTitle(self.get_plu... | Create a QMainWindow instance containing this plugin. |
def clear_conditions(self, *conkeys, **noclear):
offenders = set(conkeys) - set(self.conconf.conditions.keys())
if offenders:
raise KeyError(', '.join([off for off in offenders]))
offenders = set(noclear) - set({'noclear'})
if offenders:
raise KeyError(', '.join([... | Clear conditions.
Clear only the conditions conkeys if specified. Clear only the
conditions not specified by conkeys if noclear is True (False
default).
.. note::
Updates the mask if not no_auto. |
def _calculate_day_cost(self, plan, period):
plan_pricings = plan.planpricing_set.order_by('-pricing__period').select_related('pricing')
selected_pricing = None
for plan_pricing in plan_pricings:
selected_pricing = plan_pricing
if plan_pricing.pricing.period <= period:
... | Finds most fitted plan pricing for a given period, and calculate day cost |
def encode_offset_commit_request(cls, group, payloads):
return kafka.protocol.commit.OffsetCommitRequest[0](
consumer_group=group,
topics=[(
topic,
[(
partition,
payload.offset,
payload.metadata)
... | Encode an OffsetCommitRequest struct
Arguments:
group: string, the consumer group you are committing offsets for
payloads: list of OffsetCommitRequestPayload |
def face_adjacency(self):
adjacency, edges = graph.face_adjacency(mesh=self,
return_edges=True)
self._cache['face_adjacency_edges'] = edges
return adjacency | Find faces that share an edge, which we call here 'adjacent'.
Returns
----------
adjacency : (n,2) int
Pairs of faces which share an edge
Examples
---------
In [1]: mesh = trimesh.load('models/featuretype.STL')
In [2]: mesh.face_adjacency
Out... |
def aggregate_repeated_calls(frame, options):
if frame is None:
return None
children_by_identifier = {}
for child in frame.children:
if child.identifier in children_by_identifier:
aggregate_frame = children_by_identifier[child.identifier]
aggregate_frame.self_time += ... | Converts a timeline into a time-aggregate summary.
Adds together calls along the same call stack, so that repeated calls appear as the same
frame. Removes time-linearity - frames are sorted according to total time spent.
Useful for outputs that display a summary of execution (e.g. text and html outputs) |
def connected_objects(self, from_obj):
return self.to_content_type.get_all_objects_for_this_type(pk__in=self.connected_object_ids(from_obj)) | Returns a query set matching all connected objects with the given
object as a source. |
def get_fullsize(self, kwargs):
fullsize_args = {}
if 'absolute' in kwargs:
fullsize_args['absolute'] = kwargs['absolute']
for key in ('width', 'height', 'quality', 'format', 'background', 'crop'):
fsk = 'fullsize_' + key
if fsk in kwargs:
full... | Get the fullsize rendition URL |
def get_instance(self, payload):
return InviteInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
channel_sid=self._solution['channel_sid'],
) | Build an instance of InviteInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.chat.v2.service.channel.invite.InviteInstance
:rtype: twilio.rest.chat.v2.service.channel.invite.InviteInstance |
def subscribe_list(self, list_id):
return List(tweepy_list_to_json(self._client.subscribe_list(list_id=list_id))) | Subscribe to a list
:param list_id: list ID number
:return: :class:`~responsebot.models.List` object |
def abbreviate_tab_names_changed(self, settings, key, user_data):
abbreviate_tab_names = settings.get_boolean('abbreviate-tab-names')
self.guake.abbreviate = abbreviate_tab_names
self.guake.recompute_tabs_titles() | If the gconf var abbreviate_tab_names be changed, this method will
be called and will update tab names. |
def get_sdb_keys(self, path):
list_resp = get_with_retry(
self.cerberus_url + '/v1/secret/' + path + '/?list=true',
headers=self.HEADERS
)
throw_if_bad_response(list_resp)
return list_resp.json()['data']['keys'] | Return the keys for a SDB, which are need for the full secure data path |
def set_features(self):
allpsms_str = readers.generate_psms_multiple_fractions_strings(
self.mergefiles, self.ns)
allpeps = preparation.merge_peptides(self.mergefiles, self.ns)
self.features = {'psm': allpsms_str, 'peptide': allpeps} | Merge all psms and peptides |
def update_rejection_permissions(portal):
updated = update_rejection_permissions_for(portal, "bika_ar_workflow",
"Reject Analysis Request")
if updated:
brains = api.search(dict(portal_type="AnalysisRequest"),
CATALOG_ANALYSIS_REQ... | Adds the permission 'Reject Analysis Request' and update the permission
mappings accordingly |
def _annotate_query(query, generate_dict):
annotate_key_list = []
for field_name, annotate_dict in generate_dict.items():
for annotate_name, annotate_func in annotate_dict["annotate_dict"].items():
query = annotate_func(query)
annotate_key_list.append(annotate_name)
return qu... | Add annotations to the query to retrieve values required by field value generate
functions. |
def removeSingleCachedFile(self, fileStoreID):
with self._CacheState.open(self) as cacheInfo:
cachedFile = self.encodedFileID(fileStoreID)
cachedFileStats = os.stat(cachedFile)
assert cachedFileStats.st_nlink <= self.nlinkThreshold, \
'Attempting to delete ... | Removes a single file described by the fileStoreID from the cache forcibly. |
def expand(self, msgpos):
MT = self._tree[msgpos]
MT.expand(MT.root) | expand message at given position |
def _build_url(self, shorten=True):
self.url = URL_FORMAT.format(*self._get_url_params(shorten=shorten)) | Build the url for a cable ratings page |
def status(self):
hw_type, name, major, minor, patch, status = self.rpc(0x00, 0x04, result_format="H6sBBBB")
status = {
'hw_type': hw_type,
'name': name.decode('utf-8'),
'version': (major, minor, patch),
'status': status
}
return status | Query the status of an IOTile including its name and version |
async def find_backwards(self, stream_name, predicate, predicate_label='predicate'):
logger = self._logger.getChild(predicate_label)
logger.info('Fetching first matching event')
uri = self._head_uri
try:
page = await self._fetcher.fetch(uri)
except HttpNotFoundError a... | Return first event matching predicate, or None if none exists.
Note: 'backwards', both here and in Event Store, means 'towards the
event emitted furthest in the past'. |
def disconnect(self, callback):
try:
self._callbacks.remove(callback)
except ValueError:
self._callbacks.remove(ref(callback)) | Disconnects a callback from this signal.
:param callback: The callback to disconnect.
:param weak: A flag that must have the same value than the one
specified during the call to `connect`.
.. warning::
If the callback is not connected at the time of call, a
... |
def financial_float(s, scale_factor=1, typ=float,
ignore=FINANCIAL_WHITESPACE,
percent_str=PERCENT_SYMBOLS,
replace=FINANCIAL_MAPPING,
normalize_case=str.lower):
percent_scale_factor = 1
if isinstance(s, basestring):
s = nor... | Strip dollar signs and commas from financial numerical string
Also, convert percentages to fractions/factors (generally between 0 and 1.0)
>>> [financial_float(x) for x in ("12k Flat", "12,000 flat", "20%", "$10,000 Flat", "15K flat", "null", "None", "", None)]
[12000.0, 12000.0, 0.2, 10000.0, 15000.0, 'n... |
def traverse_until_fixpoint(predicate, tree):
old_tree = None
tree = simplify(tree)
while tree and old_tree != tree:
old_tree = tree
tree = tree.traverse(predicate)
if not tree:
return None
tree = simplify(tree)
return tree | Traverses the tree again and again until it is not modified. |
def fill_transaction_defaults(web3, transaction):
defaults = {}
for key, default_getter in TRANSACTION_DEFAULTS.items():
if key not in transaction:
if callable(default_getter):
if web3 is not None:
default_val = default_getter(web3, transaction)
... | if web3 is None, fill as much as possible while offline |
def upoint2bddpoint(upoint):
point = dict()
for uniqid in upoint[0]:
point[_VARS[uniqid]] = 0
for uniqid in upoint[1]:
point[_VARS[uniqid]] = 1
return point | Convert an untyped point into a BDD point.
.. seealso::
For definitions of points and untyped points,
see the :mod:`pyeda.boolalg.boolfunc` module. |
def _six_fail_hook(modname):
attribute_of = modname != "six.moves" and modname.startswith("six.moves")
if modname != "six.moves" and not attribute_of:
raise AstroidBuildingError(modname=modname)
module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
module.name = "six.moves"
if attribute_of... | Fix six.moves imports due to the dynamic nature of this
class.
Construct a pseudo-module which contains all the necessary imports
for six
:param modname: Name of failed module
:type modname: str
:return: An astroid module
:rtype: nodes.Module |
def get_orgs(self):
orgs = []
for resource in self._get_orgs()['resources']:
orgs.append(resource['entity']['name'])
return orgs | Returns a flat list of the names for the organizations
user belongs. |
def validate_pro():
cmd = ['python3', 'validate.py', FLAGS.pro_dataset,
'--use_tpu',
'--tpu_name={}'.format(TPU_NAME),
'--work_dir={}'.format(fsdb.working_dir()),
'--flagfile=rl_loop/distributed_flags',
'--validate_name=pro']
mask_flags.run(cmd) | Validate on professional data. |
def _check_for_duplicates(durations, events):
df = pd.DataFrame({"t": durations, "e": events})
dup_times = df.loc[df["e"] != 0, "t"].duplicated(keep=False)
dup_events = df.loc[df["e"] != 0, ["t", "e"]].duplicated(keep=False)
return (dup_times & (~dup_events)).any() | Checks for duplicated event times in the data set. This is narrowed to detecting duplicated event times
where the events are of different types |
def __copy_extracted(self, path, destination):
unpacked_dir = self.filename + '.unpacked'
if not os.path.isdir(unpacked_dir):
LOGGER.warn(
'Failed to copy extracted file %s, no extracted dir',
path
)
return
source_path = os.path... | Copies a file that was already extracted to the destination directory.
Args:
path (str):
Relative (to the root of the archive) of the file to copy.
destination (str):
Directory to extract the archive to. |
def best_assemblyfile(self):
for sample in self.metadata:
try:
filtered_outputfile = os.path.join(self.path, 'raw_assemblies', '{}.fasta'.format(sample.name))
if os.path.isfile(sample.general.assemblyfile):
size = os.path.getsize(sample.general.ass... | Determine whether the contigs.fasta output file from the assembler is present. If not, set the .bestassembly
attribute to 'NA' |
def qgis_version_detailed():
version = str(Qgis.QGIS_VERSION_INT)
return [int(version[0]), int(version[1:3]), int(version[3:])] | Get the detailed version of QGIS.
:returns: List containing major, minor and patch.
:rtype: list |
def stopThread(self):
if self._thread is not None:
self.performSelector_onThread_withObject_waitUntilDone_('stopPowerNotificationsThread', self._thread, None, objc.YES)
self._thread = None | Stops spawned NSThread. |
def inspect_streamer(self, index):
if index >= len(self.graph.streamers):
return [_pack_sgerror(SensorGraphError.STREAMER_NOT_ALLOCATED), b'\0'*14]
return [Error.NO_ERROR, streamer_descriptor.create_binary_descriptor(self.graph.streamers[index])] | Inspect the streamer at the given index. |
def augment_excmessage(prefix=None, suffix=None) -> NoReturn:
exc_old = sys.exc_info()[1]
message = str(exc_old)
if prefix is not None:
message = f'{prefix}, the following error occurred: {message}'
if suffix is not None:
message = f'{message} {suffix}'
try:
exc_new = type(ex... | Augment an exception message with additional information while keeping
the original traceback.
You can prefix and/or suffix text. If you prefix something (which happens
much more often in the HydPy framework), the sub-clause ', the following
error occurred:' is automatically included:
>>> from hy... |
def _sha1_for_file(filename):
with open(filename, "rb") as fileobj:
contents = fileobj.read()
return hashlib.sha1(contents).hexdigest() | Return sha1 for contents of filename. |
def single_device_data_message(self,
registration_id=None,
condition=None,
collapse_key=None,
delay_while_idle=False,
time_to_live=None,
... | Send push message to a single device
Args:
registration_id (list, optional): FCM device registration ID
condition (str, optiona): Topic condition to deliver messages to
collapse_key (str, optional): Identifier for a group of messages
that can be collapsed so ... |
def submit(self, command="", blocksize=1, job_name="parsl.auto"):
instance, name = self.create_instance(command=command)
self.provisioned_blocks += 1
self.resources[name] = {"job_id": name, "status": translate_table[instance['status']]}
return name | The submit method takes the command string to be executed upon
instantiation of a resource most often to start a pilot.
Args :
- command (str) : The bash command string to be executed.
- blocksize (int) : Blocksize to be requested
KWargs:
- job_name (str)... |
def fillna(series_or_arr, missing_value=0.0):
if pandas.notnull(missing_value):
if isinstance(series_or_arr, (numpy.ndarray)):
series_or_arr[numpy.isnan(series_or_arr)] = missing_value
else:
series_or_arr.fillna(missing_value, inplace=True)
return series_or_arr | Fill missing values in pandas objects and numpy arrays.
Arguments
---------
series_or_arr : pandas.Series, numpy.ndarray
The numpy array or pandas series for which the missing values
need to be replaced.
missing_value : float, int, str
The value to replace the missing value with... |
def _effective_view_filter(self):
if self._effective_view == EFFECTIVE:
now = datetime.datetime.utcnow()
return {'startDate': {'$$lte': now}, 'endDate': {'$$gte': now}}
return {} | Returns the mongodb relationship filter for effective views |
def save(self):
if not self.is_valid():
return self._errors
_new = self.is_new()
if _new:
self._initialize_id()
with Mutex(self):
self._write(_new)
return True | Saves the instance to the datastore. |
def ustr(obj):
if sys.version_info[0] == 2:
if type(obj) in [str, basestring]:
return unicode(obj, DEFAULT_ENCODING)
else:
return unicode(obj)
else:
if type(obj) in [bytes]:
return obj.decode(DEFAULT_ENCODING)
else:
return str(obj) | Python 2 and 3 utility method that converts an obj to unicode in python 2 and to a str object in python 3 |
def delete_resourcegroupitems(scenario_id, item_ids, **kwargs):
user_id = int(kwargs.get('user_id'))
_get_scenario(scenario_id, user_id)
for item_id in item_ids:
rgi = db.DBSession.query(ResourceGroupItem).\
filter(ResourceGroupItem.id==item_id).one()
db.DBSession.delete(rgi)... | Delete specified items in a group, in a scenario. |
def write_tree_newick(self, filename, hide_rooted_prefix=False):
if not isinstance(filename, str):
raise TypeError("filename must be a str")
treestr = self.newick()
if hide_rooted_prefix:
if treestr.startswith('[&R]'):
treestr = treestr[4:].strip()
... | Write this ``Tree`` to a Newick file
Args:
``filename`` (``str``): Path to desired output file (plain-text or gzipped) |
def get_schema_dir(db_version=1):
v = str(db_version)
return os.path.join(_top_dir, '..', 'schemata', 'versions', v) | Get path to directory with schemata.
:param db_version: Version of the database
:type db_version: int
:return: Path
:rtype: str |
def copy_graph(subject, existing_graph):
new_graph = rdflib.Graph()
for predicate, object_ in existing_graph.predicate_objects():
new_graph.add((subject, predicate, object_))
return new_graph | Function takes a subject and an existing graph, returns a new graph with
all predicate and objects of the existing graph copied to the new_graph with
subject as the new subject
Args:
subject(rdflib.URIRef): A URIRef subject
existing_graph(rdflib.Graph): A rdflib.Graph
Returns:
... |
def temporal_segmentation(segments, min_time):
final_segments = []
for segment in segments:
final_segments.append([])
for point in segment:
if point.dt > min_time:
final_segments.append([])
final_segments[-1].append(point)
return final_segments | Segments based on time distant points
Args:
segments (:obj:`list` of :obj:`list` of :obj:`Point`): segment points
min_time (int): minimum required time for segmentation |
def xml_entity_escape(data):
data = data.replace("&", "&")
data = data.replace(">", ">")
data = data.replace("<", "<")
return data | replace special characters with their XML entity versions |
def _apply_mapping(self, mapping):
self._POST["P0100LDR__"] = mapping[0]
self._POST["P0200FMT__"] = mapping[1]
self._POST["P0300BAS__a"] = mapping[2]
self._POST["P07022001_b"] = mapping[3]
self._POST["P1501IST1_a"] = mapping[4] | Map some case specific data to the fields in internal dictionary. |
def mount(dev, mountpoint, flags='', log=None):
ensureDirectory(mountpoint)
systemCall('mount %s %s %s' % (flags, dev, mountpoint),
log=log) | Mount the given dev to the given mountpoint by using the given flags |
def unpause(self):
self._pause_level -= 1
if not self._pause_level:
self._offset = self._paused_time - self._clock() | Unpause the animation. |
async def read(self) -> bytes:
if self._read_bytes is None:
body = bytearray()
while True:
chunk = await self._payload.readany()
body.extend(chunk)
if self._client_max_size:
body_size = len(body)
if b... | Read request body if present.
Returns bytes object with full request content. |
def to_pandas(self):
if not self.is_raw():
raise ValueError('Cannot convert to pandas Index if not evaluated.')
from pandas import Index as PandasIndex
return PandasIndex(self.values,
self.dtype,
name=self.name) | Convert to pandas Index.
Returns
-------
pandas.base.Index |
def is_builtin(text):
from spyder.py3compat import builtins
return text in [str(name) for name in dir(builtins)
if not name.startswith('_')] | Test if passed string is the name of a Python builtin object |
def read_creds_from_environment_variables():
creds = init_creds()
if 'AWS_ACCESS_KEY_ID' in os.environ and 'AWS_SECRET_ACCESS_KEY' in os.environ:
creds['AccessKeyId'] = os.environ['AWS_ACCESS_KEY_ID']
creds['SecretAccessKey'] = os.environ['AWS_SECRET_ACCESS_KEY']
if 'AWS_SESSION_TOKEN' i... | Read credentials from environment variables
:return: |
def set_transform_interface_params(spec, input_features, output_features, are_optional = False):
input_features = _fm.process_or_validate_features(input_features)
output_features = _fm.process_or_validate_features(output_features)
for (fname, ftype) in input_features:
input_ = spec.description.input... | Common utilities to set transform interface params. |
def get_specific_nodes(self, node, names):
nodes = [(x.tagName, x) for x in node.childNodes
if x.nodeType == x.ELEMENT_NODE and
x.tagName in names]
return dict(nodes) | Given a node and a sequence of strings in `names`, return a
dictionary containing the names as keys and child
`ELEMENT_NODEs`, that have a `tagName` equal to the name. |
def generate_manifest(self, progressbar=None):
items = dict()
if progressbar:
progressbar.label = "Generating manifest"
for handle in self._storage_broker.iter_item_handles():
key = dtoolcore.utils.generate_identifier(handle)
value = self._storage_broker.item_... | Return manifest generated from knowledge about contents. |
def _get_params(self):
params = {'accountNumber': self._service.accountNumber}
for key, val in self.__dict__.iteritems():
if key in self.field_order:
if isinstance(val, str,):
val = val.decode('utf8')
params[key] = val
for key in se... | Generate SOAP parameters. |
def _verify_cert(self, sock: ssl.SSLSocket):
verify_mode = self._ssl_context.verify_mode
assert verify_mode in (ssl.CERT_NONE, ssl.CERT_REQUIRED,
ssl.CERT_OPTIONAL), \
'Unknown verify mode {}'.format(verify_mode)
if verify_mode == ssl.CERT_NONE:
... | Check if certificate matches hostname. |
def fetch(self):
self.retrieveVals()
for parent_name in self._graphNames:
graph = self._graphDict[parent_name]
if self.isMultigraph:
print "multigraph %s" % self._getMultigraphID(parent_name)
print self._formatVals(graph.getVals())
print
... | Implements Munin Plugin Fetch Option.
Prints out measured values. |
def combine_first(self, other):
new_index = self.index.union(other.index)
this = self.reindex(new_index, copy=False)
other = other.reindex(new_index, copy=False)
if is_datetimelike(this) and not is_datetimelike(other):
other = to_datetime(other)
return this.where(notn... | Combine Series values, choosing the calling Series's values first.
Parameters
----------
other : Series
The value(s) to be combined with the `Series`.
Returns
-------
Series
The result of combining the Series with the other object.
See A... |
def batch(self, timelimit=None):
from .launcher import BatchLauncher
prev_dir = os.path.join(*self.workdir.split(os.path.sep)[:-1])
prev_dir = os.path.join(os.path.sep, prev_dir)
workdir = os.path.join(prev_dir, os.path.basename(self.workdir) + "_batch")
return BatchLauncher(work... | Run the flow in batch mode, return exit status of the job script.
Requires a manager.yml file and a batch_adapter adapter.
Args:
timelimit: Time limit (int with seconds or string with time given with the slurm convention:
"days-hours:minutes:seconds"). If timelimit is None, the ... |
def get_monitor_pos(monitor):
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetMonitorPos(monitor, xpos, ypos)
return xpos_value.value, ypos_value.value | Returns the position of the monitor's viewport on the virtual screen.
Wrapper for:
void glfwGetMonitorPos(GLFWmonitor* monitor, int* xpos, int* ypos); |
def _interface_to_service(iface):
for _service in _get_services():
service_info = pyconnman.ConnService(os.path.join(SERVICE_PATH, _service))
if service_info.get_property('Ethernet')['Interface'] == iface:
return _service
return None | returns the coresponding service to given interface if exists, otherwise return None |
def send_mails(cls):
if settings.CAS_NEW_VERSION_EMAIL_WARNING and settings.ADMINS:
try:
obj = cls.objects.get()
except cls.DoesNotExist:
obj = NewVersionWarning.objects.create(version=VERSION)
LAST_VERSION = utils.last_version()
if... | For each new django-cas-server version, if the current instance is not up to date
send one mail to ``settings.ADMINS``. |
def get_logs(self, login=None, **kwargs):
_login = kwargs.get(
'login',
login
)
log_events_url = GSA_LOGS_URL.format(login=_login)
return self._request_api(url=log_events_url).json() | Get a user's logs.
:param str login: User's login (Default: self._login)
:return: JSON |
def inet_ntop(af, addr):
addr = bytes_encode(addr)
try:
return socket.inet_ntop(af, addr)
except AttributeError:
try:
return _INET_NTOP[af](addr)
except KeyError:
raise ValueError("unknown address family %d" % af) | Convert an IP address from binary form into text representation. |
def tsv_import(self, xsv_source, encoding="UTF-8", transforms=None, row_class=DataObject, **kwargs):
return self._xsv_import(xsv_source, encoding, transforms=transforms, delimiter="\t", row_class=row_class, **kwargs) | Imports the contents of a tab-separated data file into this table.
@param xsv_source: tab-separated data file - if a string is given, the file with that name will be
opened, read, and closed; if a file object is given, then that object
will be read as-is, and left for the caller... |
def show(self):
with_matplotlib = True
try:
import matplotlib.pyplot as plt
except RuntimeError:
import skimage.io as io
with_matplotlib = False
if with_matplotlib:
equalised_img = self.equalise()
_, ax = plt.subplots()
... | Display the image |
def find_outer_region(im, r=0):
r
if r == 0:
dt = spim.distance_transform_edt(input=im)
r = int(sp.amax(dt)) * 2
im_padded = sp.pad(array=im, pad_width=r, mode='constant',
constant_values=True)
dt = spim.distance_transform_edt(input=im_padded)
seeds = (dt >= r)... | r"""
Finds regions of the image that are outside of the solid matrix.
This function uses the rolling ball method to define where the outer region
ends and the void space begins.
This function is particularly useful for samples that do not fill the
entire rectangular image, such as cylindrical core... |
def to_valid_state_vector(state_rep: Union[int, np.ndarray],
num_qubits: int,
dtype: Type[np.number] = np.complex64) -> np.ndarray:
if isinstance(state_rep, np.ndarray):
if len(state_rep) != 2 ** num_qubits:
raise ValueError(
'i... | Verifies the state_rep is valid and converts it to ndarray form.
This method is used to support passing in an integer representing a
computational basis state or a full wave function as a representation of
a state.
Args:
state_rep: If an int, the state returned is the state corresponding to
... |
def Update(self, attribute=None):
currently_running = self.Get(self.Schema.CONTENT_LOCK)
if currently_running:
flow_obj = aff4.FACTORY.Open(currently_running, token=self.token)
if flow_obj and flow_obj.GetRunner().IsRunning():
return
client_id = self.urn.Path().split("/", 2)[1]
paths... | Update an attribute from the client. |
def alter_object(self, obj):
for attname, field, replacer in self.replacers:
currentval = getattr(obj, attname)
replacement = replacer(self, obj, field, currentval)
setattr(obj, attname, replacement) | Alters all the attributes in an individual object.
If it returns False, the object will not be saved |
def get_page_square_dpi(pageinfo, options):
"Get the DPI when we require xres == yres, scaled to physical units"
xres = pageinfo.xres or 0
yres = pageinfo.yres or 0
userunit = pageinfo.userunit or 1
return float(
max(
(xres * userunit) or VECTOR_PAGE_DPI,
(yres * user... | Get the DPI when we require xres == yres, scaled to physical units |
def render(file):
with file.open() as fp:
encoding = detect_encoding(fp, default='utf-8')
result = mistune.markdown(fp.read().decode(encoding))
return result | Render HTML from Markdown file content. |
def one_hot_encoding(labels, num_classes, scope=None):
with tf.name_scope(scope, 'OneHotEncoding', [labels]):
batch_size = labels.get_shape()[0]
indices = tf.expand_dims(tf.range(0, batch_size), 1)
labels = tf.cast(tf.expand_dims(labels, 1), indices.dtype)
concated = tf.concat(axis=1, values=[indices,... | Transform numeric labels into onehot_labels.
Args:
labels: [batch_size] target labels.
num_classes: total number of classes.
scope: Optional scope for name_scope.
Returns:
one hot encoding of the labels. |
def _unpad(self, a, axis, out):
assert a.shape[axis] == self.N
Npad = self.N - self.Nin
if out:
_Npad, Npad_ = Npad - Npad//2, Npad//2
else:
_Npad, Npad_ = Npad//2, Npad - Npad//2
return np.take(a, range(_Npad, self.N - Npad_), axis=axis) | Undo padding in an array.
Parameters
----------
a : (..., N, ...) ndarray
array to be trimmed to size `Nin`
axis : int
axis along which to unpad
out : bool
trim the output if True, otherwise the input; the two cases have
their left... |
def invalidate(self, assoc_handle, dumb):
if dumb:
key = self._dumb_key
else:
key = self._normal_key
self.store.removeAssociation(key, assoc_handle) | Invalidates the association with the given handle.
@type assoc_handle: str
@param dumb: Is this association used with dumb mode?
@type dumb: bool |
def getOldestRequestTime(self):
bldrid = yield self.getBuilderId()
unclaimed = yield self.master.data.get(
('builders', bldrid, 'buildrequests'),
[resultspec.Filter('claimed', 'eq', [False])],
order=['submitted_at'], limit=1)
if unclaimed:
return u... | Returns the submitted_at of the oldest unclaimed build request for
this builder, or None if there are no build requests.
@returns: datetime instance or None, via Deferred |
def parse_json_feed_file(filename: str) -> JSONFeed:
with open(filename) as f:
try:
root = json.load(f)
except json.decoder.JSONDecodeError:
raise FeedJSONError('Not a valid JSON document')
return parse_json_feed(root) | Parse a JSON feed from a local json file. |
def autodiscover():
url_conf = getattr(settings, 'ROOT_URLCONF', ())
resolver = urlresolvers.get_resolver(url_conf)
urlpatterns = resolver.url_patterns
permissions = generate_permissions(urlpatterns)
refresh_permissions(permissions) | Autodiscover for urls.py |
def evaluate_emb(emb, labels):
d_mat = get_distance_matrix(emb)
d_mat = d_mat.asnumpy()
labels = labels.asnumpy()
names = []
accs = []
for k in [1, 2, 4, 8, 16]:
names.append('Recall@%d' % k)
correct, cnt = 0.0, 0.0
for i in range(emb.shape[0]):
d_mat[i, i] = ... | Evaluate embeddings based on Recall@k. |
def set_project_dir(self, directory):
if directory is not None:
self.treewidget.set_root_path(osp.dirname(directory))
self.treewidget.set_folder_names([osp.basename(directory)])
self.treewidget.setup_project_view()
try:
self.treewidget.setExpanded(self.t... | Set the project directory |
def configure_environ(dsn_env_name='PROM_DSN', connection_class=DsnConnection):
inters = []
cs = dsnparse.parse_environs(dsn_env_name, parse_class=connection_class)
for c in cs:
inter = c.interface
set_interface(inter, c.name)
inters.append(inter)
return inters | configure interfaces based on environment variables
by default, when prom is imported, it will look for PROM_DSN, and PROM_DSN_N (where
N is 1 through infinity) in the environment, if it finds them, it will assume they
are dsn urls that prom understands and will configure db connections with them. If you
... |
def _get_full_path(self, path, environ):
if path.startswith('/'):
path = environ.get('SCRIPT_NAME', '') + path
return path | Return the full path to ``path`` by prepending the SCRIPT_NAME.
If ``path`` is a URL, do nothing. |
def GetFeeds(client):
feed_service = client.GetService('FeedService', 'v201809')
feeds = []
more_pages = True
selector = {
'fields': ['Id', 'Name', 'Attributes'],
'predicates': [
{
'field': 'Origin',
'operator': 'EQUALS',
'values': ['USER']
... | Returns a list of all enabled Feeds.
Args:
client: an AdWordsClient instance.
Returns:
A list containing all enabled Feeds. |
def update_volume(self, data):
self._client['config']['volume'] = data['volume']
_LOGGER.info('updated volume on %s', self.friendly_name)
self._server.group(self.group.identifier).callback()
self.callback() | Update volume. |
def getResponse(self, http_request, request):
response = remoting.Envelope(request.amfVersion)
for name, message in request:
http_request.amf_request = message
processor = self.getProcessor(message)
response[name] = processor(message, http_request=http_request)
... | Processes the AMF request, returning an AMF response.
@param http_request: The underlying HTTP Request.
@type http_request: U{HTTPRequest<http://docs.djangoproject.com
/en/dev/ref/request-response/#httprequest-objects>}
@param request: The AMF Request.
@type request: L{Envel... |
def sayHello(self, name="Not given", message="nothing"):
print(
"Python.sayHello called by: {0} "
"with message: '{1}'".format(name, message)
)
return (
"PythonSync says: Howdy {0} "
"that's a nice runtime you got there".format(name)
) | Synchronous implementation of IHello.sayHello synchronous method.
The remote calling thread will be blocked until this is executed and
responds. |
def _consolidate(blocks):
gkey = lambda x: x._consolidate_key
grouper = itertools.groupby(sorted(blocks, key=gkey), gkey)
new_blocks = []
for (_can_consolidate, dtype), group_blocks in grouper:
merged_blocks = _merge_blocks(list(group_blocks), dtype=dtype,
_... | Merge blocks having same dtype, exclude non-consolidating blocks |
def add_state_machine(widget, event=None):
logger.debug("Creating new state-machine...")
root_state = HierarchyState("new root state")
state_machine = StateMachine(root_state)
rafcon.core.singleton.state_machine_manager.add_state_machine(state_machine) | Create a new state-machine when the user clicks on the '+' next to the tabs |
def _get_ipv6addrs(self):
addrs = self._get_addrs()
ipv6addrs = addrs.get(netifaces.AF_INET6)
if not ipv6addrs:
return {}
return ipv6addrs[0] | Returns the IPv6 addresses associated with this NIC. If no IPv6
addresses are used, empty dict is returned. |
def peek_pointers_in_registers(self, peekSize = 16, context = None):
peekable_registers = (
'Eax', 'Ebx', 'Ecx', 'Edx', 'Esi', 'Edi', 'Ebp'
)
if not context:
context = self.get_context(win32.CONTEXT_CONTROL | \
win32.CONTEXT_INTEGER)... | Tries to guess which values in the registers are valid pointers,
and reads some data from them.
@type peekSize: int
@param peekSize: Number of bytes to read from each pointer found.
@type context: dict( str S{->} int )
@param context: (Optional)
Dictionary mapping... |
def get_form_kwargs(self, **kwargs):
kwargs = super(ClassRegistrationView, self).get_form_kwargs(**kwargs)
kwargs['user'] = self.request.user if hasattr(self.request,'user') else None
listing = self.get_listing()
kwargs.update({
'openEvents': listing['openEvents'],
... | Tell the form which fields to render |
def add_node(self, id, label=None, type='CLASS', meta=None):
g = self.get_graph()
if meta is None:
meta={}
g.add_node(id, label=label, type=type, meta=meta) | Add a new node to the ontology |
def _serialize(self, value, attr, obj):
if isinstance(value, arrow.arrow.Arrow):
value = value.datetime
return super(ArrowField, self)._serialize(value, attr, obj) | Convert the Arrow object into a string. |
def GET_account_record(self, path_info, account_addr, token_type):
if not check_account_address(account_addr):
return self._reply_json({'error': 'Invalid address'}, status_code=400)
if not check_token_type(token_type):
return self._reply_json({'error': 'Invalid token type'}, stat... | Get the state of a particular token account
Returns the account |
def run(files, temp_folder):
"Check flake8 errors in the code base."
try:
import flake8
except ImportError:
return NO_FLAKE_MSG
try:
from flake8.engine import get_style_guide
except ImportError:
from flake8.api.legacy import get_style_guide
py_files = filter_pytho... | Check flake8 errors in the code base. |
def write_bit(self, registeraddress, value, functioncode=5):
_checkFunctioncode(functioncode, [5, 15])
_checkInt(value, minvalue=0, maxvalue=1, description='input value')
self._genericCommand(functioncode, registeraddress, value) | Write one bit to the slave.
Args:
* registeraddress (int): The slave register address (use decimal numbers, not hex).
* value (int): 0 or 1
* functioncode (int): Modbus function code. Can be 5 or 15.
Returns:
None
Raises:
ValueError,... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.