code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def flush(self):
if self._requests_middleware:
self._requests_middleware.flush()
if self._trace_log_handler:
self._trace_log_handler.flush()
if self._exception_telemetry_client:
self._exception_telemetry_client.flush() | Flushes the queued up telemetry to the service. |
def JSON_NumpyArrayEncoder(obj):
if isinstance(obj, np.ndarray):
return {'numpyArray': obj.tolist(),
'dtype': obj.dtype.__str__()}
elif isinstance(obj, np.generic):
return np.asscalar(obj)
else:
print type(obj)
raise TypeError(repr(obj) + " is not JSON serializable") | Define Specialize JSON encoder for numpy array |
def count_rows(self, table_name):
self.table_must_exist(table_name)
query = "SELECT COUNT (*) FROM `%s`" % table_name.lower()
self.own_cursor.execute(query)
return int(self.own_cursor.fetchone()[0]) | Return the number of entries in a table by counting them. |
def norm(x, encoding="latin1"):
"Convertir acentos codificados en ISO 8859-1 u otro, a ASCII regular"
if not isinstance(x, basestring):
x = unicode(x)
elif isinstance(x, str):
x = x.decode(encoding, 'ignore')
return unicodedata.normalize('NFKD', x).encode('ASCII', 'ignore') | Convertir acentos codificados en ISO 8859-1 u otro, a ASCII regular |
def patch_text(actions, tree):
tree = etree.fromstring(tree)
actions = patch.DiffParser().parse(actions)
tree = patch_tree(actions, tree)
return etree.tounicode(tree) | Takes a string with XML and a string with actions |
def sh(self, *command, **kwargs):
self.log.debug('shell: %s', ' '.join(command))
return subprocess.check_call(' '.join(command),
stdout=sys.stdout,
stderr=sys.stderr,
stdin=sys.stdin,
shell=True, **kwargs) | Run a shell command with the given arguments. |
def repeat_call(func, retries, *args, **kwargs):
retries = max(0, int(retries))
try_num = 0
while True:
if try_num == retries:
return func(*args, **kwargs)
else:
try:
return func(*args, **kwargs)
except Exception as e:
if isinstance(e, KeyboardInterrupt):
raise e
try_num += 1 | Tries a total of 'retries' times to execute callable before failing. |
def get(self, path_tuple):
if path_tuple in self.contentcache:
metadata = self.contentcache[path_tuple]
else:
LOGGER.warning('No metadata found for path_tuple ' + str(path_tuple))
metadata = dict(
filepath=os.path.sep.join(path_tuple),
title=os.path.sep.join(path_tuple)
)
return metadata | Returns metadata dict for path in `path_tuple`. |
def _get_free_words(self, blockAllowed, isRead):
if blockAllowed:
send = self._size - 5 - 4 * self._write_count
recv = self._size - 4 - 4 * self._read_count
if isRead:
return recv // 4
else:
return send // 4
else:
send = self._size - 3 - 1 * self._read_count - 5 * self._write_count
recv = self._size - 3 - 4 * self._read_count
if isRead:
return min(send, recv // 4)
else:
return send // 5 | Return the number of words free in the transmit packet |
def _read(self, size):
if self.comptype == "tar":
return self.__read(size)
c = len(self.dbuf)
while c < size:
buf = self.__read(self.bufsize)
if not buf:
break
try:
buf = self.cmp.decompress(buf)
except IOError:
raise ReadError("invalid compressed data")
self.dbuf += buf
c += len(buf)
buf = self.dbuf[:size]
self.dbuf = self.dbuf[size:]
return buf | Return size bytes from the stream. |
def _parse_qualimap_globals_inregion(table):
out = {}
for row in table.find_all("tr"):
col, val = [x.text for x in row.find_all("td")]
if col == "Mapped reads":
out.update(_parse_num_pct("%s (in regions)" % col, val))
return out | Retrieve metrics from the global targeted region table. |
def security_warnings(request, PROXY_ALLOWED_HOSTS=()):
warnings = []
PROXY_ALLOWED_HOSTS = PROXY_ALLOWED_HOSTS or getattr(settings, 'PROXY_ALLOWED_HOSTS', ())
if PROXY_ALLOWED_HOSTS and '*' in PROXY_ALLOWED_HOSTS:
warnings.append(dict(title=_('Insecure setting detected.'),
description=_('A wildcard is included in the PROXY_ALLOWED_HOSTS setting.')))
return dict(warnings=warnings) | Detects insecure settings and reports them to the client-side context. |
def _matches(o, pattern):
if not len(o) == len(pattern):
return False
comps = zip(o,pattern)
return all(isinstance(obj,kind) for obj,kind in comps) | Match a pattern of types in a sequence. |
def coneSearch(self, center, radius=3*u.arcmin, magnitudelimit=25):
self.magnitudelimit = magnitudelimit
self.speak('querying GALEX, centered on {} with radius {}'.format(center, radius, magnitudelimit))
coordinatetosearch = '{0.ra.deg} {0.dec.deg}'.format(center)
table = astroquery.mast.Catalogs.query_region(coordinates=center, radius=radius, catalog='GALEX')
epoch = 2005
self.coordinates = coord.SkyCoord( ra=table['ra'].data*u.deg,
dec=table['dec'].data*u.deg,
obstime=Time(epoch, format='decimalyear'))
self.magnitudes = dict(NUV=table['nuv_mag'].data, FUV=table['fuv_mag'].data)
self.magnitude = self.magnitudes['NUV'] | Run a cone search of the GALEX archive |
def remove_role(role):
def processor(action, argument):
ActionRoles.query_by_action(action, argument=argument).filter(
ActionRoles.role_id == role.id
).delete(synchronize_session=False)
return processor | Remove a action for a role. |
def extract_bad_ami(e):
msg = e.response['Error']['Message']
error = e.response['Error']['Code']
e_ami_ids = None
if error == 'InvalidAMIID.NotFound':
e_ami_ids = [
e_ami_id.strip() for e_ami_id
in msg[msg.find("'[") + 2:msg.rfind("]'")].split(',')]
log.warning("Image not found %s" % e_ami_ids)
elif error == 'InvalidAMIID.Malformed':
e_ami_ids = [msg[msg.find('"') + 1:msg.rfind('"')]]
log.warning("Image id malformed %s" % e_ami_ids)
return e_ami_ids | Handle various client side errors when describing images |
def _set_trainer(self, trainer):
if self._stype != 'default' and self._trainer and trainer and self._trainer is not trainer:
raise RuntimeError(
"Failed to set the trainer for Parameter '%s' because it was already set. " \
"More than one trainers for a %s Parameter is not supported." \
%(self.name, self._stype))
self._trainer = trainer | Set the trainer this parameter is associated with. |
def conf_sets(self):
with self._mutex:
if not self._conf_sets:
self._parse_configuration()
return self._conf_sets | The dictionary of configuration sets in this component, if any. |
def _parse(self, stream, context, path):
objs = []
while True:
start = stream.tell()
test = stream.read(len(self.find))
stream.seek(start)
if test == self.find:
break
else:
subobj = self.subcon._parse(stream, context, path)
objs.append(subobj)
return objs | Parse until a given byte string is found. |
def load_library_handle(libname, path):
if path is None or path in ['None', 'none']:
return None
try:
if os.name == "nt":
opj_lib = ctypes.windll.LoadLibrary(path)
else:
opj_lib = ctypes.CDLL(path)
except (TypeError, OSError):
msg = 'The {libname} library at {path} could not be loaded.'
msg = msg.format(path=path, libname=libname)
warnings.warn(msg, UserWarning)
opj_lib = None
return opj_lib | Load the library, return the ctypes handle. |
def _cursor_down(self, value):
self._cursor.clearSelection()
if self._cursor.atEnd():
self._cursor.insertText('\n')
else:
self._cursor.movePosition(self._cursor.Down, self._cursor.MoveAnchor, value)
self._last_cursor_pos = self._cursor.position() | Moves the cursor down by ``value``. |
def _unhash(hashed, alphabet):
number = 0
len_alphabet = len(alphabet)
for character in hashed:
position = alphabet.index(character)
number *= len_alphabet
number += position
return number | Restores a number tuple from hashed using the given `alphabet` index. |
def run(self, command, options, pipe=False, get_stdout=False, memscale=None):
cl = self.cl_picard(command, options, memscale=memscale)
if pipe:
subprocess.Popen(cl)
elif get_stdout:
p = subprocess.Popen(cl, stdout=subprocess.PIPE)
stdout = p.stdout.read()
p.wait()
p.stdout.close()
return stdout
else:
do.run(cl, "Picard {0}".format(command), None) | Run a Picard command with the provided option pairs. |
def _collect_monitor_metrics(self, conn, tags):
for entry in conn.entries:
dn = entry.entry_dn.lower()
if dn.endswith(self.CONNECTIONS_METRICS_DN):
self._handle_connections_entry(entry, tags)
elif dn.endswith(self.OPERATIONS_METRICS_DN):
self._handle_operations_entry(entry, tags)
elif dn.endswith(self.STATISTICS_METRICS_DN):
self._handle_statistics_entry(entry, tags)
elif dn.endswith(self.THREADS_METRICS_DN):
self._handle_threads_entry(entry, tags)
elif dn.endswith(self.TIME_METRICS_DN):
self._handle_time_entry(entry, tags)
elif dn.endswith(self.WAITERS_METRICS_DN):
self._handle_waiters_entry(entry, tags) | Collect metrics from the monitor backend |
def _reset_timeout(self):
if self._timeout:
self._timeout.cancel()
self._timeout = self.loop.call_later(self.client.timeout,
self.transport.close) | Reset timeout for date keep alive. |
def level_to_action(level):
try:
return LEVEL_ACTION_MAP[level]
except LookupError:
raise d1_common.types.exceptions.InvalidRequest(
0, 'Unknown action level. level="{}"'.format(level)
) | Map action level to action name. |
def _reset (self):
self.entries = []
self.default_entry = None
self.disallow_all = False
self.allow_all = False
self.last_checked = 0
self.sitemap_urls = [] | Reset internal flags and entry lists. |
def add_to_inventory(self):
host = self.db_attrs.pop(A.database.HOST)
self.stack.add_host(
host,
self.groups,
self.db_attrs
) | Adds db host to stack inventory |
def OnChar(self, event):
key = event.GetKeyCode()
if key < wx.WXK_SPACE or key == wx.WXK_DELETE or key > 255 or \
chr(key) in string.digits:
event.Skip() | Eats event if key not in digits |
def mark_seen(self):
data = self.get_selected_item()
if data['is_new']:
with self.term.loader('Marking as read'):
data['object'].mark_as_read()
if not self.term.loader.exception:
data['is_new'] = False
else:
with self.term.loader('Marking as unread'):
data['object'].mark_as_unread()
if not self.term.loader.exception:
data['is_new'] = True | Mark the selected message or comment as seen. |
def split_diff(old, new):
return map(lambda l: l.rstrip(),
icdiff.ConsoleDiff(cols=COLUMNS).make_table(old.splitlines(), new.splitlines())) | Returns a generator yielding the side-by-side diff of `old` and `new`). |
def _update_dictionary(self):
self._total_words = sum(self._dictionary.values())
self._unique_words = len(self._dictionary.keys())
self._letters = set()
for key in self._dictionary:
self._letters.update(key) | Update the word frequency object |
def _is_field_serializable(self, field_name):
return (
self._meta.get_field(field_name).get_internal_type()
in self.SIMPLE_UPDATE_FIELD_TYPES
) | Return True if the field can be serialized into a JSON doc. |
def by_month(self, chamber, year=None, month=None):
check_chamber(chamber)
now = datetime.datetime.now()
year = year or now.year
month = month or now.month
path = "{chamber}/votes/{year}/{month}.json".format(
chamber=chamber, year=year, month=month)
return self.fetch(path, parse=lambda r: r['results']) | Return votes for a single month, defaulting to the current month. |
def restriction(self, lam, mean_field):
self.update_H(mean_field, lam)
restric = np.array([self.expected(op) - n for op, n in zip(self.oper['Sz+1/2'], self.param['populations'])])
return restric | Lagrange multiplier in lattice slave spin |
def _set_default_resource_names(self):
self.ip_config_name = ''.join([
self.running_instance_id, '-ip-config'
])
self.nic_name = ''.join([self.running_instance_id, '-nic'])
self.public_ip_name = ''.join([self.running_instance_id, '-public-ip']) | Generate names for resources based on the running_instance_id. |
def _ConvertToCanonicalSqlDict(self, schema, raw_dict, prefix=""):
flattened_dict = {}
for k, v in iteritems(raw_dict):
if isinstance(v, dict):
flattened_dict.update(
self._ConvertToCanonicalSqlDict(
schema, v, prefix="%s%s." % (prefix, k)))
else:
field_name = prefix + k
flattened_dict[field_name] = schema[field_name].convert_fn(v)
return flattened_dict | Converts a dict of RDF values into a SQL-ready form. |
def remove_badge(self, kind):
self.update(__raw__={
'$pull': {
'badges': {'kind': kind}
}
})
self.reload()
on_badge_removed.send(self, kind=kind)
post_save.send(self.__class__, document=self) | Perform an atomic removal for a given badge |
def ensure_compliance(self):
if not self.modules:
return
try:
loaded_modules = self._get_loaded_modules()
non_compliant_modules = []
for module in self.modules:
if module in loaded_modules:
log("Module '%s' is enabled but should not be." %
(module), level=INFO)
non_compliant_modules.append(module)
if len(non_compliant_modules) == 0:
return
for module in non_compliant_modules:
self._disable_module(module)
self._restart_apache()
except subprocess.CalledProcessError as e:
log('Error occurred auditing apache module compliance. '
'This may have been already reported. '
'Output is: %s' % e.output, level=ERROR) | Ensures that the modules are not loaded. |
def delete(self, monitor_id):
if not self._state:
raise InvalidState("State was not properly obtained from the app")
monitors = self.list()
bit = None
for monitor in monitors:
if monitor_id != monitor['monitor_id']:
continue
bit = monitor['monitor_id']
if not bit:
raise MonitorNotFound("No monitor was found with that term.")
url = self.ALERTS_DELETE_URL.format(requestX=self._state[3])
self._log.debug("Deleting alert using: %s" % url)
payload = [None, monitor_id]
params = json.dumps(payload, separators=(',', ':'))
data = {'params': params}
response = self._session.post(url, data=data, headers=self.HEADERS)
if response.status_code != 200:
raise ActionError("Failed to delete by ID: %s"
% response.content)
return True | Delete a monitor by ID. |
def to_python(self, value, resource):
if isinstance(value, dict):
d = {
self.aliases.get(k, k): self.to_python(v, resource) if isinstance(v, (dict, list)) else v
for k, v in six.iteritems(value)
}
return type(self.class_name, (), d)
elif isinstance(value, list):
return [self.to_python(x, resource) if isinstance(x, (dict, list)) else x for x in value]
else:
return value | Dictionary to Python object |
def table_convert_geometry(metadata, table_name):
from sqlalchemy import Table
from ..orm import Geometry
table = Table(table_name, metadata, autoload=True)
for c in table.columns:
if c.name == 'geometry':
c.type = Geometry
return table | Get table metadata from the database. |
def image2surface(img):
if not CAIRO_AVAILABLE:
raise Exception("Cairo not available(). image2surface() cannot work.")
global g_lock
with g_lock:
img_io = io.BytesIO()
img.save(img_io, format="PNG")
img_io.seek(0)
return cairo.ImageSurface.create_from_png(img_io) | Convert a PIL image into a Cairo surface |
def fetch_url(src, dst):
if sys.version_info[0] > 2:
import urllib.request
class URLopener(urllib.request.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
sys.stderr.write("ERROR: could not fetch {0}\n".format(url))
sys.exit(-1)
else:
import urllib
class URLopener(urllib.FancyURLopener):
def http_error_default(self, url, fp, errcode, errmsg, headers):
sys.stderr.write("ERROR: could not fetch {0}\n".format(url))
sys.exit(-1)
dirname = os.path.dirname(dst)
if dirname != '':
if not os.path.isdir(dirname):
os.makedirs(dirname)
opener = URLopener()
opener.retrieve(src, dst) | Fetch file from URL src and save it to dst. |
def upload_files(self, abspaths, relpaths, remote_objects):
for relpath in relpaths:
abspath = [p for p in abspaths if p[len(self.file_root):] == relpath][0]
cloud_datetime = remote_objects[relpath] if relpath in remote_objects else None
local_datetime = datetime.datetime.utcfromtimestamp(os.stat(abspath).st_mtime)
if cloud_datetime and local_datetime < cloud_datetime:
self.skip_count += 1
if not self.quiet:
print("Skipped {0}: not modified.".format(relpath))
continue
if relpath in remote_objects:
self.update_count += 1
else:
self.create_count += 1
self.upload_file(abspath, relpath) | Determines files to be uploaded and call ``upload_file`` on each. |
def run(state, command, args):
from ..core import do_run
do_run(
command=command, args=args, three=state.three, python=state.python, pypi_mirror=state.pypi_mirror
) | Spawns a command installed into the virtualenv. |
def write(self, stream):
topology = self.createTopology()
def write_it(stream):
transportOut = TMemoryBuffer()
protocolOut = TBinaryProtocol.TBinaryProtocol(transportOut)
topology.write(protocolOut)
bytes = transportOut.getvalue()
stream.write(bytes)
if isinstance(stream, six.string_types):
with open(stream, 'wb') as f:
write_it(f)
else:
write_it(stream)
return topology | Writes the topology to a stream or file. |
def remove_address(self, fqdn, address):
" Remove an address of a domain."
for record in self.list_address(fqdn):
if record.address == address:
record.delete()
break | Remove an address of a domain. |
def _gql(cls, query_string, *args, **kwds):
from .query import gql
return gql('SELECT * FROM %s %s' % (cls._class_name(), query_string),
*args, **kwds) | Run a GQL query. |
def from_val(val_schema):
definition = getattr(val_schema, "definition", val_schema) if isinstance(
val_schema, BaseSchema) else val_schema
if isinstance(definition, dict):
return _dict_to_teleport(definition)
if isinstance(definition, list):
if len(definition) == 1:
return {"Array": from_val(definition[0])}
if definition in VAL_PRIMITIVES:
return VAL_PRIMITIVES[definition]
raise SerializationError(
"Serializing %r not (yet) supported." % definition) | Serialize a val schema to teleport. |
def compute_frontier_difficulty(parent_header: BlockHeader, timestamp: int) -> int:
validate_gt(timestamp, parent_header.timestamp, title="Header timestamp")
offset = parent_header.difficulty // DIFFICULTY_ADJUSTMENT_DENOMINATOR
difficulty_minimum = min(parent_header.difficulty, DIFFICULTY_MINIMUM)
if timestamp - parent_header.timestamp < FRONTIER_DIFFICULTY_ADJUSTMENT_CUTOFF:
base_difficulty = max(
parent_header.difficulty + offset,
difficulty_minimum,
)
else:
base_difficulty = max(
parent_header.difficulty - offset,
difficulty_minimum,
)
num_bomb_periods = (
(parent_header.block_number + 1) // BOMB_EXPONENTIAL_PERIOD
) - BOMB_EXPONENTIAL_FREE_PERIODS
if num_bomb_periods >= 0:
difficulty = max(
base_difficulty + 2**num_bomb_periods,
DIFFICULTY_MINIMUM,
)
else:
difficulty = base_difficulty
return difficulty | Computes the difficulty for a frontier block based on the parent block. |
def detach(self, ids=None, touch=True):
if isinstance(ids, orator.orm.model.Model):
ids = ids.get_key()
if ids is None:
ids = []
query = self._new_pivot_query()
if not isinstance(ids, list):
ids = [ids]
if len(ids) > 0:
query.where_in(self._other_key, ids)
if touch:
self.touch_if_touching()
results = query.delete()
return results | Detach models from the relationship. |
def export_dashboards(session):
logging.info('Starting export')
dashboards = session.query(Dashboard)
dashboard_ids = []
for dashboard in dashboards:
dashboard_ids.append(dashboard.id)
data = Dashboard.export_dashboards(dashboard_ids)
return data | Returns all dashboards metadata as a json dump |
def cont_r(self, percent=0.9, N=None):
if not hasattr(self, 'F'):
self.fs_r(N=self.rank)
return apply_along_axis(lambda _: _/self.L[:N], 1,
apply_along_axis(lambda _: _*self.r, 0, self.F[:, :N]**2)) | Return the contribution of each row. |
def defined_annotation_keywords(self) -> Set[str]:
return (
set(self.annotation_pattern) |
set(self.annotation_url) |
set(self.annotation_list)
) | Get the set of all keywords defined as annotations in this graph. |
def ls_files(client, names, authors, include, exclude, format):
records = _filter(
client, names=names, authors=authors, include=include, exclude=exclude
)
DATASET_FILES_FORMATS[format](client, records) | List files in dataset. |
def OnInsertTabs(self, event):
with undo.group(_("Insert table")):
self.grid.actions.insert_tabs(self.grid.current_table - 1, 1)
self.grid.GetTable().ResetView()
self.grid.actions.zoom()
event.Skip() | Insert one table into grid |
def _consolidate_inplace(self):
def f():
self._data = self._data.consolidate()
self._protect_consolidate(f) | Consolidate data in place and return None |
def FingerprintFile(self, pathspec, max_filesize=None, request_data=None):
request = rdf_client_action.FingerprintRequest(pathspec=pathspec)
if max_filesize is not None:
request.max_filesize = max_filesize
request.AddRequest(
fp_type=rdf_client_action.FingerprintTuple.Type.FPT_GENERIC,
hashers=[
rdf_client_action.FingerprintTuple.HashType.MD5,
rdf_client_action.FingerprintTuple.HashType.SHA1,
rdf_client_action.FingerprintTuple.HashType.SHA256
])
request.AddRequest(
fp_type=rdf_client_action.FingerprintTuple.Type.FPT_PE_COFF,
hashers=[
rdf_client_action.FingerprintTuple.HashType.MD5,
rdf_client_action.FingerprintTuple.HashType.SHA1,
rdf_client_action.FingerprintTuple.HashType.SHA256
])
self.CallClient(
self.fingerprint_file_mixin_client_action,
request,
next_state="ProcessFingerprint",
request_data=request_data) | Launch a fingerprint client action. |
def show_analyzer_status():
ecode = 0
try:
image=contexts['anchore_allimages'][imagelist[0]]
analyzer_status = contexts['anchore_db'].load_analyzer_manifest(image.meta['imageId'])
result = {image.meta['imageId']:{'result':{'header':['Analyzer', 'Status', '*Type', 'LastExec', 'Exitcode', 'Checksum'], 'rows':[]}}}
for script in analyzer_status.keys():
adata = analyzer_status[script]
nicetime = datetime.datetime.fromtimestamp(adata['timestamp']).strftime('%Y-%m-%d %H:%M:%S')
try:
row = [script.split('/')[-1], adata['status'], adata['atype'], nicetime, str(adata['returncode']), adata['csum']]
result[image.meta['imageId']]['result']['rows'].append(row)
except:
pass
if result:
anchore_utils.print_result(config, result)
except:
anchore_print_err("operation failed")
ecode = 1
contexts['anchore_allimages'].clear()
sys.exit(ecode) | Show analyzer status for specified image |
def _set_interactivity(self, interactivity):
event_default = 'hover'
if interactivity is None:
return
if isinstance(interactivity, (tuple, list)):
self.interactivity = event_default
interactive_cols = '\n'.join(
'@{0}: ${0}'.format(col) for col in interactivity
)
elif isinstance(interactivity, str):
self.interactivity = event_default
interactive_cols = '@{0}: ${0}'.format(interactivity)
elif isinstance(interactivity, dict):
self.interactivity = interactivity.get('event', event_default)
self.header = interactivity.get('header')
interactive_cols = '\n'.join(
'@{0}: ${0}'.format(col) for col in interactivity['cols']
)
else:
raise ValueError('`interactivity` must be a str, a list of str, '
'or a dict with a `cols` key')
self.styling = '\n'.join([interactive_cols, self.styling]) | Adds interactivity syntax to the styling |
def clean(file_, imports):
modules_not_imported = compare_modules(file_, imports)
re_remove = re.compile("|".join(modules_not_imported))
to_write = []
try:
f = open_func(file_, "r+")
except OSError:
logging.error("Failed on file: {}".format(file_))
raise
else:
for i in f.readlines():
if re_remove.match(i) is None:
to_write.append(i)
f.seek(0)
f.truncate()
for i in to_write:
f.write(i)
finally:
f.close()
logging.info("Successfully cleaned up requirements in " + file_) | Remove modules that aren't imported in project from file. |
def _element_to_bson(key, value, check_keys, opts):
if not isinstance(key, string_type):
raise InvalidDocument("documents must have only string keys, "
"key was %r" % (key,))
if check_keys:
if key.startswith("$"):
raise InvalidDocument("key %r must not start with '$'" % (key,))
if "." in key:
raise InvalidDocument("key %r must not contain '.'" % (key,))
name = _make_name(key)
return _name_value_to_bson(name, value, check_keys, opts) | Encode a single key, value pair. |
def state_cpfs(self) -> List[CPF]:
_, cpfs = self.cpfs
state_cpfs = []
for cpf in cpfs:
name = utils.rename_next_state_fluent(cpf.name)
if name in self.state_fluents:
state_cpfs.append(cpf)
state_cpfs = sorted(state_cpfs, key=lambda cpf: cpf.name)
return state_cpfs | Returns list of state-fluent CPFs. |
def complete_all_trajectories(self):
for index in range(self.batch_size):
trajectory = self._trajectories[index]
assert trajectory.is_active
self._complete_trajectory(trajectory, index) | Essentially same as reset, but we don't have observations. |
def com_google_fonts_check_glyf_unused_data(ttFont):
try:
expected_glyphs = len(ttFont.getGlyphOrder())
actual_glyphs = len(ttFont['glyf'].glyphs)
diff = actual_glyphs - expected_glyphs
if diff < 0:
yield FAIL, Message("unreachable-data",
("Glyf table has unreachable data at the end of "
" the table. Expected glyf table length {}"
" (from loca table), got length"
" {} (difference: {})").format(
expected_glyphs, actual_glyphs, diff))
elif not diff:
yield PASS, "There is no unused data at the end of the glyf table."
else:
raise Exception("Bug: fontTools did not raise an expected exception.")
except fontTools.ttLib.TTLibError as error:
if "not enough 'glyf' table data" in format(error):
yield FAIL, Message("missing-data",
("Loca table references data beyond"
" the end of the glyf table."
" Expected glyf table length {}"
" (from loca table).").format(expected_glyphs))
else:
raise Exception("Bug: Unexpected fontTools exception.") | Is there any unused data at the end of the glyf table? |
def initialize_delete_state_map(self):
self.fabric_state_del_map = {
fw_const.INIT_STATE_STR: fw_const.OS_IN_NETWORK_STATE,
fw_const.OS_IN_NETWORK_DEL_FAIL:
fw_const.OS_IN_NETWORK_STATE,
fw_const.OS_IN_NETWORK_DEL_SUCCESS:
fw_const.INIT_STATE,
fw_const.OS_OUT_NETWORK_DEL_FAIL:
fw_const.OS_OUT_NETWORK_STATE,
fw_const.OS_OUT_NETWORK_DEL_SUCCESS:
fw_const.OS_IN_NETWORK_STATE,
fw_const.OS_DUMMY_RTR_DEL_FAIL:
fw_const.OS_DUMMY_RTR_STATE,
fw_const.OS_DUMMY_RTR_DEL_SUCCESS:
fw_const.OS_OUT_NETWORK_STATE,
fw_const.DCNM_IN_NETWORK_DEL_FAIL:
fw_const.DCNM_IN_NETWORK_STATE,
fw_const.DCNM_IN_NETWORK_DEL_SUCCESS:
fw_const.OS_DUMMY_RTR_STATE,
fw_const.DCNM_IN_PART_UPDDEL_FAIL:
fw_const.DCNM_IN_PART_UPDATE_STATE,
fw_const.DCNM_IN_PART_UPDDEL_SUCCESS:
fw_const.DCNM_IN_NETWORK_STATE,
fw_const.DCNM_OUT_PART_DEL_FAIL:
fw_const.DCNM_OUT_PART_STATE,
fw_const.DCNM_OUT_PART_DEL_SUCCESS:
fw_const.DCNM_IN_PART_UPDATE_STATE,
fw_const.DCNM_OUT_NETWORK_DEL_FAIL:
fw_const.DCNM_OUT_NETWORK_STATE,
fw_const.DCNM_OUT_NETWORK_DEL_SUCCESS:
fw_const.DCNM_OUT_PART_STATE,
fw_const.DCNM_OUT_PART_UPDDEL_FAIL:
fw_const.DCNM_OUT_PART_UPDATE_STATE,
fw_const.DCNM_OUT_PART_UPDDEL_SUCCESS:
fw_const.DCNM_OUT_NETWORK_STATE} | This is a mapping of delete result message string to state. |
def del_fields(self, *names):
cls = type(self)
self.__class__ = cls
for n in names:
if isinstance(getattr(cls, n, None), DataField):
if n in self._field_values:
del self._field_values[n]
delattr(cls, n) | Delete data fields from this struct instance |
def headers(self):
self._headers.update(**{'Accept-Language': self.language})
if self.__token:
self._headers.update(
**{'Authorization': 'Bearer %s' % self.__token})
return self._headers | Provide access to updated headers. |
def rec_dict_to_numpy_dict(obj_dict):
if type(obj_dict) == dict:
return {key: rec_dict_to_numpy_dict(value) if value is not None else None for key, value in obj_dict.items()}
elif obj_dict is None:
return None
else:
return np.asarray(obj_dict) | Same as dict_to_numpy_dict, but recursive |
def gid_exists(gid):
try:
grp.getgrgid(gid)
gid_exists = True
except KeyError:
gid_exists = False
return gid_exists | Check if a gid exists |
def query_field_count(limit_num, kind='1'):
return TabTag.select().where(
TabTag.kind == kind
).order_by(
TabTag.count.desc()
).limit(limit_num) | Query the posts count of certain category. |
def _images(self, sys_output):
import re
gap_pattern = re.compile('\t|\s{2,}')
image_list = []
output_lines = sys_output.split('\n')
column_headers = gap_pattern.split(output_lines[0])
for i in range(1,len(output_lines)):
columns = gap_pattern.split(output_lines[i])
if len(columns) == len(column_headers):
image_details = {}
for j in range(len(columns)):
image_details[column_headers[j]] = columns[j]
image_list.append(image_details)
return image_list | a helper method for parsing docker image output |
def yesno(prompt):
prompt += " [y/n]"
a = ""
while a not in ["y", "n"]:
a = input(prompt).lower()
return a == "y" | Returns True if user answers 'y' |
def div(x, y, context=None):
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_div,
(
BigFloat._implicit_convert(x),
BigFloat._implicit_convert(y),
),
context,
) | Return ``x`` divided by ``y``. |
def portal(self, portalID=None):
if portalID is None:
portalID = self.portalSelf.id
url = "%s/%s" % (self.root, portalID)
return Portal(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initalize=True) | returns a specific reference to a portal |
def if_body_action(self, text, loc, arg):
exshared.setpos(loc, text)
if DEBUG > 0:
print("IF_BODY:",arg)
if DEBUG == 2: self.symtab.display()
if DEBUG > 2: return
label = self.codegen.label("false{0}".format(self.false_label_number), True, False)
self.codegen.jump(self.relexp_code, True, label)
self.codegen.newline_label("true{0}".format(self.label_number), True, True)
self.label_stack.append(self.false_label_number)
self.label_stack.append(self.label_number) | Code executed after recognising if statement's body |
async def repeat(ctx, times: int, content='repeating...'):
for i in range(times):
await ctx.send(content) | Repeats a message multiple times. |
def getThirdPartyLibCompilerFlags(self, libs):
fmt = PrintingFormat.singleLine()
if libs[0] == '--multiline':
fmt = PrintingFormat.multiLine()
libs = libs[1:]
platformDefaults = True
if libs[0] == '--nodefaults':
platformDefaults = False
libs = libs[1:]
details = self.getThirdpartyLibs(libs, includePlatformDefaults=platformDefaults)
return details.getCompilerFlags(self.getEngineRoot(), fmt) | Retrieves the compiler flags for building against the Unreal-bundled versions of the specified third-party libraries |
def list_services():
for importer, modname, ispkg in pkgutil.iter_modules(services.__path__):
if ispkg is False:
importer.find_module(modname).load_module(modname)
services_list = list()
for s in services.serviceBase.__subclasses__():
services_list.append(s.__name__.lower())
return services_list | returns list of available services |
def add_slave(self, slave, container_name="widget"):
cont = getattr(self, container_name, None)
if cont is None:
raise AttributeError(
'Container name must be a member of the delegate')
cont.add(slave.widget)
self.slaves.append(slave)
return slave | Add a slave delegate |
def line(self, text, style=None, verbosity=None):
if style:
styled = "<%s>%s</>" % (style, text)
else:
styled = text
self._io.write_line(styled, verbosity) | Write a string as information output. |
def register_incoming_conn(self, conn):
assert conn, "conn is required"
conn.set_outbound_pending_change_callback(self._on_conn_change)
self.connections.appendleft(conn)
self._set_on_close_cb(conn)
self._on_conn_change() | Add incoming connection into the heap. |
def clean_out_dir(directory):
if not isinstance(directory, path):
directory = path(directory)
for file_path in directory.files():
file_path.remove()
for dir_path in directory.dirs():
dir_path.rmtree() | Delete all the files and subdirectories in a directory. |
def _almost_equal(a, b):
threshold = 1e-9
diff = np.abs(a - b)
return (diff < threshold) | Check if the two numbers are almost equal |
def source(source_id=None, **kwargs):
if source_id is not None:
kwargs['source_id'] = source_id
elif 'id' in kwargs:
source_id = kwargs.pop('id')
kwargs['source_id'] = source_id
if 'releases' in kwargs:
kwargs.pop('releases')
path = 'releases'
else:
path = None
return Fred().source(path, **kwargs) | Get a source of economic data. |
def clear(self, actors=()):
if not utils.isSequence(actors):
actors = [actors]
if len(actors):
for a in actors:
self.removeActor(a)
else:
for a in settings.collectable_actors:
self.removeActor(a)
settings.collectable_actors = []
self.actors = []
for a in self.getActors():
self.renderer.RemoveActor(a)
for a in self.getVolumes():
self.renderer.RemoveVolume(a)
for s in self.sliders:
s.EnabledOff()
for b in self.buttons:
self.renderer.RemoveActor(b)
for w in self.widgets:
w.EnabledOff()
for c in self.scalarbars:
self.renderer.RemoveActor(c) | Delete specified list of actors, by default delete all. |
def _process_req_txt(req):
if req.status_code == 404:
return ''
if req.status_code != 200:
raise DapiCommError('Response of the server was {code}'.format(code=req.status_code))
return req.text | Returns a processed request or raises an exception |
def initialize_minimum_needs_post_processors():
processors = []
for field in minimum_needs_fields:
field_key = field['key']
field_name = field['name']
field_description = field['description']
need_parameter = field['need_parameter']
processor = {
'key': 'post_processor_{key}'.format(key=field_key),
'name': '{field_name} Post Processor'.format(
field_name=field_name),
'description': field_description,
'input': {
'population': {
'value': displaced_field,
'type': field_input_type,
},
'amount': {
'type': needs_profile_input_type,
'value': need_parameter.name,
}
},
'output': {
'needs': {
'value': field,
'type': function_process,
'function': multiply
}
}
}
processors.append(processor)
return processors | Generate definitions for minimum needs post processors. |
def cli(env, identifier, wait):
compute = SoftLayer.HardwareManager(env.client)
compute_id = helpers.resolve_id(compute.resolve_ids, identifier,
'hardware')
ready = compute.wait_for_ready(compute_id, wait)
if ready:
env.fout("READY")
else:
raise exceptions.CLIAbort("Server %s not ready" % compute_id) | Check if a server is ready. |
def spawn_missing_master(self):
d = defer.succeed(None)
if callable(self.on_master_missing_cb):
d.addCallback(defer.drop_param, self.on_master_missing_cb)
return d | Notifies the standalone slave agency that the master agency is missing |
def _piped_bamprep_region(data, region, out_file, tmp_dir):
if _need_prep(data):
prep_params = _get_prep_params(data)
_piped_bamprep_region_gatk(data, region, prep_params, out_file, tmp_dir)
else:
raise ValueError("No realignment specified") | Do work of preparing BAM input file on the selected region. |
def clean(all=False, docs=False, dist=False, extra=None):
run('find . -type f -name "*.py[co]" -delete')
run('find . -type d -name "__pycache__" -delete')
patterns = ['build', '*.egg-info/']
if all or docs:
patterns.append('doc/build/*')
if all or dist:
patterns.append('dist')
if extra:
patterns.append(extra)
for pattern in patterns:
run('rm -rf {}'.format(pattern)) | Clean up build files |
def from_file(self, filename):
f = open(filename, 'rb')
while True:
data = f.read(10480)
if not data:
break
self.update(data)
f.close() | Update running digest with content of named file. |
def _setsizes(self, cursor=None):
if cursor is None:
cursor = self._cursor
if self._inputsizes:
cursor.setinputsizes(self._inputsizes)
for column, size in self._outputsizes.items():
if column is None:
cursor.setoutputsize(size)
else:
cursor.setoutputsize(size, column) | Set stored input and output sizes for cursor execution. |
def _format_sync_list(self, records):
results = {}
for attributes in records:
if not isinstance(attributes, dict):
id, attributes = attributes, {}
else:
id = list(attributes.keys())[0]
attributes = attributes[id]
results[id] = attributes
return results | Format the sync list so that it is keyed by ID. |
def setup_db(self, couch, dbname):
my_db = None
self.log.debug('Setting up DB: %s' % dbname)
if dbname not in couch:
self.log.info("DB doesn't exist so creating DB: %s", dbname)
try:
my_db = couch.create(dbname)
except:
self.log.critical("Race condition caught")
raise RuntimeError("Race condition caught when creating DB")
try:
auth_doc = {}
auth_doc['_id'] = '_design/auth'
auth_doc['language'] = 'javascript'
auth_doc['validate_doc_update'] =
my_db.save(auth_doc)
except:
self.log.error('Could not set permissions of %s' % dbname)
else:
my_db = couch[dbname]
return my_db | Setup and configure DB |
def _getAllEvents(self, request):
home = request.site.root_page
return getAllEvents(request, home=home) | Return all the events in this site. |
def _copy_calibration(self, calibration):
for key, item in calibration.__dict__.items():
self.__dict__[key] = item | Copy another ``StereoCalibration`` object's values. |
def make_headers(worksheet):
headers = {}
cell_idx = 0
while cell_idx < worksheet.ncols:
cell_type = worksheet.cell_type(0, cell_idx)
if cell_type == 1:
header = slughifi(worksheet.cell_value(0, cell_idx))
if not header.startswith("_"):
headers[cell_idx] = header
cell_idx += 1
return headers | Make headers from worksheet |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.