code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def send_command_ack(self, device_id, action):
yield from self._ready_to_send.acquire()
acknowledgement = None
try:
self._command_ack.clear()
self.send_command(device_id, action)
log.debug('waiting for acknowledgement')
try:
yield f... | Send command, wait for gateway to repond with acknowledgment. |
def _get_sample(self, mode, encoding):
self._open_file(mode, encoding)
self._sample = self._file.read(UniversalCsvReader.sample_size)
self._file.close() | Get a sample from the next current input file.
:param str mode: The mode for opening the file.
:param str|None encoding: The encoding of the file. None for open the file in binary mode. |
def _get_obj_ct(self, obj):
if not hasattr(obj, '_wfct'):
if hasattr(obj, 'polymorphic_ctype'):
obj._wfct = obj.polymorphic_ctype
else:
obj._wfct = ContentType.objects.get_for_model(obj)
return obj._wfct | Look up and return object's content type and cache for reuse |
def getLogger(cls, name=None):
return logging.getLogger("{0}.{1}".format(cls.BASENAME, name) if name else cls.BASENAME) | Retrieves the Python native logger
:param name: The name of the logger instance in the VSG namespace (VSG.<name>); a None value will use the VSG root.
:return: The instacne of the Python logger object. |
def from_outcars_and_structures(cls, outcars, structures,
calc_ionic_from_zval=False):
p_elecs = []
p_ions = []
for i, o in enumerate(outcars):
p_elecs.append(o.p_elec)
if calc_ionic_from_zval:
p_ions.append(
... | Create Polarization object from list of Outcars and Structures in order
of nonpolar to polar.
Note, we recommend calculating the ionic dipole moment using calc_ionic
than using the values in Outcar (see module comments). To do this set
calc_ionic_from_zval = True |
def exists_alias(self, alias_name, index_name=None):
return self._es_conn.indices.exists_alias(index=index_name, name=alias_name) | Check whether or not the given alias exists
:return: True if alias already exist |
def Print(self):
for hypo, prob in sorted(self.Items()):
print(hypo, prob) | Prints the hypotheses and their probabilities. |
def get_image_path(image_lists, label_name, index, image_dir, category):
if label_name not in image_lists:
tf.logging.fatal('Label does not exist %s.', label_name)
label_lists = image_lists[label_name]
if category not in label_lists:
tf.logging.fatal('Category does not exist %s.', category)
category_lis... | Returns a path to an image for a label at the given index.
Args:
image_lists: OrderedDict of training images for each label.
label_name: Label string we want to get an image for.
index: Int offset of the image we want. This will be moduloed by the
available number of images for the label, so it can b... |
def report_onlysize(bytes_so_far, total_size, speed, eta):
percent = int(bytes_so_far * 100 / total_size)
current = approximate_size(bytes_so_far).center(10)
total = approximate_size(total_size).center(10)
sys.stdout.write('D: {0}% -{1}/{2}'.format(percent, current, total) + "eta {0}".format(eta))
s... | This callback for the download function is used when console width
is not enough to print the bar.
It prints only the sizes |
def log(self, timer_name, node):
timestamp = time.time()
if hasattr(self, timer_name):
getattr(self, timer_name).append({
"node":node,
"time":timestamp})
else:
setattr(self, timer_name, [{"node":node, "time":timestamp}]) | logs a event in the timer |
def __getFileObj(self, f):
if not f:
raise ShapefileException("No file-like object available.")
elif hasattr(f, "write"):
return f
else:
pth = os.path.split(f)[0]
if pth and not os.path.exists(pth):
os.makedirs(pth)
... | Safety handler to verify file-like objects |
def custom_template_name(self):
base_path = getattr(settings, "CUSTOM_SPECIAL_COVERAGE_PATH", "special_coverage/custom")
if base_path is None:
base_path = ""
return "{0}/{1}_custom.html".format(
base_path, self.slug.replace("-", "_")
).lstrip("/") | Returns the path for the custom special coverage template we want. |
def _update_show_toolbars_action(self):
if self.toolbars_visible:
text = _("Hide toolbars")
tip = _("Hide toolbars")
else:
text = _("Show toolbars")
tip = _("Show toolbars")
self.show_toolbars_action.setText(text)
self.show_toolbars... | Update the text displayed in the menu entry. |
async def pulse(self):
get_state = GetState(pyvlx=self.pyvlx)
await get_state.do_api_call()
if not get_state.success:
raise PyVLXException("Unable to send get state.") | Send get state request to API to keep the connection alive. |
def wrap(self, row: Union[Mapping[str, Any], Sequence[Any]]):
return (
self.dataclass(
**{
ident: row[column_name]
for ident, column_name in self.ids_and_column_names.items()
}
)
if isinstance(row, Mappin... | Return row tuple for row. |
def get_version():
_globals = {}
_locals = {}
exec(
compile(
open(TOP + "/manta/version.py").read(), TOP + "/manta/version.py",
'exec'), _globals, _locals)
return _locals["__version__"] | Get the python-manta version without having to import the manta package,
which requires deps to already be installed. |
def encode_hook(self, hook, msg):
if 'name' in hook:
msg.name = str_to_bytes(hook['name'])
else:
self.encode_modfun(hook, msg.modfun)
return msg | Encodes a commit hook dict into the protobuf message. Used in
bucket properties.
:param hook: the hook to encode
:type hook: dict
:param msg: the protobuf message to fill
:type msg: riak.pb.riak_pb2.RpbCommitHook
:rtype riak.pb.riak_pb2.RpbCommitHook |
def _format_type_in_doc(self, namespace, data_type):
if is_void_type(data_type):
return 'None'
elif is_user_defined_type(data_type):
return ':class:`{}.{}.{}`'.format(
self.args.types_package, namespace.name, fmt_type(data_type))
else:
return f... | Returns a string that can be recognized by Sphinx as a type reference
in a docstring. |
def enum(cls, options, values):
names, real = zip(*options)
del names
def factory(i, name):
return cls(i, name, (len(real),), lambda a: real[a[0]], values)
return factory | Create an ArgumentType where you choose one of a set of known values. |
def inspect_filter_calculation(self):
try:
node = self.ctx.cif_filter
self.ctx.cif = node.outputs.cif
except exceptions.NotExistent:
self.report('aborting: CifFilterCalculation<{}> did not return the required cif output'.format(node.uuid))
return self.exit... | Inspect the result of the CifFilterCalculation, verifying that it produced a CifData output node. |
def main():
logging.basicConfig()
logger.info("mmi-runner")
warnings.warn(
"You are using the mmi-runner script, please switch to `mmi runner`",
DeprecationWarning
)
arguments = docopt.docopt(__doc__)
kwargs = parse_args(arguments)
runner = mmi.runner.Runner(
**kwargs... | run mmi runner |
def getVisibility(self):
try:
if self.map[GET_VISIBILITY_PROPERTY] == 'VISIBLE':
return VISIBLE
elif self.map[GET_VISIBILITY_PROPERTY] == 'INVISIBLE':
return INVISIBLE
elif self.map[GET_VISIBILITY_PROPERTY] == 'GONE':
return GON... | Gets the View visibility |
def unbind(self, handler, argspec):
self.handlers[argspec.key].remove((handler, argspec))
if not len(self.handlers[argspec.key]):
del self.handlers[argspec.key] | handler will no longer be called if args match argspec
:param argspec: instance of ArgSpec - args to be matched |
def listrecursive(path, ext=None):
filenames = set()
for root, dirs, files in os.walk(path):
if ext:
if ext == 'tif' or ext == 'tiff':
tmp = fnmatch.filter(files, '*.' + 'tiff')
files = tmp + fnmatch.filter(files, '*.' + 'tif')
else:
... | List files recurisvely |
def get_fetcher_assets(self, dt):
if self._extra_source_df is None:
return []
day = normalize_date(dt)
if day in self._extra_source_df.index:
assets = self._extra_source_df.loc[day]['sid']
else:
return []
if isinstance(assets, pd.Series):
... | Returns a list of assets for the current date, as defined by the
fetcher data.
Returns
-------
list: a list of Asset objects. |
def is_job_done(job_id, conn=None):
result = False
get_done = RBJ.get_all(DONE, index=STATUS_FIELD)
for item in get_done.filter({ID_FIELD: job_id}).run(conn):
result = item
return result | is_job_done function checks to if Brain.Jobs Status is 'Done'
:param job_id: <str> id for the job
:param conn: (optional)<connection> to run on
:return: <dict> if job is done <false> if |
def mnemonic(self, value):
if value not in REIL_MNEMONICS:
raise Exception("Invalid instruction mnemonic : %s" % str(value))
self._mnemonic = value | Set instruction mnemonic. |
def pause(self):
self._mq.send("p", True, type=1)
self._paused = True | Pause pulse capture |
def apply_and_save(self):
patches = self.patches
content = None
with open(self.IN_PATH) as f_in:
content = f_in.read()
for key in self.replaced_word_dict:
content = content.replace(key, self.replaced_word_dict[key])
out_patches = []
for patch in pa... | Apply replaced words and patches, and save setup.py file. |
def sas_interconnect_types(self):
if not self.__sas_interconnect_types:
self.__sas_interconnect_types = SasInterconnectTypes(self.__connection)
return self.__sas_interconnect_types | Gets the SasInterconnectTypes API client.
Returns:
SasInterconnectTypes: |
def pull_all_external(collector, **kwargs):
deps = set()
images = collector.configuration["images"]
for layer in Builder().layered(images):
for image_name, image in layer:
for dep in image.commands.external_dependencies:
deps.add(dep)
for dep in sorted(deps):
... | Pull all the external dependencies of all the images |
def shutdown(message=None, timeout=5, force_close=True, reboot=False,
in_seconds=False, only_on_pending_reboot=False):
if six.PY2:
message = _to_unicode(message)
timeout = _convert_minutes_seconds(timeout, in_seconds)
if only_on_pending_reboot and not get_pending_reboot():
retur... | Shutdown a running system.
Args:
message (str):
The message to display to the user before shutting down.
timeout (int):
The length of time (in seconds) that the shutdown dialog box should
be displayed. While this dialog box is displayed, the shutdown can
... |
def filter(self, local_name=None, name=None, ns_uri=None, node_type=None,
filter_fn=None, first_only=False):
if filter_fn is None:
def filter_fn(n):
if node_type is not None:
if isinstance(node_type, int):
if not n.is_type(node_... | Apply filters to the set of nodes in this list.
:param local_name: a local name used to filter the nodes.
:type local_name: string or None
:param name: a name used to filter the nodes.
:type name: string or None
:param ns_uri: a namespace URI used to filter the nodes.
... |
def _modifyItemTag(self, item_id, action, tag):
return self.httpPost(ReaderUrl.EDIT_TAG_URL,
{'i': item_id, action: tag, 'ac': 'edit-tags'}) | wrapper around actual HTTP POST string for modify tags |
def process(self, metric):
if not boto:
return
collector = str(metric.getCollectorPath())
metricname = str(metric.getMetricPath())
for rule in self.rules:
self.log.debug(
"Comparing Collector: [%s] with (%s) "
"and Metric: [%s] with... | Process a metric and send it to CloudWatch |
def get_arrays_from_file(params_file, params=None):
try:
f = h5py.File(params_file, 'r')
except:
raise ValueError('File not found.')
if params is not None:
if not isinstance(params, list):
params = [params]
for p in params:
... | Reads the values of one or more parameters from an hdf file and
returns as a dictionary.
Parameters
----------
params_file : str
The hdf file that contains the values of the parameters.
params : {None, list}
If provided, will just retrieve the given param... |
def get_file_listing_sha(listing_paths: Iterable) -> str:
return sha256(''.join(sorted(listing_paths)).encode('utf-8')).hexdigest() | Return sha256 string for group of FTP listings. |
def _shutdown_unlocked(self, context, lru=None, new_context=None):
LOG.info('%r._shutdown_unlocked(): shutting down %r', self, context)
context.shutdown()
via = self._via_by_context.get(context)
if via:
lru = self._lru_by_via.get(via)
if lru:
if co... | Arrange for `context` to be shut down, and optionally add `new_context`
to the LRU list while holding the lock. |
def clear(self):
if os.path.exists(self.path):
os.remove(self.path) | Remove all existing done markers and the file used to store the dones. |
def create_appointment_group(self, appointment_group, **kwargs):
from canvasapi.appointment_group import AppointmentGroup
if (
isinstance(appointment_group, dict) and
'context_codes' in appointment_group and
'title' in appointment_group
):
... | Create a new Appointment Group.
:calls: `POST /api/v1/appointment_groups \
<https://canvas.instructure.com/doc/api/appointment_groups.html#method.appointment_groups.create>`_
:param appointment_group: The attributes of the appointment group.
:type appointment_group: `dict`
:par... |
def claim_exp(self, data):
expiration = getattr(settings, 'OAUTH_ID_TOKEN_EXPIRATION', 30)
expires = self.now + timedelta(seconds=expiration)
return timegm(expires.utctimetuple()) | Required expiration time. |
def request(self, method, params):
identifier = random.randint(1, 1000)
self._transport.write(jsonrpc_request(method, identifier, params))
self._buffer[identifier] = {'flag': asyncio.Event()}
yield from self._buffer[identifier]['flag'].wait()
result = self._buffer[identifier]['da... | Send a JSONRPC request. |
def update_ngram(self, ngram, count):
query = "UPDATE _{0}_gram SET count = {1}".format(len(ngram), count)
query += self._build_where_clause(ngram)
query += ";"
self.execute_sql(query) | Updates a given ngram in the database. The ngram has to be in the
database, otherwise this method will stop with an error.
Parameters
----------
ngram : iterable of str
A list, set or tuple of strings.
count : int
The count for the given n-gram. |
def _prepare_load_balancers(self):
stack = {
A.NAME: self[A.NAME],
A.VERSION: self[A.VERSION],
}
for load_balancer in self.get(R.LOAD_BALANCERS, []):
svars = {A.STACK: stack}
load_balancer[A.loadbalancer.VARS] = svars | Prepare load balancer variables |
def build_message(self, stat, value):
return ' '.join((self.prefix + str(stat), str(value), str(round(time())))) | Build a metric in Graphite format. |
def get_multiplicon_seeds(self, redundant=False):
for node in self._multiplicon_graph.nodes():
if not len(self._multiplicon_graph.in_edges(node)):
if not self.is_redundant_multiplicon(node):
yield node
elif redundant:
yield node... | Return a generator of the IDs of multiplicons that are initial
seeding 'pairs' in level 2 multiplicons.
Arguments:
o redundant - if true, report redundant multiplicons |
def _cls_fqn(self, cls):
ns = self._namespace_stack[-1]
if ns in ['__base__', None]:
return cls.__name__
else:
return ns + '.' + cls.__name__ | Returns fully qualified name for the class based on current namespace
and the class name. |
def eval_constraints(self, constraints):
try:
return all(self.eval_ast(c) for c in constraints)
except errors.ClaripyZeroDivisionError:
return False | Returns whether the constraints is satisfied trivially by using the
last model. |
def random_subset_ids_by_count(self, count_per_class=1):
class_sizes = self.class_sizes
subsets = list()
if count_per_class < 1:
warnings.warn('Atleast one sample must be selected from each class')
return list()
elif count_per_class >= self.num_samples:
... | Returns a random subset of sample ids of specified size by count,
within each class.
Parameters
----------
count_per_class : int
Exact number of samples per each class.
Returns
-------
subset : list
Combined list of sample ids from al... |
def score(self, env=None, score_out=None):
messages = {}
self.assignment.set_args(
score=True,
score_out=score_out,
)
if env is None:
import __main__
env = __main__.__dict__
self.run('scoring', messages, env=env)
return mess... | Run the scoring protocol.
score_out -- str; a file name to write the point breakdown
into.
Returns: dict; maps score tag (str) -> points (float) |
def interconnect_all(self):
for dep in topologically_sorted(self._provides):
if hasattr(dep, '__injections__') and not hasattr(dep, '__injections_source__'):
self.inject(dep) | Propagate dependencies for provided instances |
def _update_task(self, task):
self.task = task
self.task.data.update(self.task_data)
self.task_type = task.task_spec.__class__.__name__
self.spec = task.task_spec
self.task_name = task.get_name()
self.activity = getattr(self.spec, 'service_class', '')
self._set_la... | Assigns current task step to self.task
then updates the task's data with self.task_data
Args:
task: Task object. |
def add_how(voevent, descriptions=None, references=None):
if not voevent.xpath('How'):
etree.SubElement(voevent, 'How')
if descriptions is not None:
for desc in _listify(descriptions):
etree.SubElement(voevent.How, 'Description')
voevent.How.Description[-1] = desc
if ... | Add descriptions or references to the How section.
Args:
voevent(:class:`Voevent`): Root node of a VOEvent etree.
descriptions(str): Description string, or list of description
strings.
references(:py:class:`voeventparse.misc.Reference`): A reference element
(or list ... |
def move_into(self, destination_folder):
headers = self.headers
endpoint = 'https://outlook.office.com/api/v2.0/me/MailFolders/' + self.id + '/move'
payload = '{ "DestinationId": "' + destination_folder.id + '"}'
r = requests.post(endpoint, headers=headers, data=payload)
if check... | Move the Folder into a different folder.
This makes the Folder provided a child folder of the destination_folder.
Raises:
AuthError: Raised if Outlook returns a 401, generally caused by an invalid or expired access token.
Args:
destination_folder: A :class:`Folder <pyO... |
def init_app(self, app):
host = app.config.get('STATS_HOSTNAME', 'localhost')
port = app.config.get('STATS_PORT', 8125)
base_key = app.config.get('STATS_BASE_KEY', app.name)
client = _StatsClient(
host=host,
port=port,
prefix=base_key,
)
... | Inititialise the extension with the app object.
:param app: Your application object |
def defaultSystem():
rsystem = platform.system()
if rsystem in os_canon:
rsystem = os_canon[rsystem][0]
return rsystem | Return the canonicalized system name. |
def __parse(self) -> object:
char = self.data[self.idx: self.idx + 1]
if char in [b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b'0']:
str_len = int(self.__read_to(b':'))
return self.__read(str_len)
elif char == b'i':
self.idx += 1
return i... | Selects the appropriate method to decode next bencode element and returns the result. |
def restore_row(self, row, schema):
row = list(row)
for index, field in enumerate(schema.fields):
if self.__dialect == 'postgresql':
if field.type in ['array', 'object']:
continue
row[index] = field.cast_value(row[index])
return row | Restore row from SQL |
def get_transactions(self, include_investment=False):
assert_pd()
s = StringIO(self.get_transactions_csv(
include_investment=include_investment))
s.seek(0)
df = pd.read_csv(s, parse_dates=['Date'])
df.columns = [c.lower().replace(' ', '_') for c in df.columns]
... | Returns the transaction data as a Pandas DataFrame. |
def change_nick(self, nick):
old_nick = self.nick
self.nick = IRCstr(nick)
for c in self.channels:
c.users.remove(old_nick)
c.users.add(self.nick) | Update this user's nick in all joined channels. |
def setup_ssh_tunnel(job_id, local_port, remote_port):
cmd = ['dx', 'ssh', '--suppress-running-check', job_id, '-o', 'StrictHostKeyChecking no']
cmd += ['-f', '-L', '{0}:localhost:{1}'.format(local_port, remote_port), '-N']
subprocess.check_call(cmd) | Setup an ssh tunnel to the given job-id. This will establish
the port over the given local_port to the given remote_port
and then exit, keeping the tunnel in place until the job is
terminated. |
def save(self):
if self.rater is not None:
self.rater.set('modified', datetime.now().isoformat())
xml = parseString(tostring(self.root))
with open(self.xml_file, 'w') as f:
f.write(xml.toxml()) | Save xml to file. |
def delete(self, monitor_id):
if not self._state:
raise InvalidState("State was not properly obtained from the app")
monitors = self.list()
bit = None
for monitor in monitors:
if monitor_id != monitor['monitor_id']:
continue
bit = monit... | Delete a monitor by ID. |
def undo(self):
_, _, undo_state = self._undo_stack.back()
spike_clusters_new = self._spike_clusters_base.copy()
for spike_ids, cluster_ids, _ in self._undo_stack:
if spike_ids is not None:
spike_clusters_new[spike_ids] = cluster_ids
changed = np.nonzero(self.... | Undo the last cluster assignment operation.
Returns
-------
up : UpdateInfo instance of the changes done by this operation. |
def get_all_chats(self):
chats = self.wapi_functions.getAllChats()
if chats:
return [factory_chat(chat, self) for chat in chats]
else:
return [] | Fetches all chats
:return: List of chats
:rtype: list[Chat] |
def get_all_longest_col_lengths(self):
response = {}
for col in self.col_list:
response[col] = self._longest_val_in_column(col)
return response | iterate over all columns and get their longest values
:return: dict, {"column_name": 132} |
def profile(fun, *args, **kwargs):
timer_name = kwargs.pop("prof_name", None)
if not timer_name:
module = inspect.getmodule(fun)
c = [module.__name__]
parentclass = labtypes.get_class_that_defined_method(fun)
if parentclass:
c.append(parentclass.__name__)
c.ap... | Profile a function. |
def list_of(validate_item):
def validate(value, should_raise=True):
validate_type = is_type(list)
if not validate_type(value, should_raise=should_raise):
return False
for item in value:
try:
validate_item(item)
except TypeError as e:
... | Returns a validator function that succeeds only if the input is a list, and each item in the list passes as input
to the provided validator validate_item.
:param callable validate_item: the validator function for items in the list
:returns: a function which returns True its input is an list of valid items,... |
def set_of(*generators):
class SetOfGenerators(ArbitraryInterface):
@classmethod
def arbitrary(cls):
arbitrary_set = set()
for generator in generators:
arbitrary_set |= {
arbitrary(generator)
for _ in range(arbitrary(int... | Generates a set consisting solely of the specified generators.
This is a class factory, it makes a class which is a closure around the
specified generators. |
def deepSetAttr(obj, path, val):
first, _, rest = path.rpartition('.')
return setattr(deepGetAttr(obj, first) if first else obj, rest, val) | Sets a deep attribute on an object by resolving a dot-delimited
path. If path does not exist an `AttributeError` will be raised`. |
def periodogram_auto(self, oversampling=5, nyquist_factor=3,
return_periods=True):
N = len(self.t)
T = np.max(self.t) - np.min(self.t)
df = 1. / T / oversampling
f0 = df
Nf = int(0.5 * oversampling * nyquist_factor * N)
freq = f0 + df * np.arange(... | Compute the periodogram on an automatically-determined grid
This function uses heuristic arguments to choose a suitable frequency
grid for the data. Note that depending on the data window function,
the model may be sensitive to periodicity at higher frequencies than
this function return... |
def is_discrete(self):
return self.bounds[1] == self.bounds[0] and\
self.included == (True,True) | Check whether this interval contains exactly one number
:rtype: bool |
def to_df(self, method: str = 'MEMORY', **kwargs) -> 'pd.DataFrame':
ll = self._is_valid()
if ll:
print(ll['LOG'])
return None
else:
return self.sas.sasdata2dataframe(self.table, self.libref, self.dsopts, method, **kwargs) | Export this SAS Data Set to a Pandas Data Frame
:param method: defaults to MEMORY; the original method. CSV is the other choice which uses an intermediary csv file; faster for large data
:param kwargs:
:return: Pandas data frame |
def start_all_linking(self, link_type, group_id):
self.logger.info("start_all_linking for type %s group %s",
link_type, group_id)
self.direct_command_hub('0264' + link_type + group_id) | Begin all linking |
def entity_list(args):
r = fapi.get_entities_with_type(args.project, args.workspace)
fapi._check_response_code(r, 200)
return [ '{0}\t{1}'.format(e['entityType'], e['name']) for e in r.json() ] | List entities in a workspace. |
def all_equal(keys, axis=semantics.axis_default):
index = as_index(keys, axis)
return index.groups == 1 | returns true of all keys are equal |
def draw(self):
nodes_pos = {}
for node in self.graph.nodes():
nodes_pos[node] = (node.geom.x, node.geom.y)
plt.figure()
nx.draw_networkx(self.graph, nodes_pos, node_size=16, font_size=8)
plt.show() | Draw MV grid's graph using the geo data of nodes
Notes
-----
This method uses the coordinates stored in the nodes' geoms which
are usually conformal, not equidistant. Therefore, the plot might
be distorted and does not (fully) reflect the real positions or
distances betw... |
def convert_string(string):
if is_int(string):
return int(string)
elif is_float(string):
return float(string)
elif convert_bool(string)[0]:
return convert_bool(string)[1]
elif string == 'None':
return None
else:
return string | Convert string to int, float or bool. |
def toml(uncertainty):
text = uncertainty.text.strip()
if not text.startswith('['):
text = '[%s]' % text
for k, v in uncertainty.attrib.items():
try:
v = ast.literal_eval(v)
except ValueError:
v = repr(v)
text += '\n%s = %s' % (k, v)
return text | Converts an uncertainty node into a TOML string |
def start(self, children):
composites = []
for composite_dict in children:
if False and self.include_position:
key_token = composite_dict[1]
key_name = key_token.value.lower()
composites_position = self.get_position_dict(composite_dict)
... | Parses a MapServer Mapfile
Parsing of partial Mapfiles or lists of composites is also possible |
def from_wif(cls, wif, network=BitcoinMainNet):
wif = ensure_str(wif)
try:
extended_key_bytes = base58.b58decode_check(wif)
except ValueError as e:
raise ChecksumException(e)
network_bytes = extended_key_bytes[0]
if not isinstance(network_bytes, six.intege... | Import a key in WIF format.
WIF is Wallet Import Format. It is a base58 encoded checksummed key.
See https://en.bitcoin.it/wiki/Wallet_import_format for a full
description.
This supports compressed WIFs - see this for an explanation:
http://bitcoin.stackexchange.com/questions/7... |
def rate_limits(self):
if not self._rate_limits:
self._rate_limits = utilities.get_rate_limits(self.response)
return self._rate_limits | Returns a list of rate limit details. |
def _merge_any_two_boxes(self, box_list):
n = len(box_list)
for i in range(n):
for j in range(i + 1, n):
if self._are_nearby_parallel_boxes(box_list[i], box_list[j]):
a, b = box_list[i], box_list[j]
merged_points = np.vstack([a.points, ... | Given a list of boxes, finds two nearby parallel ones and merges them. Returns false if none found. |
def write_text(filename, data, add=False):
write_type = 'a' if add else 'w'
with open(filename, write_type) as file:
print(data, end='', file=file) | Write image data to text file
:param filename: name of text file to write data to
:type filename: str
:param data: image data to write to text file
:type data: numpy array
:param add: whether to append to existing file or not. Default is ``False``
:type add: bool |
def flat_list_to_polymer(atom_list, atom_group_s=4):
atom_labels = ['N', 'CA', 'C', 'O', 'CB']
atom_elements = ['N', 'C', 'C', 'O', 'C']
atoms_coords = [atom_list[x:x + atom_group_s]
for x in range(0, len(atom_list), atom_group_s)]
atoms = [[Atom(x[0], x[1]) for x in zip(y, atom_elem... | Takes a flat list of atomic coordinates and converts it to a `Polymer`.
Parameters
----------
atom_list : [Atom]
Flat list of coordinates.
atom_group_s : int, optional
Size of atom groups.
Returns
-------
polymer : Polypeptide
`Polymer` object containing atom coords... |
def install(self, io_handler, module_name):
bundle = self._context.install_bundle(module_name)
io_handler.write_line("Bundle ID: {0}", bundle.get_bundle_id())
return bundle.get_bundle_id() | Installs the bundle with the given module name |
def get_task_progress(self, task_name):
params = {'instanceprogress': task_name, 'taskname': task_name}
resp = self._client.get(self.resource(), params=params)
return Instance.Task.TaskProgress.parse(self._client, resp) | Get task's current progress
:param task_name: task_name
:return: the task's progress
:rtype: :class:`odps.models.Instance.Task.TaskProgress` |
def export(self, out_filename):
with zipfile.ZipFile(out_filename, 'w', zipfile.ZIP_DEFLATED) as arc:
id_list = list(self.get_thread_info())
for num, my_info in enumerate(id_list):
logging.info('Working on item %i : %s', num, my_info['number'])
my_thread =... | Export desired threads as a zipfile to out_filename. |
def Read(f):
try:
yaml_data = yaml.load(f)
except yaml.YAMLError as e:
raise ParseError('%s' % e)
except IOError as e:
raise YAMLLoadError('%s' % e)
_CheckData(yaml_data)
try:
return Config(
yaml_data.get('blacklist', ()),
yaml_data.get('whitelist', ('*')))
except UnicodeDeco... | Reads and returns Config data from a yaml file.
Args:
f: Yaml file to parse.
Returns:
Config object as defined in this file.
Raises:
Error (some subclass): If there is a problem loading or parsing the file. |
def combine_with(self, rgbd_im):
new_data = self.data.copy()
depth_data = self.depth.data
other_depth_data = rgbd_im.depth.data
depth_zero_px = self.depth.zero_pixels()
depth_replace_px = np.where(
(other_depth_data != 0) & (
other_depth_data < depth_d... | Replaces all zeros in the source rgbd image with the values of a different rgbd image
Parameters
----------
rgbd_im : :obj:`RgbdImage`
rgbd image to combine with
Returns
-------
:obj:`RgbdImage`
the combined rgbd image |
def flush(self):
if self.shutdown:
return
self.flush_buffers(force=True)
self.queue.put(FLUSH_MARKER)
self.queue.join() | Ensure all logging output has been flushed. |
def _mpl_to_vispy(fig):
renderer = VispyRenderer()
exporter = Exporter(renderer)
with warnings.catch_warnings(record=True):
exporter.run(fig)
renderer._vispy_done()
return renderer.canvas | Convert a given matplotlib figure to vispy
This function is experimental and subject to change!
Requires matplotlib and mplexporter.
Parameters
----------
fig : instance of matplotlib Figure
The populated figure to display.
Returns
-------
canvas : instance of Canvas
T... |
def _guess_record(self, rtype, name=None, content=None):
records = self._list_records_internal(
identifier=None, rtype=rtype, name=name, content=content)
if len(records) == 1:
return records[0]
if len(records) > 1:
raise Exception(
'Identifier ... | Tries to find existing unique record by type, name and content |
def create_for_block(
cls, i=None, name=None, cname=None, version=None, **kwargs):
if cname is None:
cname = name or 'values_block_{idx}'.format(idx=i)
if name is None:
name = cname
try:
if version[0] == 0 and version[1] <= 10 and version[2] == 0:
... | return a new datacol with the block i |
def get_crash_signature(error_line):
search_term = None
match = CRASH_RE.match(error_line)
if match and is_helpful_search_term(match.group(1)):
search_term = match.group(1)
return search_term | Try to get a crash signature from the given error_line string. |
def field_value(self, value):
if not self.is_array:
return self.field_type(value)
if isinstance(value, (list, tuple, set)):
return [self.field_type(item) for item in value]
return self.field_type(value) | Validate against NodeType. |
def _get_kernel_data(self, nmr_samples, thinning, return_output):
kernel_data = {
'data': self._data,
'method_data': self._get_mcmc_method_kernel_data(),
'nmr_iterations': Scalar(nmr_samples * thinning, ctype='ulong'),
'iteration_offset': Scalar(self._sampling_ind... | Get the kernel data we will input to the MCMC sampler.
This sets the items:
* data: the pointer to the user provided data
* method_data: the data specific to the MCMC method
* nmr_iterations: the number of iterations to sample
* iteration_offset: the current sample index, that ... |
def on_attribute(self, node):
ctx = node.ctx.__class__
if ctx == ast.Store:
msg = "attribute for storage: shouldn't be here!"
self.raise_exception(node, exc=RuntimeError, msg=msg)
sym = self.run(node.value)
if ctx == ast.Del:
return delattr(sym, node.a... | Extract attribute. |
def _auto_commit(self, by_count=False):
if (self._stopping or self._shuttingdown or (not self._start_d) or
(self._last_processed_offset is None) or
(not self.consumer_group) or
(by_count and not self.auto_commit_every_n)):
return
if (not by_cou... | Check if we should start a new commit operation and commit |
def config(config_dict: typing.Mapping) -> Config:
logger.debug(f"Updating with {config_dict}")
_cfg.update(config_dict)
return _cfg | Configures the konch shell. This function should be called in a
.konchrc file.
:param dict config_dict: Dict that may contain 'context', 'banner', and/or
'shell' (default shell class to use). |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.