code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def voip(self):
if self._voip is None:
self._voip = VoipList(
self._version,
account_sid=self._solution['account_sid'],
country_code=self._solution['country_code'],
)
return self._voip | Access the voip
:returns: twilio.rest.api.v2010.account.available_phone_number.voip.VoipList
:rtype: twilio.rest.api.v2010.account.available_phone_number.voip.VoipList |
def _to_pywintypes(row):
def _pywintype(x):
if isinstance(x, dt.date):
return dt.datetime(x.year, x.month, x.day, tzinfo=dt.timezone.utc)
elif isinstance(x, (dt.datetime, pa.Timestamp)):
if x.tzinfo is None:
return x.replace(tzinfo=dt.timezone.utc)
elif isinstance(x, str):
if re.match("^\d{4}-\d{2}-\d{2}$", x):
return "'" + x
return x
elif isinstance(x, np.integer):
return int(x)
elif isinstance(x, np.floating):
return float(x)
elif x is not None and not isinstance(x, (str, int, float, bool)):
return str(x)
return x
return [_pywintype(x) for x in row] | convert values in a row to types accepted by excel |
def disconnect_all(self):
rhs = 'b:' + self.definition['node_class'].__label__
rel = _rel_helper(lhs='a', rhs=rhs, ident='r', **self.definition)
q = 'MATCH (a) WHERE id(a)={self} MATCH ' + rel + ' DELETE r'
self.source.cypher(q) | Disconnect all nodes
:return: |
def start(self):
with self._lock_send_remaining_time:
if self._send_remaining_time <= 0.0:
local_send_interval = self._send_interval
if self._send_interval < 0.1:
local_send_interval = 0.1
self._send_remaining_time = self._send_time
if self._send_remaining_time < local_send_interval:
self._send_remaining_time = local_send_interval
thread = Thread(target=self._run)
thread.daemon = True
thread.start() | Starts a new sender thread if none is not already there |
def run(self, i, o):
self.input = i
self.output = PoetryStyle(i, o)
for logger in self._loggers:
self.register_logger(logging.getLogger(logger))
return super(BaseCommand, self).run(i, o) | Initialize command. |
def _get_branch_opts(branch, local_branch, all_local_branches,
desired_upstream, git_ver=None):
if branch is not None and branch not in all_local_branches:
return None
if git_ver is None:
git_ver = _LooseVersion(__salt__['git.version'](versioninfo=False))
ret = []
if git_ver >= _LooseVersion('1.8.0'):
ret.extend(['--set-upstream-to', desired_upstream])
else:
ret.append('--set-upstream')
ret.append(local_branch if branch is None else branch)
ret.append(desired_upstream)
return ret | DRY helper to build list of opts for git.branch, for the purposes of
setting upstream tracking branch |
def _get_format_timedelta64(values, nat_rep='NaT', box=False):
values_int = values.astype(np.int64)
consider_values = values_int != iNaT
one_day_nanos = (86400 * 1e9)
even_days = np.logical_and(consider_values,
values_int % one_day_nanos != 0).sum() == 0
all_sub_day = np.logical_and(
consider_values, np.abs(values_int) >= one_day_nanos).sum() == 0
if even_days:
format = None
elif all_sub_day:
format = 'sub_day'
else:
format = 'long'
def _formatter(x):
if x is None or (is_scalar(x) and isna(x)):
return nat_rep
if not isinstance(x, Timedelta):
x = Timedelta(x)
result = x._repr_base(format=format)
if box:
result = "'{res}'".format(res=result)
return result
return _formatter | Return a formatter function for a range of timedeltas.
These will all have the same format argument
If box, then show the return in quotes |
def send(self, **kwargs):
payload = self.api_payload()
payload.update(**kwargs)
return self.api_method()(**payload) | Combines api_payload and api_method to submit the current object to the API |
def _netsh_file(content):
with tempfile.NamedTemporaryFile(mode='w',
prefix='salt-',
suffix='.netsh',
delete=False) as fp:
fp.write(content)
try:
log.debug('%s:\n%s', fp.name, content)
return salt.modules.cmdmod.run('netsh -f {0}'.format(fp.name), python_shell=True)
finally:
os.remove(fp.name) | helper function to get the results of ``netsh -f content.txt``
Running ``netsh`` will drop you into a ``netsh`` prompt where you can issue
``netsh`` commands. You can put a series of commands in an external file and
run them as if from a ``netsh`` prompt using the ``-f`` switch. That's what
this function does.
Args:
content (str):
The contents of the file that will be run by the ``netsh -f``
command
Returns:
str: The text returned by the netsh command |
def get_locale ():
try:
loc, encoding = locale.getdefaultlocale()
except ValueError:
loc, encoding = None, None
if loc is None:
loc = "C"
else:
loc = norm_locale(loc)
if encoding is None:
encoding = "ascii"
return (loc, encoding) | Search the default platform locale and norm it.
@returns (locale, encoding)
@rtype (string, string) |
def StoreStat(self, responses):
index = responses.request_data["index"]
if not responses.success:
self.Log("Failed to stat file: %s", responses.status)
self._FileFetchFailed(index, responses.request_data["request_name"])
return
tracker = self.state.pending_hashes[index]
tracker["stat_entry"] = responses.First() | Stores stat entry in the flow's state. |
def getInstalled():
installed = _find_installed()
return [os.path.basename(p.pathname)[3:] for p in installed] | Returns a list of strings representing the installed wxPython
versions that are found on the system. |
def _get_short_description(self):
if self.description is None:
return None
lines = [x for x in self.description.split('\n')]
if len(lines) == 1:
return lines[0]
elif len(lines) >= 3 and lines[1] == '':
return lines[0]
return None | Return the first line of a multiline description
Returns:
string: The short description, otherwise None |
def _to_ctfile_property_block(self):
ctab_properties_data = defaultdict(list)
for atom in self.atoms:
for ctab_property_key, ctab_property_value in atom._ctab_property_data.items():
ctab_properties_data[ctab_property_key].append(OrderedDict(
zip(self.ctab_conf[self.version][ctab_property_key]['values'],
[atom.atom_number, ctab_property_value])))
ctab_property_lines = []
for ctab_property_key, ctab_property_value in ctab_properties_data.items():
for entry in ctab_property_value:
ctab_property_line = '{} {}{}'.format(self.ctab_conf[self.version][ctab_property_key]['fmt'],
1, ''.join([str(value).rjust(4) for value in entry.values()]))
ctab_property_lines.append(ctab_property_line)
if ctab_property_lines:
return '{}\n'.format('\n'.join(ctab_property_lines))
return '' | Create ctab properties block in `CTfile` format from atom-specific properties.
:return: Ctab property block.
:rtype: :py:class:`str` |
def dumpindented(self, pn, indent=0):
page = self.readpage(pn)
print(" " * indent, page)
if page.isindex():
print(" " * indent, end="")
self.dumpindented(page.preceeding, indent + 1)
for p in range(len(page.index)):
print(" " * indent, end="")
self.dumpindented(page.getpage(p), indent + 1) | Dump all nodes of the current page with keys indented, showing how the `indent`
feature works |
def _get_or_add_definition(self):
if self._has_definition:
return self._definition
prior_headerfooter = self._prior_headerfooter
if prior_headerfooter:
return prior_headerfooter._get_or_add_definition()
return self._add_definition() | Return HeaderPart or FooterPart object for this section.
If this header/footer inherits its content, the part for the prior header/footer
is returned; this process continue recursively until a definition is found. If
the definition cannot be inherited (because the header/footer belongs to the
first section), a new definition is added for that first section and then
returned. |
def set_position(x, y, stream=STD_OUTPUT_HANDLE):
stream = kernel32.GetStdHandle(stream)
value = x + (y << 16)
kernel32.SetConsoleCursorPosition(stream, c_long(value)) | Sets current position of the cursor. |
def _get_reflectance(self, projectables, optional_datasets):
_nir, _tb11 = projectables
LOG.info('Getting reflective part of %s', _nir.attrs['name'])
sun_zenith = None
tb13_4 = None
for dataset in optional_datasets:
wavelengths = dataset.attrs.get('wavelength', [100., 0, 0])
if (dataset.attrs.get('units') == 'K' and
wavelengths[0] <= 13.4 <= wavelengths[2]):
tb13_4 = dataset
elif ("standard_name" in dataset.attrs and
dataset.attrs["standard_name"] == "solar_zenith_angle"):
sun_zenith = dataset
if sun_zenith is None:
if sun_zenith_angle is None:
raise ImportError("No module named pyorbital.astronomy")
lons, lats = _nir.attrs["area"].get_lonlats_dask(CHUNK_SIZE)
sun_zenith = sun_zenith_angle(_nir.attrs['start_time'], lons, lats)
return self._refl3x.reflectance_from_tbs(sun_zenith, _nir, _tb11, tb_ir_co2=tb13_4) | Calculate 3.x reflectance with pyspectral. |
def place_order(self, package_keyname, location, item_keynames, complex_type=None,
hourly=True, preset_keyname=None, extras=None, quantity=1):
order = self.generate_order(package_keyname, location, item_keynames,
complex_type=complex_type, hourly=hourly,
preset_keyname=preset_keyname,
extras=extras, quantity=quantity)
return self.order_svc.placeOrder(order) | Places an order with the given package and prices.
This function takes in parameters needed for an order and places the order.
:param str package_keyname: The keyname for the package being ordered
:param str location: The datacenter location string for ordering (Ex: DALLAS13)
:param list item_keynames: The list of item keyname strings to order. To see list of
possible keynames for a package, use list_items()
(or `slcli order item-list`)
:param str complex_type: The complex type to send with the order. Typically begins
with `SoftLayer_Container_Product_Order_`.
:param bool hourly: If true, uses hourly billing, otherwise uses monthly billing
:param string preset_keyname: If needed, specifies a preset to use for that package.
To see a list of possible keynames for a package, use
list_preset() (or `slcli order preset-list`)
:param dict extras: The extra data for the order in dictionary format.
Example: A VSI order requires hostname and domain to be set, so
extras will look like the following:
{'virtualGuests': [{'hostname': 'test', domain': 'softlayer.com'}]}
:param int quantity: The number of resources to order |
def _double_as_bytes(dval):
"Use struct.unpack to decode a double precision float into eight bytes"
tmp = list(struct.unpack('8B',struct.pack('d', dval)))
if not _big_endian:
tmp.reverse()
return tmp | Use struct.unpack to decode a double precision float into eight bytes |
def get_value(self, subsystem, option):
assert subsystem in self, 'Subsystem {} is missing'.format(subsystem)
return util.read_file(self.per_subsystem[subsystem], subsystem + '.' + option) | Read the given value from the given subsystem.
Do not include the subsystem name in the option name.
Only call this method if the given subsystem is available. |
def each(self, callback):
items = self.items
for item in items:
if callback(item) is False:
break
return self | Execute a callback over each item.
.. code::
collection = Collection([1, 2, 3])
collection.each(lambda x: x + 3)
.. warning::
It only applies the callback but does not modify the collection's items.
Use the `transform() <#backpack.Collection.transform>`_ method to
modify the collection.
:param callback: The callback to execute
:type callback: callable
:rtype: Collection |
def dump_ckan(m):
doc = MetapackDoc(cache=m.cache)
doc.new_section('Groups', 'Title Description Id Image_url'.split())
doc.new_section('Organizations', 'Title Description Id Image_url'.split())
c = RemoteCKAN(m.ckan_url, apikey=m.api_key)
for g in c.action.group_list(all_fields=True):
print(g.keys())
for o in c.action.organization_list(all_fields=True):
print(g.keys()) | Create a groups and organization file |
def update(self):
from ambry.orm.exc import NotFoundError
from requests.exceptions import ConnectionError, HTTPError
from boto.exception import S3ResponseError
d = {}
try:
for k, v in self.list(full=True):
if not v:
continue
d[v['vid']] = {
'vid': v['vid'],
'vname': v.get('vname'),
'id': v.get('id'),
'name': v.get('name')
}
self.data['list'] = d
except (NotFoundError, ConnectionError, S3ResponseError, HTTPError) as e:
raise RemoteAccessError("Failed to update {}: {}".format(self.short_name, e)) | Cache the list into the data section of the record |
def strengths_und_sign(W):
W = W.copy()
n = len(W)
np.fill_diagonal(W, 0)
Spos = np.sum(W * (W > 0), axis=0)
Sneg = np.sum(W * (W < 0), axis=0)
vpos = np.sum(W[W > 0])
vneg = np.sum(W[W < 0])
return Spos, Sneg, vpos, vneg | Node strength is the sum of weights of links connected to the node.
Parameters
----------
W : NxN np.ndarray
undirected connection matrix with positive and negative weights
Returns
-------
Spos : Nx1 np.ndarray
nodal strength of positive weights
Sneg : Nx1 np.ndarray
nodal strength of positive weights
vpos : float
total positive weight
vneg : float
total negative weight |
def inline_inputs(self):
self.text = texutils.inline(self.text,
os.path.dirname(self._filepath))
self._children = {} | Inline all input latex files references by this document. The
inlining is accomplished recursively. The document is modified
in place. |
def del_option(self, section, option):
if self.config.has_section(section):
if self.config.has_option(section, option):
self.config.remove_option(section, option)
return (True, self.config.options(section))
return (False, 'Option: ' + option + ' does not exist')
return (False, 'Section: ' + section + ' does not exist') | Deletes an option if the section and option exist |
def train_df(self, df):
aesthetics = sorted(set(self.aesthetics) & set(df.columns))
for ae in aesthetics:
self.train(df[ae]) | Train scale from a dataframe |
def _get_frame_result_type(result, objs):
if (result.blocks and (
any(isinstance(obj, ABCSparseDataFrame) for obj in objs))):
from pandas.core.sparse.api import SparseDataFrame
return SparseDataFrame
else:
return next(obj for obj in objs if not isinstance(obj,
ABCSparseDataFrame)) | return appropriate class of DataFrame-like concat
if all blocks are sparse, return SparseDataFrame
otherwise, return 1st obj |
def get(self, key):
uri = 'updates/job/{}'.format(key)
return self.make_request(method='GET', uri=uri) | Return the status from a job.
:param key: id of job
:type document: dict or list
:return: message with location of job
:rtype: dict
:raises Unauthorized: if API returns status 401
:raises Forbidden: if API returns status 403
:raises NotFound: if API returns status 404
:raises ApiError: if API returns other status |
def progress_updater(size, total):
current_task.update_state(
state=state('PROGRESS'),
meta=dict(size=size, total=total)
) | Progress reporter for checksum verification. |
def list_plugins(self):
vals = self.plugins.items()
return {x: y for x, y in vals} | List all of the plugins that have been registerd for the iotile program on this computer |
def calc_paired_insert_stats_save(in_bam, stat_file, nsample=1000000):
if utils.file_exists(stat_file):
with open(stat_file) as in_handle:
return yaml.safe_load(in_handle)
else:
stats = calc_paired_insert_stats(in_bam, nsample)
with open(stat_file, "w") as out_handle:
yaml.safe_dump(stats, out_handle, default_flow_style=False, allow_unicode=False)
return stats | Calculate paired stats, saving to a file for re-runs. |
def indices_within_segments(times, segment_files, ifo=None, segment_name=None):
veto_segs = segmentlist([])
indices = numpy.array([], dtype=numpy.uint32)
for veto_file in segment_files:
veto_segs += select_segments_by_definer(veto_file, segment_name, ifo)
veto_segs.coalesce()
start, end = segments_to_start_end(veto_segs)
if len(start) > 0:
idx = indices_within_times(times, start, end)
indices = numpy.union1d(indices, idx)
return indices, veto_segs.coalesce() | Return the list of indices that should be vetoed by the segments in the
list of veto_files.
Parameters
----------
times: numpy.ndarray of integer type
Array of gps start times
segment_files: string or list of strings
A string or list of strings that contain the path to xml files that
contain a segment table
ifo: string, optional
The ifo to retrieve segments for from the segment files
segment_name: str, optional
name of segment
Returns
-------
indices: numpy.ndarray
The array of index values within the segments
segmentlist:
The segment list corresponding to the selected time. |
def get_form(self, request, obj=None, **kwargs):
defaults = {}
if obj is None:
defaults['form'] = self.add_form
defaults.update(kwargs)
return super(SettingsAdmin, self).get_form(request, obj, **defaults) | Use special form during user creation |
def matrix_is_equivalent(X, Y):
return X is Y or (isinstance(X, Y.__class__) and X.shape == Y.shape and
np.sum((X != Y).sum()) == 0) | Checks matrix equivalence with numpy, scipy and pandas |
def update_firmware(self, firmware_information, force=False):
firmware_uri = "{}/firmware".format(self.data["uri"])
result = self._helper.update(firmware_information, firmware_uri, force=force)
self.refresh()
return result | Installs firmware to the member interconnects of a SAS Logical Interconnect.
Args:
firmware_information: Options to install firmware to a SAS Logical Interconnect.
force: If sets to true, the operation completes despite any problems with the network connectivy
or the erros on the resource itself.
Returns:
dict: SAS Logical Interconnect Firmware. |
def casperjs_command_kwargs():
kwargs = {
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE,
'universal_newlines': True
}
phantom_js_cmd = app_settings['PHANTOMJS_CMD']
if phantom_js_cmd:
path = '{0}:{1}'.format(
os.getenv('PATH', ''), os.path.dirname(phantom_js_cmd)
)
kwargs.update({'env': {'PATH': path}})
return kwargs | will construct kwargs for cmd |
def remap_hotkey(src, dst, suppress=True, trigger_on_release=False):
def handler():
active_modifiers = sorted(modifier for modifier, state in _listener.modifier_states.items() if state == 'allowed')
for modifier in active_modifiers:
release(modifier)
send(dst)
for modifier in reversed(active_modifiers):
press(modifier)
return False
return add_hotkey(src, handler, suppress=suppress, trigger_on_release=trigger_on_release) | Whenever the hotkey `src` is pressed, suppress it and send
`dst` instead.
Example:
remap('alt+w', 'ctrl+up') |
def call(self, name, *args, **kwargs):
if name in ('getTaskInfo', 'getTaskDescendants'):
kwargs['request'] = True
if kwargs:
kwargs['__starstar'] = True
args = args + (kwargs,)
payload = {'methodName': name, 'params': args}
self.calls.append(payload) | Add a new call to the list that we will submit to the server.
Similar to txkoji.Connection.call(), but this will store the call
for later instead of sending it now. |
def to_representation(self, value):
content_type = ContentType.objects.get_for_id(value)
return "_".join(content_type.natural_key()) | Convert to natural key. |
def prettify_xml(xml_root):
xml_string = etree.tostring(xml_root, encoding="utf-8", xml_declaration=True, pretty_print=True)
return get_unicode_str(xml_string) | Returns pretty-printed string representation of element tree. |
def get_holdings(self, account: SEPAAccount):
with self._get_dialog() as dialog:
hkwpd = self._find_highest_supported_command(HKWPD5, HKWPD6)
responses = self._fetch_with_touchdowns(
dialog,
lambda touchdown: hkwpd(
account=hkwpd._fields['account'].type.from_sepa_account(account),
touchdown_point=touchdown,
),
'HIWPD'
)
holdings = []
for resp in responses:
if type(resp.holdings) == bytes:
holding_str = resp.holdings.decode()
else:
holding_str = resp.holdings
mt535_lines = str.splitlines(holding_str)
del mt535_lines[0]
mt535 = MT535_Miniparser()
holdings.extend(mt535.parse(mt535_lines))
if not holdings:
logger.debug('No HIWPD response segment found - maybe account has no holdings?')
return holdings | Retrieve holdings of an account.
:param account: SEPAAccount to retrieve holdings for.
:return: List of Holding objects |
def nextValidComment(self, text, start=0):
m = min([self.lineComment(text, start),
self.blockComment(text, start)],
key=lambda m: m.start(0) if m else len(text))
return m | Return the next actual comment. |
def double_sha256(ba):
d1 = hashlib.sha256(ba)
d2 = hashlib.sha256()
d1.hexdigest()
d2.update(d1.digest())
return d2.hexdigest() | Perform two SHA256 operations on the input.
Args:
ba (bytes): data to hash.
Returns:
str: hash as a double digit hex string. |
def current_user_was_last_verifier(analysis):
verifiers = analysis.getVerificators()
return verifiers and verifiers[:-1] == api.get_current_user().getId() | Returns whether the current user was the last verifier or not |
def item_hist(list_):
dict_hist = {}
for item in list_:
if item not in dict_hist:
dict_hist[item] = 0
dict_hist[item] += 1
return dict_hist | counts the number of times each item appears in the dictionary |
def create_header(self):
try:
self.check_valid()
_header_list = []
for k,v in self.inputs.items():
if v is None:
return {self.__class__.__name__.replace('_','-'):None}
elif k == 'value':
_header_list.insert(0,str(v))
elif isinstance(v,bool):
if v is True:
_header_list.append(k)
else:
_header_list.append('%s=%s' % (k,str(v)))
return {self.__class__.__name__.replace('_','-'):'; '.join(_header_list)}
except Exception, e:
raise | return header dict |
def accept(self):
socket = Socket(self._llc, None)
socket._tco = self.llc.accept(self._tco)
return socket | Accept a connection. The socket must be bound to an address
and listening for connections. The return value is a new
socket object usable to send and receive data on the
connection. |
def get_all(self, page=None, per_page=None, include_totals=False):
params = {
'page': page,
'per_page': per_page,
'include_totals': str(include_totals).lower()
}
return self.client.get(self._url(), params=params) | Retrieves all resource servers
Args:
page (int, optional): The result's page number (zero based).
per_page (int, optional): The amount of entries per page.
include_totals (bool, optional): True if the query summary is
to be included in the result, False otherwise.
See: https://auth0.com/docs/api/management/v2#!/Resource_Servers/get_resource_servers |
def get_locale_hints():
lang, encoding = locale.getdefaultlocale()
if lang and '_' in lang:
lang3, _, lang2 = lang.partition('_')
else:
lang3 = None
lang2 = None
ll_s = [encoding, lang, lang2, lang3]
ll_s_unique = []
for ll in ll_s:
if ll:
ll = ll.lower()
if ll not in ll_s_unique:
ll_s_unique.append(ll)
return ll_s_unique | Get a list of locale hints,
guessed according to Python's default locale info. |
def login(self):
response = self.session.get(self.base_url + '/login_sid.lua', timeout=10)
xml = ET.fromstring(response.text)
if xml.find('SID').text == "0000000000000000":
challenge = xml.find('Challenge').text
url = self.base_url + "/login_sid.lua"
response = self.session.get(url, params={
"username": self.username,
"response": self.calculate_response(challenge, self.password),
}, timeout=10)
xml = ET.fromstring(response.text)
sid = xml.find('SID').text
if xml.find('SID').text == "0000000000000000":
blocktime = int(xml.find('BlockTime').text)
exc = Exception("Login failed, please wait {} seconds".format(
blocktime
))
exc.blocktime = blocktime
raise exc
self.sid = sid
return sid | Try to login and set the internal session id.
Please note:
- Any failed login resets all existing session ids, even of
other users.
- SIDs expire after some time |
def draw_medium(r, R, L, n=1, ax=None):
if ax is None:
ax = plt.gca()
for ru in _unwrap_to_layer(r, L, n):
c = plt.Circle(ru, radius=R, alpha=0.2)
ax.add_artist(c) | Draw circles representing circles in a two-dimensional periodic system.
Circles may be tiled up to a number of periods.
Parameters
----------
r: float array, shape (:, 2).
Set of points.
R: float
Circle radius.
L: float array, shape (2,)
System lengths.
n: integer.
Period to unwrap up to.
ax: matplotlib axes instance or None
Axes to draw circles onto. If `None`, use default axes.
Returns
-------
None |
def get_go_ntsets(self, go_fins):
nts = []
ntobj = namedtuple('NtGOFiles', 'hdr go_set, go_fin')
go_sets = self._init_go_sets(go_fins)
hdrs = [os.path.splitext(os.path.basename(f))[0] for f in go_fins]
assert len(go_fins) == len(go_sets)
assert len(go_fins) == len(hdrs)
for hdr, go_set, go_fin in zip(hdrs, go_sets, go_fins):
nts.append(ntobj(hdr=hdr, go_set=go_set, go_fin=go_fin))
return nts | For each file containing GOs, extract GO IDs, store filename and header. |
def update_attribute_toolbar(self, key=None):
if key is None:
key = self.actions.cursor
post_command_event(self, self.ToolbarUpdateMsg, key=key,
attr=self.code_array.cell_attributes[key]) | Updates the attribute toolbar
Parameters
----------
key: 3-tuple of Integer, defaults to current cell
\tCell to which attributes the attributes toolbar is updated |
def getTaskDescendents(self, task_id, **kwargs):
kwargs['request'] = True
data = yield self.call('getTaskDescendents', task_id, **kwargs)
tasks = []
for tdata in data[str(task_id)]:
task = Task.fromDict(tdata)
task.connection = self
tasks.append(task)
defer.returnValue(tasks) | Load all information about a task's descendents into Task classes.
Calls "getTaskDescendents" XML-RPC (with request=True to get the full
information.)
:param task_id: ``int``, for example 12345, parent task ID
:returns: deferred that when fired returns a list of Task (Munch,
dict-like) objects representing Koji tasks. |
def merge_cameras(self):
combined = CaseInsensitiveDict({})
for sync in self.sync:
combined = merge_dicts(combined, self.sync[sync].cameras)
return combined | Merge all sync camera dicts into one. |
def ynticks(self, nticks, index=1):
self.layout['yaxis' + str(index)]['nticks'] = nticks
return self | Set the number of ticks. |
def generate_subsets(self, sz, overlap=0.8, subsets=2):
overlap_sz = int(math.floor(overlap * sz))
unique_sz = sz - overlap_sz
total_unique_sz = unique_sz * subsets
total_sz = overlap_sz + total_unique_sz
if total_sz > len(self.names):
msg = 'insufficient names for requested size and overlap'
raise ValueError(msg)
sset = random.sample(self.names, total_sz)
sset_overlap, sset_unique = sset[:overlap_sz], sset[overlap_sz:]
assert len(sset_unique) == subsets * unique_sz
uniques = (sset_unique[p * unique_sz: (p + 1) * unique_sz]
for p in range(subsets))
return tuple(sset_overlap + u for u in uniques) | Return random subsets with nonempty intersection.
The random subsets are of specified size. If an element is
common to two subsets, then it is common to all subsets.
This overlap is controlled by a parameter.
:param sz: size of subsets to generate
:param overlap: size of the intersection, as fraction of the
subset length
:param subsets: number of subsets to generate
:raises ValueError: if there aren't sufficiently many names
in the list to satisfy the request; more precisely,
raises if (1 - subsets) * floor(overlap * sz)
+ subsets * sz > len(self.names).
:return: tuple of subsets |
def _find_model(self, constructor, table_name, constraints=None, *, columns=None, order_by=None):
data = self.find(table_name, constraints, columns=columns, order_by=order_by)
return constructor(data) if data else None | Calls DataAccess.find and passes the results to the given constructor. |
def add(self, *number):
return self._format_result(sum(
[int(n) for n in number])) | Adds all parameters interpreted as integers |
def apply(key, value):
path = __opts__['conf_file']
if os.path.isdir(path):
path = os.path.join(path, 'master')
data = values()
data[key] = value
with salt.utils.files.fopen(path, 'w+') as fp_:
salt.utils.yaml.safe_dump(data, default_flow_style=False) | Set a single key
.. note::
This will strip comments from your config file |
def dynamodb_autoscaling_policy(tables):
return Policy(
Statement=[
Statement(
Effect=Allow,
Resource=dynamodb_arns(tables),
Action=[
dynamodb.DescribeTable,
dynamodb.UpdateTable,
]
),
Statement(
Effect=Allow,
Resource=['*'],
Action=[
cloudwatch.PutMetricAlarm,
cloudwatch.DescribeAlarms,
cloudwatch.GetMetricStatistics,
cloudwatch.SetAlarmState,
cloudwatch.DeleteAlarms,
]
),
]
) | Policy to allow AutoScaling a list of DynamoDB tables. |
def list_entrypoints(entry_point):
found_entry_points = {}
for dist in working_set:
entry_map = dist.get_entry_map()
for group_name, entry_points in entry_map.items():
if entry_point is None and \
not group_name.startswith('invenio'):
continue
if entry_point is not None and \
entry_point != group_name:
continue
if group_name not in found_entry_points:
found_entry_points[group_name] = []
for ep in entry_points.values():
found_entry_points[group_name].append(str(ep))
for ep_group in sorted(found_entry_points.keys()):
click.secho('{0}'.format(ep_group), fg='green')
for ep in sorted(found_entry_points[ep_group]):
click.echo(' {0}'.format(ep)) | List defined entry points. |
def check(cls):
attribs = [
'app_version',
'app_name',
'config_file_path',
'config_sep_str',
]
for attrib in attribs:
if getattr(cls, attrib) == 'not_set':
raise IncompleteSetupError(f'elib_config setup is incomplete; missing: {attrib}') | Verifies that all necessary values for the package to be used have been provided
:raises: `elib_config._exc.IncompleteSetupError` |
def set_cache_token(self, token_data):
if self.conn is None:
raise CacheException('Redis is not connected')
token = token_data['auth_token']
token_expires = token_data['expires_at']
roles = token_data['roles']
try:
datetime_object = datetime.strptime(
token_expires, '%Y-%m-%dT%H:%M:%S.%fZ')
except ValueError:
datetime_object = datetime.strptime(
token_expires, '%Y-%m-%dT%H:%M:%SZ')
ttl = (datetime.utcnow().now() - datetime_object)
token_data = json.dumps({
'expires_at': token_expires,
'roles': roles,
'user': token_data['user']
})
self.conn.set(token, token_data, ex=ttl.seconds) | Set Token with data in Redis |
def path(self):
if len(self.heads) == 1:
return _fmt_mfs_path(self.heads.keys()[0], self.heads.values()[0])
else:
return "(" + "|".join(
_fmt_mfs_path(k, v) for (k, v) in self.heads.items()
) + ")" | The path attribute returns a stringified, concise representation of
the MultiFieldSelector. It can be reversed by the ``from_path``
constructor. |
def create_generic_instances(self, instances):
generic_instances = []
for instance in instances:
transformed_instance = self._create_kube_dns_instance(instance)
generic_instances.append(transformed_instance)
return generic_instances | Transform each Kube DNS instance into a OpenMetricsBaseCheck instance |
def get_revocation_time(self):
if self.revoked is False:
return
if timezone.is_aware(self.revoked_date):
return timezone.make_naive(self.revoked_date, pytz.utc)
return self.revoked_date | Get the revocation time as naive datetime.
Note that this method is only used by cryptography>=2.4. |
def complex_validates(validate_rule):
ref_dict = {
}
for column_names, predicate_refs in validate_rule.items():
for column_name in _to_tuple(column_names):
ref_dict[column_name] = \
ref_dict.get(column_name, tuple()) + _normalize_predicate_refs(predicate_refs)
return validates(*ref_dict.keys())(
lambda self, name, value: _validate_handler(name, value, ref_dict[name])) | Quickly setup attributes validation by one-time, based on `sqlalchemy.orm.validates`.
Don't like `sqlalchemy.orm.validates`, you don't need create many model method,
as long as pass formatted validate rule.
(Cause of SQLAlchemy's validate mechanism, you need assignment this funciton's return value
to a model property.)
For simplicity, complex_validates don't support `include_removes` and `include_backrefs` parameters
that in `sqlalchemy.orm.validates`.
And we don't recommend you use this function multiple times in one model.
Because this will bring many problems, like:
1. Multiple complex_validates's execute order was decide by it's model property name, and by reversed order.
eg. predicates in `validator1 = complex_validates(...)`
will be executed **AFTER** predicates in `validator2 = complex_validates(...)`
2. If you try to validate the same attribute in two (or more) complex_validates, only one of complex_validates
will be execute. (May be this is a bug of SQLAlchemy?)
`complex_validates` was currently based on `sqlalchemy.orm.validates`, so it is difficult to solve these problems.
May be we can try to use `AttributeEvents` directly in further, to provide more reliable function.
Rule Format
-----------
{
column_name: predicate # basic format
(column_name2, column_name3): predicate # you can specify multiple column_names to given predicates
column_name4: (predicate, predicate2) # you can specify multiple predicates to given column_names
column_name5: [(predicate, arg1, ... argN)] # and you can specify what arguments should pass to predicate
# when it doing validate
(column_name6, column_name7): [(predicate, arg1, ... argN), predicate2] # another example
}
Notice: If you want pass arguments to predicate, you must wrap whole command by another list or tuple.
Otherwise, we will determine the argument as another predicate.
So, this is wrong: { column_name: (predicate, arg) }
this is right: { column_name: [(predicate, arg)] }
Predicate
---------
There's some `predefined_predicates`, you can just reference its name in validate rule.
{column_name: ['trans_upper']}
Or you can pass your own predicate function to the rule, like this:
def custom_predicate(value):
return value_is_legal # return True or False for valid or invalid value
{column_name: [custom_predicate]}
If you want change the value when doing validate, return an `dict(value=new_value)` instead of boolean
{column_name: lambda value: dict(value = value * 2)} # And you see, we can use lambda as a predicate.
And the predicate can receive extra arguments, that passes in rule:
def multiple(value, target_multiple):
return dict(value= value * target_multiple)
{column_name: (multiple, 10)}
Complete Example
----------------
class People(db.Model):
name = Column(String(100))
age = Column(Integer)
IQ = Column(Integer)
has_lover = Column(Boolean)
validator = complex_validates({
'name': [('min_length', 1), ('max_length', 100)],
('age', 'IQ'): [('min', 0)],
'has_lover': lambda value: return !value # hate you!
}) |
def subclass(cls, t):
t.doc = None
t.terms = []
t.__class__ = SectionTerm
return t | Change a term into a Section Term |
def calc_one_vert_gauss(one_vert, xyz=None, std=None):
trans = empty(xyz.shape[0])
for i, one_xyz in enumerate(xyz):
trans[i] = gauss(norm(one_vert - one_xyz), std)
return trans | Calculate how many electrodes influence one vertex, using a Gaussian
function.
Parameters
----------
one_vert : ndarray
vector of xyz position of a vertex
xyz : ndarray
nChan X 3 with the position of all the channels
std : float
distance in mm of the Gaussian kernel
Returns
-------
ndarray
one vector with values for one vertex |
def data(self, data: numpy.ndarray) -> None:
self.__data_item.set_data(numpy.copy(data)) | Set the data.
:param data: A numpy ndarray.
.. versionadded:: 1.0
Scriptable: Yes |
def node_scale_root_height(self, treeheight=1):
ctree = self._ttree.copy()
_height = ctree.treenode.height
for node in ctree.treenode.traverse():
node.dist = (node.dist / _height) * treeheight
ctree._coords.update()
return ctree | Returns a toytree copy with all nodes scaled so that the root
height equals the value entered for treeheight. |
def random_id(size=8, chars=string.ascii_letters + string.digits):
return ''.join(random.choice(chars) for _ in range(size)) | Generates a random string of given size from the given chars.
@param size: The size of the random string.
@param chars: Constituent pool of characters to draw random characters from.
@type size: number
@type chars: string
@rtype: string
@return: The string of random characters. |
def arguments(function, extra_arguments=0):
if not hasattr(function, '__code__'):
return ()
return function.__code__.co_varnames[:function.__code__.co_argcount + extra_arguments] | Returns the name of all arguments a function takes |
def _build_verb_statement_mapping():
path_this = os.path.dirname(os.path.abspath(__file__))
map_path = os.path.join(path_this, 'isi_verb_to_indra_statement_type.tsv')
with open(map_path, 'r') as f:
first_line = True
verb_to_statement_type = {}
for line in f:
if not first_line:
line = line[:-1]
tokens = line.split('\t')
if len(tokens) == 2 and len(tokens[1]) > 0:
verb = tokens[0]
s_type = tokens[1]
try:
statement_class = getattr(ist, s_type)
verb_to_statement_type[verb] = statement_class
except Exception:
pass
else:
first_line = False
return verb_to_statement_type | Build the mapping between ISI verb strings and INDRA statement classes.
Looks up the INDRA statement class name, if any, in a resource file,
and resolves this class name to a class.
Returns
-------
verb_to_statement_type : dict
Dictionary mapping verb name to an INDRA statment class |
def decimal_entry(self, prompt, message=None, min=None, max=None, rofi_args=None, **kwargs):
if (min is not None) and (max is not None) and not (max > min):
raise ValueError("Maximum limit has to be more than the minimum limit.")
def decimal_validator(text):
error = None
try:
value = Decimal(text)
except InvalidOperation:
return None, "Please enter a decimal value."
if (min is not None) and (value < min):
return None, "The minimum allowable value is {0}.".format(min)
if (max is not None) and (value > max):
return None, "The maximum allowable value is {0}.".format(max)
return value, None
return self.generic_entry(prompt, decimal_validator, message, rofi_args, **kwargs) | Prompt the user to enter a decimal number.
Parameters
----------
prompt: string
Prompt to display to the user.
message: string, optional
Message to display under the entry line.
min, max: Decimal, optional
Minimum and maximum values to allow. If None, no limit is imposed.
Returns
-------
Decimal, or None if the dialog is cancelled. |
def validate(options):
try:
if options.backends.index('modelinstance') > options.backends.index('model'):
raise Exception("Metadata backend 'modelinstance' must come before 'model' backend")
except ValueError:
raise Exception("Metadata backend 'modelinstance' must be installed in order to use 'model' backend") | Validates the application of this backend to a given metadata |
def to_phalf_from_pfull(arr, val_toa=0, val_sfc=0):
phalf = np.zeros((arr.shape[0] + 1, arr.shape[1], arr.shape[2]))
phalf[0] = val_toa
phalf[-1] = val_sfc
phalf[1:-1] = 0.5*(arr[:-1] + arr[1:])
return phalf | Compute data at half pressure levels from values at full levels.
Could be the pressure array itself, but it could also be any other data
defined at pressure levels. Requires specification of values at surface
and top of atmosphere. |
def alterar(self, id_model, id_brand, name):
if not is_valid_int_param(id_model):
raise InvalidParameterError(
u'The identifier of Model is invalid or was not informed.')
model_map = dict()
model_map['name'] = name
model_map['id_brand'] = id_brand
url = 'model/' + str(id_model) + '/'
code, xml = self.submit({'model': model_map}, 'PUT', url)
return self.response(code, xml) | Change Model from by the identifier.
:param id_model: Identifier of the Model. Integer value and greater than zero.
:param id_brand: Identifier of the Brand. Integer value and greater than zero.
:param name: Model name. String with a minimum 3 and maximum of 100 characters
:return: None
:raise InvalidParameterError: The identifier of Model, Brand or name is null and invalid.
:raise MarcaNaoExisteError: Brand not registered.
:raise ModeloEquipamentoNaoExisteError: Model not registered.
:raise NomeMarcaModeloDuplicadoError: There is already a registered Model with the value of name and brand.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response |
def get_residue_mapping(self):
if len(self.sequence_ids) == 2:
if not self.alignment_output:
self.align()
assert(self.alignment_output)
return self._create_residue_map(self._get_alignment_lines(), self.sequence_ids[1], self.sequence_ids[2])
else:
return None | Returns a mapping between the sequences ONLY IF there are exactly two. This restriction makes the code much simpler. |
def relpath(self):
here = os.path.abspath(os.path.curdir)
relpath = os.path.relpath(self.fpath, here)
return relpath | Determine the relative path to this repository
Returns:
str: relative path to this repository |
def configure_root():
root_logger = logging.getLogger()
for hdlr in root_logger.handlers:
if isinstance(hdlr, logging.StreamHandler):
root_logger.removeHandler(hdlr)
root_logger.setLevel(ROOT_LOG_LEVEL)
hdlr = logging.StreamHandler(ROOT_LOG_STREAM)
formatter = colorlog.ColoredFormatter(
'%(purple)s%(name)-10s %(log_color)s%(levelname)-8s%(reset)s %(white)s%(message)s',
reset=True,
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
hdlr.setFormatter(formatter)
root_logger.addHandler(hdlr) | Configure the root logger. |
def represent_float_as_str(value):
if not isinstance(value, float):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-float as a float: '
u'{}'.format(value))
with decimal.localcontext() as ctx:
ctx.prec = 20
return u'{:f}'.format(decimal.Decimal(value)) | Represent a float as a string without losing precision. |
def to_serializable_repr(x):
t = type(x)
if isinstance(x, list):
return list_to_serializable_repr(x)
elif t in (set, tuple):
return {
"__class__": class_to_serializable_representation(t),
"__value__": list_to_serializable_repr(x)
}
elif isinstance(x, dict):
return dict_to_serializable_repr(x)
elif isinstance(x, (FunctionType, BuiltinFunctionType)):
return function_to_serializable_representation(x)
elif type(x) is type:
return class_to_serializable_representation(x)
else:
state_dictionary = to_serializable_repr(to_dict(x))
state_dictionary["__class__"] = class_to_serializable_representation(
x.__class__)
return state_dictionary | Convert an instance of Serializable or a primitive collection containing
such instances into serializable types. |
def handle_error(self, error, response):
query_params = {"error": error.error}
query = urlencode(query_params)
location = "%s?%s" % (self.client.redirect_uri, query)
response.status_code = 302
response.body = ""
response.add_header("Location", location)
return response | Redirects the client in case an error in the auth process occurred. |
def filter_callbacks(cls, client, event_data):
for event in cls.filter_events(client, event_data):
for cb in event.callbacks:
yield cb | Filter registered events and yield all of their callbacks. |
def readCache(self, filename):
with open(filename, 'rb') as f:
self.modules = pickle.load(f) | Load the graph from a cache file. |
def upload(self, url, filename, data=None, formname=None,
otherfields=()):
if data is None:
data = open(filename, 'rb')
self._upbuffer = StringIO.StringIO(get_upload_form(filename, data,
formname,
otherfields))
ulheaders = self.stdheaders.copy()
ulheaders['Content-Type'] = 'multipart/form-data; boundary=' + BND
ulheaders['Content-Length'] = len(uploadforms[filename])
self.ulsize = len(uploadforms[filename])
webclient = self.dupe()
webclient.request('POST', url, self._upbuffer, ulheaders)
rsp = webclient.getresponse()
try:
del uploadforms[filename]
except KeyError:
pass
self.rspstatus = rsp.status
if rsp.status != 200:
raise Exception('Unexpected response in file upload: ' +
rsp.read())
return rsp.read() | Upload a file to the url
:param url:
:param filename: The name of the file
:param data: A file object or data to use rather than reading from
the file.
:return: |
def _humanize_bytes(num_bytes, precision=1):
if num_bytes == 0:
return 'no bytes'
if num_bytes == 1:
return '1 byte'
factored_bytes = 0
factor_suffix = 'bytes'
for factor, suffix in ABBREVS:
if num_bytes >= factor:
factored_bytes = num_bytes / factor
factor_suffix = suffix
break
if factored_bytes == 1:
precision = 0
return '{:.{prec}f} {}'.format(factored_bytes, factor_suffix,
prec=precision) | Return a humanized string representation of a number of num_bytes.
from:
http://code.activestate.com/recipes/
577081-humanized-representation-of-a-number-of-num_bytes/
Assumes `from __future__ import division`.
>>> humanize_bytes(1)
'1 byte'
>>> humanize_bytes(1024)
'1.0 kB'
>>> humanize_bytes(1024*123)
'123.0 kB'
>>> humanize_bytes(1024*12342)
'12.1 MB'
>>> humanize_bytes(1024*12342,2)
'12.05 MB'
>>> humanize_bytes(1024*1234,2)
'1.21 MB'
>>> humanize_bytes(1024*1234*1111,2)
'1.31 GB'
>>> humanize_bytes(1024*1234*1111,1)
'1.3 GB' |
def path_helper(self, operations, view, app=None, **kwargs):
rule = self._rule_for_view(view, app=app)
operations.update(yaml_utils.load_operations_from_docstring(view.__doc__))
if hasattr(view, 'view_class') and issubclass(view.view_class, MethodView):
for method in view.methods:
if method in rule.methods:
method_name = method.lower()
method = getattr(view.view_class, method_name)
operations[method_name] = yaml_utils.load_yaml_from_docstring(method.__doc__)
return self.flaskpath2openapi(rule.rule) | Path helper that allows passing a Flask view function. |
def calc_normal_std_he_forward(inmaps, outmaps, kernel=(1, 1)):
r
return np.sqrt(2. / (np.prod(kernel) * inmaps)) | r"""Calculates the standard deviation proposed by He et al.
.. math::
\sigma = \sqrt{\frac{2}{NK}}
Args:
inmaps (int): Map size of an input Variable, :math:`N`.
outmaps (int): Map size of an output Variable, :math:`M`.
kernel (:obj:`tuple` of :obj:`int`): Convolution kernel spatial shape.
In above definition, :math:`K` is the product of shape dimensions.
In Affine, the default value should be used.
Example:
.. code-block:: python
import nnabla as nn
import nnabla.parametric_functions as PF
import nnabla.initializer as I
x = nn.Variable([60,1,28,28])
s = I.calc_normal_std_he_forward(x.shape[1],64)
w = I.NormalInitializer(s)
b = I.ConstantInitializer(0)
h = PF.convolution(x, 64, [3, 3], w_init=w, b_init=b, pad=[1, 1], name='conv')
References:
* `He, et al. Delving Deep into Rectifiers: Surpassing Human-Level
Performance on ImageNet Classification.
<https://arxiv.org/abs/1502.01852>`_ |
def get_path(self, i):
index = (i - 1) // 2
reverse = (i - 1) % 2
path = self.paths[index]
if reverse:
return path.reversed()
else:
return path | Returns the path corresponding to the node i. |
def from_sample(sample):
upload_config = sample.get("upload")
if upload_config:
approach = _approaches[upload_config.get("method", "filesystem")]
for finfo in _get_files(sample):
approach.update_file(finfo, sample, upload_config)
return [[sample]] | Upload results of processing from an analysis pipeline sample. |
def _do_config_proposal_vote(args):
signer = _read_signer(args.key)
rest_client = RestClient(args.url)
proposals = _get_proposals(rest_client)
proposal = None
for candidate in proposals.candidates:
if candidate.proposal_id == args.proposal_id:
proposal = candidate
break
if proposal is None:
raise CliException('No proposal exists with the given id')
for vote_record in proposal.votes:
if vote_record.public_key == signer.get_public_key().as_hex():
raise CliException(
'A vote has already been recorded with this signing key')
txn = _create_vote_txn(
signer,
args.proposal_id,
proposal.proposal.setting,
args.vote_value)
batch = _create_batch(signer, [txn])
batch_list = BatchList(batches=[batch])
rest_client.send_batches(batch_list) | Executes the 'proposal vote' subcommand. Given a key file, a proposal
id and a vote value, it generates a batch of sawtooth_settings transactions
in a BatchList instance. The BatchList is file or submitted to a
validator. |
def update(accountable, options):
issue = accountable.issue_update(options)
headers = issue.keys()
rows = [headers, [v for k, v in issue.items()]]
print_table(SingleTable(rows)) | Update an existing issue. |
def is_default(name=None, index=None):
if not is_configured():
raise JutException('No configurations available, please run `jut config add`')
count = 1
for configuration in _CONFIG.sections():
if index != None:
if _CONFIG.has_option(configuration, 'default') and count == index:
return True
if name != None:
if _CONFIG.has_option(configuration, 'default') and configuration == name:
return True
count += 1
return False | returns True if the specified configuration is the default one |
def __store_other(self, o, method_name, member):
self.__store__[ method_name ] = eval( "o." + method_name )
self.__store__[ method_name[0].lower() + method_name[1:] ] = eval( "o." + method_name ) | Stores a reference to an attribute on o
:param mixed o: Some object
:param str method_name: The name of the attribute
:param mixed member: The attribute |
def files_write(self, path, file, offset=0, create=False, truncate=False,
count=None, **kwargs):
opts = {"offset": offset, "create": create, "truncate": truncate}
if count is not None:
opts["count"] = count
kwargs.setdefault("opts", opts)
args = (path,)
body, headers = multipart.stream_files(file, self.chunk_size)
return self._client.request('/files/write', args,
data=body, headers=headers, **kwargs) | Writes to a mutable file in the MFS.
.. code-block:: python
>>> c.files_write("/test/file", io.BytesIO(b"hi"), create=True)
b''
Parameters
----------
path : str
Filepath within the MFS
file : io.RawIOBase
IO stream object with data that should be written
offset : int
Byte offset at which to begin writing at
create : bool
Create the file if it does not exist
truncate : bool
Truncate the file to size zero before writing
count : int
Maximum number of bytes to read from the source ``file`` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.