code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
async def parse_get_revoc_reg_def_response(get_revoc_ref_def_response: str) -> (str, str):
logger = logging.getLogger(__name__)
logger.debug("parse_get_revoc_reg_def_response: >>> get_revoc_ref_def_response: %r", get_revoc_ref_def_response)
if not hasattr(parse_get_revoc_reg_def_response, "cb"):
log... | Parse a GET_REVOC_REG_DEF response to get Revocation Registry Definition in the format compatible with Anoncreds API.
:param get_revoc_ref_def_response: response of GET_REVOC_REG_DEF request.
:return: Revocation Registry Definition Id and Revocation Registry Definition json.
{
"id": string - ID... |
def update(self):
console = self.console
aux = self.aux
state = yield from self._get_container_state()
yield from self.reset()
yield from self.create()
self.console = console
self.aux = aux
if state == "running":
yield from self.start() | Destroy an recreate the container with the new settings |
def new_method_return(self) :
"creates a new DBUS.MESSAGE_TYPE_METHOD_RETURN that is a reply to this Message."
result = dbus.dbus_message_new_method_return(self._dbobj)
if result == None :
raise CallFailed("dbus_message_new_method_return")
return \
type(self)(resu... | creates a new DBUS.MESSAGE_TYPE_METHOD_RETURN that is a reply to this Message. |
def hull_moving_average(data, period):
catch_errors.check_for_period_error(data, period)
hma = wma(
2 * wma(data, int(period/2)) - wma(data, period), int(np.sqrt(period))
)
return hma | Hull Moving Average.
Formula:
HMA = WMA(2*WMA(n/2) - WMA(n)), sqrt(n) |
def get_similar_items(self, items=None, k=10, verbose=False):
if items is None:
get_all_items = True
items = _SArray()
else:
get_all_items = False
if isinstance(items, list):
items = _SArray(items)
def check_type(arg, arg_name, required_typ... | Get the k most similar items for each item in items.
Each type of recommender has its own model for the similarity
between items. For example, the item_similarity_recommender will
return the most similar items according to the user-chosen
similarity; the factorization_recommender will r... |
def create_disk(name, size):
ret = False
cmd = 'vmctl create {0} -s {1}'.format(name, size)
result = __salt__['cmd.run_all'](cmd,
output_loglevel='trace',
python_shell=False)
if result['retcode'] == 0:
ret = True
else:... | Create a VMM disk with the specified `name` and `size`.
size:
Size in megabytes, or use a specifier such as M, G, T.
CLI Example:
.. code-block:: bash
salt '*' vmctl.create_disk /path/to/disk.img size=10G |
def _sethex(self, hexstring):
hexstring = tidy_input_string(hexstring)
hexstring = hexstring.replace('0x', '')
length = len(hexstring)
if length % 2:
hexstring += '0'
try:
try:
data = bytearray.fromhex(hexstring)
except TypeErro... | Reset the bitstring to have the value given in hexstring. |
def get_encodings():
encodings = [__salt_system_encoding__]
try:
sys_enc = sys.getdefaultencoding()
except ValueError:
sys_enc = None
if sys_enc and sys_enc not in encodings:
encodings.append(sys_enc)
for enc in ['utf-8', 'latin-1']:
if enc not in encodings:
... | return a list of string encodings to try |
def decrement(name, tags=None):
def wrap(f):
@wraps(f)
def decorator(*args, **kwargs):
stats = client()
ret = f(*args, **kwargs)
stats.decr(name, tags=tags)
return ret
return decorator
return wrap | Function decorator for decrementing a statsd stat whenever
a function is invoked.
>>> from statsdecor.decorators import decrement
>>> @decrement('my.metric')
>>> def my_func():
>>> pass |
def get_credit_notes_per_page(self, per_page=1000, page=1, params=None):
return self._get_resource_per_page(resource=CREDIT_NOTES, per_page=per_page, page=page, params=params) | Get credit notes per page
:param per_page: How many objects per page. Default: 1000
:param page: Which page. Default: 1
:param params: Search parameters. Default: {}
:return: list |
def list_team_codes():
cleanlist = sorted(TEAM_DATA, key=lambda k: (k["league"]["name"], k["code"]))
leaguenames = sorted(list(set([team["league"]["name"] for team in cleanlist])))
for league in leaguenames:
teams = [team for team in cleanlist if team["league"]["name"] == league]
click.secho... | List team names in alphabetical order of team ID, per league. |
def on_purchase_completed(self, mapping={'payload': 'payload','name':'name','status':'status','token':'token'}, convert={}, default={}):
def decorator(f):
self._intent_view_funcs['Connections.Response'] = f
self._intent_mappings['Connections.Response'] = mapping
self._intent_... | Decorator routes an Connections.Response to the wrapped function.
Request is sent when Alexa completes the purchase flow.
See https://developer.amazon.com/docs/in-skill-purchase/add-isps-to-a-skill.html#handle-results
The wrapped view function may accept parameters from the Request.
... |
def related_to(self):
params = []
constraints = self.in_constraints
if self.is_constraint is not None:
constraints.append(self.is_constraint)
for constraint in constraints:
for var in constraint._vars:
param = var.get_parameter()
if... | returns a list of all parameters that are either constrained by or constrain this parameter |
def has_port_by_ref(self, port_ref):
with self._mutex:
if self.get_port_by_ref(self, port_ref):
return True
return False | Check if this component has a port by the given reference to a CORBA
PortService object. |
def assure_image(fnc):
@wraps(fnc)
def _wrapped(self, img, *args, **kwargs):
if not isinstance(img, Image):
img = self._manager.get(img)
return fnc(self, img, *args, **kwargs)
return _wrapped | Converts a image ID passed as the 'image' parameter to a image object. |
def is_valid(self, name=None, debug=False):
valid_tags = self.action_tree
invalid = False
for item in self.current_tree:
try:
if item in valid_tags or self.ALL_TAGS in valid_tags:
valid_tags = valid_tags[item if item in valid_tags else self.ALL_TAG... | Check to see if the current xml path is to be processed. |
def update_status(self):
task = self.make_request(
TaskRunFailed,
href=self.href)
return Task(task) | Gets the current status of this task and returns a
new task object.
:raises TaskRunFailed: fail to update task status |
def nmb_weights_hidden(self) -> int:
nmb = 0
for idx_layer in range(self.nmb_layers-1):
nmb += self.nmb_neurons[idx_layer] * self.nmb_neurons[idx_layer+1]
return nmb | Number of hidden weights.
>>> from hydpy import ANN
>>> ann = ANN(None)
>>> ann(nmb_inputs=2, nmb_neurons=(4, 3, 2), nmb_outputs=3)
>>> ann.nmb_weights_hidden
18 |
def get_sampleS(self, res, DS=None, resMode='abs',
ind=None, offsetIn=0., Out='(X,Y,Z)', Ind=None):
if Ind is not None:
assert self.dgeom['Multi']
kwdargs = dict(DS=DS, dSMode=resMode, ind=ind, DIn=offsetIn,
VIn=self.dgeom['VIn'], VType=self.Id.Type... | Sample, with resolution res, the surface defined by DS or ind
An optionnal offset perpendicular to the surface can be used
(offsetIn>0 => inwards)
Parameters
----------
res : float / list of 2 floats
Desired resolution of the surfacic sample
fl... |
def rolling_window(a, axis, window, center, fill_value):
pads = [(0, 0) for s in a.shape]
if center:
start = int(window / 2)
end = window - 1 - start
pads[axis] = (start, end)
else:
pads[axis] = (window - 1, 0)
a = np.pad(a, pads, mode='constant', constant_values=fill_val... | rolling window with padding. |
def check_dimensions(self, dataset):
results = []
required_ctx = TestCtx(BaseCheck.HIGH, 'All geophysical variables are timeseries-profile-orthogonal feature types')
message = '{} must be a valid profile-orthogonal feature type. It must have dimensions of (station, time, z).'
message += ... | Checks that the feature types of this dataset are consistent with a timeseries-profile-orthogonal dataset.
:param netCDF4.Dataset dataset: An open netCDF dataset |
def tabLayout(self):
self.childWindow.column += 1
if self.childWindow.column > Layout.BUTTONS_NUMBER:
self.childWindow.column = 0
self.childWindow.row += 1 | For all tabs, specify the number of buttons in a row |
def add_mongo_config_simple(app, connection_string, collection_name):
split_string = connection_string.split(":")
config = {"host": "localhost", "port": 27017, "db": "sacred"}
if len(split_string) > 0 and len(split_string[-1]) > 0:
config["db"] = split_string[-1]
if len(split_string) > 1:
... | Configure the app to use MongoDB.
:param app: Flask Application
:type app: Flask
:param connection_string: in format host:port:database or database
(default: sacred)
:type connection_string: str
:param collection_name: Name of the collection
:type collection_name: str |
def draw(self):
from calysto.display import display, clear_output
canvas = self.render()
clear_output(wait=True)
display(canvas) | Render and draw the world and robots. |
def unnest_collection(collection, df_list):
for item in collection['link']['item']:
if item['class'] == 'dataset':
df_list.append(Dataset.read(item['href']).write('dataframe'))
elif item['class'] == 'collection':
nested_collection = request(item['href'])
unnest_co... | Unnest collection structure extracting all its datasets and converting \
them to Pandas Dataframes.
Args:
collection (OrderedDict): data in JSON-stat format, previously \
deserialized to a python object by \
json.l... |
def handle_existing_user(self, provider, user, access, info):
"Login user and redirect."
login(self.request, user)
return redirect(self.get_login_redirect(provider, user, access)) | Login user and redirect. |
def calcChebyshev(coeffs, validDomain, freqs):
logger = logging.getLogger(__name__)
domain = (validDomain[1] - validDomain[0])[0]
bins = -1 + 2* n.array([ (freqs[i]-validDomain[0,i])/domain for i in range(len(freqs))])
ncoeffs = len(coeffs[0])/2
rr = n.array([n.polynomial.chebyshev.chebval(bins[i], ... | Given a set of coefficients,
this method evaluates a Chebyshev approximation.
Used for CASA bandpass reading.
input coeffs and freqs are numpy arrays |
def install(*pkgs, **kwargs):
attributes = kwargs.get('attributes', False)
if not pkgs:
return "Plese specify a package or packages to upgrade"
cmd = _quietnix()
cmd.append('--install')
if kwargs.get('attributes', False):
cmd.extend(_zip_flatten('--attr', pkgs))
else:
cmd... | Installs a single or multiple packages via nix
:type pkgs: list(str)
:param pkgs:
packages to update
:param bool attributes:
Pass the list of packages or single package as attribues, not package names.
default: False
:return: Installed packages. Example element: ``gcc-3.3.2``
... |
def toFilename(url):
urlp = urlparse(url)
path = urlp.path
if not path:
path = "file_{}".format(int(time.time()))
value = re.sub(r'[^\w\s\.\-]', '-', path).strip().lower()
return re.sub(r'[-\s]+', '-', value).strip("-")[-200:] | gets url and returns filename |
def compute_positive_association(self, visible,
hidden_probs, hidden_states):
if self.visible_unit_type == 'bin':
positive = tf.matmul(tf.transpose(visible), hidden_states)
elif self.visible_unit_type == 'gauss':
positive = tf.matmul(tf.transp... | Compute positive associations between visible and hidden units.
:param visible: visible units
:param hidden_probs: hidden units probabilities
:param hidden_states: hidden units states
:return: positive association = dot(visible.T, hidden) |
def FromArchive(cls, path, actions_dict, resources_dict, temp_dir=None):
if not path.endswith(".ship"):
raise ArgumentError("Attempted to unpack a recipe archive from a file that did not end in .ship", path=path)
name = os.path.basename(path)[:-5]
if temp_dir is None:
tem... | Create a RecipeObject from a .ship archive.
This archive should have been generated from a previous call to
iotile-ship -a <path to yaml file>
or via iotile-build using autobuild_shiparchive().
Args:
path (str): The path to the recipe file that we wish to load
... |
def absent(name, driver=None):
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
volume = _find_volume(name)
if not volume:
ret['result'] = True
ret['comment'] = 'Volume \'{0}\' already absent'.format(name)
return ret
try:
... | Ensure that a volume is absent.
.. versionadded:: 2015.8.4
.. versionchanged:: 2017.7.0
This state was renamed from **docker.volume_absent** to **docker_volume.absent**
name
Name of the volume
Usage Examples:
.. code-block:: yaml
volume_foo:
docker_volume.absen... |
def hl_canvas2table_box(self, canvas, tag):
self.treeview.clear_selection()
cobj = canvas.get_object_by_tag(tag)
if cobj.kind != 'rectangle':
return
canvas.delete_object_by_tag(tag, redraw=False)
if self.maskhltag:
try:
canvas.delete_object... | Highlight all masks inside user drawn box on table. |
def get_keyword_query(self, **kw):
query = dict()
indexes = self.catalog.get_indexes()
for k, v in kw.iteritems():
if k.lower() == "uid":
k = "UID"
if k.lower() == "portal_type":
if v:
v = _.to_list(v)
if k n... | Generates a query from the given keywords.
Only known indexes make it into the generated query.
:returns: Catalog query
:rtype: dict |
def copy_data_ext(self, model, field, dest=None, idx=None, astype=None):
if not dest:
dest = field
assert dest not in self._states + self._algebs
self.__dict__[dest] = self.read_data_ext(
model, field, idx, astype=astype)
if idx is not None:
if len(idx... | Retrieve the field of another model and store it as a field.
:param model: name of the source model being a model name or a group name
:param field: name of the field to retrieve
:param dest: name of the destination field in ``self``
:param idx: idx of elements to access
:param ... |
def to_bed(call, sample, work_dir, calls, data):
out_file = os.path.join(work_dir, "%s-%s-flat.bed" % (sample, call["variantcaller"]))
if call.get("vrn_file") and not utils.file_uptodate(out_file, call["vrn_file"]):
with file_transaction(data, out_file) as tx_out_file:
convert_fn = CALLER_TO... | Create a simplified BED file from caller specific input. |
def vlog(self, msg, *args):
if self.verbose:
self.log(msg, *args) | Logs a message to stderr only if verbose is enabled. |
def calc_sasa(dssp_df):
infodict = {'ssb_sasa': dssp_df.exposure_asa.sum(),
'ssb_mean_rel_exposed': dssp_df.exposure_rsa.mean(),
'ssb_size': len(dssp_df)}
return infodict | Calculation of SASA utilizing the DSSP program.
DSSP must be installed for biopython to properly call it.
Install using apt-get on Ubuntu
or from: http://swift.cmbi.ru.nl/gv/dssp/
Input: PDB or CIF structure file
Output: SASA (integer) of structure |
def update(self, sensor, reading):
parents = list(self._child_to_parents[sensor])
for parent in parents:
self.recalculate(parent, (sensor,)) | Update callback used by sensors to notify obervers of changes.
Parameters
----------
sensor : :class:`katcp.Sensor` object
The sensor whose value has changed.
reading : (timestamp, status, value) tuple
Sensor reading as would be returned by sensor.read() |
def random_id(length):
def char():
return random.choice(string.ascii_letters + string.digits)
return "".join(char() for _ in range(length)) | Generates a random ID of given length |
def free_cache(ctx, *elts):
for elt in elts:
if isinstance(elt, Hashable):
cache = __STATIC_ELEMENTS_CACHE__
else:
cache = __UNHASHABLE_ELTS_CACHE__
elt = id(elt)
if elt in cache:
del cache[elt]
if not elts:
__STATIC_ELEMENTS_CACHE_... | Free properties bound to input cached elts. If empty, free the whole
cache. |
def maybe_inspect_zip(models):
r
if not(is_zip_file(models)):
return models
if len(models) > 1:
return models
if len(models) < 1:
raise AssertionError('No models at all')
return zipfile.ZipFile(models[0]).namelist() | r'''
Detect if models is a list of protocolbuffer files or a ZIP file.
If the latter, then unzip it and return the list of protocolbuffer files
that were inside. |
def after_model_change(self, form, User, is_created):
if is_created and form.notification.data is True:
send_reset_password_instructions(User) | Send password instructions if desired. |
def create_relationship(manager, handle_id, other_handle_id, rel_type):
meta_type = get_node_meta_type(manager, handle_id)
if meta_type == 'Location':
return create_location_relationship(manager, handle_id, other_handle_id, rel_type)
elif meta_type == 'Logical':
return create_logical_relatio... | Makes a relationship from node to other_node depending on which
meta_type the nodes are. Returns the relationship or raises
NoRelationshipPossible exception. |
def highlightBlock(self, string):
prev_data = self.currentBlock().previous().userData()
if prev_data is not None:
self._lexer._saved_state_stack = prev_data.syntax_stack
elif hasattr(self._lexer, '_saved_state_stack'):
del self._lexer._saved_state_stack
index = 0
... | Highlight a block of text. |
def get_alt_lengths(self):
out = []
for i in six.moves.range(len(self.genotype)):
valid_alt = self.get_alt_length(individual=i)
if not valid_alt:
out.append(None)
else:
out.append(max(valid_alt)-len(self.ref))
return out | Returns the longest length of the variant. For deletions, return is negative,
SNPs return 0, and insertions are +. None return corresponds to no variant in interval
for specified individual |
def beautify_file(self, path):
error = False
if(path == '-'):
data = sys.stdin.read()
result, error = self.beautify_string(data, '(stdin)')
sys.stdout.write(result)
else:
data = self.read_file(path)
result, error = self.beautify_string(... | Beautify bash script file. |
def __cloudflare_list_zones(self, *, account, **kwargs):
done = False
zones = []
page = 1
while not done:
kwargs['page'] = page
response = self.__cloudflare_request(account=account, path='/zones', args=kwargs)
info = response['result_info']
... | Helper function to list all zones registered in the CloudFlare system. Returns a `list` of the zones
Args:
account (:obj:`CloudFlareAccount`): A CloudFlare Account object
**kwargs (`dict`): Extra arguments to pass to the API endpoint
Returns:
`list` of `dict` |
def _get_vm_by_name(name, allDetails=False):
vms = get_resources_vms(includeConfig=allDetails)
if name in vms:
return vms[name]
log.info('VM with name "%s" could not be found.', name)
return False | Since Proxmox works based op id's rather than names as identifiers this
requires some filtering to retrieve the required information. |
def network_lopf(network, snapshots=None, solver_name="glpk", solver_io=None,
skip_pre=False, extra_functionality=None, solver_logfile=None, solver_options={},
keep_files=False, formulation="angles", ptdf_tolerance=0.,
free_memory={},extra_postprocessing=None):
sna... | Linear optimal power flow for a group of snapshots.
Parameters
----------
snapshots : list or index slice
A list of snapshots to optimise, must be a subset of
network.snapshots, defaults to network.snapshots
solver_name : string
Must be a solver name that pyomo recognises and th... |
def get_fitness(self, chromosome):
fitness = self.fitness_cache.get(chromosome.dna)
if fitness is None:
fitness = self.eval_fitness(chromosome)
self.fitness_cache[chromosome.dna] = fitness
return fitness | Get the fitness score for a chromosome, using the cached value if available. |
def run_shell(args: dict) -> int:
if args.get('project_directory'):
return run_batch(args)
shell = CauldronShell()
if in_project_directory():
shell.cmdqueue.append('open "{}"'.format(os.path.realpath(os.curdir)))
shell.cmdloop()
return 0 | Run the shell sub command |
def timeout(limit, handler):
def wrapper(f):
def wrapped_f(*args, **kwargs):
old_handler = signal.getsignal(signal.SIGALRM)
signal.signal(signal.SIGALRM, timeout_handler)
signal.alarm(limit)
try:
res = f(*args, **kwargs)
except Time... | A decorator ensuring that the decorated function tun time does not
exceeds the argument limit.
:args limit: the time limit
:type limit: int
:args handler: the handler function called when the decorated
function times out.
:type handler: callable
Example:
>>>def timeout_handler(limit, ... |
def find_parent_id_for_component(self, component_id):
cursor = self.db.cursor()
sql = "SELECT parentResourceComponentId FROM ResourcesComponents WHERE resourceComponentId=%s"
count = cursor.execute(sql, (component_id,))
if count > 0:
return (ArchivistsToolkitClient.RESOURCE_C... | Given the ID of a component, returns the parent component's ID.
:param string component_id: The ID of the component.
:return: A tuple containing:
* The type of the parent record; valid values are ArchivesSpaceClient.RESOURCE and ArchivesSpaceClient.RESOURCE_COMPONENT.
* The ID o... |
def unify_mp(b, partition_name):
with b.progress.start('coalesce_mp',0,message="MP coalesce {}".format(partition_name)) as ps:
r = b.unify_partition(partition_name, None, ps)
return r | Unify all of the segment partitions for a parent partition, then run stats on the MPR file |
def combine_ctrlpts_weights(ctrlpts, weights=None):
if weights is None:
weights = [1.0 for _ in range(len(ctrlpts))]
ctrlptsw = []
for pt, w in zip(ctrlpts, weights):
temp = [float(c * w) for c in pt]
temp.append(float(w))
ctrlptsw.append(temp)
return ctrlptsw | Multiplies control points by the weights to generate weighted control points.
This function is dimension agnostic, i.e. control points can be in any dimension but weights should be 1D.
The ``weights`` function parameter can be set to None to let the function generate a weights vector composed of
1.0 value... |
def dump_stats(self, pattern):
if not isinstance(pattern, basestring):
raise TypeError("pattern can only be an instance of type basestring")
self._call("dumpStats",
in_p=[pattern]) | Dumps VM statistics.
in pattern of type str
The selection pattern. A bit similar to filename globbing. |
def up_alpha_beta(returns, factor_returns, **kwargs):
return up(returns, factor_returns, function=alpha_beta_aligned, **kwargs) | Computes alpha and beta for periods when the benchmark return is positive.
Parameters
----------
see documentation for `alpha_beta`.
Returns
-------
float
Alpha.
float
Beta. |
def put(self, item, block=True, timeout=None):
if self.full():
if not block:
raise Full()
current = compat.getcurrent()
waketime = None if timeout is None else time.time() + timeout
if timeout is not None:
scheduler.schedule_at(wake... | put an item into the queue
.. note::
if the queue was created with a `maxsize` and it is currently
:meth:`full`, this method will block the calling coroutine until
another coroutine :meth:`get`\ s an item.
:param item: the object to put into the queue, can be any t... |
def _execfile(filename, globals, locals=None):
mode = 'rb'
if sys.version_info < (2, 7):
mode += 'U'
with open(filename, mode) as stream:
script = stream.read()
if locals is None:
locals = globals
code = compile(script, filename, 'exec')
exec(code, globals, locals) | Python 3 implementation of execfile. |
def get_ip_reports(self, ips):
api_name = 'virustotal-ip-address-reports'
(all_responses, ips) = self._bulk_cache_lookup(api_name, ips)
responses = self._request_reports("ip", ips, 'ip-address/report')
for ip, response in zip(ips, responses):
if self._cache:
s... | Retrieves the most recent VT info for a set of ips.
Args:
ips: list of IPs.
Returns:
A dict with the IP as key and the VT report as value. |
def parse_mtl(mtl):
if hasattr(mtl, 'decode'):
mtl = mtl.decode('utf-8')
mtllib = None
mtllibs = []
for line in str.splitlines(str(mtl).strip()):
line_split = line.strip().split()
if len(line_split) <= 1:
continue
key = line_split[0]
if key == 'newmtl'... | Parse a loaded MTL file.
Parameters
-------------
mtl : str or bytes
Data from an MTL file
Returns
------------
mtllibs : list of dict
Each dict has keys: newmtl, map_Kd, Kd |
def spam(self, tag=None, fromdate=None, todate=None):
return self.call("GET", "/stats/outbound/spam", tag=tag, fromdate=fromdate, todate=todate) | Gets a total count of recipients who have marked your email as spam. |
def in_download_archive(track):
global arguments
if not arguments['--download-archive']:
return
archive_filename = arguments.get('--download-archive')
try:
with open(archive_filename, 'a+', encoding='utf-8') as file:
logger.debug('Contents of {0}:'.format(archive_filename))
... | Returns True if a track_id exists in the download archive |
def select_time(da, **indexer):
if not indexer:
selected = da
else:
key, val = indexer.popitem()
time_att = getattr(da.time.dt, key)
selected = da.sel(time=time_att.isin(val)).dropna(dim='time')
return selected | Select entries according to a time period.
Parameters
----------
da : xarray.DataArray
Input data.
**indexer : {dim: indexer, }, optional
Time attribute and values over which to subset the array. For example, use season='DJF' to select winter values,
month=1 to select January, or mont... |
def get_image_code(self, id_code, access_token=None, user_id=None):
if access_token:
self.req.credential.set_token(access_token)
if user_id:
self.req.credential.set_user_id(user_id)
if not self.check_credentials():
raise CredentialsError('credentials invalid')... | Get the image of a code, by its id |
def data_url(content, mimetype=None):
if isinstance(content, pathlib.Path):
if not mimetype:
mimetype = guess_type(content.name)[0]
with content.open('rb') as fp:
content = fp.read()
else:
if isinstance(content, text_type):
content = content.encode('ut... | Returns content encoded as base64 Data URI.
:param content: bytes or str or Path
:param mimetype: mimetype for
:return: str object (consisting only of ASCII, though)
.. seealso:: https://en.wikipedia.org/wiki/Data_URI_scheme |
def get_composition_mdata():
return {
'children': {
'element_label': {
'text': 'children',
'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),
'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),
'formatTypeId': str(DEFAULT_FORMAT_TYPE),
... | Return default mdata map for Composition |
def run(plugin_name, *args, **kwargs):
plugindir = nago.settings.get_option('plugin_dir')
plugin = plugindir + "/" + plugin_name
if not os.path.isfile(plugin):
raise ValueError("Plugin %s not found" % plugin)
command = [plugin] + list(args)
p = subprocess.Popen(command, stdout=subprocess.PIP... | Run a specific plugin |
def stop(self):
self._running = False
if self._sleep_task:
self._sleep_task.cancel()
self._sleep_task = None | Stop listening. |
def GameTypeEnum(ctx):
return Enum(
ctx,
RM=0,
Regicide=1,
DM=2,
Scenario=3,
Campaign=4,
KingOfTheHill=5,
WonderRace=6,
DefendTheWonder=7,
TurboRandom=8
) | Game Type Enumeration. |
def align(self,inputwords, outputwords):
alignment = []
cursor = 0
for inputword in inputwords:
if len(outputwords) > cursor and outputwords[cursor] == inputword:
alignment.append(cursor)
cursor += 1
elif len(outputwords) > cursor+1 and out... | For each inputword, provides the index of the outputword |
def inception_v3_parameters(weight_decay=0.00004, stddev=0.1,
batch_norm_decay=0.9997, batch_norm_epsilon=0.001):
with scopes.arg_scope([ops.conv2d, ops.fc],
weight_decay=weight_decay):
with scopes.arg_scope([ops.conv2d],
stddev=stddev,... | Yields the scope with the default parameters for inception_v3.
Args:
weight_decay: the weight decay for weights variables.
stddev: standard deviation of the truncated guassian weight distribution.
batch_norm_decay: decay for the moving average of batch_norm momentums.
batch_norm_epsilon: small float ... |
def get_registered(option_hooks=None, event_hooks=None,
command_hooks=None, root_access=None,
task_active=True):
plugins = []
for _, item in _registered:
plugin, type_info = item
if task_active:
if type_info.get('disabled'):
conti... | Returns a generator of registered plugins matching filters.
`option_hooks`
Boolean to include or exclude plugins using option hooks.
`event_hooks`
Boolean to include or exclude task event plugins.
`command_hooks`
Boolean to include or exclude command plugins.... |
def shortDescription(self):
cd = getattr(self,'classDescription',None)
if cd:
sd = getattr(cd,'shortDescription','')
d = getattr(cd,'description','')
return sd if sd else d
return '' | Overrides property from Event base class. |
def handler_for(obj):
for handler_type in handlers:
if isinstance(obj, handler_type):
return handlers[handler_type]
try:
for handler_type in handlers:
if issubclass(obj, handler_type):
return handlers[handler_type]
except TypeError:
pass | return the handler for the object type |
def discard(self, element):
try:
i = int(element)
set.discard(self, i)
except ValueError:
pass | Remove element from the RangeSet if it is a member.
If the element is not a member, do nothing. |
def is_russian(self):
russian_chars = 0
for char in RUSSIAN_CHARS:
if char in self.name:
russian_chars += 1
return russian_chars > len(RUSSIAN_CHARS) / 2.0 | Checks if file path is russian
:return: True iff document has a russian name |
def sync_db():
with cd('/'.join([deployment_root(),'env',env.project_fullname,'project',env.project_package_name,'sitesettings'])):
venv = '/'.join([deployment_root(),'env',env.project_fullname,'bin','activate'])
sites = _get_django_sites()
site_ids = sites.keys()
site_ids.sort()
... | Runs the django syncdb command |
def _set_repo_option(repo, option):
if not option:
return
opt = option.split('=')
if len(opt) != 2:
return
if opt[0] == 'trusted':
repo['trusted'] = opt[1] == 'yes'
else:
repo[opt[0]] = opt[1] | Set the option to repo |
def verify_axis_labels(self, expected, actual, source_name):
if not getattr(self, '_checked_axis_labels', False):
self._checked_axis_labels = defaultdict(bool)
if not self._checked_axis_labels[source_name]:
if actual is None:
log.warning("%s instance could not ver... | Verify that axis labels for a given source are as expected.
Parameters
----------
expected : tuple
A tuple of strings representing the expected axis labels.
actual : tuple or None
A tuple of strings representing the actual axis labels, or
`None` if th... |
def _handle_iorder(self, state):
if self.opts['state_auto_order']:
for name in state:
for s_dec in state[name]:
if not isinstance(s_dec, six.string_types):
continue
if not isinstance(state[name], dict):
... | Take a state and apply the iorder system |
def get_statements_noprior(self):
stmt_lists = [v for k, v in self.stmts.items() if k != 'prior']
stmts = []
for s in stmt_lists:
stmts += s
return stmts | Return a list of all non-prior Statements in a single list.
Returns
-------
stmts : list[indra.statements.Statement]
A list of all the INDRA Statements in the model (excluding
the prior). |
def create_fw_db(self, fw_id, fw_name, tenant_id):
fw_dict = {'fw_id': fw_id, 'name': fw_name, 'tenant_id': tenant_id}
self.update_fw_dict(fw_dict) | Create FW dict. |
def build_dictionary(self):
d = {}
for t in self.all_tags_of_type(DefinitionTag, recurse_into_sprites = False):
if t.characterId in d:
raise ValueError('illegal redefinition of character')
d[t.characterId] = t
return d | Return a dictionary of characterIds to their defining tags. |
def virtualchain_set_opfields( op, **fields ):
for f in fields.keys():
if f not in indexer.RESERVED_KEYS:
log.warning("Unsupported virtualchain field '%s'" % f)
for f in fields.keys():
if f in indexer.RESERVED_KEYS:
op[f] = fields[f]
return op | Pass along virtualchain-reserved fields to a virtualchain operation.
This layer of indirection is meant to help with future compatibility,
so virtualchain implementations do not try to set operation fields
directly. |
def add(self, entity):
do_append = self.__check_new(entity)
if do_append:
self.__entities.append(entity) | Adds the given entity to this cache.
:param entity: Entity to add.
:type entity: Object implementing :class:`everest.interfaces.IEntity`.
:raises ValueError: If the ID of the entity to add is ``None``
(unless the `allow_none_id` constructor argument was set). |
def access_token(self):
access_token = generate_token(length=self.access_token_length[1])
token_secret = generate_token(self.secret_length)
client_key = request.oauth.client_key
self.save_access_token(client_key, access_token,
request.oauth.resource_owner_key, secret=token_se... | Create an OAuth access token for an authorized client.
Defaults to /access_token. Invoked by client applications. |
def _extract_centerdistance(image, mask = slice(None), voxelspacing = None):
image = numpy.array(image, copy=False)
if None == voxelspacing:
voxelspacing = [1.] * image.ndim
centers = [(x - 1) / 2. for x in image.shape]
indices = numpy.indices(image.shape, dtype=numpy.float)
for dim_indices,... | Internal, single-image version of `centerdistance`. |
def reconfigure_log_level(self):
if Global.LOGGER:
Global.LOGGER.debug('reconfiguring logger level')
stream_handlers = filter(lambda x: type(x) is logging.StreamHandler,
self._logger_instance.handlers)
for x in stream_handlers:
x.level = G... | Returns a new standard logger instance |
def getParameter(self, name):
return lock_and_call(
lambda: Parameter(self._impl.getParameter(name)),
self._lock
) | Get the parameter with the corresponding name.
Args:
name: Name of the parameter to be found.
Raises:
TypeError: if the specified parameter does not exist. |
def mtr_tr_dense(sz):
n = 2 ** sz
hparams = mtf_bitransformer_base()
hparams.d_model = 1024
hparams.max_length = 256
hparams.batch_size = 128
hparams.d_ff = int(4096 * n)
hparams.d_kv = 128
hparams.encoder_num_heads = int(8 * n)
hparams.decoder_num_heads = int(8 * n)
hparams.learning_rate_decay_step... | Series of machine translation models.
All models are trained on sequences of 256 tokens.
You can use the dataset translate_enfr_wmt32k_packed.
154000 steps = 3 epochs.
Args:
sz: an integer
Returns:
a hparams |
def get_conversations(self):
cs = self.data["data"]
res = []
for c in cs:
res.append(Conversation(c))
return res | Returns list of Conversation objects |
def add_url_rule(self, host, rule_string, endpoint, **options):
rule = Rule(rule_string, host=host, endpoint=endpoint, **options)
self.url_map.add(rule) | Add a url rule to the app instance.
The url rule is the same with Flask apps and other Werkzeug apps.
:param host: the matched hostname. e.g. "www.python.org"
:param rule_string: the matched path pattern. e.g. "/news/<int:id>"
:param endpoint: the endpoint name as a dispatching key suc... |
def fit(self, X, y=None, **kwargs):
super(PCADecomposition, self).fit(X=X, y=y, **kwargs)
self.pca_transformer.fit(X)
self.pca_components_ = self.pca_transformer.named_steps['pca'].components_
return self | Fits the PCA transformer, transforms the data in X, then draws the
decomposition in either 2D or 3D space as a scatter plot.
Parameters
----------
X : ndarray or DataFrame of shape n x m
A matrix of n instances with m features.
y : ndarray or Series of length n
... |
def make_fitness(function, greater_is_better):
if not isinstance(greater_is_better, bool):
raise ValueError('greater_is_better must be bool, got %s'
% type(greater_is_better))
if function.__code__.co_argcount != 3:
raise ValueError('function requires 3 arguments (y, y_pr... | Make a fitness measure, a metric scoring the quality of a program's fit.
This factory function creates a fitness measure object which measures the
quality of a program's fit and thus its likelihood to undergo genetic
operations into the next generation. The resulting object is able to be
called with Nu... |
def eval(self):
if self.magic:
return self.magic
if not self.filename:
return file_pattern.format(self.alias, self.ext)
return self.path | Returns a filename to be used for script output. |
def _receive(self):
preamble = self._read(1)
if not preamble:
return None
elif ord(preamble) != SBP_PREAMBLE:
if self._verbose:
print("Host Side Unhandled byte: 0x%02x" % ord(preamble))
return None
hdr = self._readall(5)
msg_crc... | Read and build SBP message. |
def list_nodes():
ret = {}
nodes = list_nodes_full()
for node in nodes:
ret[node] = {
'id': nodes[node]['UUID'],
'image': nodes[node]['Guest OS'],
'name': nodes[node]['Name'],
'state': None,
'private_ips': [],
'public_ips': [],
... | Return a list of registered VMs
CLI Example:
.. code-block:: bash
salt '*' vboxmanage.list_nodes |
async def connect_to_endpoints(self, *endpoints: ConnectionConfig) -> None:
self._throw_if_already_connected(*endpoints)
await asyncio.gather(
*(self._await_connect_to_endpoint(endpoint) for endpoint in endpoints),
loop=self.event_loop
) | Connect to the given endpoints and await until all connections are established. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.