code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def get_summary(session):
profile = get_profile(session)
return {
'user': {
'email': profile['userProfile']['eMail'],
'name': '{} {}'.format(profile['userProfile']['firstName'],
profile['userProfile']['lastName'])
},
'vehicles': ... | Get vehicle summary. |
def init(directory):
username = click.prompt("Input your username")
password = click.prompt("Input your password", hide_input=True,
confirmation_prompt=True)
log_directory = click.prompt("Input your log directory")
if not path.exists(log_directory):
sys.exit("Invalid ... | Init the config fle. |
def select(table, cols="*", where=(), group="", order=(), limit=(), **kwargs):
where = dict(where, **kwargs).items()
sql, args = makeSQL("SELECT", table, cols, where, group, order, limit)
return execute(sql, args) | Convenience wrapper for database SELECT. |
def set_token(self, token):
self.token = token
self.set_header(
'Authorization',
"Bearer {}".format(token)
) | Set the token for the v20 context
Args:
token: The token used to access the v20 REST api |
def sg_input(shape=None, dtype=sg_floatx, name=None):
r
if shape is None:
return tf.placeholder(dtype, shape=None, name=name)
else:
if not isinstance(shape, (list, tuple)):
shape = [shape]
return tf.placeholder(dtype, shape=[None] + list(shape), name=name) | r"""Creates a placeholder.
Args:
shape: A tuple/list of integers. If an integers is given, it will turn to a list.
dtype: A data type. Default is float32.
name: A name for the placeholder.
Returns:
A wrapped placeholder `Tensor`. |
def add_column(self, func, name=None, show=True):
assert func
name = name or func.__name__
if name == '<lambda>':
raise ValueError("Please provide a valid name for " + name)
d = {'func': func,
'show': show,
}
self._columns[name] = d
d... | Add a column function which takes an id as argument and
returns a value. |
def makeCys(segID, N, CA, C, O, geo):
CA_CB_length=geo.CA_CB_length
C_CA_CB_angle=geo.C_CA_CB_angle
N_C_CA_CB_diangle=geo.N_C_CA_CB_diangle
CB_SG_length= geo.CB_SG_length
CA_CB_SG_angle= geo.CA_CB_SG_angle
N_CA_CB_SG_diangle= geo.N_CA_CB_SG_diangle
carbon_b= calculateCoordinates(N, C, CA, CA... | Creates a Cysteine residue |
def get_brizo_url(config):
brizo_url = 'http://localhost:8030'
if config.has_option('resources', 'brizo.url'):
brizo_url = config.get('resources', 'brizo.url') or brizo_url
brizo_path = '/api/v1/brizo'
return f'{brizo_url}{brizo_path}' | Return the Brizo component url.
:param config: Config
:return: Url, str |
def destroy(self):
if self.client:
self.client.setWebView(self.widget, None)
del self.client
super(AndroidWebView, self).destroy() | Destroy the client |
def transform_aglistener_output(result):
from collections import OrderedDict
from msrestazure.tools import parse_resource_id
try:
resource_group = getattr(result, 'resource_group', None) or parse_resource_id(result.id)['resource_group']
output = OrderedDict([('id', result.id),
... | Transforms the result of Availability Group Listener to eliminate unnecessary parameters. |
def dist(x1, x2=None, metric='sqeuclidean', to_numpy=True):
if x2 is None:
x2 = x1
if metric == "sqeuclidean":
return euclidean_distances(x1, x2, squared=True, to_numpy=to_numpy)
elif metric == "euclidean":
return euclidean_distances(x1, x2, squared=False, to_numpy=to_numpy)
else... | Compute distance between samples in x1 and x2 on gpu
Parameters
----------
x1 : np.array (n1,d)
matrix with n1 samples of size d
x2 : np.array (n2,d), optional
matrix with n2 samples of size d (if None then x2=x1)
metric : str
Metric from 'sqeuclidean', 'euclidean',
R... |
def get_network(self):
ref_key = self.ref_key
if ref_key == 'NETWORK':
return self.network
elif ref_key == 'NODE':
return self.node.network
elif ref_key == 'LINK':
return self.link.network
elif ref_key == 'GROUP':
return self.group.... | Get the network that this resource attribute is in. |
def setup_address(self, name, address=default, transact={}):
owner = self.setup_owner(name, transact=transact)
self._assert_control(owner, name)
if is_none_or_zero_address(address):
address = None
elif address is default:
address = owner
elif is_binary_add... | Set up the name to point to the supplied address.
The sender of the transaction must own the name, or
its parent name.
Example: If the caller owns ``parentname.eth`` with no subdomains
and calls this method with ``sub.parentname.eth``,
then ``sub`` will be created as part of thi... |
def plot_point(self, x, y, visible=True, color='black', size=5):
xp = (self.px_x * (x - self.x_min)) / self.x_tick
yp = (self.px_y * (self.y_max - y)) / self.y_tick
coord = 50 + xp, 50 + yp
if visible:
size = int(size/2) if int(size/2) > 1 else 1
x, y = coord
... | Places a single point on the grid
:param x: the x coordinate
:param y: the y coordinate
:param visible: True if the individual point should be visible
:param color: the color of the point
:param size: the point size in pixels
:return: The absolute coordinates as a tuple |
def load_fileobj(fileobj, gz = None, xmldoc = None, contenthandler = None):
fileobj = MD5File(fileobj)
md5obj = fileobj.md5obj
if gz or gz is None:
fileobj = RewindableInputFile(fileobj)
magic = fileobj.read(2)
fileobj.seek(0, os.SEEK_SET)
if gz or magic == '\037\213':
fileobj = gzip.GzipFile(mode = "rb",... | Parse the contents of the file object fileobj, and return the
contents as a LIGO Light Weight document tree. The file object
does not need to be seekable.
If the gz parameter is None (the default) then gzip compressed data
will be automatically detected and decompressed, otherwise
decompression can be forced on ... |
def _get_client_fqdn(self, client_info_contents):
yamldict = yaml.safe_load(client_info_contents)
fqdn = yamldict['system_info']['fqdn']
client_id = yamldict['client_id'].split('/')[1]
return client_id, fqdn | Extracts a GRR client's FQDN from its client_info.yaml file.
Args:
client_info_contents: The contents of the client_info.yaml file.
Returns:
A (str, str) tuple representing client ID and client FQDN. |
def set(self, instance, value, **kw):
ref = []
if api.is_uid(value):
ref.append(value)
if u.is_dict(value):
ref = ref.append(value.get("uid"))
if api.is_at_content(value):
ref.append(value)
if u.is_list(value):
for item in value:
... | Set the value of the uid reference field |
def scale(cls, *scaling):
if len(scaling) == 1:
sx = sy = float(scaling[0])
else:
sx, sy = scaling
return tuple.__new__(cls, (sx, 0.0, 0.0, 0.0, sy, 0.0, 0.0, 0.0, 1.0)) | Create a scaling transform from a scalar or vector.
:param scaling: The scaling factor. A scalar value will
scale in both dimensions equally. A vector scaling
value scales the dimensions independently.
:type scaling: float or sequence
:rtype: Affine |
def certs(self):
certstack = libcrypto.CMS_get1_certs(self.ptr)
if certstack is None:
raise CMSError("getting certs")
return StackOfX509(ptr=certstack, disposable=True) | List of the certificates contained in the structure |
def find_multiplex_by_name(self, multiplex_name: str) -> Multiplex:
for multiplex in self.multiplexes:
if multiplex.name == multiplex_name:
return multiplex
raise AttributeError(f'multiplex "{multiplex_name}" does not exist') | Find and return a multiplex in the influence graph with the given name.
Raise an AttributeError if there is no multiplex in the graph with the given name. |
def check_data(self):
assert os.path.exists(self.data_fp)
if gis:
with fiona.drivers():
with fiona.open(self.faces_fp) as src:
assert src.meta
gpkg_hash = json.load(open(self.data_fp))['metadata']['sha256']
assert gpkg_hash == sha256(self.f... | Check that definitions file is present, and that faces file is readable. |
def read_raw_parser_conf(data: str) -> dict:
config = configparser.ConfigParser(allow_no_value=True)
config.read_string(data)
try:
_data: dict = dict(config["commitizen"])
if "files" in _data:
files = _data["files"]
_f = json.loads(files)
_data.update({"fi... | We expect to have a section like this
```
[commitizen]
name = cz_jira
files = [
"commitizen/__version__.py",
"pyproject.toml"
] # this tab at the end is important
``` |
def to_path_globs(self, relpath, conjunction):
return PathGlobs(
include=tuple(os.path.join(relpath, glob) for glob in self._file_globs),
exclude=tuple(os.path.join(relpath, exclude) for exclude in self._excluded_file_globs),
conjunction=conjunction) | Return a PathGlobs representing the included and excluded Files for these patterns. |
def __create_image(self, inpt, hashfun):
if hashfun not in generator.HASHES.keys():
print ("Unknown or unsupported hash function. Using default: %s"
% self.DEFAULT_HASHFUN)
algo = self.DEFAULT_HASHFUN
else:
algo = hashfun
return generator.ge... | Creates the avatar based on the input and
the chosen hash function. |
def abort(self, count=2, timeout=60):
for counter in xrange(0, count):
self.putc(CAN, timeout) | Send an abort sequence using CAN bytes. |
def read_requirements():
reqs_path = os.path.join('.', 'requirements.txt')
install_reqs = parse_requirements(reqs_path, session=PipSession())
reqs = [str(ir.req) for ir in install_reqs]
return reqs | parses requirements from requirements.txt |
def main():
if not sys.platform.startswith("win"):
if "--daemon" in sys.argv:
daemonize()
from gns3server.run import run
run() | Entry point for GNS3 server |
async def prover_get_credentials(wallet_handle: int,
filter_json: str) -> str:
logger = logging.getLogger(__name__)
logger.debug("prover_get_credentials: >>> wallet_handle: %r, filter_json: %r",
wallet_handle,
filter_json)
if not hasattr(pro... | Gets human readable credentials according to the filter.
If filter is NULL, then all credentials are returned.
Credentials can be filtered by tags created during saving of credential.
NOTE: This method is deprecated because immediately returns all fetched credentials.
Use <prover_search_credentials> to... |
def looks_like_xml(text):
if xml_decl_re.match(text):
return True
key = hash(text)
try:
return _looks_like_xml_cache[key]
except KeyError:
m = doctype_lookup_re.match(text)
if m is not None:
return True
rv = tag_re.search(text[:1000]) is not None
... | Check if a doctype exists or if we have some tags. |
def _update_message_request(self, message):
for each in self.row_keys:
message.rows.row_keys.append(_to_bytes(each))
for each in self.row_ranges:
r_kwrags = each.get_range_kwargs()
message.rows.row_ranges.add(**r_kwrags) | Add row keys and row range to given request message
:type message: class:`data_messages_v2_pb2.ReadRowsRequest`
:param message: The ``ReadRowsRequest`` protobuf |
def to_internal(self, attribute_profile, external_dict):
internal_dict = {}
for internal_attribute_name, mapping in self.from_internal_attributes.items():
if attribute_profile not in mapping:
logger.debug("no attribute mapping found for internal attribute '%s' the attribute p... | Converts the external data from "type" to internal
:type attribute_profile: str
:type external_dict: dict[str, str]
:rtype: dict[str, str]
:param attribute_profile: From which external type to convert (ex: oidc, saml, ...)
:param external_dict: Attributes in the external format... |
def disconnect(self):
self.connState = Client.DISCONNECTED
if self.conn is not None:
self._logger.info('Disconnecting')
self.conn.disconnect()
self.wrapper.connectionClosed()
self.reset() | Disconnect from IB connection. |
def get_client_settings_env(**_):
return {
'proxy': os.environ.get('https_proxy'),
'username': os.environ.get('SL_USERNAME'),
'api_key': os.environ.get('SL_API_KEY'),
} | Retrieve client settings from environment settings.
:param \\*\\*kwargs: Arguments that are passed into the client instance |
def to_regular_array(self, A):
return A.view((int, len(A.dtype.names))).reshape(A.shape + (-1,)) | Converts from an array of type `self.dtype` to an array
of type `int` with an additional index labeling the
tuple indeces.
:param np.ndarray A: An `np.array` of type `self.dtype`.
:rtype: `np.ndarray` |
def save_grade_system(self, grade_system_form, *args, **kwargs):
if grade_system_form.is_for_update():
return self.update_grade_system(grade_system_form, *args, **kwargs)
else:
return self.create_grade_system(grade_system_form, *args, **kwargs) | Pass through to provider GradeSystemAdminSession.update_grade_system |
def encipher_shift(plaintext, plain_vocab, shift):
ciphertext = []
cipher = ShiftEncryptionLayer(plain_vocab, shift)
for _, sentence in enumerate(plaintext):
cipher_sentence = []
for _, character in enumerate(sentence):
encrypted_char = cipher.encrypt_character(character)
cipher_sentence.appen... | Encrypt plain text with a single shift layer.
Args:
plaintext (list of list of Strings): a list of plain text to encrypt.
plain_vocab (list of Integer): unique vocabularies being used.
shift (Integer): number of shift, shift to the right if shift is positive.
Returns:
ciphertext (list of Strings): ... |
def escapePlaceholders(self,inputString):
escaped = inputString.replace(MapConstants.placeholder,'\\'+MapConstants.placeholder)
escaped = escaped.replace(MapConstants.placeholderFileName,'\\'+MapConstants.placeholderFileName)
escaped = escaped.replace(MapConstants.placeholderPath,'\\'+MapConstan... | This is an internal method that escapes all the placeholders
defined in MapConstants.py. |
def serve_forever(self, poll_interval=0.5):
self.__is_shut_down.clear()
try:
while not self.__shutdown_request:
r, w, e = _eintr_retry(select.select, [self], [], [], poll_interval)
if self in r:
self._handle_request_noblock()
finall... | Handle one request at a time until shutdown.
Polls for shutdown every poll_interval seconds. Ignores
self.timeout. If you need to do periodic tasks, do them in
another thread. |
def txinfo(self, txid: str) -> dict:
return cast(dict, self.ext_fetch('txinfo/' + txid)) | Returns information about given transaction. |
def adjust_for_triggers(self):
triggers = self.template['spec'].get('triggers', [])
remove_plugins = [
("prebuild_plugins", "check_and_set_rebuild"),
("prebuild_plugins", "stop_autorebuild_if_disabled"),
]
should_remove = False
if triggers and (self.is_cus... | Remove trigger-related plugins when needed
If there are no triggers defined, it's assumed the
feature is disabled and all trigger-related plugins
are removed.
If there are triggers defined, and this is a custom
base image, some trigger-related plugins do not apply.
Add... |
def execute_cmd(self, userid, cmdStr):
LOG.debug("executing cmd: %s", cmdStr)
return self._smtclient.execute_cmd(userid, cmdStr) | Execute commands on the guest vm. |
def config_mode(self, config_command="configure", pattern=r"[edit]"):
return super(VyOSSSH, self).config_mode(
config_command=config_command, pattern=pattern
) | Enter configuration mode. |
def _alpha2rho0(self, theta_Rs, Rs):
rho0 = theta_Rs / (4. * Rs ** 2 * (1. + np.log(1. / 2.)))
return rho0 | convert angle at Rs into rho0 |
def remove_line_breaks(text):
return unicode(text, 'utf-8').replace('\f', '').replace('\n', '') \
.replace('\r', '').replace(u'\xe2\x80\xa8', '') \
.replace(u'\xe2\x80\xa9', '').replace(u'\xc2\x85', '') \
.encode('utf-8') | Remove line breaks from input.
Including unicode 'line separator', 'paragraph separator',
and 'next line' characters. |
def stop(self):
self.state = False
with display_manager(self.display) as d:
d.record_disable_context(self.ctx)
d.ungrab_keyboard(X.CurrentTime)
with display_manager(self.display2):
d.record_disable_context(self.ctx)
d.ungrab_keyboard(X.CurrentTime) | Stop listening for keyboard input events. |
def add_section(self, section):
if not issubclass(section.__class__, _AbstractSection):
raise TypeError("argument should be a subclass of Section")
self.sections[section.get_key_name()] = section
return section | Add a new Section object to the config. Should be a subclass of
_AbstractSection. |
def real(self):
def re(val):
if hasattr(val, 'real'):
return val.real
elif hasattr(val, 'as_real_imag'):
return val.as_real_imag()[0]
elif hasattr(val, 'conjugate'):
return (val.conjugate() + val) / 2
else:
... | Element-wise real part
Raises:
NoConjugateMatrix: if entries have no `conjugate` method and no
other way to determine the real part
Note:
A mathematically equivalent way to obtain a real matrix from a
complex matrix ``M`` is::
(M.con... |
def assignParameters(self,solution_next,DiscFac,LivPrb,CRRA,Rfree,PermGroFac):
self.solution_next = solution_next
self.DiscFac = DiscFac
self.LivPrb = LivPrb
self.CRRA = CRRA
self.Rfree = Rfree
self.PermGroFac = PermGroFac | Saves necessary parameters as attributes of self for use by other methods.
Parameters
----------
solution_next : ConsumerSolution
The solution to next period's one period problem.
DiscFac : float
Intertemporal discount factor for future utility.
LivPrb : ... |
def re_run_file(self):
if self.get_option('save_all_before_run'):
self.save_all()
if self.__last_ec_exec is None:
return
(fname, wdir, args, interact, debug,
python, python_args, current, systerm,
post_mortem, clear_namespace) = self.__last_ec_exe... | Re-run last script |
def merge_entity(self, table_name, entity, if_match='*', timeout=None):
_validate_not_none('table_name', table_name)
request = _merge_entity(entity, if_match, self.require_encryption,
self.key_encryption_key)
request.host_locations = self._get_host_locations()
... | Updates an existing entity by merging the entity's properties. Throws
if the entity does not exist.
This operation does not replace the existing entity as the update_entity
operation does. A property cannot be removed with merge_entity.
Any properties with null values... |
def _generate_rpc_method(self, method):
def _(**kwargs):
msg_id = self.get_unique_msg_id()
params = encode_data(kwargs)
payload = {
'method': method,
'params': params,
'jsonrpc': '2.0',
'id': msg_id
}... | Generate a function that performs rpc call
:param method: method name
:return: rpc function |
def _archive_single_dir(archive):
common_root = None
for info in _list_archive_members(archive):
fn = _info_name(info)
if fn in set(['.', '/']):
continue
sep = None
if '/' in fn:
sep = '/'
elif '\\' in fn:
sep = '\\'
if sep is N... | Check if all members of the archive are in a single top-level directory
:param archive:
An archive from _open_archive()
:return:
None if not a single top level directory in archive, otherwise a
unicode string of the top level directory name |
def add_handler(cls, level, fmt, colorful, **kwargs):
global g_logger
if isinstance(level, str):
level = getattr(logging, level.upper(), logging.DEBUG)
handler = cls(**kwargs)
handler.setLevel(level)
if colorful:
formatter = ColoredFormatter(fmt, datefmt='%Y-%m-%d %H:%M:%S')
else... | Add a configured handler to the global logger. |
def _get_load_ramping_construct(self):
bus_no = integer.setResultsName("bus_no")
s_rating = real.setResultsName("s_rating")
up_rate = real.setResultsName("up_rate")
down_rate = real.setResultsName("down_rate")
min_up_time = real.setResultsName("min_up_time")
min_down_time... | Returns a construct for an array of load ramping data. |
def _find_files(root, includes, excludes, follow_symlinks):
root = os.path.abspath(root)
file_set = formic.FileSet(
directory=root, include=includes,
exclude=excludes, symlinks=follow_symlinks,
)
for filename in file_set.qualified_files(absolute=False):
yield filename | List files inside a directory based on include and exclude rules.
This is a more advanced version of `glob.glob`, that accepts multiple
complex patterns.
Args:
root (str): base directory to list files from.
includes (list[str]): inclusion patterns. Only files matching those
pat... |
def _apply_xheaders(self, headers: httputil.HTTPHeaders) -> None:
ip = headers.get("X-Forwarded-For", self.remote_ip)
for ip in (cand.strip() for cand in reversed(ip.split(","))):
if ip not in self.trusted_downstream:
break
ip = headers.get("X-Real-Ip", ip)
if... | Rewrite the ``remote_ip`` and ``protocol`` fields. |
def get_alt_description(self):
if 'altDescription' in self.attributes and bool(self.attributes['altDescription'].strip()):
return self.attributes['altDescription']
else:
return None | Returns the alternate description of a parameter.
Only pipeline prompt-when-run parameters
can have alternate names and alternate descriptions |
def rename_to_tmp_name(self):
self.client.rename(
self.id,
'%s_%s' % (self.short_id, self.name)
) | Rename the container to a hopefully unique temporary container name
by prepending the short id. |
def wait_for_element_by_selector(self, selector, seconds):
def assert_element_present():
if not find_elements_by_jquery(world.browser, selector):
raise AssertionError("Expected a matching element.")
wait_for(assert_element_present)(timeout=int(seconds)) | Assert an element exists matching the given selector within the given time
period. |
def parse_services(rule):
parser = argparse.ArgumentParser()
rules = shlex.split(rule)
rules.pop(0)
parser.add_argument('--disabled', dest='disabled', action='store')
parser.add_argument('--enabled', dest='enabled', action='store')
args = clean_args(vars(parser.parse_args(rules)))
parser = N... | Parse the services line |
def replace_variables(sentence: List[str],
sentence_variables: Dict[str, str]) -> Tuple[List[str], List[str]]:
tokens = []
tags = []
for token in sentence:
if token not in sentence_variables:
tokens.append(token)
tags.append("O")
else:
... | Replaces abstract variables in text with their concrete counterparts. |
def get_local_environnement(self):
local_env = os.environ.copy()
for local_var in self.env:
local_env[local_var] = self.env[local_var]
return local_env | Mix the environment and the environment variables into a new local
environment dictionary
Note: We cannot just update the global os.environ because this
would effect all other checks.
:return: local environment variables
:rtype: dict |
def add_behave_arguments(parser):
conflicts = [
'--no-color',
'--version',
'-c',
'-k',
'-v',
'-S',
'--simple',
]
parser.add_argument(
'paths',
action='store',
nargs='*',
help="Feature directory, file or file location (FI... | Additional command line arguments extracted directly from behave |
def sget_steptime(self, cycle, step, dataset_number=None):
dataset_number = self._validate_dataset_number(dataset_number)
if dataset_number is None:
self._report_empty_dataset()
return
cycle_index_header = self.headers_normal.cycle_index_txt
step_time_header = sel... | Returns step time for cycle, step.
Convinience function; same as issuing
dfdata[(dfdata[cycle_index_header] == cycle) &
(dfdata[step_index_header] == step)][step_time_header]
Args:
cycle: cycle number
step: step number
dataset_number: the... |
def _get_jwt_for_audience(self, audience):
token, expiry = self._cache.get(audience, (None, None))
if token is None or expiry < _helpers.utcnow():
token, expiry = self._make_jwt_for_audience(audience)
self._cache[audience] = token, expiry
return token | Get a JWT For a given audience.
If there is already an existing, non-expired token in the cache for
the audience, that token is used. Otherwise, a new token will be
created.
Args:
audience (str): The intended audience.
Returns:
bytes: The encoded JWT. |
def subvolume_find_new(name, last_gen):
cmd = ['btrfs', 'subvolume', 'find-new', name, last_gen]
res = __salt__['cmd.run_all'](cmd)
salt.utils.fsutils._verify_run(res)
lines = res['stdout'].splitlines()
files = [l.split()[-1] for l in lines if l.startswith('inode')]
transid = lines[-1].split()[-... | List the recently modified files in a subvolume
name
Name of the subvolume
last_gen
Last transid marker from where to compare
CLI Example:
.. code-block:: bash
salt '*' btrfs.subvolume_find_new /var/volumes/tmp 1024 |
def _apply_over_vars_with_dim(func, self, dim=None, **kwargs):
ds = type(self)(coords=self.coords, attrs=self.attrs)
for name, var in self.data_vars.items():
if dim in var.dims:
ds[name] = func(var, dim=dim, **kwargs)
else:
ds[name] = var
return ds | wrapper for datasets |
def substring_search(query, list_of_strings, limit_results=DEFAULT_LIMIT):
matching = []
query_words = query.split(' ')
query_words.sort(key=len, reverse=True)
counter = 0
for s in list_of_strings:
target_words = s.split(' ')
if(anyword_substring_search(target_words, query_words)):
... | main function to call for searching |
def use_comparative_sequence_rule_enabler_view(self):
self._object_views['sequence_rule_enabler'] = COMPARATIVE
for session in self._get_provider_sessions():
try:
session.use_comparative_sequence_rule_enabler_view()
except AttributeError:
pass | Pass through to provider SequenceRuleEnablerLookupSession.use_comparative_sequence_rule_enabler_view |
def _get_event_cls(view_obj, events_map):
request = view_obj.request
view_method = getattr(view_obj, request.action)
event_action = (
getattr(view_method, '_event_action', None) or
request.action)
return events_map[event_action] | Helper function to get event class.
:param view_obj: Instance of View that processes the request.
:param events_map: Map of events from which event class should be
picked.
:returns: Found event class. |
def is_cleanly_mergable(*dicts: Dict[Any, Any]) -> bool:
if len(dicts) <= 1:
return True
elif len(dicts) == 2:
if not all(isinstance(d, Mapping) for d in dicts):
return False
else:
shared_keys = set(dicts[0].keys()) & set(dicts[1].keys())
return all(is... | Check that nothing will be overwritten when dictionaries are merged using `deep_merge`.
Examples:
>>> is_cleanly_mergable({"a": 1}, {"b": 2}, {"c": 3})
True
>>> is_cleanly_mergable({"a": 1}, {"b": 2}, {"a": 0, c": 3})
False
>>> is_cleanly_mergable({"a": 1, "b": {"ba": 2}}, ... |
def _dedent(text):
lines = text.split('\n')
if len(lines) == 1:
indent = 0
elif lines[0].strip():
raise ValueError('when multiple lines, first line must be blank')
elif lines[-1].strip():
raise ValueError('last line must only contain indent whitespace'... | Remove common indentation from each line in a text block.
When text block is a single line, return text block. Otherwise
determine common indentation from last line, strip common
indentation from each line, and return text block consisting of
inner lines (don't include first and last li... |
def revnet(inputs, hparams, reuse=None):
training = hparams.mode == tf.estimator.ModeKeys.TRAIN
with tf.variable_scope('RevNet', reuse=reuse):
x1, x2 = init(inputs,
num_channels=hparams.num_channels_init_block,
dim=hparams.dim,
kernel_size=hparams.init_kerne... | Uses Tensor2Tensor memory optimized RevNet block to build a RevNet.
Args:
inputs: [NxHxWx3] tensor of input images to the model.
hparams: HParams object that contains the following parameters,
in addition to the parameters contained in the basic_params1() object in
the common_hparams module:
... |
def _json_safe(cls, value):
if type(value) == date:
return str(value)
elif type(value) == datetime:
return value.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(value, ObjectId):
return str(value)
elif isinstance(value, _BaseFrame):
return va... | Return a JSON safe value |
def _package_conf_file_to_dir(file_name):
if file_name in SUPPORTED_CONFS:
path = BASE_PATH.format(file_name)
if os.path.exists(path):
if os.path.isdir(path):
return False
else:
os.rename(path, path + '.tmpbak')
os.mkdir(path, 0... | Convert a config file to a config directory. |
def from_representation(self, data):
if data in self._TRUE_VALUES:
return True
elif data in self._FALSE_VALUES:
return False
else:
raise ValueError(
"{type} type value must be one of {values}".format(
type=self.type,
... | Convert representation value to ``bool`` if it has expected form. |
def add_term(self, t):
if t not in self.terms:
if t.parent_term_lc == 'root':
self.terms.append(t)
self.doc.add_term(t, add_section=False)
t.set_ownership()
else:
raise GenerateError("Can only add or move root-level terms. T... | Add a term to this section and set it's ownership. Should only be used on root level terms |
def isom(self,coolingFactor=None,EdgeAttribute=None,initialAdaptation=None,\
maxEpoch=None,minAdaptation=None,minRadius=None,network=None,NodeAttribute=None,\
nodeList=None,radius=None,radiusConstantTime=None,singlePartition=None,\
sizeFactor=None,verbose=None):
network=check_network(self,network,verbose=verbos... | Execute the Inverted Self-Organizing Map Layout on a network.
:param coolingFactor (string, optional): Cooling factor, in numeric value
:param EdgeAttribute (string, optional): The name of the edge column contai
ning numeric values that will be used as weights in the layout algor
ithm. Only columns containin... |
def _should_set(self, key, mode):
if mode is None or mode not in ["nx", "xx"]:
return True
if mode == "nx":
if key in self.redis:
return False
elif key not in self.redis:
return False
return True | Determine if it is okay to set a key.
If the mode is None, returns True, otherwise, returns True of false based on
the value of ``key`` and the ``mode`` (nx | xx). |
def decorate_event_js(js_code):
def add_annotation(method):
setattr(method, "__is_event", True )
setattr(method, "_js_code", js_code )
return method
return add_annotation | setup a method as an event, adding also javascript code to generate
Args:
js_code (str): javascript code to generate the event client-side.
js_code is added to the widget html as
widget.attributes['onclick'] = js_code%{'emitter_identifier':widget.identifier, 'event_name':'onclick'} |
def get_position_searchable(self):
ids = gkr.list_item_ids_sync(self.keyring)
position_searchable = {}
for i in ids:
item_attrs = gkr.item_get_attributes_sync(self.keyring, i)
position_searchable[i] = item_attrs['searchable']
return position_searchable | Return dict of the position and corrasponding searchable str |
def _clear(self, pipe=None):
redis = self.redis if pipe is None else pipe
redis.delete(self.key) | Helper for clear operations.
:param pipe: Redis pipe in case update is performed as a part
of transaction.
:type pipe: :class:`redis.client.StrictPipeline` or
:class:`redis.client.StrictRedis` |
def discharge_coefficient_to_K(D, Do, C):
r
beta = Do/D
beta2 = beta*beta
beta4 = beta2*beta2
return ((1.0 - beta4*(1.0 - C*C))**0.5/(C*beta2) - 1.0)**2 | r'''Converts a discharge coefficient to a standard loss coefficient,
for use in computation of the actual pressure drop of an orifice or other
device.
.. math::
K = \left[\frac{\sqrt{1-\beta^4(1-C^2)}}{C\beta^2} - 1\right]^2
Parameters
----------
D : float
Upstream inte... |
def get_table_info(self, tablename):
conn = self.__get_conn()
ret = a99.get_table_info(conn, tablename)
if len(ret) == 0:
raise RuntimeError("Cannot get info for table '{}'".format(tablename))
more = self.gui_info.get(tablename)
for row in ret.values():
ca... | Returns information about fields of a specific table
Returns: OrderedDict(("fieldname", MyDBRow), ...))
**Note** Fields "caption" and "tooltip" are added to rows using information in moldb.gui_info |
def train(self, conversation):
previous_statement_text = None
previous_statement_search_text = ''
statements_to_create = []
for conversation_count, text in enumerate(conversation):
if self.show_training_progress:
utils.print_progress_bar(
'... | Train the chat bot based on the provided list of
statements that represents a single conversation. |
def cancel_expired_invitations(invitations=None):
expiration_date = timezone.now() - settings.WALDUR_CORE['INVITATION_LIFETIME']
if not invitations:
invitations = models.Invitation.objects.filter(state=models.Invitation.State.PENDING)
invitations = invitations.filter(created__lte=expiration_date)
... | Invitation lifetime must be specified in Waldur Core settings with parameter
"INVITATION_LIFETIME". If invitation creation time is less than expiration time, the invitation will set as expired. |
def copyNode(node, children=False, parent=False):
if parent is not False:
element = SubElement(
parent,
node.tag,
attrib=node.attrib,
nsmap={None: "http://www.tei-c.org/ns/1.0"}
)
else:
element = Element(
node.tag,
a... | Copy an XML Node
:param node: Etree Node
:param children: Copy children nodes is set to True
:param parent: Append copied node to parent if given
:return: New Element |
def week_to_datetime(iso_year, iso_week):
"datetime instance for the start of the given ISO year and week"
gregorian = iso_to_gregorian(iso_year, iso_week, 0)
return datetime.datetime.combine(gregorian, datetime.time(0)) | datetime instance for the start of the given ISO year and week |
def LDAP_search(pattern_search, attribute):
connection, ldap_base = _get_LDAP_connection()
connection.search(
search_base=ldap_base,
search_filter=pattern_search,
attributes=[attribute]
)
return connection.response | Do a LDAP search |
def _clean_streams(repo, mapped_streams):
for stream_name in ('stdout', 'stderr'):
stream = mapped_streams.get(stream_name)
if not stream:
continue
path = os.path.relpath(stream, start=repo.working_dir)
if (path, 0) not in repo.index.entries:
os.remove(stream)... | Clean mapped standard streams. |
def put_logging(Bucket,
TargetBucket=None, TargetPrefix=None, TargetGrants=None,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
logstate = {}
targets = {'TargetBucket': TargetBucket,
... | Given a valid config, update the logging parameters for a bucket.
Returns {updated: true} if parameters were updated and returns
{updated: False} if parameters were not updated.
CLI Example:
.. code-block:: bash
salt myminion boto_s3_bucket.put_logging my_bucket log_bucket '[{...}]' prefix |
def removeLayer(self, layer):
if isinstance(layer, BaseGlyph):
layer = layer.layer.name
layerName = layer
layerName = normalizers.normalizeLayerName(layerName)
if self._getLayer(layerName).layer.name == layerName:
self._removeLayer(layerName) | Remove ``layer`` from this glyph.
>>> glyph.removeLayer("background")
Layer can be a :ref:`type-glyph-layer` or a :ref:`type-string`
representing a layer name. |
def getStartTag(self):
attributeStrings = []
for name, val in self._attributes.items():
if val:
val = tostr(val)
if val or name not in TAG_ITEM_BINARY_ATTRIBUTES:
val = escapeQuotes(val)
attributeStrings.append('%s="%s"' %(name, val... | getStartTag - Returns the start tag represented as HTML
@return - String of start tag with attributes |
def _try_dump_cnt(self):
now = time.time()
if now - self._last_dump_cnt > 60:
self._last_dump_cnt = now
self._dump_cnt()
self._print_counter_log() | Dump counters every 60 seconds |
def put(self, device_id: int) -> Device:
device = self._get_or_abort(device_id)
self.update(device)
session.commit()
session.add(device)
return device | Updates the Device Resource with the
name. |
def make(self):
logger.debug("preparing to add all git files")
num_added = self.local_repo.add_all_files()
if num_added:
self.local_repo.commit("Initial import from Project Gutenberg")
file_handler = NewFilesHandler(self)
file_handler.add_new_files()
num_added... | turn fetched files into a local repo, make auxiliary files |
def get_s2_pixel_cloud_detector(threshold=0.4, average_over=4, dilation_size=2, all_bands=True):
return S2PixelCloudDetector(threshold=threshold,
average_over=average_over,
dilation_size=dilation_size,
all_bands=all_band... | Wrapper function for pixel-based S2 cloud detector `S2PixelCloudDetector` |
def chdir(path):
cur_cwd = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(cur_cwd) | Change the working directory to `path` for the duration of this context
manager.
:param str path: The path to change to |
def inside_softimage():
try:
import maya
return False
except ImportError:
pass
try:
from win32com.client import Dispatch as disp
disp('XSI.Application')
return True
except:
return False | Returns a boolean indicating if the code is executed inside softimage. |
def constructor(
self,
name=None,
function=None,
return_type=None,
arg_types=None,
header_dir=None,
header_file=None,
recursive=None):
return (
self._find_single(
self._impl_matchers[scope... | returns reference to constructor declaration, that is matched
defined criteria |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.