Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
4,100 | def function(fname):
def _f(func):
class WrapFunction(Function):
name = fname
def __call__(self, *args, **kwargs):
return func(*args, **kwargs)
return WrapFunction
return _f | Make a function to Function class |
4,101 | def _requirement_element(self, parent_element, req_data):
req_data = self._transform_result(req_data)
if not req_data:
return
title = req_data.get("title")
if not title:
logger.warning("Skipping requirement, title is missing")
return
... | Adds requirement XML element. |
4,102 | def sort_direction(self):
if self.table._meta.order_by == self.name:
return "asc"
elif self.table._meta.order_by == ("-" + self.name):
return "desc"
else:
return None | Return the direction in which the linked table is is sorted by
this column ("asc" or "desc"), or None this column is unsorted. |
4,103 | def safe_compare_digest(val1, val2):
if len(val1) != len(val2):
return False
result = 0
if PY3 and isinstance(val1, bytes) and isinstance(val2, bytes):
for i, j in zip(val1, val2):
result |= i ^ j
else:
for i, j in zip(val1, val2):
result |= (ord(i) ... | safe_compare_digest method.
:param val1: string or bytes for compare
:type val1: str | bytes
:param val2: string or bytes for compare
:type val2: str | bytes |
4,104 | def sort_schemas(schemas):
def keyfun(v):
x = SQL_SCHEMA_REGEXP.match(v).groups()
return (int(x[0]), x[1], int(x[2]) if x[2] else None,
x[3] if x[3] else , int(x[4]))
return sorted(schemas, key=keyfun) | Sort a list of SQL schemas in order |
4,105 | def check_model(self, max_paths=1, max_path_length=5):
results = []
for stmt in self.statements:
result = self.check_statement(stmt, max_paths, max_path_length)
results.append((stmt, result))
return results | Check all the statements added to the ModelChecker.
Parameters
----------
max_paths : Optional[int]
The maximum number of specific paths to return for each Statement
to be explained. Default: 1
max_path_length : Optional[int]
The maximum length of spe... |
4,106 | def update_serviceprofile(self, host_id, vlan_id):
ucsm_ip = self.get_ucsm_ip_for_host(host_id)
if not ucsm_ip:
LOG.info(
, str(host_id))
return False
service_profile = self.ucsm_sp_dict.get((ucsm_ip, host_id))
if service_profile:
... | Top level method to update Service Profiles on UCS Manager.
Calls all the methods responsible for the individual tasks that
ultimately result in a vlan_id getting programed on a server's
ethernet ports and the Fabric Interconnect's network ports. |
4,107 | def _init_map(self):
QuestionFilesFormRecord._init_map(self)
FirstAngleProjectionFormRecord._init_map(self)
super(MultiChoiceOrthoQuestionFormRecord, self)._init_map() | stub |
4,108 | def _loadf(ins):
output = _float_oper(ins.quad[2])
output.extend(_fpush())
return output | Loads a floating point value from a memory address.
If 2nd arg. start with '*', it is always treated as
an indirect value. |
4,109 | def values_update(self, range, params=None, body=None):
url = SPREADSHEET_VALUES_URL % (self.id, quote(range))
r = self.client.request(, url, params=params, json=body)
return r.json() | Lower-level method that directly calls `spreadsheets.values.update <https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/update>`_.
:param str range: The `A1 notation <https://developers.google.com/sheets/api/guides/concepts#a1_notation>`_ of the values to update.
:param dict ... |
4,110 | def _build_file_writer(cls, session: AppSession):
args = session.args
if args.delete_after:
return session.factory.new()
elif args.output_document:
session.factory.class_map[] = SingleDocumentWriter
return session.factory.new(, args.output_documen... | Create the File Writer.
Returns:
FileWriter: An instance of :class:`.writer.BaseFileWriter`. |
4,111 | def _control(self, state):
if not self._subscription_is_recent():
self._subscribe()
cmd = MAGIC + CONTROL + self._mac + PADDING_1 + PADDING_2 + state
_LOGGER.debug("Sending new state to %s: %s", self.host, ord(state))
ack_state = self._udp_transact(cmd, se... | Control device state.
Possible states are ON or OFF.
:param state: Switch to this state. |
4,112 | def assert_valid_rule_class(clazz):
if not (issubclass(clazz, rules.LineRule) or issubclass(clazz, rules.CommitRule)):
msg = u"User-defined rule class must extend from {1}.{2} or {1}.{3}"
raise UserRuleError(msg.format(clazz.__name__, rules.CommitRule.__module__,
... | Asserts that a given rule clazz is valid by checking a number of its properties:
- Rules must extend from LineRule or CommitRule
- Rule classes must have id and name string attributes.
The options_spec is optional, but if set, it must be a list of gitlint Options.
- Rule classes must have a valid... |
4,113 | def p_values(self, p):
if len(p) == 1:
p[0] = list()
else:
p[1].append(p[2])
p[0] = p[1] | values :
| values value VALUE_SEPARATOR
| values value |
4,114 | def generate_one(self):
weights = [self.probability_func(self.generated[element])
for element in self.domain]
element = random.choices(self.domain, weights=weights)[0]
self.generated[element] += 1
return element | Generate a single element.
Returns
-------
element
An element from the domain.
Examples
-------
>>> generator = RepellentGenerator(['a', 'b'])
>>> gen_item = generator.generate_one()
>>> gen_item in ['a', 'b']
True |
4,115 | def _write_git_file_and_module_config(cls, working_tree_dir, module_abspath):
git_file = osp.join(working_tree_dir, )
rela_path = osp.relpath(module_abspath, start=working_tree_dir)
if is_win:
if osp.isfile(git_file):
os.remove(git_file)
with open(git... | Writes a .git file containing a (preferably) relative path to the actual git module repository.
It is an error if the module_abspath cannot be made into a relative path, relative to the working_tree_dir
:note: will overwrite existing files !
:note: as we rewrite both the git file as well as the ... |
4,116 | def uninstall(self):
if self.installed:
sys.meta_path.remove(self)
import_list = []
for name in self.__loaded_modules:
del sys.modules[name]
import_list.append(name)
for name in import_list:
__import__(name)
se... | Uninstall the module finder. If not installed, this will do nothing.
After uninstallation, none of the newly loaded modules will be
decorated (that is, everything will be back to normal). |
4,117 | def start(self):
if self.mode == "manual":
return
if self.ipython_dir != :
self.ipython_dir = os.path.abspath(os.path.expanduser(self.ipython_dir))
if self.log:
stdout = open(os.path.join(self.ipython_dir, "{0}.controller.out".format(self.profile))... | Start the controller. |
4,118 | def host_info_getter(func, name=None):
name = name or func.__name__
host_info_gatherers[name] = func
return func | The decorated function is added to the process of collecting the host_info.
This just adds the decorated function to the global
``sacred.host_info.host_info_gatherers`` dictionary.
The functions from that dictionary are used when collecting the host info
using :py:func:`~sacred.host_info.get_host_info`... |
4,119 | def ParseOptions(cls, options, configuration_object):
if not isinstance(configuration_object, tools.CLITool):
raise errors.BadConfigObject(
)
filter_collection = getattr(
configuration_object, , None)
if not filter_collection:
raise errors.BadConfigObject(
)
... | Parses and validates options.
Args:
options (argparse.Namespace): parser options.
configuration_object (CLITool): object to be configured by the argument
helper.
Raises:
BadConfigObject: when the configuration object is of the wrong type. |
4,120 | def parse_timezone(matches, default_timezone=UTC):
if matches["timezone"] == "Z":
return UTC
if matches["timezone"] is None:
return default_timezone
sign = matches["tz_sign"]
hours = to_int(matches, "tz_hour")
minutes = to_int(matches, "tz_minute", default_to_zer... | Parses ISO 8601 time zone specs into tzinfo offsets |
4,121 | def description(self, request, id, description):
request.data = json.dumps({
"description": description
})
return self.send(request, id).json()[] | Updates the description of a gist
Arguments:
request: an initial request object
id: the id of the gist we want to edit the description for
description: the new description |
4,122 | def size(self, filename: str) -> int:
yield from self._control_stream.write_command(Command(, filename))
reply = yield from self._control_stream.read_reply()
self.raise_if_not_match(, ReplyCodes.file_status, reply)
try:
return int(reply.text.strip())
excep... | Get size of file.
Coroutine. |
4,123 | def create(self):
self._init_tables()
self._populate_from_lines(self.iterator)
self._update_relations()
self._finalize() | Calls various methods sequentially in order to fully build the
database. |
4,124 | def user_parse(data):
yield , data.get()
yield , data.get()
yield , data.get()
yield , "https://cdn.discordapp.com/avatars/{}/{}.png".format(
data.get(), data.get()) | Parse information from the provider. |
4,125 | def lemke_howson(g, init_pivot=0, max_iter=10**6, capping=None,
full_output=False):
try:
N = g.N
except:
raise TypeError()
if N != 2:
raise NotImplementedError()
payoff_matrices = g.payoff_arrays
nums_actions = g.nums_actions
total_num = sum(nums_ac... | Find one mixed-action Nash equilibrium of a 2-player normal form
game by the Lemke-Howson algorithm [2]_, implemented with
"complementary pivoting" (see, e.g., von Stengel [3]_ for details).
Parameters
----------
g : NormalFormGame
NormalFormGame instance with 2 players.
init_pivot : s... |
4,126 | def jsonify(resource):
response = flask.jsonify(resource.to_dict())
response = add_link_headers(response, resource.links())
return response | Return a Flask ``Response`` object containing a
JSON representation of *resource*.
:param resource: The resource to act as the basis of the response |
4,127 | def object_exists_in_project(obj_id, proj_id):
if obj_id is None:
raise ValueError("Expected obj_id to be a string")
if proj_id is None:
raise ValueError("Expected proj_id to be a string")
if not is_container_id(proj_id):
raise ValueError( % (proj_id,))
return try_call(dxpy.... | :param obj_id: object ID
:type obj_id: str
:param proj_id: project ID
:type proj_id: str
Returns True if the specified data object can be found in the specified
project. |
4,128 | def score(self, X, y=None, **kwargs):
y_pred = self.predict(X)
scores = precision_recall_fscore_support(y, y_pred)
self.support_score_ = scores[-1]
scores = list(scores)
scores[-1] = scores[-1] / scores[-1].sum()
scores = m... | Generates the Scikit-Learn classification report.
Parameters
----------
X : ndarray or DataFrame of shape n x m
A matrix of n instances with m features
y : ndarray or Series of length n
An array or series of target or class values
Returns
------... |
4,129 | def run_with_reloader(main_func, extra_files=None, interval=1):
import signal
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
if os.environ.get() == :
thread.start_new_thread(main_func, ())
try:
reloader_loop(extra_files, interval)
except KeyboardInterrupt:
... | Run the given function in an independent python interpreter. |
4,130 | def get_data_source(self):
product_type = self.product_id.split()[1]
if product_type.endswith() or product_type == :
return DataSource.SENTINEL2_L1C
if product_type.endswith() or product_type == :
return DataSource.SENTINEL2_L2A
raise ValueError(.format(s... | The method determines data source from product ID.
:return: Data source of the product
:rtype: DataSource
:raises: ValueError |
4,131 | def record_run(record_type, print_session_id, **kwds):
if print_session_id and record_type != :
raise RuntimeError(
)
cfstore = ConfigStore()
json.dump(data, fp) | Record shell history. |
4,132 | def vm_ip(cls, vm_id):
vm_info = cls.info(vm_id)
for iface in vm_info[]:
if iface[] == :
continue
for ip in iface[]:
return ip[], ip[] | Return the first usable ip address for this vm.
Returns a (version, ip) tuple. |
4,133 | def save_pointings(self):
import tkFileDialog
f=tkFileDialog.asksaveasfile()
i=0
if self.pointing_format.get()==:
f.write()
if self.pointing_format.get()==:
f.write("index\n")
for pointing in self.pointings:
i=i+1
name=pointing... | Print the currently defined FOVs |
4,134 | def autodiscover():
from django.conf import settings
for application in settings.INSTALLED_APPS:
module = import_module(application)
if module_has_submodule(module, ):
emails = import_module( % application)
try:
import_module( % application)
... | Imports all available previews classes. |
4,135 | def minimum_needs_section_header_element(feature, parent):
_ = feature, parent
header = minimum_needs_section_header[]
return header.capitalize() | Retrieve minimum needs section header string from definitions. |
4,136 | def is_descendant_of_vault(self, id_, vault_id):
if self._catalog_session is not None:
return self._catalog_session.is_descendant_of_catalog(id_=id_, catalog_id=vault_id)
return self._hierarchy_session.is_descendant(id_=id_, descendant_id=vault_id) | Tests if an ``Id`` is a descendant of a vault.
arg: id (osid.id.Id): an ``Id``
arg: vault_id (osid.id.Id): the ``Id`` of a vault
return: (boolean) - ``true`` if the ``id`` is a descendant of
the ``vault_id,`` ``false`` otherwise
raise: NotFound - ``vault_id`` not... |
4,137 | def get_docker_network(self, container_id, all_stats):
network_new = {}
try:
netcounters = all_stats["networks"]
except KeyError as e:
logger.debug("docker plugin - Cannot grab NET usage for container {} ({})".format(container_id, ... | Return the container network usage using the Docker API (v1.0 or higher).
Input: id is the full container id
Output: a dict {'time_since_update': 3000, 'rx': 10, 'tx': 65}.
with:
time_since_update: number of seconds elapsed between the latest grab
rx: Number of byte rece... |
4,138 | def location(self):
try:
return self.data.get().get()
except (KeyError, AttributeError):
return self.device_status_simple() | Return the location of the printer. |
4,139 | def _run_code(code, run_globals, init_globals=None,
mod_name=None, mod_fname=None,
mod_loader=None, pkg_name=None):
if init_globals is not None:
run_globals.update(init_globals)
run_globals.update(__name__ = mod_name,
__file__ = mod_fname,
... | Helper to run code in nominated namespace |
4,140 | def open():
global _MATLAB_RELEASE
if is_win:
ret = MatlabConnection()
ret.open()
return ret
else:
if settings.MATLAB_PATH != :
matlab_path = settings.MATLAB_PATH +
elif _MATLAB_RELEASE != :
matlab_path = discover_location(_MATLAB_RELEASE... | Opens MATLAB using specified connection (or DCOM+ protocol on Windows)where matlab_location |
4,141 | def listar(self, id_divisao=None, id_ambiente_logico=None):
url =
if is_valid_int_param(id_divisao) and not is_valid_int_param(
id_ambiente_logico):
url = + str(id_divisao) +
elif is_valid_int_param(id_divisao) and is_valid_int_param(id_ambiente_logico):... | Lista os ambientes filtrados conforme parâmetros informados.
Se os dois parâmetros têm o valor None então retorna todos os ambientes.
Se o id_divisao é diferente de None então retorna os ambientes filtrados
pelo valor de id_divisao.
Se o id_divisao e id_ambiente_logico são diferentes de... |
4,142 | def _get_relationships(model):
relationships = []
for name, relationship in inspect(model).relationships.items():
class_ = relationship.mapper.class_
if relationship.uselist:
rel = ListRelationship(name, relation=class_.__name__)
else:
rel = Relationship(name... | Gets the necessary relationships for the resource
by inspecting the sqlalchemy model for relationships.
:param DeclarativeMeta model: The SQLAlchemy ORM model.
:return: A tuple of Relationship/ListRelationship instances
corresponding to the relationships on the Model.
:rtype: tuple |
4,143 | def configure_callbacks(app):
@app.before_request
def before_request():
from flask import session
session[] = helper.generate_menusystem()
print session[] | Configure application callbacks |
4,144 | def _read_input_csv(in_file):
with io.open(in_file, newline=None) as in_handle:
reader = csv.reader(in_handle)
next(reader)
for line in reader:
if line:
(fc_id, lane, sample_id, genome, barcode) = line[:5]
yield fc_id, lane, sample_id, genom... | Parse useful details from SampleSheet CSV file. |
4,145 | def _evaluate(self,R,z,phi=0.,t=0.):
if self.alpha == 2.:
return nu.log(R**2.+z**2.)/2.
else:
return -(R**2.+z**2.)**(1.-self.alpha/2.)/(self.alpha-2.) | NAME:
_evaluate
PURPOSE:
evaluate the potential at R,z
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
Phi(R,z)
HISTORY:
2010-07-10 - Started - Bovy (NY... |
4,146 | def check(self, check_url=None):
if check_url is not None:
self.check_url = self._normalize_check_url(check_url)
response = None
sleeped = 0.0
t = datetime.now()
while not response:
try:
response = requests.get(self.check_url, v... | Checks whether a server is running.
:param str check_url:
URL where to check whether the server is running.
Default is ``"http://{self.host}:{self.port}"``. |
4,147 | def get_sof_term(self, C, rup):
if rup.rake <= -45.0 and rup.rake >= -135.0:
return C["FN_UM"]
elif rup.rake > 45.0 and rup.rake < 135.0:
return C["FRV_UM"]
else:
return 0.0 | In the case of the upper mantle events separate coefficients
are considered for normal, reverse and strike-slip |
4,148 | def __grabHotkeys(self):
c = self.app.configManager
hotkeys = c.hotKeys + c.hotKeyFolders
for item in c.globalHotkeys:
if item.enabled:
self.__enqueue(self.__grabHotkey, item.hotKey, item.modifiers, self.rootWindow)
if self.__needsMu... | Run during startup to grab global and specific hotkeys in all open windows |
4,149 | def _get_upsert_sql(queryset, model_objs, unique_fields, update_fields, returning,
ignore_duplicate_updates=True, return_untouched=False):
model = queryset.model
all_fields = [
field for field in model._meta.fields
if field.column != model._meta.pk.name or not fiel... | Generates the postgres specific sql necessary to perform an upsert (ON CONFLICT)
INSERT INTO table_name (field1, field2)
VALUES (1, 'two')
ON CONFLICT (unique_field) DO UPDATE SET field2 = EXCLUDED.field2; |
4,150 | def dump(bqm, fp, vartype_header=False):
for triplet in _iter_triplets(bqm, vartype_header):
fp.write( % triplet) | Dump a binary quadratic model to a string in COOrdinate format. |
4,151 | def run_command(self, command, arg=None, is_eval=False, member_id=None):
logger.debug("run_command({command}, {arg}, {is_eval}, {member_id})".format(**locals()))
mode = is_eval and or
hostname = None
if isinstance(member_id, int):
hostname = self.member_id_to_host(... | run command on replica set
if member_id is specified command will be execute on this server
if member_id is not specified command will be execute on the primary
Args:
command - command string
arg - command argument
is_eval - if True execute command as eval
... |
4,152 | def genesis_block_audit(genesis_block_stages, key_bundle=GENESIS_BLOCK_SIGNING_KEYS):
gpg2_path = find_gpg2()
if gpg2_path is None:
raise Exception()
log.debug(.format(len(key_bundle)))
res = load_signing_keys(gpg2_path, [key_bundle[kid] for kid in key_bundle])
if not res:
rais... | Verify the authenticity of the stages of the genesis block, optionally with a given set of keys.
Return True if valid
Return False if not |
4,153 | def _Rforce(self,R,z,phi=0.,t=0.):
return -R/(R**2.+z**2.)**(self.alpha/2.) | NAME:
_Rforce
PURPOSE:
evaluate the radial force for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the radial force
HISTORY:
2010-07-10... |
4,154 | def create_row_to_some_id_col_mapping(id_array):
original_order_unique_ids = get_original_order_unique_ids(id_array)
rows_to_ids = (id_array[:, None] ==
original_order_unique_ids[None, :]).astype(int)
return rows_to_ids | Parameters
----------
id_array : 1D ndarray.
All elements of the array should be ints representing some id related
to the corresponding row.
Returns
-------
rows_to_ids : 2D scipy sparse array.
Will map each row of id_array to the unique values of `id_array`. The
col... |
4,155 | def on_change_checkout(self):
checkout_date = time.strftime(dt)
checkin_date = time.strftime(dt)
if not (checkout_date and checkin_date):
return {: {}}
delta = timedelta(days=1)
dat_a = time.strptime(checkout_date, dt)[:5]
addDays = datetime(*dat_a) +... | When you change checkout or checkin update dummy field
-----------------------------------------------------------
@param self: object pointer
@return: raise warning depending on the validation |
4,156 | def create_server(self, *args, **kwargs):
if not in kwargs:
kwargs[] = False
s = super(HPNova, self).create_server(*args, **kwargs)
return fix_hp_addrs(s) | Wraps :meth:`bang.providers.openstack.Nova.create_server` to apply
hpcloud specialization, namely pulling IP addresses from the hpcloud's
non-standard return values. |
4,157 | def rnn(bptt, vocab_size, num_embed, nhid, num_layers, dropout, num_proj, batch_size):
state_names = []
data = S.var()
weight = S.var("encoder_weight", stype=)
embed = S.sparse.Embedding(data=data, weight=weight, input_dim=vocab_size,
output_dim=num_embed, name=, spar... | word embedding + LSTM Projected |
4,158 | def _tf_restore_batch_dims(x, num_nonbatch_dims, prototype):
assert x.shape.ndims == 1 + num_nonbatch_dims
new_shape = (
prototype.shape.as_list()[:-num_nonbatch_dims] + x.shape.as_list()[1:])
assert None not in new_shape
if new_shape != x.shape.as_list():
x = tf.reshape(x, new_shape)
return x | Reverse op of _tf_flatten_batch_dims.
Un-flatten the first dimension of x to match all but the last
num_nonbatch_dims dimensions of prototype.
Args:
x: a tf.Tensor with 1 + num_nonbatch_dims dimensions
num_nonbatch_dims: an integer
prototype: a tf.Tensor
Returns:
a tf.Tensor |
4,159 | def stop_subscribe(self):
asyncio.gather(*asyncio.Task.all_tasks()).cancel()
self.event_loop.stop()
self.event_loop.close() | This function is used to stop the event loop created when subscribe is called. But this function doesn't
stop the thread and should be avoided until its completely developed. |
4,160 | def add_resource(
self,
base_rule,
base_view,
alternate_view=None,
alternate_rule=None,
id_rule=None,
app=None,
):
if alternate_view:
if not alternate_rule:
id_rule = id_rule or DEFAULT_ID_RULE
alter... | Add route or routes for a resource.
:param str base_rule: The URL rule for the resource. This will be
prefixed by the API prefix.
:param base_view: Class-based view for the resource.
:param alternate_view: If specified, an alternate class-based view for
the resource. Usu... |
4,161 | def search_commits(self, query, sort=github.GithubObject.NotSet, order=github.GithubObject.NotSet, **qualifiers):
assert isinstance(query, (str, unicode)), query
url_parameters = dict()
if sort is not github.GithubObject.NotSet:
assert sort in (, ), sort
url_pa... | :calls: `GET /search/commits <http://developer.github.com/v3/search>`_
:param query: string
:param sort: string ('author-date', 'committer-date')
:param order: string ('asc', 'desc')
:param qualifiers: keyword dict query qualifiers
:rtype: :class:`github.PaginatedList.PaginatedLi... |
4,162 | def clonemedium(medium,
uuid_in=None,
file_in=None,
uuid_out=None,
file_out=None,
mformat=None,
variant=None,
existing=False,
**kwargs):
hypervisor
params =
valid_mediums = (, , )... | Clone a new VM from an existing VM
CLI Example:
.. code-block:: bash
salt 'hypervisor' vboxmanage.clonemedium <name> <new_name> |
4,163 | def save(self, inplace=True):
modified_data = self._modified_data()
if bool(modified_data):
extra = {
: self.__class__.__name__,
: {
: self.id,
: modified_data
}
}
logger.... | Saves all modification to the marker on the server.
:param inplace Apply edits on the current instance or get a new one.
:return: Marker instance. |
4,164 | def _to_dict(self, node):
if node == BLANK_NODE:
return {}
node_type = self._get_node_type(node)
if is_key_value_type(node_type):
nibbles = without_terminator(unpack_to_nibbles(node[0]))
key = b.join([to_string(x) for x in nibbles])
if n... | convert (key, value) stored in this and the descendant nodes
to dict items.
:param node: node in form of list, or BLANK_NODE
.. note::
Here key is in full form, rather than key of the individual node |
4,165 | def get_hkr_state(self):
self.update()
try:
return {
126.5: ,
127.0: ,
self.eco_temperature: ,
self.comfort_temperature:
}[self.target_temperature]
except KeyError:
return | Get the thermostate state. |
4,166 | def drawDisplay( self, painter, option, rect, text ):
painter.setBrush(Qt.NoBrush)
painter.drawText(rect.left() + 3,
rect.top(),
rect.width() - 3,
rect.height(),
option.displayAlignment,
... | Handles the display drawing for this delegate.
:param painter | <QPainter>
option | <QStyleOption>
rect | <QRect>
text | <str> |
4,167 | async def _notify_update(self, name, change_type, change_info=None, directed_client=None):
for monitor in self._monitors:
try:
result = monitor(name, change_type, change_info, directed_client=directed_client)
if inspect.isawaitable(result):
... | Notify updates on a service to anyone who cares. |
4,168 | def gzip_dir(path, compresslevel=6):
for f in os.listdir(path):
full_f = os.path.join(path, f)
if not f.lower().endswith("gz"):
with open(full_f, ) as f_in, \
GzipFile(.format(full_f), ,
compresslevel=compresslevel) as f_out:
... | Gzips all files in a directory. Note that this is different from
shutil.make_archive, which creates a tar archive. The aim of this method
is to create gzipped files that can still be read using common Unix-style
commands like zless or zcat.
Args:
path (str): Path to directory.
compressl... |
4,169 | def get_instance(self, payload):
return FaxMediaInstance(self._version, payload, fax_sid=self._solution[], ) | Build an instance of FaxMediaInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.fax.v1.fax.fax_media.FaxMediaInstance
:rtype: twilio.rest.fax.v1.fax.fax_media.FaxMediaInstance |
4,170 | def __analizar_evento(self, ret):
"Comprueba y extrae el wvento informativo si existen en la respuesta XML"
evt = ret.get()
if evt:
self.Eventos = [evt]
self.Evento = "%(codigo)s: %(descripcion)s" % evt | Comprueba y extrae el wvento informativo si existen en la respuesta XML |
4,171 | def _parse_saved_model(path):
path_to_pb = _get_saved_model_proto_path(path)
file_content = tf_v1.gfile.Open(path_to_pb, "rb").read()
saved_model = saved_model_pb2.SavedModel()
try:
saved_model.ParseFromString(file_content)
except message.DecodeError as e:
raise IOError("Cannot parse file %s: %s... | Reads the savedmodel.pb file containing `SavedModel`. |
4,172 | def check_user(self, todays_facts):
interval = self.conf_notify_interval
if interval <= 0 or interval >= 121:
return
now = dt.datetime.now()
message = None
last_activity = todays_facts[-1] if todays_facts else None
if last_activity and not... | check if we need to notify user perhaps |
4,173 | def get_notifications(self, **params):
response = self._get(, , params=params)
return self._make_api_object(response, Notification) | https://developers.coinbase.com/api/v2#list-notifications |
4,174 | def get_queryset(self):
queryset = super(IndexView, self).get_queryset()
search_form = self.get_search_form()
if search_form.is_valid():
query_str = search_form.cleaned_data.get(, ).strip()
queryset = self.model.objects.search(query_str)
return ... | Returns queryset instance.
:rtype: django.db.models.query.QuerySet. |
4,175 | def extrap_sec(data, dist, depth, w1=1.0, w2=0):
from scipy.interpolate import interp1d
new_data1 = []
for row in data:
mask = ~np.isnan(row)
if mask.any():
y = row[mask]
if y.size == 1:
row = np.repeat(y, len(mask))
else:
... | Extrapolates `data` to zones where the shallow stations are shadowed by
the deep stations. The shadow region usually cannot be extrapolates via
linear interpolation.
The extrapolation is applied using the gradients of the `data` at a certain
level.
Parameters
----------
data : array_like
... |
4,176 | def get_keys_from_ldap(self, username=None):
result_dict = {}
filter = []
if username is not None:
filter.append(.format(username))
attributes = [, ]
results = self.client.search(filter, attributes)
for result in results:
result_dict[resul... | Fetch keys from ldap.
Args:
username Username associated with keys to fetch (optional)
Returns:
Array of dictionaries in '{username: [public keys]}' format |
4,177 | def pack(fmt, *args, **kwargs):
endian, target = kwargs.get(), kwargs.get()
endian = endian if endian is not None else target.endian if target is not None else pwnypack.target.target.endian
if fmt and fmt[0] not in :
if endian is pwnypack.target.Target.Endian.little:
fmt = + fmt
... | pack(fmt, v1, v2, ..., endian=None, target=None)
Return a string containing the values v1, v2, ... packed according to the
given format. The actual packing is performed by ``struct.pack`` but the
byte order will be set according to the given `endian`, `target` or
byte order of the global target.
A... |
4,178 | def set_fingerprint(fullpath, fingerprint=None):
try:
fingerprint = fingerprint or utils.file_fingerprint(fullpath)
record = model.FileFingerprint.get(file_path=fullpath)
if record:
record.set(fingerprint=fingerprint,
file_mtime=os.stat(fullpath).st_m... | Set the last known modification time for a file |
4,179 | def transform(self, vector):
if isinstance(vector, RDD):
vector = vector.map(_convert_to_vector)
else:
vector = _convert_to_vector(vector)
return self.call("transform", vector) | Applies transformation on a vector or an RDD[Vector].
.. note:: In Python, transform cannot currently be used within
an RDD transformation or action.
Call transform directly on the RDD instead.
:param vector: Vector or RDD of Vector to be transformed. |
4,180 | def get(self, timeout=None, block=True):
_vv and IOLOG.debug(,
self, timeout, block)
self._lock.acquire()
try:
if self.closed:
raise LatchError()
i = len(self._sleeping)
if len(self._queue) > i:
... | Return the next enqueued object, or sleep waiting for one.
:param float timeout:
If not :data:`None`, specifies a timeout in seconds.
:param bool block:
If :data:`False`, immediately raise
:class:`mitogen.core.TimeoutError` if the latch is empty.
:raises mi... |
4,181 | def Update(self, attribute=None):
client_id = self.urn.Split()[0]
if attribute == "CONTAINS":
flow_id = flow.StartAFF4Flow(
client_id=client_id,
flow_name="ListDirectory",
pathspec=self.real_pathspec,
not... | Refresh an old attribute.
Note that refreshing the attribute is asynchronous. It does not change
anything about the current object - you need to reopen the same URN some
time later to get fresh data.
Attributes: CONTAINS - Refresh the content of the directory listing.
Args:
attribute: An at... |
4,182 | def uniform_spacings(N):
z = np.cumsum(-np.log(random.rand(N + 1)))
return z[:-1] / z[-1] | Generate ordered uniform variates in O(N) time.
Parameters
----------
N: int (>0)
the expected number of uniform variates
Returns
-------
(N,) float ndarray
the N ordered variates (ascending order)
Note
----
This is equivalent to::
from numpy import rando... |
4,183 | def running_apps(device_id):
if not is_valid_device_id(device_id):
abort(403)
if device_id not in devices:
abort(404)
return jsonify(running_apps=devices[device_id].running_apps) | Get running apps via HTTP GET. |
4,184 | def f_remove_child(self, name, recursive=False, predicate=None):
if name not in self._children:
raise ValueError( %
(self.v_full_name, name))
else:
child = self._children[name]
if (name not in self._links and
n... | Removes a child of the group.
Note that groups and leaves are only removed from the current trajectory in RAM.
If the trajectory is stored to disk, this data is not affected. Thus, removing children
can be only be used to free RAM memory!
If you want to free memory on disk via your sto... |
4,185 | def transform(self, audio_f=None, jam=None, y=None, sr=None, crop=False):
if y is None:
if audio_f is None:
raise ParameterError(
)
y, sr = librosa.load(audio_f, sr=sr, mono=True)
if sr is None:
... | Apply the transformations to an audio file, and optionally JAMS object.
Parameters
----------
audio_f : str
Path to audio file
jam : optional, `jams.JAMS`, str or file-like
Optional JAMS object/path to JAMS file/open file descriptor.
If provided, th... |
4,186 | def type_errors(self, context=None):
try:
results = self._infer_augassign(context=context)
return [
result
for result in results
if isinstance(result, util.BadBinaryOperationMessage)
]
except exceptions.Inferenc... | Get a list of type errors which can occur during inference.
Each TypeError is represented by a :class:`BadBinaryOperationMessage` ,
which holds the original exception.
:returns: The list of possible type errors.
:rtype: list(BadBinaryOperationMessage) |
4,187 | def __get_ml_configuration_status(self, job_id):
failure_message = "Get status on ml configuration failed"
response = self._get_success_json(self._get(
+ job_id + , None, failure_message=failure_message))[
]
return response | After invoking the create_ml_configuration async method, you can use this method to
check on the status of the builder job.
:param job_id: The identifier returned from create_ml_configuration
:return: Job status |
4,188 | def add(self, pattern):
def wrap(f):
self.functions.append((f, pattern))
return f
return wrap | Decorator to add new dispatch functions. |
4,189 | def pool_delete(storage_pool, logger):
path = etree.fromstring(storage_pool.XMLDesc(0)).find().text
volumes_delete(storage_pool, logger)
try:
storage_pool.destroy()
except libvirt.libvirtError:
logger.exception("Unable to delete storage pool.")
try:
if os.path.exists(p... | Storage Pool deletion, removes all the created disk images within the pool and the pool itself. |
4,190 | def update_floatingip_statuses_cfg(self, context, router_id, fip_statuses):
with context.session.begin(subtransactions=True):
for (floatingip_id, status) in six.iteritems(fip_statuses):
LOG.debug("New status for floating IP %(floatingip_id)s: "
"%(s... | Update operational status for one or several floating IPs.
This is called by Cisco cfg agent to update the status of one or
several floatingips.
:param context: contains user information
:param router_id: id of router associated with the floatingips
:param router_id: dict with ... |
4,191 | def _get_centered_z1pt0(self, sites):
mean_z1pt0 = (-7.15 / 4.) * np.log(((sites.vs30) ** 4. + 570.94 ** 4.)
/ (1360 ** 4. + 570.94 ** 4.))
centered_z1pt0 = sites.z1pt0 - np.exp(mean_z1pt0)
return centered_z1pt0 | Get z1pt0 centered on the Vs30- dependent avarage z1pt0(m)
California and non-Japan regions |
4,192 | def finalize_canonical_averages(
number_of_nodes, ps, canonical_averages, alpha,
):
spanning_cluster = (
(
in
canonical_averages.dtype.names
) and
in canonical_averages.dtype.names
)
ret = np.empty_like(
canonical_averages,
dt... | Finalize canonical averages |
4,193 | def extras_msg(extras):
if len(extras) == 1:
verb = "was"
else:
verb = "were"
return ", ".join(repr(extra) for extra in extras), verb | Create an error message for extra items or properties. |
4,194 | def check_token_payment(name, token_price, stacks_payment_info):
token_units = stacks_payment_info[]
tokens_paid = stacks_payment_info[]
tokens_paid = int(tokens_paid)
if token_units != TOKEN_TYPE_STACKS:
log.warning(.format(token_units, TOKEN_TYPE_STACKS))
return {: False}
... | Check a token payment was enough and was of the right type
Return {'status': True, 'tokens_paid': ..., 'token_units': ...} if so
Return {'status': False} if not |
4,195 | def get(self, names, country_id=None, language_id=None, retheader=False):
responses = [
self._get_chunk(name_chunk, country_id, language_id)
for name_chunk
in _chunked(names, Genderize.BATCH_SIZE)
]
data = list(chain.from_iterable(
respons... | Look up gender for a list of names.
Can optionally refine search with locale info.
May make multiple requests if there are more names than
can be retrieved in one call.
:param names: List of names.
:type names: Iterable[str]
:param country_id: Optional ISO 3166-1 alpha-2... |
4,196 | def presnyields(self, *cycles, **keyw):
abund_list = []
xm_list = []
if ("xm" in keyw) == False:
keyw["xm"] = "mass"
if ("abund" in keyw) == False:
keyw["abund"] = "iso_massf"
if ("mrem" in keyw) == False:
mrem = 0.
else:
... | This function calculates the presupernova yields of a full
structure profile from a remnant mass, mrem, to the surface.
Parameters
----------
cycles : variadic tuple
cycle[0] is the cycle to perform the presupernova yields
calculations on. If cycle[1] is also sp... |
4,197 | def add_instance(model, _commit=True, **kwargs):
try:
model = get_model(model)
except ImportError:
return None
instance = model(**kwargs)
db.session.add(instance)
try:
if _commit:
db.session.commit()
else:
db.session.flush()
retu... | Add instance to database.
:param model: a string, model name in rio.models
:param _commit: control whether commit data to database or not. Default True.
:param \*\*kwargs: persisted data.
:return: instance id. |
4,198 | def addSuffixToExtensions(toc):
new_toc = TOC()
for inm, fnm, typ in toc:
if typ in (, ):
binext = os.path.splitext(fnm)[1]
if not os.path.splitext(inm)[1] == binext:
inm = inm + binext
new_toc.append((inm, fnm, typ))
return new_toc | Returns a new TOC with proper library suffix for EXTENSION items. |
4,199 | def add(self, command_template, job_class):
job = JobTemplate(command_template.alias,
command_template=command_template,
depends_on=command_template.depends_on, queue=self.queue,
job_class=job_class)
self.queue.push(job) | Given a command template, add it as a job to the queue. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.