Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
364,000 | def by_classifiers(cls, session, classifiers):
return cls.find(session,
join=(cls.classifiers,),
where=(Classifier.name.in_(classifiers),),
) | Get releases for given classifiers.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param classifiers: classifiers
:type classifiers: unicode
:return: release instances
:rtype: generator of :class:`pyshop.models.Release` |
364,001 | def get_template_loaders():
try:
from django.template.engine import Engine
except ImportError:
Engine = None
if Engine:
try:
engine = Engine.get_default()
except ImproperlyConfigured:
loaders = []
else:
loaders = engine.temp... | Compatibility method to fetch the template loaders.
Source: https://github.com/django-debug-toolbar/django-debug-toolbar/blob/ece1c2775af108a92a0ef59636266b49e286e916/debug_toolbar/compat.py |
364,002 | def min(self):
return round(np.min(self.array), self.precision)\
if len(self.array) else None | -> #float :func:numpy.min of the timing intervals |
364,003 | def register(cls):
SOME-MSG
key = (cls.msgtype, cls.revision)
if key in index:
raise ProtocolError("Duplicate message specification encountered: %r" % key)
index[key] = cls
return cls | Decorator to add a Message (and its revision) to the Protocol index.
Example:
.. code-block:: python
@register
class some_msg_1(Message):
msgtype = 'SOME-MSG'
revision = 1
@classmethod
def create(cls, **metadata):
... |
364,004 | def get_user_deliveryserver(self, domainid, serverid):
return self.api_call(
ENDPOINTS[][],
dict(domainid=domainid, serverid=serverid)) | Get a user delivery server |
364,005 | def __autoconnect_signals(self):
dic = {}
for name in dir(self):
method = getattr(self, name)
if (not isinstance(method, collections.Callable)):
continue
assert(name not in dic)
dic[name] = method
for xml in self... | This is called during view registration, to autoconnect
signals in glade file with methods within the controller |
364,006 | def get_rtc_table(self):
rtc_table = self._global_tables.get(RF_RTC_UC)
if not rtc_table:
rtc_table = RtcTable(self._core_service, self._signal_bus)
self._global_tables[RF_RTC_UC] = rtc_table
self._tables[(None, RF_RTC_UC)] = rtc_table
return... | Returns global RTC table.
Creates the table if it does not exist. |
364,007 | def _dir2(obj, pref=, excl=(), slots=None, itor=):
if slots:
if hasattr(obj, slots):
s = {}
for c in type(obj).mro():
for a in getattr(c, slots, ()):
if hasattr(obj, a):
s.set... | Return an attribute name, object 2-tuple for certain
attributes or for the ``__slots__`` attributes of the
given object, but not both. Any iterator referent
objects are returned with the given name if the
latter is non-empty. |
364,008 | def validate_config(self, organization, config, actor=None):
if config.get():
client = self.get_client(actor)
try:
repo = client.get_repo(config[])
except Exception as e:
self.raise_error(e)
else:
config[] =... | ```
if config['foo'] and not config['bar']:
raise PluginError('You cannot configure foo with bar')
return config
``` |
364,009 | def http_request(self, path="/", method="GET", host=None, port=None, json=False, data=None):
host = host or
port = port or 8080
url = get_url(host=host, port=port, path=path)
return self.http_session.request(method, url, json=json, data=data) | perform a HTTP request
:param path: str, path within the request, e.g. "/api/version"
:param method: str, HTTP method
:param host: str, if None, set to 127.0.0.1
:param port: str or int, if None, set to 8080
:param json: bool, should we expect json?
:param data: data to ... |
364,010 | def scale_WCS(self,pixel_scale,retain=True):
retain
_ratio = pixel_scale / self.pscale
self.naxis1 /= _ratio
self.naxis2 /= _ratio
self.crpix1 = self.naxis1/2.
self.crpix2 = self.naxis2/2.
if retain:
self.cd11 *= _ratio
... | Scale the WCS to a new pixel_scale. The 'retain' parameter
[default value: True] controls whether or not to retain the original
distortion solution in the CD matrix. |
364,011 | def inline(self) -> str:
return "{0}:{1}:{2}:{3}".format(self.pubkey_from, self.pubkey_to,
self.timestamp.number, self.signatures[0]) | Return inline document string
:return: |
364,012 | def apply_uncertainties(self, branch_ids, source_group):
branchset = self.root_branchset
branchsets_and_uncertainties = []
branch_ids = list(branch_ids[::-1])
while branchset is not None:
branch = branchset.get_branch_by_id(branch_ids.pop(-1))
if not bra... | Parse the path through the source model logic tree and return
"apply uncertainties" function.
:param branch_ids:
List of string identifiers of branches, representing the path
through source model logic tree.
:param source_group:
A group of sources
:re... |
364,013 | def best_matches(self, target, choices):
all = self.all_matches
try:
matches = next(all(target, choices, group=True))
for match in matches:
yield match
except StopIteration:
pass | \
Get all the first choices listed from best match to worst match,
optionally grouping on equal matches.
:param target:
:param choices:
:param group: |
364,014 | def reload_pimms():
import sys, six
try: from importlib import reload
except: from imp import reload
reload(sys.modules[])
reload(sys.modules[])
reload(sys.modules[])
reload(sys.modules[])
reload(sys.modules[])
reload(sys.modules[])
return sys.modules[] | reload_pimms() reloads the entire pimms module and returns it. |
364,015 | def complete_credit_note(self, credit_note_it, complete_dict):
return self._create_put_request(
resource=CREDIT_NOTES,
billomat_id=credit_note_it,
command=COMPLETE,
send_data=complete_dict
) | Completes an credit note
:param complete_dict: the complete dict with the template id
:param credit_note_it: the credit note id
:return: Response |
364,016 | def str_between(str_, startstr, endstr):
r
if startstr is None:
startpos = 0
else:
startpos = str_.find(startstr) + len(startstr)
if endstr is None:
endpos = None
else:
endpos = str_.find(endstr)
if endpos == -1:
endpos = None
newstr = str_[sta... | r"""
gets substring between two sentianl strings
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_str import * # NOQA
>>> import utool as ut
>>> str_ = '\n INSERT INTO vsone(\n'
>>> startstr = 'INSERT'
>>> endstr = '('
>>> result = str_between(s... |
364,017 | def UsesArtifact(self, artifacts):
if isinstance(artifacts, string_types):
return artifacts in self.artifacts
else:
return any(True for artifact in artifacts if artifact in self.artifacts) | Determines if the check uses the specified artifact.
Args:
artifacts: Either a single artifact name, or a list of artifact names
Returns:
True if the check uses a specific artifact. |
364,018 | def add_slave_server(self):
master = self.args.config[1]
slave = self.args.add_slave
self.reset_server_env(slave, self.configure)
if self.prompt_check("Update package at slave server"):
self.update_source_list()
self.common_update_sys()
... | 添加slave服务器
:return: |
364,019 | def iwls(y, x, family, offset, y_fix,
ini_betas=None, tol=1.0e-8, max_iter=200, wi=None):
n_iter = 0
diff = 1.0e6
if ini_betas is None:
betas = np.zeros((x.shape[1], 1), np.float)
else:
betas = ini_betas
if isinstance(family, Binomial):
y = family.link._clean(... | Iteratively re-weighted least squares estimation routine
Parameters
----------
y : array
n*1, dependent variable
x : array
n*k, designs matrix of k independent variables
family : family object
probability models: Gauss... |
364,020 | def pid(self):
try:
with open(self.pidfile, ) as pidfile:
try:
pid = int(pidfile.read().strip())
except ValueError:
return None
try:
os.kill(pid, 0)
except OSE... | Get the pid which represents a daemonized process.
The result should be None if the process is not running. |
364,021 | def channel(self, channel_id=None, auto_encode_decode=True):
try:
return self.channels[channel_id]
except KeyError:
return self.Channel(self, channel_id,
auto_encode_decode=auto_encode_decode) | Fetch a Channel object identified by the numeric channel_id, or
create that object if it doesn't already exist. See Channel for meaning
of auto_encode_decode. If the channel already exists, the auto_* flag
will not be updated. |
364,022 | def remove_remap_file(filename):
global file2file_remap
if filename in file2file_remap:
retval = file2file_remap[filename]
del file2file_remap[filename]
return retval
return None | Remove any mapping for *filename* and return that if it exists |
364,023 | def consumer(self, name):
return type(self)(self.database, self.name, self.keys, name) | Create a new consumer for the :py:class:`ConsumerGroup`.
:param name: name of consumer
:returns: a :py:class:`ConsumerGroup` using the given consumer name. |
364,024 | def try_enqueue(conn, queue_name, msg):
logger.debug(, queue_name)
qurl = conn.get_queue_url(QueueName=queue_name)[]
logger.debug(, qurl)
resp = conn.send_message(
QueueUrl=qurl,
MessageBody=msg,
DelaySeconds=0
)
logger.debug(, queue_name,
resp[])
... | Try to enqueue a message. If it succeeds, return the message ID.
:param conn: SQS API connection
:type conn: :py:class:`botocore:SQS.Client`
:param queue_name: name of queue to put message in
:type queue_name: str
:param msg: JSON-serialized message body
:type msg: str
:return: message ID
... |
364,025 | def add_threat_list(self, threat_list):
q =
params = [threat_list.threat_type, threat_list.platform_type, threat_list.threat_entry_type]
with self.get_cursor() as dbc:
dbc.execute(q, params) | Add threat list entry if it does not exist. |
364,026 | def search(term, lang=None):
r = requests.get(
"http://anisearch.outrance.pl/index.php",
params={
"task": "search",
"query": term,
"langs": "ja,x-jat,en" if lang is None else .join(lang)
}
)
if r.status... | As a convenient alternative to downloading and parsing a dump,
This function will instead query the AID search provided by Eloyard.
This is the same information available at http://anisearch.outrance.pl/.
:param str term: Search Term
:param list lang: A list of language codes which dete... |
364,027 | def save(self, *args, **kwargs):
self.clean()
if not self.slug:
self.slug = slugify(self.name)
super(SpecialCoverage, self).save(*args, **kwargs)
if self.query and self.query != {}:
self._save_percolator() | Saving ensures that the slug, if not set, is set to the slugified name. |
364,028 | def add_new_refs(cls, manifest, current_project, node, macros):
manifest = manifest.deepcopy(config=current_project)
raise dbt.exceptions.raise_duplicate_resource_name(
manifest.nodes[node.unique_id], node
)
manifest.nodes[node.unique_id] = node
... | Given a new node that is not in the manifest, copy the manifest and
insert the new node into it as if it were part of regular ref
processing |
364,029 | def cleanup(self, **kwargs):
params = kwargs.get()
LOG.info("asa_cleanup: tenant %(tenant)s %(in_vlan)d %(out_vlan)d"
" %(in_ip)s %(in_mask)s %(out_ip)s %(out_mask)s",
{: params.get(),
: params.get(),
: params.get(),
... | cleanup ASA context for an edge tenant pair. |
364,030 | def _validate_config(self, config):
if config is None:
raise ValueError("OIDCFrontend conf canNone{}' for OpenID Connect frontend.".format(k)) | Validates that all necessary config parameters are specified.
:type config: dict[str, dict[str, Any] | str]
:param config: the module config |
364,031 | def manage_beacons(self, tag, data):
func = data.get(, None)
name = data.get(, None)
beacon_data = data.get(, None)
include_pillar = data.get(, None)
include_opts = data.get(, None)
funcs = {: (, (name, beacon_data)),
: (, (name, beacon_data)),
... | Manage Beacons |
364,032 | def set_char(key, value):
global _chars
category = _get_char_category(key)
if not category:
raise KeyError
_chars[category][key] = value | Updates charters used to render components. |
364,033 | def callers(variant_obj, category=):
calls = set()
for caller in CALLERS[category]:
if variant_obj.get(caller[]):
calls.add((caller[], variant_obj[caller[]]))
return list(calls) | Return info about callers. |
364,034 | def to_dict(self, ignore_none: bool=True, force_value: bool=True, ignore_empty: bool=False) -> dict:
return traverse_dict(self._dict, ignore_none, force_value, ignore_empty) | From instance to dict
:param ignore_none: Properties which is None are excluded if True
:param force_value: Transform to value using to_value (default: str()) of ValueTransformer which inherited if True
:param ignore_empty: Properties which is empty are excluded if True
:return: Dict
... |
364,035 | def getmessage(self) -> str:
image = {}
for key, default in vars(self.__class__).items():
if not key.startswith() and key != and (not key in vars(QueueMessage).items()):
if isinstance(default, datetime.date):
... | parse self into unicode string as message content |
364,036 | def BackAssign(cls,
other_entity_klass,
this_entity_backpopulate_field,
other_entity_backpopulate_field,
is_many_to_one=False):
data = dict()
for _, other_klass in other_entity_klass.Subclasses():
other_fiel... | Assign defined one side mapping relationship to other side.
For example, each employee belongs to one department, then one department
includes many employees. If you defined each employee's department,
this method will assign employees to ``Department.employees`` field.
This is an one t... |
364,037 | def lp7(self, reaction_subset):
if self._zl is None:
self._add_maximization_vars()
positive = set(reaction_subset) - self._flipped
negative = set(reaction_subset) & self._flipped
v = self._v.set(positive)
zl = self._zl.set(positive)
cs = self._prob... | Approximately maximize the number of reaction with flux.
This is similar to FBA but approximately maximizing the number of
reactions in subset with flux > epsilon, instead of just maximizing the
flux of one particular reaction. LP7 prefers "flux splitting" over
"flux concentrating". |
364,038 | def bandpass(s, f1, f2, order=2, fs=1000.0, use_filtfilt=False):
b, a = butter(order, [f1 * 2 / fs, f2 * 2 / fs], btype=)
if use_filtfilt:
return filtfilt(b, a, s)
return lfilter(b, a, s) | -----
Brief
-----
For a given signal s passes the frequencies within a certain range (between f1 and f2) and rejects (attenuates) the
frequencies outside that range by applying a Butterworth digital filter.
-----------
Description
-----------
Signals may have frequency components of mul... |
364,039 | def add_called_sequence(self, section, name, sequence, qstring):
event_group = .format(section)
if not event_group in self.handle.handle[.format(self.group_name)]:
self.handle.add_analysis_subgroup(self.group_name, event_group)
fastq_text = .format(name, sequence, qstring)
... | Add basecalled sequence data
:param section: ['template', 'complement' or '2D']
:param name: The record ID to use for the fastq.
:param sequence: The called sequence.
:param qstring: The quality string. |
364,040 | def info(self, section=None):
cmd = [b]
if section:
cmd.append(section)
return self._execute(cmd, format_callback=common.format_info_response) | The INFO command returns information and statistics about the server
in a format that is simple to parse by computers and easy to read by
humans.
The optional parameter can be used to select a specific section of
information:
- server: General information about the Redis se... |
364,041 | def removereadergroup(self, group):
hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER)
if 0 != hresult:
raise error(
+ \
SCardGetErrorMessage(hresult))
try:
hresult = SCardForgetReaderGroup(hcontext, group)
i... | Remove a reader group |
364,042 | def _filter_repeating_items(download_list):
unique_requests_map = {}
mapping_list = []
unique_download_list = []
for download_request in download_list:
if download_request not in unique_requests_map:
unique_requests_map[download_request] = len(unique_... | Because of data_filter some requests in download list might be the same. In order not to download them again
this method will reduce the list of requests. It will also return a mapping list which can be used to
reconstruct the previous list of download requests.
:param download_list: List of do... |
364,043 | def find_minimal_node(self, node_head, discriminator):
min_key = lambda cur_node: cur_node.data[discriminator]
stack = []
candidates = []
isFinished = False
while isFinished is False:
if node_head is not None:
stack.append(no... | !
@brief Find minimal node in line with coordinate that is defined by discriminator.
@param[in] node_head (node): Node of KD tree from that search should be started.
@param[in] discriminator (uint): Coordinate number that is used for comparison.
@return (node) Min... |
364,044 | def login(self, username=None, password=None, login_url=None,
auth_url=None):
self._username = username or self._username
self._password = password or self._password
self.login_url = login_url or self.login_url
self.auth_url = auth_url or self.auth_url
if s... | This will automatically log the user into the pre-defined account
Feel free to overwrite this with an endpoint on endpoint load
:param username: str of the user name to login in as
:param password: str of the password to login as
:param login_url: str of the url for t... |
364,045 | def cancelScannerSubscription(self, dataList: ScanDataList):
self.client.cancelScannerSubscription(dataList.reqId)
self.wrapper.endSubscription(dataList) | Cancel market data subscription.
https://interactivebrokers.github.io/tws-api/market_scanners.html
Args:
dataList: The scan data list that was obtained from
:meth:`.reqScannerSubscription`. |
364,046 | def build_state_assignment(self, runnable, regime, state_assignment):
return [.format(\
state_assignment.variable,
self.build_expression_from_tree(runnable,
regime,
state_assignment.expressi... | Build state assignment code.
@param state_assignment: State assignment object
@type state_assignment: lems.model.dynamics.StateAssignment
@return: Generated state assignment code
@rtype: string |
364,047 | def create_api_key(self, body, **kwargs):
kwargs[] = True
if kwargs.get():
return self.create_api_key_with_http_info(body, **kwargs)
else:
(data) = self.create_api_key_with_http_info(body, **kwargs)
return data | Create a new API key. # noqa: E501
An endpoint for creating a new API key. **Example usage:** `curl -X POST https://api.us-east-1.mbedcloud.com/v3/api-keys -d '{\"name\": \"MyKey1\"}' -H 'content-type: application/json' -H 'Authorization: Bearer API_KEY'` # noqa: E501
This method makes a synchronou... |
364,048 | def build_tree(self, data, tagname, attrs=None, depth=0):
r
if data is None:
data =
indent = ( % (self.__options[] * depth)) if self.__options[] else
if isinstance(data, utils.DictTypes):
if self.__options[] and self.check_structure(data.keys()):
... | r"""Build xml tree.
:param data: data for build xml.
:param tagname: element tag name.
:param attrs: element attributes. Default:``None``.
:type attrs: dict or None
:param depth: element depth of the hierarchy. Default:``0``.
:type depth: int |
364,049 | def get_queryset(self):
"Restrict to a single kind of event, if any, and include Venue data."
qs = super().get_queryset()
kind = self.get_event_kind()
if kind is not None:
qs = qs.filter(kind=kind)
qs = qs.select_related()
return qs | Restrict to a single kind of event, if any, and include Venue data. |
364,050 | def get(self):
if self.num_inst == 0:
return (self.name, float())
else:
return (self.name, self.sum_metric / self.num_inst) | Gets the current evaluation result.
Returns
-------
names : list of str
Name of the metrics.
values : list of float
Value of the evaluations. |
364,051 | def build_from_file(self, dockerfile, tag, **kwargs):
with DockerContext(dockerfile, finalize=True) as ctx:
return self.build_from_context(ctx, tag, **kwargs) | Builds a docker image from the given :class:`~dockermap.build.dockerfile.DockerFile`. Use this as a shortcut to
:meth:`build_from_context`, if no extra data is added to the context.
:param dockerfile: An instance of :class:`~dockermap.build.dockerfile.DockerFile`.
:type dockerfile: dockermap.bu... |
364,052 | def get_topology(self):
topology = {: self._name,
: ,
: {},
: ,
: }
if self._links:
topology[][] = self._links
if self._nodes:
topology[][] = self._nodes
if self._servers:
... | Get the converted topology ready for JSON encoding
:return: converted topology assembled into a single dict
:rtype: dict |
364,053 | def port_profile_vlan_profile_switchport_mode_vlan_mode(self, **kwargs):
config = ET.Element("config")
port_profile = ET.SubElement(config, "port-profile", xmlns="urn:brocade.com:mgmt:brocade-port-profile")
name_key = ET.SubElement(port_profile, "name")
name_key.text = kwargs.po... | Auto Generated Code |
364,054 | def lang_items(self, lang=None):
if lang is None:
lang = self.language
yield from self.cache.setdefault(lang, {}).items() | Yield pairs of (id, string) for the given language. |
364,055 | def bullet_ant():
locals().update(default())
import pybullet_envs
env =
max_length = 1000
steps = 3e7
update_every = 60
return locals() | Configuration for PyBullet's ant task. |
364,056 | def to_reminders(self, ical, label=None, priority=None, tags=None,
tail=None, sep=" ", postdate=None, posttime=None):
if not hasattr(ical, ):
return
reminders = [self.to_remind(vevent, label, priority, tags, tail, sep,
postd... | Return Remind commands for all events of a iCalendar |
364,057 | def x_at_y(self, y, reverse=False):
logger.info(,
{"name": self.__class__, "y": y, "rev": reverse})
x_handle, y_handle = self.x, self.y
if reverse:
x_handle, y_handle = self.x[::-1], self.y[::-1]
cond = y_handle >= y
ind... | Calculates inverse profile - for given y returns x such that f(x) = y
If given y is not found in the self.y, then interpolation is used.
By default returns first result looking from left,
if reverse argument set to True,
looks from right. If y is outside range of self.y
then np.n... |
364,058 | def digital_write_pullup(pin_num, value, hardware_addr=0):
_get_pifacedigital(hardware_addr).gppub.bits[pin_num].value = value | Writes the value to the input pullup specified.
.. note:: This function is for familiarality with users of other types of
IO board. Consider accessing the ``gppub`` attribute of a
PiFaceDigital object:
>>> pfd = PiFaceDigital(hardware_addr)
>>> hex(pfd.gppub.value)
0xff
>... |
364,059 | def assign_interval(data):
if not dd.get_coverage_interval(data):
vrs = dd.get_variant_regions_merged(data)
callable_file = dd.get_sample_callable(data)
if vrs:
callable_size = pybedtools.BedTool(vrs).total_coverage()
else:
callable_size = pybedtools.BedT... | Identify coverage based on percent of genome covered and relation to targets.
Classifies coverage into 3 categories:
- genome: Full genome coverage
- regional: Regional coverage, like exome capture, with off-target reads
- amplicon: Amplication based regional coverage without off-target reads |
364,060 | def translate_src(src, cortex):
src_new = np.array(np.where(np.in1d(cortex, src))[0], dtype=np.int32)
return src_new | Convert source nodes to new surface (without medial wall). |
364,061 | def _tot_unhandled_hosts_by_state(self, state):
return sum(1 for h in self.hosts if h.state == state and h.state_type == u and
h.is_problem and not h.problem_has_been_acknowledged) | Generic function to get the number of unhandled problem hosts in the specified state
:param state: state to filter on
:type state:
:return: number of host in state *state* and which are not acknowledged problems
:rtype: int |
364,062 | def encode_xml(obj, E=None):
if E is None:
E = default_E
elif isinstance(E, str):
E = ElementMaker(namespace=xml.openmath_ns,
nsmap={ E: xml.openmath_ns })
name = ""
attr = {}
children = []
if isinstance(obj, om.CDBaseAttribute) and obj.cdbase is ... | Encodes an OpenMath object as an XML node.
:param obj: OpenMath object (or related item) to encode as XML.
:type obj: OMAny
:param ns: Namespace prefix to use for
http://www.openmath.org/OpenMath", or None if default namespace.
:type ns: str, None
:return: The XML node representing the Op... |
364,063 | def match(self, node):
u
if node.type == self.syms.term:
div_idx = find_division(node)
if div_idx is not False:
if not is_floaty(node, div_idx):
return clone_div_operands(node, div_idx)
... | u"""
Since the tree needs to be fixed once and only once if and only if it
matches, we can start discarding matches after the first. |
364,064 | def delete_autostart_entry():
autostart_file = Path(common.AUTOSTART_DIR) / "autokey.desktop"
if autostart_file.exists():
autostart_file.unlink()
_logger.info("Deleted old autostart entry: {}".format(autostart_file)) | Remove a present autostart entry. If none is found, nothing happens. |
364,065 | def make_compute_file(self):
string = ""
try:
vardict_items = self.vardict.iteritems()
except AttributeError:
vardict_items = self.vardict.items()
for key, val in vardict_items:
default_format = get_default_format(val)
... | Make the compute file from the self.vardict and self.vardictformat |
364,066 | def flush(self):
if len(self.rows) != 0:
self.table.mutate_rows(self.rows)
self.total_mutation_count = 0
self.total_size = 0
self.rows = [] | Sends the current. batch to Cloud Bigtable.
For example:
.. literalinclude:: snippets.py
:start-after: [START bigtable_batcher_flush]
:end-before: [END bigtable_batcher_flush] |
364,067 | def show_download_links(self):
if self.options.file_type == "all" and self.version == "dev":
self.options.file_type = "svn"
if self.options.file_type == "svn":
version = "dev"
else:
if self.version:
version = self.ve... | Query PyPI for pkg download URI for a packge
@returns: 0 |
364,068 | def initialize(self):
if self._sdrClassifier is None:
self._sdrClassifier = SDRClassifierFactory.create(
steps=self.stepsList,
alpha=self.alpha,
verbosity=self.verbosity,
implementation=self.implementation,
) | Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.initialize`.
Is called once by NuPIC before the first call to compute().
Initializes self._sdrClassifier if it is not already initialized. |
364,069 | def display_required_items(msg_type):
print("Configure a profile for: " + msg_type)
print("You will need the following information:")
for k, v in CONFIG[msg_type]["settings"].items():
print(" * " + v)
print("Authorization/credentials required:")
for k, v in CONFIG[msg_type]["auth"].it... | Display the required items needed to configure a profile for the given
message type.
Args:
:msg_type: (str) message type to create config entry. |
364,070 | def shard_filename(path, tag, shard_num, total_shards):
return os.path.join(
path, "%s-%s-%s-%.5d-of-%.5d" % (_PREFIX, _ENCODE_TAG, tag, shard_num, total_shards)) | Create filename for data shard. |
364,071 | def cmd_arp_sniff(iface):
conf.verb = False
if iface:
conf.iface = iface
print("Waiting for ARP packets...", file=sys.stderr)
sniff(filter="arp", store=False, prn=procpkt) | Listen for ARP packets and show information for each device.
Columns: Seconds from last packet | IP | MAC | Vendor
Example:
\b
1 192.168.0.1 a4:08:f5:19:17:a4 Sagemcom Broadband SAS
7 192.168.0.2 64:bc:0c:33:e5:57 LG Electronics (Mobile Communications)
2 192.168.0.5 00:c... |
364,072 | def get_lldp_neighbor_detail_output_lldp_neighbor_detail_remote_management_address(self, **kwargs):
config = ET.Element("config")
get_lldp_neighbor_detail = ET.Element("get_lldp_neighbor_detail")
config = get_lldp_neighbor_detail
output = ET.SubElement(get_lldp_neighbor_detail, ... | Auto Generated Code |
364,073 | def consumption(self):
consumption = defaultdict(float)
for load in self.graph.nodes_by_attribute():
for sector, val in load.consumption.items():
consumption[sector] += val
return pd.Series(consumption) | Consumption in kWh per sector for whole grid
Returns
-------
:pandas:`pandas.Series<series>`
Indexed by demand sector |
364,074 | def open(self, *, autocommit=False):
if self.connection is not None:
raise Exception("Connection already set")
self.connection = self.core.open()
self.autocommit = autocommit
if self._search_path:
self._configure_connection(
"search_path",
self._search_path)
return s... | Sets the connection with the core's open method.
:param autocommit: the default autocommit state
:type autocommit: boolean
:return: self |
364,075 | def image_members(self):
uri = "/%s/members" % self.uri_base
resp, resp_body = self.api.method_get(uri)
return resp_body | Returns a json-schema document that represents an image members entity
(a container of member entities). |
364,076 | def woodbury_chol(self):
if self._woodbury_chol is None:
if self._woodbury_inv is not None:
winv = np.atleast_3d(self._woodbury_inv)
self._woodbury_chol = np.zeros(winv.shape)
for p in range(winv.shape[-1]):
se... | return $L_{W}$ where L is the lower triangular Cholesky decomposition of the Woodbury matrix
$$
L_{W}L_{W}^{\top} = W^{-1}
W^{-1} := \texttt{Woodbury inv}
$$ |
364,077 | def read_pot_status(self):
a = self.cnxn.xfer([0x13])[0]
sleep(10e-3)
res = []
for i in range(4):
res.append(self.cnxn.xfer([0x00])[0])
sleep(0.1)
return {
: res[0],
: res[1],
: ... | Read the status of the digital pot. Firmware v18+ only.
The return value is a dictionary containing the following as
unsigned 8-bit integers: FanON, LaserON, FanDACVal, LaserDACVal.
:rtype: dict
:Example:
>>> alpha.read_pot_status()
{
'LaserDACVal': 230,
... |
364,078 | def delete(self):
try:
self._api.table_delete(self._name_parts)
except google.datalab.utils.RequestException:
pass
except Exception as e:
raise e
return not self.exists() | Delete the table.
Returns:
True if the Table no longer exists; False otherwise. |
364,079 | def token(self, value):
if value and not isinstance(value, Token):
value = Token(value)
self._token = value | Setter to convert any token dict into Token instance |
364,080 | async def auto_add(self, device, recursive=None, automount=True):
device, created = await self._find_device_losetup(device)
if created and recursive is False:
return device
if device.is_luks_cleartext and self.udisks.version_info >= (2, 7, 0):
await sleep(1.5) ... | Automatically attempt to mount or unlock a device, but be quiet if the
device is not supported.
:param device: device object, block device path or mount path
:param bool recursive: recursively mount and unlock child devices
:returns: whether all attempted operations succeeded |
364,081 | def _default_capacity(self, value):
if value is not None:
return value
if self.default_return_capacity or self.rate_limiters:
return INDEXES
return NONE | Get the value for ReturnConsumedCapacity from provided value |
364,082 | def _reader_thread_func(self, read_stdout: bool) -> None:
if read_stdout:
read_stream = self._proc.stdout
write_stream = self._stdout
else:
read_stream = self._proc.stderr
write_stream = self._stderr
assert read_stream is not Non... | Thread function that reads a stream from the process
:param read_stdout: if True, then this thread deals with stdout. Otherwise it deals with stderr. |
364,083 | def create_gtk_grid(self, row_spacing=6, col_spacing=6, row_homogenous=False, col_homogenous=True):
grid_lang = Gtk.Grid()
grid_lang.set_column_spacing(row_spacing)
grid_lang.set_row_spacing(col_spacing)
grid_lang.set_border_width(12)
grid_lang.set_row_homogeneous(row_ho... | Function creates a Gtk Grid with spacing
and homogeous tags |
364,084 | def get_reply(self, method, reply):
reply = self.replyfilter(reply)
sax = Parser()
replyroot = sax.parse(string=reply)
plugins = PluginContainer(self.options().plugins)
plugins.message.parsed(reply=replyroot)
soapenv = replyroot.getChild()
soapenv.promote... | Process the I{reply} for the specified I{method} by sax parsing the
I{reply} and then unmarshalling into python object(s).
@param method: The name of the invoked method.
@type method: str
@param reply: The reply XML received after invoking the specified
method.
@type ... |
364,085 | def update_or_create(cls, **kwargs):
keys = kwargs.pop() if in kwargs else []
filter_kwargs = subdict(kwargs, keys)
if filter_kwargs == {}:
obj = None
else:
obj = cls.first(**filter_kwargs)
if obj is not None:
for key, value in kwargs... | Checks if an instance already exists by filtering with the
kwargs. If yes, updates the instance with new kwargs and
returns that instance. If not, creates a new
instance with kwargs and returns it.
Args:
**kwargs: The keyword arguments which are used for filtering
... |
364,086 | def process(self):
for locale in self._fields.keys():
self._client._put(
"{0}/files/{1}/process".format(
self.__class__.base_url(
self.space.id,
self.id,
environment_id=self._environ... | Calls the process endpoint for all locales of the asset.
API reference: https://www.contentful.com/developers/docs/references/content-management-api/#/reference/assets/asset-processing |
364,087 | def select_segments_by_definer(segment_file, segment_name=None, ifo=None):
from glue.ligolw.ligolw import LIGOLWContentHandler as h; lsctables.use_in(h)
indoc = ligolw_utils.load_filename(segment_file, False, contenthandler=h)
segment_table = table.get_table(indoc, )
seg_def_table = table.get_tab... | Return the list of segments that match the segment name
Parameters
----------
segment_file: str
path to segment xml file
segment_name: str
Name of segment
ifo: str, optional
Returns
-------
seg: list of segments |
364,088 | def _parse_time_to_freeze(time_to_freeze_str):
if time_to_freeze_str is None:
time_to_freeze_str = datetime.datetime.utcnow()
if isinstance(time_to_freeze_str, datetime.datetime):
time_to_freeze = time_to_freeze_str
elif isinstance(time_to_freeze_str, datetime.date):
time_to_fr... | Parses all the possible inputs for freeze_time
:returns: a naive ``datetime.datetime`` object |
364,089 | def import_all_modules():
for module_folder in get_installed_modules():
try:
if module_folder in module_data:
raise seash_exceptions.ModuleImportError("Module already imported")
module_data[module_folder] = import_module(module_folder)
except seash_exceptions.ModuleImportError, e:
... | <Purpose>
Imports all modules within the modules folder. This should only be called once
throughout the entire execution of seash.
<Side Effects>
Modules that don't have collisions will have their commanddicts and
helptexts loaded and returned.
<Exceptions>
ImportError: There is a... |
364,090 | def backspace(self, n=1, interval=0, pre_dl=None, post_dl=None):
self.delay(pre_dl)
self.k.tap_key(self.k.backspace_key, n, interval)
self.delay(post_dl) | Press backspace key n times.
**中文文档**
按退格键 n 次。 |
364,091 | def proto_01_01_HP010(abf=exampleABF):
swhlab.memtest.memtest(abf)
swhlab.memtest.checkSweep(abf)
swhlab.plot.save(abf,tag="tau") | hyperpolarization step. Use to calculate tau and stuff. |
364,092 | def register_all_shape_checker(shape_checker_function,
arg_types,
exclude=(),
ignore_existing=False):
for t1 in arg_types:
for t2 in arg_types:
if (t1, t2) in exclude:
continue
if ignore_existing an... | Register a gradient adder for all combinations of given types.
This is a convenience shorthand for calling register_add_grad when registering
gradient adders for multiple types that can be interchanged for the purpose
of addition.
Args:
shape_checker_function: A shape checker, see register_shape_checker.
... |
364,093 | def _apply_axes_mapping(self, target, inverse=False):
if len(target) != self.ndim:
raise ValueError(
.format(len(target),
self.ndim))
if inverse:
axis_map = self._inverse_axes_map
... | Apply the transposition to the target iterable.
Parameters
----------
target - iterable
The iterable to transpose. This would be suitable for things
such as a shape as well as a list of ``__getitem__`` keys.
inverse - bool
Whether to map old dimension... |
364,094 | def gatk_type(self):
if LooseVersion(self.gatk_major_version()) > LooseVersion("3.9"):
return "gatk4"
elif LooseVersion(self.gatk_major_version()) > LooseVersion("2.3"):
return "restricted"
else:
return "lite" | Retrieve type of GATK jar, allowing support for older GATK lite.
Returns either `lite` (targeting GATK-lite 2.3.9) or `restricted`,
the latest 2.4+ restricted version of GATK. |
364,095 | def get(self, path):
service_name = gssapi.Name(.format(self.url.netloc),
gssapi.NameType.hostbased_service)
ctx = gssapi.SecurityContext(usage="initiate", name=service_name)
data = b64encode(ctx.step()).decode()
connection =... | Perform a GET request with GSSAPI authentication |
364,096 | def create_namespaced_ingress(self, namespace, body, **kwargs):
kwargs[] = True
if kwargs.get():
return self.create_namespaced_ingress_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_ingress_with_http_info(namespace, body, **kw... | create_namespaced_ingress # noqa: E501
create an Ingress # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_ingress(namespace, body, async_req=True)
>>> resul... |
364,097 | def do_dice_roll():
options = get_options()
dice = Dice(options.sides)
rolls = [dice.roll() for n in range(options.number)]
for roll in rolls:
print(, roll)
if options.number > 1:
print(, sum(rolls)) | Roll n-sided dice and return each result and the total |
364,098 | def set_contributor_details(self, contdetails):
if not isinstance(contdetails, bool):
raise TwitterSearchException(1008)
self.arguments.update({:
if contdetails
else }) | Sets 'contributor_details' parameter used to enhance the \
contributors element of the status response to include \
the screen_name of the contributor. By default only \
the user_id of the contributor is included
:param contdetails: Boolean triggering the usage of the parameter
... |
364,099 | def get_languages(self):
if self._languages:
return self._languages
self._languages = cache.get(self.PAGE_LANGUAGES_KEY % (self.id))
if self._languages is not None:
return self._languages
languages = [c[] for
c in Content.obje... | Return a list of all used languages for this page. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.