code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def set_doc_ids(self, doc_ids):
""" Build xml documents from a list of document ids.
Args:
doc_ids -- A document id or a lost of those.
"""
if isinstance(doc_ids, list):
self.set_documents(dict.fromkeys(doc_ids))
else:
self.set_documen... | Build xml documents from a list of document ids.
Args:
doc_ids -- A document id or a lost of those. |
def iterGet(self, objectType, *args, **coolArgs) :
"""Same as get. But retuns the elements one by one, much more efficient for large outputs"""
for e in self._makeLoadQuery(objectType, *args, **coolArgs).iterRun() :
if issubclass(objectType, pyGenoRabaObjectWrapper) :
yield objectType(wrapped_object_and_bag... | Same as get. But retuns the elements one by one, much more efficient for large outputs |
def bandit(self, choice_rewards):
"""
Multi-armed bandit method which chooses the arm for which the upper
confidence bound (UCB) of expected reward is greatest.
If there are multiple arms with the same UCB1 index, then one is chosen
at random.
An explanation is here:
... | Multi-armed bandit method which chooses the arm for which the upper
confidence bound (UCB) of expected reward is greatest.
If there are multiple arms with the same UCB1 index, then one is chosen
at random.
An explanation is here:
https://www.cs.bham.ac.uk/internal/courses/robot... |
def create_storage_policy(policy_name, policy_dict, service_instance=None):
'''
Creates a storage policy.
Supported capability types: scalar, set, range.
policy_name
Name of the policy to create.
The value of the argument will override any existing name in
``policy_dict``.
... | Creates a storage policy.
Supported capability types: scalar, set, range.
policy_name
Name of the policy to create.
The value of the argument will override any existing name in
``policy_dict``.
policy_dict
Dictionary containing the changes to apply to the policy.
(... |
def newDocPI(self, name, content):
"""Creation of a processing instruction element. """
ret = libxml2mod.xmlNewDocPI(self._o, name, content)
if ret is None:raise treeError('xmlNewDocPI() failed')
__tmp = xmlNode(_obj=ret)
return __tmp | Creation of a processing instruction element. |
def make_tables(grammar, precedence):
"""Generates the ACTION and GOTO tables for the grammar.
Returns:
action - dict[state][lookahead] = (action, ...)
goto - dict[state][just_reduced] = new_state
"""
ACTION = {}
GOTO = {}
labels = {}
d... | Generates the ACTION and GOTO tables for the grammar.
Returns:
action - dict[state][lookahead] = (action, ...)
goto - dict[state][just_reduced] = new_state |
def on_data(self, ws, message, message_type, fin):
"""
Callback executed when message is received from the server.
:param ws: Websocket client
:param message: utf-8 string which we get from the server.
:param message_type: Message type which is either ABNF.OPCODE_TEXT or ABNF.OP... | Callback executed when message is received from the server.
:param ws: Websocket client
:param message: utf-8 string which we get from the server.
:param message_type: Message type which is either ABNF.OPCODE_TEXT or ABNF.OPCODE_BINARY
:param fin: continue flag. If 0, the data continues... |
def extract():
"""Extract melting points from patents."""
Paragraph.parsers = [CompoundParser(), ChemicalLabelParser(), MpParser()]
Table.parsers = []
patents = []
for root, dirs, files in os.walk('../examples/mp/grants'):
for filename in files:
if not filename.endswith('.xml'):
... | Extract melting points from patents. |
def from_expr(cls, expr):
"""Instantiate proto-expression from the given Expression"""
return cls(expr.args, expr.kwargs, cls=expr.__class__) | Instantiate proto-expression from the given Expression |
async def cluster_reset_all_nodes(self, soft=True):
"""
Send CLUSTER RESET to all nodes in the cluster
If 'soft' is True then it will send 'SOFT' argument
If 'soft' is False then it will send 'HARD' argument
Sends to all nodes in the cluster
"""
option = 'SOFT' ... | Send CLUSTER RESET to all nodes in the cluster
If 'soft' is True then it will send 'SOFT' argument
If 'soft' is False then it will send 'HARD' argument
Sends to all nodes in the cluster |
def reset_case(self):
""" Returns the case to its original state.
"""
for bus in self.market.case.buses:
bus.p_demand = self.pdemand[bus]
for task in self.tasks:
for g in task.env.generators:
g.p = task.env._g0[g]["p"]
g.p_max = tas... | Returns the case to its original state. |
def validate_lv_districts(session, nw):
'''Validate if total load of a grid in a pkl file is what expected from LV districts
Parameters
----------
session : sqlalchemy.orm.session.Session
Database session
nw:
The network
Returns
-------
DataFrame
compare_by... | Validate if total load of a grid in a pkl file is what expected from LV districts
Parameters
----------
session : sqlalchemy.orm.session.Session
Database session
nw:
The network
Returns
-------
DataFrame
compare_by_district
DataFrame
compare_by_load... |
def has_index(self, name):
"""
Returns whether this table has an Index with the given name.
:param name: The index name
:type name: str
:rtype: bool
"""
name = self._normalize_identifier(name)
return name in self._indexes | Returns whether this table has an Index with the given name.
:param name: The index name
:type name: str
:rtype: bool |
def start_range(self):
"""Similar to the junction range but don't need to check for leftmost or rightmost"""
if len(self._exons) == 0: return None
return GenomicRange(self._exons[0].chr,
min([x.start for x in self._exons]),# must be part of junction
max([x.start for x in self... | Similar to the junction range but don't need to check for leftmost or rightmost |
def CNOT(control, target):
"""Produces a controlled-NOT (controlled-X) gate::
CNOT = [[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 0]]
This gate applies to two qubit arguments to produce the controlled-not gate instruction.
:param control: Th... | Produces a controlled-NOT (controlled-X) gate::
CNOT = [[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 0]]
This gate applies to two qubit arguments to produce the controlled-not gate instruction.
:param control: The control qubit.
:param target... |
def delete(network):
"""libvirt network cleanup.
@raise: libvirt.libvirtError.
"""
try:
network.destroy()
except libvirt.libvirtError as error:
raise RuntimeError("Unable to destroy network: {}".format(error)) | libvirt network cleanup.
@raise: libvirt.libvirtError. |
def get_and_check_tasks_for(context, task, msg_prefix=''):
"""Given a parent task, return the reason the parent task was spawned.
``.taskcluster.yml`` uses this to know whether to spawn an action,
cron, or decision task definition. ``tasks_for`` must be a valid one defined in the context.
Args:
... | Given a parent task, return the reason the parent task was spawned.
``.taskcluster.yml`` uses this to know whether to spawn an action,
cron, or decision task definition. ``tasks_for`` must be a valid one defined in the context.
Args:
task (dict): the task definition.
msg_prefix (str): the... |
def attach(self, file):
"""Attaches the queried record with `file` and returns the response after validating the response
:param file: File to attach to the record
:raise:
:NoResults: if query returned no results
:MultipleResults: if query returned more than one result (... | Attaches the queried record with `file` and returns the response after validating the response
:param file: File to attach to the record
:raise:
:NoResults: if query returned no results
:MultipleResults: if query returned more than one result (currently not supported)
:r... |
def removeIndividual(self, individual):
"""
Removes the specified individual from this repository.
"""
q = models.Individual.delete().where(
models.Individual.id == individual.getId())
q.execute() | Removes the specified individual from this repository. |
def connected_channel(self):
""" Returns the voice channel the player is connected to. """
if not self.channel_id:
return None
return self._lavalink.bot.get_channel(int(self.channel_id)) | Returns the voice channel the player is connected to. |
def fetch_stackexchange(
dataset,
test_set_fraction=0.2,
min_training_interactions=1,
data_home=None,
indicator_features=True,
tag_features=False,
download_if_missing=True,
):
"""
Fetch a dataset from the `StackExchange network <http://stackexchange.com/>`_.
The datasets contain... | Fetch a dataset from the `StackExchange network <http://stackexchange.com/>`_.
The datasets contain users answering questions: an interaction is defined as a user
answering a given question.
The following datasets from the StackExchange network are available:
- CrossValidated: From stats.stackexchang... |
def _GetContents(self):
"""Read the directory, making sure we close the file if the format
is bad."""
try:
self._RealGetContents()
except BadZipfile:
if not self._filePassed:
self.fp.close()
self.fp = None
raise | Read the directory, making sure we close the file if the format
is bad. |
def _client(self, host, port, unix_socket, auth):
"""Return a redis client for the configuration.
:param str host: redis host
:param int port: redis port
:rtype: redis.Redis
"""
db = int(self.config['db'])
timeout = int(self.config['timeout'])
try:
cli = redis.Redis... | Return a redis client for the configuration.
:param str host: redis host
:param int port: redis port
:rtype: redis.Redis |
def _get_lib_modules(self, full):
"""Returns a list of the modules in the same folder as the one being wrapped for
compilation as a linked library.
:arg full: when True, all the code files in the source file's directory are considered
as dependencies; otherwise only those explicitly n... | Returns a list of the modules in the same folder as the one being wrapped for
compilation as a linked library.
:arg full: when True, all the code files in the source file's directory are considered
as dependencies; otherwise only those explicitly needed are kept. |
def forall(self, method):
"""
IT IS EXPECTED THE method ACCEPTS (value, coord, cube), WHERE
value - VALUE FOUND AT ELEMENT
coord - THE COORDINATES OF THE ELEMENT (PLEASE, READ ONLY)
cube - THE WHOLE CUBE, FOR USE IN WINDOW FUNCTIONS
"""
for c in self._all_combos()... | IT IS EXPECTED THE method ACCEPTS (value, coord, cube), WHERE
value - VALUE FOUND AT ELEMENT
coord - THE COORDINATES OF THE ELEMENT (PLEASE, READ ONLY)
cube - THE WHOLE CUBE, FOR USE IN WINDOW FUNCTIONS |
def parse_environment(fields, context, topics):
"""Resolve the be.yaml environment key
Features:
- Lists, e.g. ["/path1", "/path2"]
- Environment variable references, via $
- Replacement field references, e.g. {key}
- Topic references, e.g. {1}
"""
def _resolve_environ... | Resolve the be.yaml environment key
Features:
- Lists, e.g. ["/path1", "/path2"]
- Environment variable references, via $
- Replacement field references, e.g. {key}
- Topic references, e.g. {1} |
def matches(property_name, regex, *, present_optional=False, message=None):
"""Returns a Validation that checks a property against a regex."""
def check(val):
"""Checks that a value matches a scope-enclosed regex."""
if not val:
return present_optional
else:
return True if rege... | Returns a Validation that checks a property against a regex. |
def greet(event: str):
"""Greets appropriately (from http://blog.ketchum.com/how-to-write-10-common-holiday-greetings/) """
greetings = "Happy"
if event == "Christmas":
greetings = "Merry"
if event == "Kwanzaa":
greetings = "Joyous"
if event == "wishes":
greetings = "Warm"
... | Greets appropriately (from http://blog.ketchum.com/how-to-write-10-common-holiday-greetings/) |
def set_xlimits_widgets(self, set_min=True, set_max=True):
"""Populate axis limits GUI with current plot values."""
xmin, xmax = self.tab_plot.ax.get_xlim()
if set_min:
self.w.x_lo.set_text('{0}'.format(xmin))
if set_max:
self.w.x_hi.set_text('{0}'.format(xmax)) | Populate axis limits GUI with current plot values. |
def __has_language(self, bundleId, languageId):
"""Returns ``True`` if the bundle has the language, ``False`` otherwise
"""
return True if self.__get_language_data(bundleId=bundleId,
languageId=languageId) \
else False | Returns ``True`` if the bundle has the language, ``False`` otherwise |
def get_form_kwargs(self, **kwargs):
''' Pass along the request data to the form '''
kwargs = super(PrivateLessonStudentInfoView, self).get_form_kwargs(**kwargs)
kwargs['request'] = self.request
kwargs['payAtDoor'] = self.payAtDoor
return kwargs | Pass along the request data to the form |
def template_to_dict_find(item, debug=0):
"""
DEPRECATED: Returns infobox parsetree value using etree.find()
Older template_to_dict() algorithm, uses etree.xpath() to "lookup"
or find specific elements, but fails to include tail text in the
order it is found, and does not _exclude_ <ext> tags (refe... | DEPRECATED: Returns infobox parsetree value using etree.find()
Older template_to_dict() algorithm, uses etree.xpath() to "lookup"
or find specific elements, but fails to include tail text in the
order it is found, and does not _exclude_ <ext> tags (references,
etc.). Compare to template_to_dict_iter(). |
def burstColumn(self, column, columnMatchingSegments, prevActiveCells,
prevWinnerCells, learn):
"""
Activates all of the cells in an unpredicted active column, chooses a winner
cell, and, if learning is turned on, learns on one segment, growing a new
segment if necessary.
@param c... | Activates all of the cells in an unpredicted active column, chooses a winner
cell, and, if learning is turned on, learns on one segment, growing a new
segment if necessary.
@param column (int)
Index of bursting column.
@param columnMatchingSegments (iter)
Matching segments in this column, or N... |
def validate(self, value):
"""Validate value."""
len_ = len(value)
if self.minimum_value is not None and len_ < self.minimum_value:
tpl = "Value '{val}' length is lower than allowed minimum '{min}'."
raise ValidationError(tpl.format(
val=value, min=self.m... | Validate value. |
def _write(self, data):
"""
Note: print()-statements cause to multiple write calls.
(write('line') and write('\n')). Of course we don't want to call
`run_in_terminal` for every individual call, because that's too
expensive, and as long as the newline hasn't been... | Note: print()-statements cause to multiple write calls.
(write('line') and write('\n')). Of course we don't want to call
`run_in_terminal` for every individual call, because that's too
expensive, and as long as the newline hasn't been written, the
text itself is a... |
def convertDay(self, day, prefix="", weekday=False):
"""Convert a datetime object representing a day into a human-ready
string that can be read, spoken aloud, etc.
Args:
day (datetime.date): A datetime object to be converted into text.
prefix (str): An optional argument ... | Convert a datetime object representing a day into a human-ready
string that can be read, spoken aloud, etc.
Args:
day (datetime.date): A datetime object to be converted into text.
prefix (str): An optional argument that prefixes the converted
string. For example,... |
def is_pinyin(s):
"""Check if *s* consists of valid Pinyin."""
re_pattern = ('(?:%(word)s|[ \t%(punctuation)s])+' %
{'word': zhon.pinyin.word,
'punctuation': zhon.pinyin.punctuation})
return _is_pattern_match(re_pattern, s) | Check if *s* consists of valid Pinyin. |
def _printDescription(self, hrlinetop=True):
"""generic method to print out a description"""
if hrlinetop:
self._print("----------------")
NOTFOUND = "[not found]"
if self.currentEntity:
obj = self.currentEntity['object']
label = obj.bestLabel() or NOT... | generic method to print out a description |
def _string_to_record_type(string):
'''
Return a string representation of a DNS record type to a
libcloud RecordType ENUM.
:param string: A record type, e.g. A, TXT, NS
:type string: ``str``
:rtype: :class:`RecordType`
'''
string = string.upper()
record_type = getattr(RecordType, ... | Return a string representation of a DNS record type to a
libcloud RecordType ENUM.
:param string: A record type, e.g. A, TXT, NS
:type string: ``str``
:rtype: :class:`RecordType` |
def addBarcodesToIdentifier(read, UMI, cell):
'''extract the identifier from a read and append the UMI and
cell barcode before the first space'''
read_id = read.identifier.split(" ")
if cell == "":
read_id[0] = read_id[0] + "_" + UMI
else:
read_id[0] = read_id[0] + "_" + cell + "_"... | extract the identifier from a read and append the UMI and
cell barcode before the first space |
def _get_ckptmgr_process(self):
''' Get the command to start the checkpoint manager process'''
ckptmgr_main_class = 'org.apache.heron.ckptmgr.CheckpointManager'
ckptmgr_ram_mb = self.checkpoint_manager_ram / (1024 * 1024)
ckptmgr_cmd = [os.path.join(self.heron_java_home, "bin/java"),
... | Get the command to start the checkpoint manager process |
def db990(self, value=None):
""" Corresponds to IDD Field `db990`
Dry-bulb temperature corresponding to 90.0% annual cumulative
frequency of occurrence (cold conditions)
Args:
value (float): value for IDD Field `db990`
Unit: C
if `value` is N... | Corresponds to IDD Field `db990`
Dry-bulb temperature corresponding to 90.0% annual cumulative
frequency of occurrence (cold conditions)
Args:
value (float): value for IDD Field `db990`
Unit: C
if `value` is None it will not be checked against the
... |
def _readintbe(self, length, start):
"""Read bits and interpret as a big-endian signed int."""
if length % 8:
raise InterpretError("Big-endian integers must be whole-byte. "
"Length = {0} bits.", length)
return self._readint(length, start) | Read bits and interpret as a big-endian signed int. |
def get_exception_information(self, index):
"""
@type index: int
@param index: Index into the exception information block.
@rtype: int
@return: Exception information DWORD.
"""
if index < 0 or index > win32.EXCEPTION_MAXIMUM_PARAMETERS:
raise IndexE... | @type index: int
@param index: Index into the exception information block.
@rtype: int
@return: Exception information DWORD. |
def interrupt(self, data=None):
"""
中断处理
:param data: 要响应的数据,不传即不响应
:return:
"""
self.interrupted = True
if data is not None:
return self.write(data)
else:
return True | 中断处理
:param data: 要响应的数据,不传即不响应
:return: |
def _create_destination(self, server_id, dest_url, owned):
"""
Create a listener destination instance in the Interop namespace of a
WBEM server and return that instance.
In order to catch any changes the server applies, the instance is
retrieved again using the instance path ret... | Create a listener destination instance in the Interop namespace of a
WBEM server and return that instance.
In order to catch any changes the server applies, the instance is
retrieved again using the instance path returned by instance creation.
Parameters:
server_id (:term:`s... |
def absent(name, profile="splunk"):
'''
Ensure a search is absent
.. code-block:: yaml
API Error Search:
splunk_search.absent
The following parameters are required:
name
This is the name of the search in splunk
'''
ret = {
'name': name,
'changes'... | Ensure a search is absent
.. code-block:: yaml
API Error Search:
splunk_search.absent
The following parameters are required:
name
This is the name of the search in splunk |
def strip_tx_attenuation(self, idx):
"""strip(1 byte) tx_attenuation
:idx: int
:return: int
idx
:return: int
"""
idx = Radiotap.align(idx, 2)
tx_attenuation, = struct.unpack_from('<H', self._rtap, idx)
return idx + 2, tx_attenuation | strip(1 byte) tx_attenuation
:idx: int
:return: int
idx
:return: int |
def GenCatchallState(self):
"""Generate string matching state rules.
This sets up initial state handlers that cover both the 'INITIAL' state
and the intermediate content between fields.
The lexer acts on items with precedence:
- continuation characters: use the fast forward state rules.
- ... | Generate string matching state rules.
This sets up initial state handlers that cover both the 'INITIAL' state
and the intermediate content between fields.
The lexer acts on items with precedence:
- continuation characters: use the fast forward state rules.
- field separators: finalize processi... |
def format(self, number, **kwargs):
"""Format a given number.
Format a number, with comma-separated thousands and
custom precision/decimal places
Localise by overriding the precision and thousand / decimal separators
2nd parameter `precision` can be an object matching `settings... | Format a given number.
Format a number, with comma-separated thousands and
custom precision/decimal places
Localise by overriding the precision and thousand / decimal separators
2nd parameter `precision` can be an object matching `settings.number`
Args:
number (TYP... |
def _write_to_file(self, fileinfo, filename):
"""Low-level function for writing text of editor to file.
Args:
fileinfo: FileInfo object associated to editor to be saved
filename: str with filename to save to
This is a low-level function that only saves the text t... | Low-level function for writing text of editor to file.
Args:
fileinfo: FileInfo object associated to editor to be saved
filename: str with filename to save to
This is a low-level function that only saves the text to file in the
correct encoding without doing any ... |
def validate(self):
"""Checks whether this OmapiStartupMessage matches the implementation.
@raises OmapiError:
"""
if self.implemented_protocol_version != self.protocol_version:
raise OmapiError("protocol mismatch")
if self.implemented_header_size != self.header_size:
raise OmapiError("header size misma... | Checks whether this OmapiStartupMessage matches the implementation.
@raises OmapiError: |
def K_diaphragm_valve_Crane(D=None, fd=None, style=0):
r'''Returns the loss coefficient for a diaphragm valve of either weir
(`style` = 0) or straight-through (`style` = 1) as shown in [1]_.
.. math::
K = K_1 = K_2 = N\cdot f_d
For style 0 (weir), N = 149; for style 1 (straight... | r'''Returns the loss coefficient for a diaphragm valve of either weir
(`style` = 0) or straight-through (`style` = 1) as shown in [1]_.
.. math::
K = K_1 = K_2 = N\cdot f_d
For style 0 (weir), N = 149; for style 1 (straight through), N = 39.
Parameters
----------
D... |
def _get_spark_app_ids(self, running_apps, requests_config, tags):
"""
Traverses the Spark application master in YARN to get a Spark application ID.
Return a dictionary of {app_id: (app_name, tracking_url)} for Spark applications
"""
spark_apps = {}
for app_id, (app_name... | Traverses the Spark application master in YARN to get a Spark application ID.
Return a dictionary of {app_id: (app_name, tracking_url)} for Spark applications |
def get_house_conn_gen_load(graph, node):
"""
Get generation capacity/ peak load of neighboring house connected to main
branch
Parameters
----------
graph : :networkx:`NetworkX Graph Obj< >`
Directed graph
node : graph node
Node of the main branch of LV grid
Returns
... | Get generation capacity/ peak load of neighboring house connected to main
branch
Parameters
----------
graph : :networkx:`NetworkX Graph Obj< >`
Directed graph
node : graph node
Node of the main branch of LV grid
Returns
-------
:any:`list`
A list containing two... |
def obj_assd(result, reference, voxelspacing=None, connectivity=1):
"""
Average symmetric surface distance.
Computes the average symmetric surface distance (ASSD) between the binary objects in
two images.
Parameters
----------
result : array_like
Input data containing objec... | Average symmetric surface distance.
Computes the average symmetric surface distance (ASSD) between the binary objects in
two images.
Parameters
----------
result : array_like
Input data containing objects. Can be any type but will be converted
into binary: background where ... |
def _wait_non_ressources(self, callback):
"""This get started as a thread, and waits for the data lock to be freed then advertise itself to the SelectableSelector using the callback""" # noqa: E501
self.trigger = threading.Lock()
self.was_ended = False
self.trigger.acquire()
sel... | This get started as a thread, and waits for the data lock to be freed then advertise itself to the SelectableSelector using the callback |
def get_service_display_name(name):
"""
Get the service display name for the given service name.
@see: L{get_service}
@type name: str
@param name: Service unique name. You can get this value from the
C{ServiceName} member of the service descriptors returned by
... | Get the service display name for the given service name.
@see: L{get_service}
@type name: str
@param name: Service unique name. You can get this value from the
C{ServiceName} member of the service descriptors returned by
L{get_services} or L{get_active_services}.
... |
def convertLatLngToPixelXY(self, lat, lng, level):
'''
returns the x and y values of the pixel corresponding to a latitude
and longitude.
'''
mapSize = self.getMapDimensionsByZoomLevel(level)
lat = self.clipValue(lat, self.min_lat, self.max_lat)
lng = self.clipVa... | returns the x and y values of the pixel corresponding to a latitude
and longitude. |
def teardown_socket(s):
"""Shuts down and closes a socket."""
try:
s.shutdown(socket.SHUT_WR)
except socket.error:
pass
finally:
s.close() | Shuts down and closes a socket. |
def frames(
self,
*,
callers: Optional[Union[str, List[str]]] = None,
callees: Optional[Union[str, List[str]]] = None,
kind: Optional[TraceKind] = None,
limit: Optional[int] = 10,
):
"""Display trace frames independent of the current issue.
Parameters... | Display trace frames independent of the current issue.
Parameters (all optional):
callers: str or list[str] filter traces by this caller name
callees: str or list[str] filter traces by this callee name
kind: precondition|postcondition the type of tra... |
def search_dashboard_entities(self, **kwargs): # noqa: E501
"""Search over a customer's non-deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api... | Search over a customer's non-deleted dashboards # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_dashboard_entities(async_req=True)
>>> result = thread.g... |
async def insert_news(self, **params):
"""Inserts news for account
Accepts:
- event_type
- cid
- access_string (of buyer)
- buyer_pubkey
- buyer address
- owner address
- price
- offer type
- coin ID
Returns:
- dict with result
"""
logging.debug("\n\n [+] -- Setting news debuggin... | Inserts news for account
Accepts:
- event_type
- cid
- access_string (of buyer)
- buyer_pubkey
- buyer address
- owner address
- price
- offer type
- coin ID
Returns:
- dict with result |
def visit_named_list(self, _, children):
"""Manage a list, represented by a ``.resources.List`` instance.
This list is populated with data from the result of the ``FILTERS``.
Arguments
---------
_ (node) : parsimonious.nodes.Node.
children : list
- 0: for ``... | Manage a list, represented by a ``.resources.List`` instance.
This list is populated with data from the result of the ``FILTERS``.
Arguments
---------
_ (node) : parsimonious.nodes.Node.
children : list
- 0: for ``FILTERS``: list of instances of ``.resources.Field``... |
def is_empty(self):
'''Returns True if all titleInfo subfields are not set or
empty; returns False if any of the fields are not empty.'''
return not bool(self.title or self.subtitle or self.part_number \
or self.part_name or self.non_sort or self.type) | Returns True if all titleInfo subfields are not set or
empty; returns False if any of the fields are not empty. |
def output(data, **kwargs): # pylint: disable=unused-argument
'''
Read in the dict structure generated by the salt key API methods and
print the structure.
'''
color = salt.utils.color.get_colors(
__opts__.get('color'),
__opts__.get('color_theme'))
strip_colors = __opts_... | Read in the dict structure generated by the salt key API methods and
print the structure. |
def use_plenary_grade_entry_view(self):
"""Pass through to provider GradeEntryLookupSession.use_plenary_grade_entry_view"""
self._object_views['grade_entry'] = PLENARY
# self._get_provider_session('grade_entry_lookup_session') # To make sure the session is tracked
for session in self._ge... | Pass through to provider GradeEntryLookupSession.use_plenary_grade_entry_view |
def avl_new_top(t1, t2, top, direction=0):
"""
if direction == 0:
(t1, t2) is (left, right)
if direction == 1:
(t1, t2) is (right, left)
"""
top.parent = None
assert top.parent is None, str(top.parent.value)
top.set_child(direction, t1)
top.set_child(1 - direction, t2)
... | if direction == 0:
(t1, t2) is (left, right)
if direction == 1:
(t1, t2) is (right, left) |
def terminate_processes(pid_list):
"""Terminate a list of processes by sending to each of them a SIGTERM signal,
pre-emptively checking if its PID might have been reused.
Parameters
----------
pid_list : list
A list of process identifiers identifying active processes.
"""
... | Terminate a list of processes by sending to each of them a SIGTERM signal,
pre-emptively checking if its PID might have been reused.
Parameters
----------
pid_list : list
A list of process identifiers identifying active processes. |
def to_(self, off_pts):
"""Reverse of :meth:`from_`."""
off_pts = np.asarray(off_pts, dtype=np.float)
has_z = (off_pts.shape[-1] > 2)
# scale according to current settings
scale_pt = [self.viewer._org_scale_x, self.viewer._org_scale_y]
if has_z:
scale_pt.appe... | Reverse of :meth:`from_`. |
def refactor_string(self, data, name):
"""Refactor a given input string.
Args:
data: a string holding the code to be refactored.
name: a human-readable name for use in error/log messages.
Returns:
An AST corresponding to the refactored input stream; None if
... | Refactor a given input string.
Args:
data: a string holding the code to be refactored.
name: a human-readable name for use in error/log messages.
Returns:
An AST corresponding to the refactored input stream; None if
there were errors during the parse. |
def select_delay_factor(self, delay_factor):
"""
Choose the greater of delay_factor or self.global_delay_factor (default).
In fast_cli choose the lesser of delay_factor of self.global_delay_factor.
:param delay_factor: See __init__: global_delay_factor
:type delay_factor: int
... | Choose the greater of delay_factor or self.global_delay_factor (default).
In fast_cli choose the lesser of delay_factor of self.global_delay_factor.
:param delay_factor: See __init__: global_delay_factor
:type delay_factor: int |
def validate(self):
"""Validate that the GremlinFoldedContextField is correctly representable."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format(
type(self.fold_scope_locatio... | Validate that the GremlinFoldedContextField is correctly representable. |
def create_server(initialize=True):
"""Create a server"""
with provider() as p:
host_string = p.create_server()
if initialize:
env.host_string = host_string
initialize_server() | Create a server |
def create_script_fact(self):
"""
appends the CREATE TABLE, index etc to self.ddl_text
"""
self.ddl_text += '---------------------------------------------\n'
self.ddl_text += '-- CREATE Fact Table - ' + self.fact_table + '\n'
self.ddl_text += '----------------------------... | appends the CREATE TABLE, index etc to self.ddl_text |
def result(self, *args, **kwargs):
"""
Construye la consulta SQL
"""
prettify = kwargs.get('pretty', False)
sql = 'CREATE %s %s' % (self._type, self._class)
if prettify:
sql += '\n'
else:
sql += ' '
if self._type.lower() ... | Construye la consulta SQL |
def get_bonds(input_group):
"""Utility function to get indices (in pairs) of the bonds."""
out_list = []
for i in range(len(input_group.bond_order_list)):
out_list.append((input_group.bond_atom_list[i * 2], input_group.bond_atom_list[i * 2 + 1],))
return out_list | Utility function to get indices (in pairs) of the bonds. |
def format_file_node(import_graph, node, indent):
"""Prettyprint nodes based on their provenance."""
f = import_graph.provenance[node]
if isinstance(f, resolve.Direct):
out = '+ ' + f.short_path
elif isinstance(f, resolve.Local):
out = ' ' + f.short_path
elif isinstance(f, resolve.S... | Prettyprint nodes based on their provenance. |
def merge_lists(*args):
"""Merge an arbitrary number of lists into a single list and dedupe it
Args:
*args: Two or more lists
Returns:
A deduped merged list of all the provided lists as a single list
"""
out = {}
for contacts in filter(None, args):
for contact in conta... | Merge an arbitrary number of lists into a single list and dedupe it
Args:
*args: Two or more lists
Returns:
A deduped merged list of all the provided lists as a single list |
def check_existens_of_staging_tag_in_remote_repo():
"""
This method will check, if the given tag exists as a staging tag in the remote repository.
The intention is, that every tag, which should be deployed on a production envirnment,
has to be deployed on a staging environment before.
... | This method will check, if the given tag exists as a staging tag in the remote repository.
The intention is, that every tag, which should be deployed on a production envirnment,
has to be deployed on a staging environment before. |
def bind_to_uniform_block(self, binding=0, *, offset=0, size=-1) -> None:
'''
Bind the buffer to a uniform block.
Args:
binding (int): The uniform block binding.
Keyword Args:
offset (int): The offset.
size (int): The size. Va... | Bind the buffer to a uniform block.
Args:
binding (int): The uniform block binding.
Keyword Args:
offset (int): The offset.
size (int): The size. Value ``-1`` means all. |
def p_BIT_ix(p):
""" asm : bitop expr COMMA reg8_I
| bitop pexpr COMMA reg8_I
"""
bit = p[2].eval()
if bit < 0 or bit > 7:
error(p.lineno(3), 'Invalid bit position %i. Must be in [0..7]' % bit)
p[0] = None
return
p[0] = Asm(p.lineno(3), '%s %i,%s' % (p[1], bit, p... | asm : bitop expr COMMA reg8_I
| bitop pexpr COMMA reg8_I |
def create_chunker(self, chunk_size):
"""Create a chunker performing content-defined chunking (CDC) using Rabin Karp's rolling hash scheme with a
specific, expected chunk size.
Args:
chunk_size (int): (Expected) target chunk size.
Returns:
BaseChunker: A chunker... | Create a chunker performing content-defined chunking (CDC) using Rabin Karp's rolling hash scheme with a
specific, expected chunk size.
Args:
chunk_size (int): (Expected) target chunk size.
Returns:
BaseChunker: A chunker object. |
def specify_data_set(self, x_input, y_input):
"""
Define input to ACE.
Parameters
----------
x_input : list
list of iterables, one for each independent variable
y_input : array
the dependent observations
"""
self.x = x_input
... | Define input to ACE.
Parameters
----------
x_input : list
list of iterables, one for each independent variable
y_input : array
the dependent observations |
def _edge_opposite_point(self, tri, i):
""" Given a triangle, return the edge that is opposite point i.
Vertexes are returned in the same orientation as in tri.
"""
ind = tri.index(i)
return (tri[(ind+1) % 3], tri[(ind+2) % 3]) | Given a triangle, return the edge that is opposite point i.
Vertexes are returned in the same orientation as in tri. |
def vperp(a, b):
"""
Find the component of a vector that is perpendicular to a second
vector. All vectors are 3-dimensional.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vperp_c.html
:param a: The vector whose orthogonal component is sought.
:type a: 3-Element Array of floats
:... | Find the component of a vector that is perpendicular to a second
vector. All vectors are 3-dimensional.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vperp_c.html
:param a: The vector whose orthogonal component is sought.
:type a: 3-Element Array of floats
:param b: The vector used as t... |
def aveknt(t, k):
"""Compute the running average of `k` successive elements of `t`. Return the averaged array.
Parameters:
t:
Python list or rank-1 array
k:
int, >= 2, how many successive elements to average
Returns:
rank-1 array, averaged data. If k > len(t), returns a zero-length arr... | Compute the running average of `k` successive elements of `t`. Return the averaged array.
Parameters:
t:
Python list or rank-1 array
k:
int, >= 2, how many successive elements to average
Returns:
rank-1 array, averaged data. If k > len(t), returns a zero-length array.
Caveat:
This is ... |
def hs_mux(sel, ls_hsi, hso):
""" [Many-to-one] Multiplexes a list of input handshake interfaces
sel - (i) selects an input handshake interface to be connected to the output
ls_hsi - (i) list of input handshake tuples (ready, valid)
hso - (o) output handshake tuple (ready, ... | [Many-to-one] Multiplexes a list of input handshake interfaces
sel - (i) selects an input handshake interface to be connected to the output
ls_hsi - (i) list of input handshake tuples (ready, valid)
hso - (o) output handshake tuple (ready, valid) |
def get_info(self):
"""
Query the GenePattern server for metadata regarding this job and assign
that metadata to the properties on this GPJob object. Including:
* Task Name
* LSID
* User ID
* Job Number
* Status
* Date Submi... | Query the GenePattern server for metadata regarding this job and assign
that metadata to the properties on this GPJob object. Including:
* Task Name
* LSID
* User ID
* Job Number
* Status
* Date Submitted
* URL of Log Files
... |
def DeleteMessageHandlerRequests(self, requests, cursor=None):
"""Deletes a list of message handler requests from the database."""
query = "DELETE FROM message_handler_requests WHERE request_id IN ({})"
request_ids = set([r.request_id for r in requests])
query = query.format(",".join(["%s"] * len(reque... | Deletes a list of message handler requests from the database. |
def favorites_add(photo_id):
"""Add a photo to the user's favorites."""
method = 'flickr.favorites.add'
_dopost(method, auth=True, photo_id=photo_id)
return True | Add a photo to the user's favorites. |
def GetFormatSpecification(cls):
"""Retrieves the format specification.
Returns:
FormatSpecification: format specification.
"""
format_specification = specification.FormatSpecification(cls.NAME)
format_specification.AddNewSignature(b'SCCA', offset=4)
format_specification.AddNewSignature(b... | Retrieves the format specification.
Returns:
FormatSpecification: format specification. |
def get_gc_book(self):
""" Returns the GnuCash db session """
if not self.gc_book:
gc_db = self.config.get(ConfigKeys.gnucash_book_path)
if not gc_db:
raise AttributeError("GnuCash book path not configured.")
# check if this is the abs file exists
... | Returns the GnuCash db session |
def execute_prebuild_script(self):
"""
Parse and execute the prebuild_script from the zappa_settings.
"""
(pb_mod_path, pb_func) = self.prebuild_script.rsplit('.', 1)
try: # Prefer prebuild script in working directory
if pb_mod_path.count('.') >= 1: # Prebuild sc... | Parse and execute the prebuild_script from the zappa_settings. |
def genl_ctrl_resolve_grp(sk, family_name, grp_name):
"""Resolve Generic Netlink family group name.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L471
Looks up the family object and resolves the group name to the numeric group identifier.
Positional arguments:
sk -- Generic Ne... | Resolve Generic Netlink family group name.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/genl/ctrl.c#L471
Looks up the family object and resolves the group name to the numeric group identifier.
Positional arguments:
sk -- Generic Netlink socket (nl_sock class instance).
family_name -- nam... |
def replace_termcodes(self, string, from_part=False, do_lt=True,
special=True):
r"""Replace any terminal code strings by byte sequences.
The returned sequences are Nvim's internal representation of keys,
for example:
<esc> -> '\x1b'
<cr> -> '\r'
... | r"""Replace any terminal code strings by byte sequences.
The returned sequences are Nvim's internal representation of keys,
for example:
<esc> -> '\x1b'
<cr> -> '\r'
<c-l> -> '\x0c'
<up> -> '\x80ku'
The returned sequences can be used as input to `feedkeys`. |
def list_nodes_min(call=None):
'''
Return a list of the VMs that are on the provider. Only a list of VM names and
their state is returned. This is the minimum amount of information needed to
check for existing VMs.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt... | Return a list of the VMs that are on the provider. Only a list of VM names and
their state is returned. This is the minimum amount of information needed to
check for existing VMs.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt-cloud -f list_nodes_min my-linode-config
... |
def get_all_current_trains(self, train_type=None, direction=None):
"""Returns all trains that are due to start in the next 10 minutes
@param train_type: ['mainline', 'suburban', 'dart']
"""
params = None
if train_type:
url = self.api_base_url + 'getCurrentTrainsXML_Wi... | Returns all trains that are due to start in the next 10 minutes
@param train_type: ['mainline', 'suburban', 'dart'] |
def create_role(self, role_name, mount_point='approle', **kwargs):
"""POST /auth/<mount_point>/role/<role name>
:param role_name:
:type role_name:
:param mount_point:
:type mount_point:
:param kwargs:
:type kwargs:
:return:
:rtype:
"""
... | POST /auth/<mount_point>/role/<role name>
:param role_name:
:type role_name:
:param mount_point:
:type mount_point:
:param kwargs:
:type kwargs:
:return:
:rtype: |
async def getRecentErrors(self, *args, **kwargs):
"""
Look up the most recent errors in the provisioner across all worker types
Return a list of recent errors encountered
This method gives output: ``v1/errors.json#``
This method is ``experimental``
"""
return ... | Look up the most recent errors in the provisioner across all worker types
Return a list of recent errors encountered
This method gives output: ``v1/errors.json#``
This method is ``experimental`` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.