code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def nmap_scan():
"""
Scans the given hosts with nmap.
"""
# Create the search and config objects
hs = HostSearch()
config = Config()
# Static options to be able to figure out what options to use depending on the input the user gives.
nmap_types = ['top10', 'top100', 'custom', 'top10... | Scans the given hosts with nmap. |
def ensure_mapping_format(variables):
""" ensure variables are in mapping format.
Args:
variables (list/dict): original variables
Returns:
dict: ensured variables in dict format
Examples:
>>> variables = [
{"a": 1},
{"b": 2}
]
... | ensure variables are in mapping format.
Args:
variables (list/dict): original variables
Returns:
dict: ensured variables in dict format
Examples:
>>> variables = [
{"a": 1},
{"b": 2}
]
>>> print(ensure_mapping_format(variables))
... |
def paint(self, painter, option, index):
"""Paint checkbox and text
_
|_| My label
"""
body_rect = QtCore.QRectF(option.rect)
check_rect = QtCore.QRectF(body_rect)
check_rect.setWidth(check_rect.height())
check_rect.adjust(6, 6, -6, -6)
check... | Paint checkbox and text
_
|_| My label |
def parse(self, url):
"""
Return a configuration dict from a URL
"""
parsed_url = urlparse.urlparse(url)
try:
default_config = self.CONFIG[parsed_url.scheme]
except KeyError:
raise ValueError(
'unrecognised URL scheme for {}: {}'.fo... | Return a configuration dict from a URL |
def call_multiple_modules(module_gen):
"""Call each module
module_gen should be a iterator
"""
for args_seq in module_gen:
module_name_or_path = args_seq[0]
with replace_sys_args(args_seq):
if re.match(VALID_PACKAGE_RE, module_name_or_path):
runpy.run_module(... | Call each module
module_gen should be a iterator |
def _setEncoderParams(self):
"""
Set the radius, resolution and range. These values are updated when minval
and/or maxval change.
"""
self.rangeInternal = float(self.maxval - self.minval)
self.resolution = float(self.rangeInternal) / (self.n - self.w)
self.radius = self.w * self.resolution... | Set the radius, resolution and range. These values are updated when minval
and/or maxval change. |
def convert_money(amount, currency_from, currency_to):
"""
Convert 'amount' from 'currency_from' to 'currency_to' and return a Money
instance of the converted amount.
"""
new_amount = base_convert_money(amount, currency_from, currency_to)
return moneyed.Money(new_amount, currency_to) | Convert 'amount' from 'currency_from' to 'currency_to' and return a Money
instance of the converted amount. |
def _find_usage_cloudtrail(self):
"""Calculate current usage for CloudTrail related metrics"""
trail_list = self.conn.describe_trails()['trailList']
trail_count = len(trail_list) if trail_list else 0
for trail in trail_list:
data_resource_count = 0
if self.conn.... | Calculate current usage for CloudTrail related metrics |
def has_access(user, required_roles, match_all=True):
"""Check if the user meets the role requirements. If mode is set to AND, all the provided roles must apply
Args:
user (:obj:`User`): User object
required_roles (`list` of `str`): List of roles that the user must have applied
match_al... | Check if the user meets the role requirements. If mode is set to AND, all the provided roles must apply
Args:
user (:obj:`User`): User object
required_roles (`list` of `str`): List of roles that the user must have applied
match_all (`bool`): If true, all the required_roles must be applied t... |
def _load_item(self, key):
'''Load the specified item from the [flask] section. Type is
determined by the type of the equivalent value in app.default_config
or string if unknown.'''
key_u = key.upper()
default = current_app.default_config.get(key_u)
# One of the defaul... | Load the specified item from the [flask] section. Type is
determined by the type of the equivalent value in app.default_config
or string if unknown. |
def plot_burstness(corpus, B, **kwargs):
"""
Generate a figure depicting burstness profiles for ``feature``.
Parameters
----------
B
Returns
-------
fig : :class:`matplotlib.figure.Figure`
Examples
--------
.. code-block:: python
>>> from tethne.analyze.corpus imp... | Generate a figure depicting burstness profiles for ``feature``.
Parameters
----------
B
Returns
-------
fig : :class:`matplotlib.figure.Figure`
Examples
--------
.. code-block:: python
>>> from tethne.analyze.corpus import burstness
>>> fig = plot_burstness(corpus,... |
def get_provisioned_table_write_units(table_name):
""" Returns the number of provisioned write units for the table
:type table_name: str
:param table_name: Name of the DynamoDB table
:returns: int -- Number of write units
"""
try:
desc = DYNAMODB_CONNECTION.describe_table(table_name)
... | Returns the number of provisioned write units for the table
:type table_name: str
:param table_name: Name of the DynamoDB table
:returns: int -- Number of write units |
def replace(self, *args, **kwargs):
"""
replace(lower=None, upper=None, lower_inc=None, upper_inc=None)
Returns a new instance of self with the given arguments replaced. It
takes the exact same arguments as the constructor.
>>> intrange(1, 5).replace(upper=10)
i... | replace(lower=None, upper=None, lower_inc=None, upper_inc=None)
Returns a new instance of self with the given arguments replaced. It
takes the exact same arguments as the constructor.
>>> intrange(1, 5).replace(upper=10)
intrange([1,10))
>>> intrange(1, 10).replace(... |
def __check_table_rules(configuration):
""" Do some basic checks on the configuration """
for table_name in configuration['tables']:
table = configuration['tables'][table_name]
# Check that increase/decrease units is OK
valid_units = ['percent', 'units']
if table['increase_reads_... | Do some basic checks on the configuration |
def markdown(text, html=False, valid_tags=GFM_TAGS):
"""
Return Markdown rendered text using GitHub Flavoured Markdown,
with HTML escaped and syntax-highlighting enabled.
"""
if text is None:
return None
if html:
return Markup(sanitize_html(markdown_convert_html(gfm(text)), valid... | Return Markdown rendered text using GitHub Flavoured Markdown,
with HTML escaped and syntax-highlighting enabled. |
def distance_matrix(a, b, periodic):
'''Calculate a distrance matrix between coordinates sets a and b
'''
a = a
b = b[:, np.newaxis]
return periodic_distance(a, b, periodic) | Calculate a distrance matrix between coordinates sets a and b |
def add_dataset(self, name=None, label=None,
x_column_label=None, y_column_label=None, index=None, control=False):
"""Add a dataset to a specific plot.
This method adds a dataset to a plot. Its functional use is imperative
to the plot generation. It handles adding new files ... | Add a dataset to a specific plot.
This method adds a dataset to a plot. Its functional use is imperative
to the plot generation. It handles adding new files as well
as indexing to files that are added to other plots.
All Args default to None. However, these are note the defaults
... |
def check(text):
"""Suggest the preferred forms."""
err = "misc.waxed"
msg = u"The modifier following 'waxed' must be an adj.: '{}' is correct"
waxes = ["wax", "waxes", "waxed", "waxing"]
modifiers = [("ebullient", "ebulliently"),
("ecstatic", "ecstatically"),
("el... | Suggest the preferred forms. |
def flush(self):
"""
Flush the write buffers of the stream if applicable.
"""
if self._writable:
with self._seek_lock:
self._flush_raw_or_buffered()
# Clear the buffer
self._write_buffer = bytearray(self._buffer_size)
... | Flush the write buffers of the stream if applicable. |
def convert_weights_to_numpy(weights_dict):
"""Convert weights to numpy"""
return dict([(k.replace("arg:", "").replace("aux:", ""), v.asnumpy())
for k, v in weights_dict.items()]) | Convert weights to numpy |
def get_component_product(self, other):
"""Returns the component product of this vector and the given
other vector."""
return Point(self.x * other.x, self.y * other.y) | Returns the component product of this vector and the given
other vector. |
def _handle_lrr(self, data):
"""
Handle Long Range Radio messages.
:param data: LRR message to parse
:type data: string
:returns: :py:class:`~alarmdecoder.messages.LRRMessage`
"""
msg = LRRMessage(data)
if not self._ignore_lrr_states:
self._... | Handle Long Range Radio messages.
:param data: LRR message to parse
:type data: string
:returns: :py:class:`~alarmdecoder.messages.LRRMessage` |
def set(self, key, val, bucket):
""" Set a cached item by key
WARN: Regardless if the item is already in the cache,
it will be udpated with the new value.
"""
if bucket not in self._cache:
self._cache[bucket] = {}
self._cache[bucket][key] = val | Set a cached item by key
WARN: Regardless if the item is already in the cache,
it will be udpated with the new value. |
def _send_request(self, xml_request):
""" Send the prepared XML request block to the CPS using the corect protocol.
Args:
xml_request -- A fully formed xml request string for the CPS.
Returns:
The raw xml response string.
Raises:
... | Send the prepared XML request block to the CPS using the corect protocol.
Args:
xml_request -- A fully formed xml request string for the CPS.
Returns:
The raw xml response string.
Raises:
ConnectionError -- Can't establish a connecti... |
def child_task(self, q, l, gq, gl):
'''child process - this holds GUI elements'''
mp_util.child_close_fds()
from ..lib import wx_processguard
from ..lib.wx_loader import wx
from MAVProxy.modules.mavproxy_misseditor import missionEditorFrame
self.app = wx.App(Fal... | child process - this holds GUI elements |
def return_markers(self):
"""Reads the notes of the Ktlx recordings.
"""
ent_file = self._filename.with_suffix('.ent')
if not ent_file.exists():
ent_file = self._filename.with_suffix('.ent.old')
try:
ent_notes = _read_ent(ent_file)
except (FileNo... | Reads the notes of the Ktlx recordings. |
def get_student_email(cmd_args, endpoint=''):
"""Attempts to get the student's email. Returns the email, or None."""
log.info("Attempting to get student email")
if cmd_args.local:
return None
access_token = authenticate(cmd_args, endpoint=endpoint, force=False)
if not access_token:
r... | Attempts to get the student's email. Returns the email, or None. |
def to_unicode(string):
"""
Ensure a passed string is unicode
"""
if isinstance(string, six.binary_type):
return string.decode('utf8')
if isinstance(string, six.text_type):
return string
if six.PY2:
return unicode(string)
return str(string) | Ensure a passed string is unicode |
def compute_avg_adj_deg(G):
r"""
Compute the average adjacency degree for each node.
The average adjacency degree is the average of the degrees of a node and
its neighbors.
Parameters
----------
G: Graph
Graph on which the statistic is extracted
"""
return np.sum(np.dot(G.A... | r"""
Compute the average adjacency degree for each node.
The average adjacency degree is the average of the degrees of a node and
its neighbors.
Parameters
----------
G: Graph
Graph on which the statistic is extracted |
def get_asset_lookup_session_for_repository(self, repository_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the asset lookup service for the
given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
... | Gets the ``OsidSession`` associated with the asset lookup service for the
given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetLookupSession) - an
``AssetLookupSession``... |
def batch_annotate_files(
self,
requests,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Service that performs image detection and annotation for a batch of files.
Now only "applica... | Service that performs image detection and annotation for a batch of files.
Now only "application/pdf", "image/tiff" and "image/gif" are supported.
This service will extract at most the first 10 frames (gif) or pages
(pdf or tiff) from each file provided and perform detection and annotation
... |
def load(source, **kwargs) -> JsonObj:
""" Deserialize a JSON source.
:param source: a URI, File name or a .read()-supporting file-like object containing a JSON document
:param kwargs: arguments. see: json.load for details
:return: JsonObj representing fp
"""
if isinstance(source, str):
... | Deserialize a JSON source.
:param source: a URI, File name or a .read()-supporting file-like object containing a JSON document
:param kwargs: arguments. see: json.load for details
:return: JsonObj representing fp |
def earthquake_contour_preprocessor(impact_function):
"""Preprocessor to create contour from an earthquake
:param impact_function: Impact function to run.
:type impact_function: ImpactFunction
:return: The contour layer.
:rtype: QgsMapLayer
"""
contour_path = create_smooth_contour(impact_f... | Preprocessor to create contour from an earthquake
:param impact_function: Impact function to run.
:type impact_function: ImpactFunction
:return: The contour layer.
:rtype: QgsMapLayer |
def get_range_info(array, component):
"""Get the data range of the array's component"""
r = array.GetRange(component)
comp_range = {}
comp_range['min'] = r[0]
comp_range['max'] = r[1]
comp_range['component'] = array.GetComponentName(component)
return comp_range | Get the data range of the array's component |
def extern_store_bytes(self, context_handle, bytes_ptr, bytes_len):
"""Given a context and raw bytes, return a new Handle to represent the content."""
c = self._ffi.from_handle(context_handle)
return c.to_value(binary_type(self._ffi.buffer(bytes_ptr, bytes_len))) | Given a context and raw bytes, return a new Handle to represent the content. |
def ComputeRoot(hashes):
"""
Compute the root hash.
Args:
hashes (list): the list of hashes to build the root from.
Returns:
bytes: the root hash.
"""
if not len(hashes):
raise Exception('Hashes must have length')
if len(hashe... | Compute the root hash.
Args:
hashes (list): the list of hashes to build the root from.
Returns:
bytes: the root hash. |
def get_agent(msg):
""" Handy hack to handle legacy messages where 'agent' was a list. """
agent = msg['msg']['agent']
if isinstance(agent, list):
agent = agent[0]
return agent | Handy hack to handle legacy messages where 'agent' was a list. |
def choice(self, board: Union[chess.Board, int], *, minimum_weight: int = 1, exclude_moves: Container[chess.Move] = (), random=random) -> Entry:
"""
Uniformly selects a random entry for the given position.
:raises: :exc:`IndexError` if no entries are found.
"""
chosen_entry = No... | Uniformly selects a random entry for the given position.
:raises: :exc:`IndexError` if no entries are found. |
def mkdir(path, mode=0o755, delete=False):
"""Make a directory.
Create a leaf directory and all intermediate ones.
Works like ``mkdir``, except that any intermediate path segment (not just
the rightmost) will be created if it does not exist. This is recursive.
Args:
path (str): Directory t... | Make a directory.
Create a leaf directory and all intermediate ones.
Works like ``mkdir``, except that any intermediate path segment (not just
the rightmost) will be created if it does not exist. This is recursive.
Args:
path (str): Directory to create
mode (int): Directory mode
... |
def _path_pair(self, s):
"""Parse two paths separated by a space."""
# TODO: handle a space in the first path
if s.startswith(b'"'):
parts = s[1:].split(b'" ', 1)
else:
parts = s.split(b' ', 1)
if len(parts) != 2:
self.abort(errors.BadFormat, '... | Parse two paths separated by a space. |
def releases(self):
r"""
A dictionary that maps release identifiers to :class:`Release` objects.
Here's an example based on a mirror of the git project's repository
which shows the last ten releases based on tags, where each release
identifier captures a tag without its 'v' pref... | r"""
A dictionary that maps release identifiers to :class:`Release` objects.
Here's an example based on a mirror of the git project's repository
which shows the last ten releases based on tags, where each release
identifier captures a tag without its 'v' prefix:
>>> from pprint... |
def ec(ns=None, cn=None, di=None, lo=None, iq=None, ico=None):
# pylint: disable=redefined-outer-name
"""
This function is a wrapper for
:meth:`~pywbem.WBEMConnection.EnumerateClasses`.
Enumerate the subclasses of a class, or the top-level classes in a
namespace.
Parameters:
ns (:te... | This function is a wrapper for
:meth:`~pywbem.WBEMConnection.EnumerateClasses`.
Enumerate the subclasses of a class, or the top-level classes in a
namespace.
Parameters:
ns (:term:`string`):
Name of the CIM namespace to be used (case independent).
If `None`, defaults to the... |
def matches(self, stream):
"""Check if this selector matches the given stream
Args:
stream (DataStream): The stream to check
Returns:
bool: True if this selector matches the stream
"""
if self.match_type != stream.stream_type:
return False
... | Check if this selector matches the given stream
Args:
stream (DataStream): The stream to check
Returns:
bool: True if this selector matches the stream |
def put_abs (self, r, c, ch):
'''Screen array starts at 1 index.'''
r = constrain (r, 1, self.rows)
c = constrain (c, 1, self.cols)
if isinstance(ch, bytes):
ch = self._decode(ch)[0]
else:
ch = ch[0]
self.w[r-1][c-1] = ch | Screen array starts at 1 index. |
def _do_close(self):
"""Tear down this object, after we've agreed to close
with the server."""
AMQP_LOGGER.debug('Closed channel #%d', self.channel_id)
self.is_open = False
channel_id, self.channel_id = self.channel_id, None
connection, self.connection = self.connection, ... | Tear down this object, after we've agreed to close
with the server. |
def bucket_to_dataframe(name, buckets, append_name=None):
'''A function that turns elasticsearch aggregation buckets into dataframes
:param name: The name of the bucket (will be a column in the dataframe)
:type name: str
:param bucket: a bucket from elasticsearch results
:type bucke... | A function that turns elasticsearch aggregation buckets into dataframes
:param name: The name of the bucket (will be a column in the dataframe)
:type name: str
:param bucket: a bucket from elasticsearch results
:type bucket: list[dict]
:returns: pandas.DataFrame |
def credit_card_number(self, card_type=None):
""" Returns a valid credit card number. """
card = self._credit_card_type(card_type)
prefix = self.random_element(card.prefixes)
number = self._generate_number(self.numerify(prefix), card.length)
return number | Returns a valid credit card number. |
def _search_ldap(self, ldap, con, username):
"""
Searches LDAP for user, assumes ldap_search is set.
:param ldap: The ldap module reference
:param con: The ldap connection
:param username: username to match with auth_ldap_uid_field
:return: ldap objec... | Searches LDAP for user, assumes ldap_search is set.
:param ldap: The ldap module reference
:param con: The ldap connection
:param username: username to match with auth_ldap_uid_field
:return: ldap object array |
def multi_muscle_align(data, samples, ipyclient):
"""
Sends the cluster bits to nprocessors for muscle alignment. They return
with indel.h5 handles to be concatenated into a joint h5.
"""
LOGGER.info("starting alignments")
## get client
lbview = ipyclient.load_balanced_view()
start = ti... | Sends the cluster bits to nprocessors for muscle alignment. They return
with indel.h5 handles to be concatenated into a joint h5. |
def approveproposal(self, proposal_ids, account=None, approver=None, **kwargs):
""" Approve Proposal
:param list proposal_id: Ids of the proposals
:param str appprover: The account or key to use for approval
(defaults to ``account``)
:param str account: (opti... | Approve Proposal
:param list proposal_id: Ids of the proposals
:param str appprover: The account or key to use for approval
(defaults to ``account``)
:param str account: (optional) the account to allow access
to (defaults to ``default_account``) |
def get_combo(self, symbol):
""" get group by child symbol """
for parent, legs in self.instrument_combos.items():
if symbol == parent or symbol in legs.keys():
return {
"parent": self.get_instrument(parent),
"legs": legs,
... | get group by child symbol |
def _JModule(spec, javaname):
""" (internal) Front end for creating a java module dynamically """
cls = _JImportFactory(spec, javaname)
out = cls(spec.name)
return out | (internal) Front end for creating a java module dynamically |
def can_overlap(self, contig, strand=None):
"""
Is this locus on the same contig and (optionally) on the same strand?
"""
return (self.on_contig(contig) and
(strand is None or self.on_strand(strand))) | Is this locus on the same contig and (optionally) on the same strand? |
def populateViewTree(self, view):
'''
Populates the View tree.
'''
vuid = view.getUniqueId()
text = view.__smallStr__()
if view.getParent() is None:
self.viewTree.insert('', Tkinter.END, vuid, text=text)
else:
self.viewTree.insert(view.get... | Populates the View tree. |
def query_one(cls, *args, **kwargs):
""" Same as collection.find_one, but return Document then dict """
doc = cls._coll.find_one(*args, **kwargs)
if doc:
return cls.from_storage(doc) | Same as collection.find_one, but return Document then dict |
def urljoin(base, path=None):
"""Join a base url with a relative path."""
# /foo/bar + baz makes /foo/bar/baz instead of /foo/baz
if path is None:
url = base
else:
if not base.endswith('/'):
base += '/'
url = urllib.parse.urljoin(base, str(path))
return url | Join a base url with a relative path. |
def read(self, path, ext=None, start=None, stop=None, recursive=False, npartitions=None):
"""
Sets up Spark RDD across S3 or GS objects specified by dataPath.
Returns RDD of <string bucket keyname, string buffer> k/v pairs.
"""
from .utils import connection_with_anon, connection... | Sets up Spark RDD across S3 or GS objects specified by dataPath.
Returns RDD of <string bucket keyname, string buffer> k/v pairs. |
def open_netcdf_writer(self, flatten=False, isolate=False, timeaxis=1):
"""Prepare a new |NetCDFInterface| object for writing data."""
self._netcdf_writer = netcdftools.NetCDFInterface(
flatten=bool(flatten),
isolate=bool(isolate),
timeaxis=int(timeaxis)) | Prepare a new |NetCDFInterface| object for writing data. |
def _add_text_ngrams(self, witness, minimum, maximum):
"""Adds n-gram data from `witness` to the data store.
:param witness: witness to get n-grams from
:type witness: `WitnessText`
:param minimum: minimum n-gram size
:type minimum: `int`
:param maximum: maximum n-gram s... | Adds n-gram data from `witness` to the data store.
:param witness: witness to get n-grams from
:type witness: `WitnessText`
:param minimum: minimum n-gram size
:type minimum: `int`
:param maximum: maximum n-gram size
:type maximum: `int` |
def _get_motor_parameters(json_file):
"""Returns a dictionary with joints as keys, and a description (dict) of each joint as value"""
with open(json_file) as motor_fd:
global_config = json.load(motor_fd)
motors = global_config["motors"]
# Returned dict
motor_config = {}
# Add motor to ... | Returns a dictionary with joints as keys, and a description (dict) of each joint as value |
def has_nrows(
state,
incorrect_msg="Your query returned a table with {{n_stu}} row{{'s' if n_stu > 1 else ''}} while it should return a table with {{n_sol}} row{{'s' if n_sol > 1 else ''}}.",
):
"""Test whether the student and solution query results have equal numbers of rows.
Args:
incorr... | Test whether the student and solution query results have equal numbers of rows.
Args:
incorrect_msg: If specified, this overrides the automatically generated feedback message
in case the number of rows in the student and solution query don't match. |
def _hijacked_run_baton_query(
self, baton_binary: BatonBinary, program_arguments: List[str]=None, input_data: Any=None) -> List[Dict]:
"""
Hijacked `run_baton_query` method with hijacking to add the `--recursive` flag to calls to `baton-chmod` that
originate from code called from fr... | Hijacked `run_baton_query` method with hijacking to add the `--recursive` flag to calls to `baton-chmod` that
originate from code called from frames with the ids in `self._hijack_frame_ids`.
:param baton_binary: see `BatonRunner.run_baton_query`
:param program_arguments: see `BatonRunner.run_bat... |
def _axis(self, axis):
"""
Return the corresponding labels taking into account the axis.
The axis could be horizontal (0) or vertical (1).
"""
return self.df.columns if axis == 0 else self.df.index | Return the corresponding labels taking into account the axis.
The axis could be horizontal (0) or vertical (1). |
def dump(obj, attributes = True, _refset = None):
"Show full value of a data object"
if _refset is None:
_refset = set()
if obj is None:
return None
elif isinstance(obj, DataObject):
if id(obj) in _refset:
attributes = False
else:
_refset.a... | Show full value of a data object |
def expr_str(expr, sc_expr_str_fn=standard_sc_expr_str):
"""
Returns the string representation of the expression 'expr', as in a Kconfig
file.
Passing subexpressions of expressions to this function works as expected.
sc_expr_str_fn (default: standard_sc_expr_str):
This function is called for... | Returns the string representation of the expression 'expr', as in a Kconfig
file.
Passing subexpressions of expressions to this function works as expected.
sc_expr_str_fn (default: standard_sc_expr_str):
This function is called for every symbol/choice (hence "sc") appearing in
the expression, ... |
def pid_exists(pid):
""" Determines if a system process identifer exists in process table.
"""
try:
os.kill(pid, 0)
except OSError as exc:
return exc.errno == errno.EPERM
else:
return True | Determines if a system process identifer exists in process table. |
def sequence_to_graph(G, seq, color='black'):
"""
Automatically construct graph given a sequence of characters.
"""
for x in seq:
if x.endswith("_1"): # Mutation
G.node(x, color=color, width="0.1", shape="circle", label="")
else:
G.node(x, color=color)
for a,... | Automatically construct graph given a sequence of characters. |
def is_left(point0, point1, point2):
""" Tests if a point is Left|On|Right of an infinite line.
Ported from the C++ version: on http://geomalgorithms.com/a03-_inclusion.html
.. note:: This implementation only works in 2-dimensional space.
:param point0: Point P0
:param point1: Point P1
:param... | Tests if a point is Left|On|Right of an infinite line.
Ported from the C++ version: on http://geomalgorithms.com/a03-_inclusion.html
.. note:: This implementation only works in 2-dimensional space.
:param point0: Point P0
:param point1: Point P1
:param point2: Point P2
:return:
>0 for... |
def arg_tup_to_dict(argument_tuples):
"""Given a set of argument tuples, set their value in a data dictionary if not blank"""
data = dict()
for arg_name, arg_val in argument_tuples:
if arg_val is not None:
if arg_val is True:
arg_val = 'true'
elif arg_val is F... | Given a set of argument tuples, set their value in a data dictionary if not blank |
def remove(src, rel, dst):
"""
Returns an SQL statement that removes edges from
the SQL backing store. Either `src` or `dst` may
be specified, even both.
:param src: The source node.
:param rel: The relation.
:param dst: The destination node.
"""
smt = 'DELETE FROM %s' % rel
que... | Returns an SQL statement that removes edges from
the SQL backing store. Either `src` or `dst` may
be specified, even both.
:param src: The source node.
:param rel: The relation.
:param dst: The destination node. |
def ipoib_interfaces():
"""Return a list of IPOIB capable ethernet interfaces"""
interfaces = []
for interface in network_interfaces():
try:
driver = re.search('^driver: (.+)$', subprocess.check_output([
'ethtool', '-i',
interface]), re.M).group(1)
... | Return a list of IPOIB capable ethernet interfaces |
def extract_cookies(self, response, request, referrer_host=None):
'''Wrapped ``extract_cookies``.
Args:
response: An instance of :class:`.http.request.Response`.
request: An instance of :class:`.http.request.Request`.
referrer_host (str): An hostname or IP address of... | Wrapped ``extract_cookies``.
Args:
response: An instance of :class:`.http.request.Response`.
request: An instance of :class:`.http.request.Request`.
referrer_host (str): An hostname or IP address of the referrer
URL. |
def print_tree(
expr, attr='operands', padding='', exclude_type=None, depth=None,
unicode=True, srepr_leaves=False, _last=False, _root=True, _level=0,
_print=True):
"""Print a tree representation of the structure of `expr`
Args:
expr (Expression): expression to render
at... | Print a tree representation of the structure of `expr`
Args:
expr (Expression): expression to render
attr (str): The attribute from which to get the children of `expr`
padding (str): Whitespace by which the entire tree is idented
exclude_type (type): Type (or list of types) which sh... |
def deploy_directory(directory, auth=None):
"""Deploy all files in a given directory.
:param str directory: the path to a directory
:param tuple[str] auth: A pair of (str username, str password) to give to the auth keyword of the constructor of
:class:`artifactory.ArtifactoryPat... | Deploy all files in a given directory.
:param str directory: the path to a directory
:param tuple[str] auth: A pair of (str username, str password) to give to the auth keyword of the constructor of
:class:`artifactory.ArtifactoryPath`. Defaults to the result of :func:`get_arty_auth`... |
def get_current_cmus():
"""
Get the current song from cmus.
"""
result = subprocess.run('cmus-remote -Q'.split(' '), check=True,
stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
info = {}
for line in result.stdout.decode().split('\n'):
line = line.split(' ')... | Get the current song from cmus. |
def get_frame_locals(stepback=0):
"""Returns locals dictionary from a given frame.
:param int stepback:
:rtype: dict
"""
with Frame(stepback=stepback) as frame:
locals_dict = frame.f_locals
return locals_dict | Returns locals dictionary from a given frame.
:param int stepback:
:rtype: dict |
def getEdgeDirected(self, networkId, edgeId, verbose=None):
"""
Returns true if the edge specified by the `edgeId` and `networkId` parameters is directed.
:param networkId: SUID of the network containing the edge
:param edgeId: SUID of the edge
:param verbose: print more
... | Returns true if the edge specified by the `edgeId` and `networkId` parameters is directed.
:param networkId: SUID of the network containing the edge
:param edgeId: SUID of the edge
:param verbose: print more
:returns: 200: successful operation |
def volumes_from(self, value):
"""
:param value:
:return:
"""
volumes_from = []
if isinstance(value, list):
for volume_from in value:
if not isinstance(volume_from, six.string_types):
raise TypeError("each bind must be a st... | :param value:
:return: |
def overlay_gateway_map_vlan_vni_mapping_vid(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
overlay_gateway = ET.SubElement(config, "overlay-gateway", xmlns="urn:brocade.com:mgmt:brocade-tunnels")
name_key = ET.SubElement(overlay_gateway, "name")
... | Auto Generated Code |
def render_fields(dictionary,
*fields,
**opts):
'''
This function works similarly to
:mod:`render_field <salt.modules.napalm_formula.render_field>` but for a
list of fields from the same dictionary, rendering, indenting and
distributing them on separate lines.
... | This function works similarly to
:mod:`render_field <salt.modules.napalm_formula.render_field>` but for a
list of fields from the same dictionary, rendering, indenting and
distributing them on separate lines.
dictionary
The dictionary to traverse.
fields
A list of field names or pa... |
def GetCustomJsonFieldMapping(message_type, python_name=None, json_name=None):
"""Return the appropriate remapping for the given field, or None."""
return _FetchRemapping(message_type, 'field',
python_name=python_name, json_name=json_name,
mappings=_JSON_FIE... | Return the appropriate remapping for the given field, or None. |
def invenio_query_factory(parser=None, walkers=None):
"""Create a parser returning Elastic Search DSL query instance."""
parser = parser or Main
walkers = walkers or [PypegConverter()]
walkers.append(ElasticSearchDSL())
def invenio_query(pattern):
query = pypeg2.parse(pattern, parser, white... | Create a parser returning Elastic Search DSL query instance. |
def default_values_of(func):
"""Return the defaults of the function `func`."""
signature = inspect.signature(func)
return [k
for k, v in signature.parameters.items()
if v.default is not inspect.Parameter.empty or
v.kind != inspect.Parameter.POSITIONAL_OR_KEYWORD] | Return the defaults of the function `func`. |
def window(self, vec):
"""Apply a window to the coefficients defined by *vec*. *vec* must
have length *nmax* + 1. This is good way to filter the pattern by
windowing in the coefficient domain.
Example::
>>> vec = numpy.linspace(0, 1, c.nmax + 1)
>>> c.w... | Apply a window to the coefficients defined by *vec*. *vec* must
have length *nmax* + 1. This is good way to filter the pattern by
windowing in the coefficient domain.
Example::
>>> vec = numpy.linspace(0, 1, c.nmax + 1)
>>> c.window(vec)
Args:
... |
def capture_vm_image(self, service_name, deployment_name, role_name, options):
'''
Creates a copy of the operating system virtual hard disk (VHD) and all
of the data VHDs that are associated with the Virtual Machine, saves
the VHD copies in the same storage location as the original VHDs,... | Creates a copy of the operating system virtual hard disk (VHD) and all
of the data VHDs that are associated with the Virtual Machine, saves
the VHD copies in the same storage location as the original VHDs, and
registers the copies as a VM Image in the image repository that is
associated ... |
def cancel(**kwargs):
"""Cancels work items based on their criteria.
Args:
**kwargs: Same parameters as the query() method.
Returns:
The number of tasks that were canceled.
"""
task_list = _query(**kwargs)
for task in task_list:
task.status = WorkQueue.CANCELED
... | Cancels work items based on their criteria.
Args:
**kwargs: Same parameters as the query() method.
Returns:
The number of tasks that were canceled. |
def build_mutation_pruner_plugin() -> LaserPlugin:
""" Creates an instance of the mutation pruner plugin"""
from mythril.laser.ethereum.plugins.implementations.mutation_pruner import (
MutationPruner,
)
return MutationPruner() | Creates an instance of the mutation pruner plugin |
def read_random_state(self, group=None):
"""Reads the state of the random number generator from the file.
Parameters
----------
group : str
Name of group to read random state from.
Returns
-------
tuple
A tuple with 5 elements that can be... | Reads the state of the random number generator from the file.
Parameters
----------
group : str
Name of group to read random state from.
Returns
-------
tuple
A tuple with 5 elements that can be passed to numpy.set_state. |
def compare_outputs(expected, output, **kwargs):
"""
Compares expected values and output.
Returns None if no error, an exception message otherwise.
"""
SkipDim1 = kwargs.pop("SkipDim1", False)
NoProb = kwargs.pop("NoProb", False)
Dec4 = kwargs.pop("Dec4", False)
Dec3 = kwargs.pop("Dec3",... | Compares expected values and output.
Returns None if no error, an exception message otherwise. |
def load_file(self, file_path, share_name, directory_name, file_name, **kwargs):
"""
Upload a file to Azure File Share.
:param file_path: Path to the file to load.
:type file_path: str
:param share_name: Name of the share.
:type share_name: str
:param directory_n... | Upload a file to Azure File Share.
:param file_path: Path to the file to load.
:type file_path: str
:param share_name: Name of the share.
:type share_name: str
:param directory_name: Name of the directory.
:type directory_name: str
:param file_name: Name of the f... |
def _dispatch(name, *args, **kwargs):
"""
Dispatch to apply.
"""
def outer(self, *args, **kwargs):
def f(x):
x = self._shallow_copy(x, groupby=self._groupby)
return getattr(x, name)(*args, **kwargs)
return self._groupby.apply(f)
... | Dispatch to apply. |
def addChild(self, child_id):
"""Add a child to current workitem
:param child_id: the child workitem id/number
(integer or equivalent string)
"""
self.log.debug("Try to add a child <Workitem %s> to current "
"<Workitem %s>",
chi... | Add a child to current workitem
:param child_id: the child workitem id/number
(integer or equivalent string) |
def institute(context, institute_id, sanger_recipient, coverage_cutoff, frequency_cutoff,
display_name, remove_sanger):
"""
Update an institute
"""
adapter = context.obj['adapter']
LOG.info("Running scout update institute")
try:
adapter.update_institute(
i... | Update an institute |
def removeDuplicates(inFileName, outFileName) :
"""removes duplicated lines from a 'inFileName' CSV file, the results are witten in 'outFileName'"""
f = open(inFileName)
legend = f.readline()
data = ''
h = {}
h[legend] = 0
lines = f.readlines()
for l in lines :
if not h.has_key(l) :
h[l] = 0
data +=... | removes duplicated lines from a 'inFileName' CSV file, the results are witten in 'outFileName |
def to_comm(self, light_request=False):
'''
Convert `self` to :class:`.Archive`.
Returns:
obj: :class:`.Archive` instance.
'''
data = None
if not light_request:
tmp_fn = path_to_zip(self.dir_pointer)
data = read_as_base64(tmp_fn)
... | Convert `self` to :class:`.Archive`.
Returns:
obj: :class:`.Archive` instance. |
def get_roles(self, principal, object=None, no_group_roles=False):
"""Get all the roles attached to given `principal`, on a given
`object`.
:param principal: a :class:`User` or :class:`Group`
:param object: an :class:`Entity`
:param no_group_roles: If `True`, return only direc... | Get all the roles attached to given `principal`, on a given
`object`.
:param principal: a :class:`User` or :class:`Group`
:param object: an :class:`Entity`
:param no_group_roles: If `True`, return only direct roles, not roles
acquired through group membership. |
def default(self, obj):
"""Overriding the default JSONEncoder.default for NDB support."""
obj_type = type(obj)
# NDB Models return a repr to calls from type().
if obj_type not in self._ndb_type_encoding:
if hasattr(obj, '__metaclass__'):
obj_type = obj.__metaclass__
else:
# T... | Overriding the default JSONEncoder.default for NDB support. |
def convert_date(value, parameter):
'''
Converts to datetime.date:
'', '-', None convert to parameter default
The first matching format in settings.DATE_INPUT_FORMATS converts to datetime
'''
value = _check_default(value, parameter, ( '', '-', None ))
if value is None or isinstance(v... | Converts to datetime.date:
'', '-', None convert to parameter default
The first matching format in settings.DATE_INPUT_FORMATS converts to datetime |
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the Digest object and decode it into its
constituent parts.
Args:
istream (Stream): A data stream containing encoded object data,
supporting a read method; usually a ... | Read the data encoding the Digest object and decode it into its
constituent parts.
Args:
istream (Stream): A data stream containing encoded object data,
supporting a read method; usually a BytearrayStream object.
kmip_version (KMIPVersion): An enumeration definin... |
def save_modules():
"""
Context in which imported modules are saved.
Translates exceptions internal to the context into the equivalent exception
outside the context.
"""
saved = sys.modules.copy()
with ExceptionSaver() as saved_exc:
yield saved
sys.modules.update(saved)
# r... | Context in which imported modules are saved.
Translates exceptions internal to the context into the equivalent exception
outside the context. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.