code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def validate_edge_direction(edge_direction):
"""Ensure the provided edge direction is either "in" or "out"."""
if not isinstance(edge_direction, six.string_types):
raise TypeError(u'Expected string edge_direction, got: {} {}'.format(
type(edge_direction), edge_direction))
if... | Ensure the provided edge direction is either "in" or "out". |
def django_include(context, template_name, **kwargs):
'''
Mako tag to include a Django template withing the current DMP (Mako) template.
Since this is a Django template, it is search for using the Django search
algorithm (instead of the DMP app-based concept).
See https://docs.djangoproject.com/en/2... | Mako tag to include a Django template withing the current DMP (Mako) template.
Since this is a Django template, it is search for using the Django search
algorithm (instead of the DMP app-based concept).
See https://docs.djangoproject.com/en/2.1/topics/templates/.
The current context is sent to the incl... |
def sync_main(async_main, config_path=None, default_config=None,
should_validate_task=True, loop_function=asyncio.get_event_loop):
"""Entry point for scripts using scriptworker.
This function sets up the basic needs for a script to run. More specifically:
* it creates the scriptworker con... | Entry point for scripts using scriptworker.
This function sets up the basic needs for a script to run. More specifically:
* it creates the scriptworker context and initializes it with the provided config
* the path to the config file is either taken from `config_path` or from `sys.argv[1]`.
... |
def submit_url(self, url, params={}, _extra_params={}):
"""
Submit a website for analysis.
"""
self._check_user_parameters(params)
params = copy.copy(params)
params['url'] = url
return self._submit(params, _extra_params=_extra_params) | Submit a website for analysis. |
def proxy_global(name, no_expand_macro=False, fname='func', args=()):
"""
Used to automatically asrootpy ROOT's thread local variables
"""
if no_expand_macro: # pragma: no cover
# handle older ROOT versions without _ExpandMacroFunction wrapping
@property
def gSomething_no_func(s... | Used to automatically asrootpy ROOT's thread local variables |
def get_threads_where_participant_is_active(self, participant_id):
""" Gets all the threads in which the current participant is involved. The method excludes threads where the participant has left. """
participations = Participation.objects.\
filter(participant__id=participant_id).\
... | Gets all the threads in which the current participant is involved. The method excludes threads where the participant has left. |
def decode_intervals(self, encoded, duration=None, multi=True, sparse=False,
transition=None, p_state=None, p_init=None):
'''Decode labeled intervals into (start, end, value) triples
Parameters
----------
encoded : np.ndarray, shape=(n_frames, m)
Fra... | Decode labeled intervals into (start, end, value) triples
Parameters
----------
encoded : np.ndarray, shape=(n_frames, m)
Frame-level annotation encodings as produced by
``encode_intervals``
duration : None or float > 0
The max duration of the annota... |
def pre_execute(self, execution, context):
"""Make sure the named directory is created if possible"""
path = self._fspath
if path:
path = path.format(
benchmark=context.benchmark,
api=execution['category'],
**execution.get('metas', {})
... | Make sure the named directory is created if possible |
def standardize(self, x):
"""Apply the normalization configuration to a batch of inputs.
# Arguments
x: batch of inputs to be normalized.
# Returns
The inputs, normalized.
"""
if self.preprocessing_function:
x = self.preprocessing_function(x)... | Apply the normalization configuration to a batch of inputs.
# Arguments
x: batch of inputs to be normalized.
# Returns
The inputs, normalized. |
def series_index(self, series):
"""
Return the integer index of *series* in this sequence.
"""
for idx, s in enumerate(self):
if series is s:
return idx
raise ValueError('series not in chart data object') | Return the integer index of *series* in this sequence. |
def min(self):
"""Return the minimum of ``self``.
See Also
--------
numpy.amin
max
"""
results = [x.ufuncs.min() for x in self.elem]
return np.min(results) | Return the minimum of ``self``.
See Also
--------
numpy.amin
max |
def loads(astring):
"""Decompress and deserialize string into Python object via marshal."""
try:
return marshal.loads(zlib.decompress(astring))
except zlib.error as e:
raise SerializerError(
'Cannot decompress object ("{}")'.format(str(e))
)
... | Decompress and deserialize string into Python object via marshal. |
def _update_tree_store(self):
""" Updates TreeStore of the Gtk.ListView according internal combo knowledge gained by
_update_internal_data_base function call.
"""
self.list_store.clear()
if self.view_dict['transitions_internal'] and isinstance(self.model, ContainerStateModel) an... | Updates TreeStore of the Gtk.ListView according internal combo knowledge gained by
_update_internal_data_base function call. |
def show_progress(self):
""" whether to show the progress of heavy calculations on this object. """
from pyemma import config
# no value yet, obtain from config
if not hasattr(self, "_show_progress"):
val = config.show_progress_bars
self._show_progress = val
... | whether to show the progress of heavy calculations on this object. |
def _Rforce(self,R,phi=0.,t=0.):
"""
NAME:
_Rforce
PURPOSE:
evaluate the radial force for this potential
INPUT:
R - Galactocentric cylindrical radius
phi - azimuth
t - time
OUTPUT:
the radial force
HISTORY:... | NAME:
_Rforce
PURPOSE:
evaluate the radial force for this potential
INPUT:
R - Galactocentric cylindrical radius
phi - azimuth
t - time
OUTPUT:
the radial force
HISTORY:
2010-11-24 - Written - Bovy (NYU) |
def complete_server(self, text, line, begidx, endidx):
''' Tab-complete server command '''
return [i for i in PsiturkShell.server_commands if i.startswith(text)] | Tab-complete server command |
def query_pager_by_slug(slug, current_page_num=1, tag='', order=False):
'''
Query pager via category slug.
'''
cat_rec = MCategory.get_by_slug(slug)
if cat_rec:
cat_id = cat_rec.uid
else:
return None
# The flowing code is valid.
if... | Query pager via category slug. |
def Beach(fm, linewidth=2, facecolor='b', bgcolor='w', edgecolor='k',
alpha=1.0, xy=(0, 0), width=200, size=100, nofill=False,
zorder=100, axes=None):
"""
Return a beach ball as a collection which can be connected to an
current matplotlib axes instance (ax.add_collection).
S1, D1, a... | Return a beach ball as a collection which can be connected to an
current matplotlib axes instance (ax.add_collection).
S1, D1, and R1, the strike, dip and rake of one of the focal planes, can
be vectors of multiple focal mechanisms.
:param fm: Focal mechanism that is either number of mechanisms (NM) b... |
def setColor( self, color ):
"""
Convenience method to set the border, fill and highlight colors based
on the inputed color.
:param color | <QColor>
"""
# sets the border color as the full value
self.setBorderColor(color)
#... | Convenience method to set the border, fill and highlight colors based
on the inputed color.
:param color | <QColor> |
def _get_previous_open_tag(self, obj):
"""
Return the open tag of the previous sibling
"""
prev_instance = self.get_previous_instance(obj)
if prev_instance and prev_instance.plugin_type == self.__class__.__name__:
return prev_instance.glossary.get('open_tag') | Return the open tag of the previous sibling |
def derive_single_object_url_pattern(slug_url_kwarg, path, action):
"""
Utility function called by class methods for single object views
"""
if slug_url_kwarg:
return r'^%s/%s/(?P<%s>[^/]+)/$' % (path, action, slug_url_kwarg)
else:
return r'^%s/%s/(?P<pk>\d+)/$' % (path, action) | Utility function called by class methods for single object views |
def to_dict(self):
"""
Creates a dictionary representing the state of this position.
Returns a dict object of the form:
"""
return {
'sid': self.asset,
'amount': self.amount,
'cost_basis': self.cost_basis,
'last_sale_price': self.la... | Creates a dictionary representing the state of this position.
Returns a dict object of the form: |
def _sort_lows_and_highs(func):
"Decorator for extract_cycles"
@functools.wraps(func)
def wrapper(*args, **kwargs):
for low, high, mult in func(*args, **kwargs):
if low < high:
yield low, high, mult
else:
yield high, low, mult
return wrappe... | Decorator for extract_cycles |
def _set_static_ag_ipv6_config(self, v, load=False):
"""
Setter method for static_ag_ipv6_config, mapped from YANG variable /rbridge_id/ipv6/static_ag_ipv6_config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_static_ag_ipv6_config is considered as a private... | Setter method for static_ag_ipv6_config, mapped from YANG variable /rbridge_id/ipv6/static_ag_ipv6_config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_static_ag_ipv6_config is considered as a private
method. Backends looking to populate this variable should
... |
def configurar_interface_de_rede(self, configuracao):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
resp = self._http_post('configurarinterfacederede',
co... | Sobrepõe :meth:`~satcfe.base.FuncoesSAT.configurar_interface_de_rede`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT |
def set_prefs(prefs):
"""This function is called before opening the project"""
# Specify which files and folders to ignore in the project.
# Changes to ignored resources are not added to the history and
# VCSs. Also they are not returned in `Project.get_files()`.
# Note that ``?`` and ``*`` match ... | This function is called before opening the project |
def age(self):
""" Returns the user's age, determined by their birthdate()
"""
if not self.birthdate():
return -1
adjuster = 0
today = date.today()
birthday = self.birthdate()
if today.month == birthday.month:
if today.day < birthday.day:
... | Returns the user's age, determined by their birthdate() |
def tag_values(request):
"""
Get tags types and values with localized names
language:
language of tags
"""
data = defaultdict(lambda: {"values": {}})
for tag in Tag.objects.filter(lang=get_language(request)):
data[tag.type]["name"] = tag.type_name
data[tag.type]["values"]... | Get tags types and values with localized names
language:
language of tags |
def send_keys(self, keys, wait=True):
"""
Send a raw key sequence to *Vim*.
.. note:: *Vim* style key sequence notation (like ``<Esc>``)
is not recognized.
Use escaped characters (like ``'\033'``) instead.
Example:
>>> import headlessvim
... | Send a raw key sequence to *Vim*.
.. note:: *Vim* style key sequence notation (like ``<Esc>``)
is not recognized.
Use escaped characters (like ``'\033'``) instead.
Example:
>>> import headlessvim
>>> with headlessvim.open() as vim:
... v... |
def _split_path(path, seps=PATH_SEPS):
"""
Parse path expression and return list of path items.
:param path: Path expression may contain separator chars.
:param seps: Separator char candidates.
:return: A list of keys to fetch object[s] later.
>>> assert _split_path('') == []
>>> assert _s... | Parse path expression and return list of path items.
:param path: Path expression may contain separator chars.
:param seps: Separator char candidates.
:return: A list of keys to fetch object[s] later.
>>> assert _split_path('') == []
>>> assert _split_path('/') == [''] # JSON Pointer spec expects... |
def loadUnStructuredGrid(filename): # not tested
"""Load a ``vtkunStructuredGrid`` object from file and return a ``Actor(vtkActor)`` object."""
reader = vtk.vtkUnstructuredGridReader()
reader.SetFileName(filename)
reader.Update()
gf = vtk.vtkUnstructuredGridGeometryFilter()
gf.SetInputConnectio... | Load a ``vtkunStructuredGrid`` object from file and return a ``Actor(vtkActor)`` object. |
def arrays2wcxf(C):
"""Convert a dictionary with Wilson coefficient names as keys and
numbers or numpy arrays as values to a dictionary with a Wilson coefficient
name followed by underscore and numeric indices as keys and numbers as
values. This is needed for the output in WCxf format."""
d = {}
... | Convert a dictionary with Wilson coefficient names as keys and
numbers or numpy arrays as values to a dictionary with a Wilson coefficient
name followed by underscore and numeric indices as keys and numbers as
values. This is needed for the output in WCxf format. |
def iflat_nodes(self, status=None, op="==", nids=None):
"""
Generators that produces a flat sequence of nodes.
if status is not None, only the tasks with the specified status are selected.
nids is an optional list of node identifiers used to filter the nodes.
"""
nids = a... | Generators that produces a flat sequence of nodes.
if status is not None, only the tasks with the specified status are selected.
nids is an optional list of node identifiers used to filter the nodes. |
def find_item_project(self, eitem):
"""
Find the project for a enriched item
:param eitem: enriched item for which to find the project
:return: the project entry (a dictionary)
"""
# get the data source name relying on the cfg section name, if null use the connector name
... | Find the project for a enriched item
:param eitem: enriched item for which to find the project
:return: the project entry (a dictionary) |
def minion_pub(self, load):
'''
Publish a command initiated from a minion, this method executes minion
restrictions so that the minion publication will only work if it is
enabled in the config.
The configuration on the master allows minions to be matched to
salt functions... | Publish a command initiated from a minion, this method executes minion
restrictions so that the minion publication will only work if it is
enabled in the config.
The configuration on the master allows minions to be matched to
salt functions, so the minions can only publish allowed salt f... |
def median1d(self, name, return_errors=False):
""" Return median 1d marginalized parameters
Parameters
----------
name: str
The name of the parameter requested
return_errors: Optional, {bool, False}
If true, return a second and third parameter that repres... | Return median 1d marginalized parameters
Parameters
----------
name: str
The name of the parameter requested
return_errors: Optional, {bool, False}
If true, return a second and third parameter that represents the
lower and upper 90% error on the param... |
def html_single_plot(self,abfID,launch=False,overwrite=False):
"""create ID_plot.html of just intrinsic properties."""
if type(abfID) is str:
abfID=[abfID]
for thisABFid in cm.abfSort(abfID):
parentID=cm.parent(self.groups,thisABFid)
saveAs=os.path.abspath("%s... | create ID_plot.html of just intrinsic properties. |
def blockstack_tx_filter( tx ):
"""
Virtualchain tx filter function:
* only take txs whose OP_RETURN payload starts with 'id'
"""
if not 'nulldata' in tx:
return False
if tx['nulldata'] is None:
return False
payload = binascii.unhexlify( tx['nulldata'] )
if payload.... | Virtualchain tx filter function:
* only take txs whose OP_RETURN payload starts with 'id' |
def check_version():
"""Sanity check version information for corrupt virtualenv symlinks
"""
if sys.version_info[0:3] == PYTHON_VERSION_INFO[0:3]:
return
sys.exit(
ansi.error() + ' your virtual env points to the wrong python version. '
'This is likely because you ... | Sanity check version information for corrupt virtualenv symlinks |
def _print_speed(self):
'''Print the current speed.'''
if self._bandwidth_meter.num_samples:
speed = self._bandwidth_meter.speed()
if self._human_format:
file_size_str = wpull.string.format_size(speed)
else:
file_size_str = '{:.1f} b'.... | Print the current speed. |
def form_field(self):
"Returns appropriate form field."
label = unicode(self)
defaults = dict(required=False, label=label, widget=self.widget)
defaults.update(self.extra)
return self.field_class(**defaults) | Returns appropriate form field. |
def create_namespace(self, namespace):
"""
Create the specified CIM namespace in the WBEM server and
update this WBEMServer object to reflect the new namespace
there.
This method attempts the following approaches for creating the
namespace, in order, until an approach su... | Create the specified CIM namespace in the WBEM server and
update this WBEMServer object to reflect the new namespace
there.
This method attempts the following approaches for creating the
namespace, in order, until an approach succeeds:
1. Namespace creation as described in the ... |
def hicpro_mapping_chart (self):
""" Generate the HiC-Pro Aligned reads plot """
# Specify the order of the different possible categories
keys = OrderedDict()
keys['Full_Alignments_Read'] = { 'color': '#005ce6', 'name': 'Full reads Alignments' }
keys['Trimmed_Alignments_Read']... | Generate the HiC-Pro Aligned reads plot |
def match_file(filename):
"""Return True if file is okay for modifying/recursing."""
base_name = os.path.basename(filename)
if base_name.startswith('.'):
return False
if not os.path.isdir(filename) and not filename.lower().endswith('.rst'):
return False
return True | Return True if file is okay for modifying/recursing. |
def show_instances(server, cim_class):
"""
Display the instances of the CIM_Class defined by cim_class. If the
namespace is None, use the interop namespace. Search all namespaces for
instances except for CIM_RegisteredProfile
"""
if cim_class == 'CIM_RegisteredProfile':
for inst in serve... | Display the instances of the CIM_Class defined by cim_class. If the
namespace is None, use the interop namespace. Search all namespaces for
instances except for CIM_RegisteredProfile |
def _FindLargestIdPostfixNumber(self, schedule):
"""Finds the largest integer used as the ending of an id in the schedule.
Args:
schedule: The schedule to check.
Returns:
The maximum integer used as an ending for an id.
"""
postfix_number_re = re.compile('(\d+)$')
def ExtractPostf... | Finds the largest integer used as the ending of an id in the schedule.
Args:
schedule: The schedule to check.
Returns:
The maximum integer used as an ending for an id. |
def image_uuid(pil_img):
"""
UNSAFE: DEPRICATE: JPEG IS NOT GAURENTEED TO PRODUCE CONSITENT VALUES ON
MULTIPLE MACHINES image global unique id
References:
http://stackoverflow.com/questions/23565889/jpeg-images-have-different-pixel-values-across-multiple-devices
"""
print('WARNING DO N... | UNSAFE: DEPRICATE: JPEG IS NOT GAURENTEED TO PRODUCE CONSITENT VALUES ON
MULTIPLE MACHINES image global unique id
References:
http://stackoverflow.com/questions/23565889/jpeg-images-have-different-pixel-values-across-multiple-devices |
def create_config(config_path="scriptworker.yaml"):
"""Create a config from DEFAULT_CONFIG, arguments, and config file.
Then validate it and freeze it.
Args:
config_path (str, optional): the path to the config file. Defaults to
"scriptworker.yaml"
Returns:
tuple: (config ... | Create a config from DEFAULT_CONFIG, arguments, and config file.
Then validate it and freeze it.
Args:
config_path (str, optional): the path to the config file. Defaults to
"scriptworker.yaml"
Returns:
tuple: (config frozendict, credentials dict)
Raises:
SystemEx... |
def add_pagination_meta(self, params, meta):
"""Extend default meta dictionary value with pagination hints.
Note:
This method handler attaches values to ``meta`` dictionary without
changing it's reference. This means that you should never replace
``meta`` dictionary ... | Extend default meta dictionary value with pagination hints.
Note:
This method handler attaches values to ``meta`` dictionary without
changing it's reference. This means that you should never replace
``meta`` dictionary with any other dict instance but simply modify
... |
def upload(self, fileobj, tileset, name=None, patch=False, callback=None, bypass=False):
"""Upload data and create a Mapbox tileset
Effectively replicates the Studio upload feature. Returns a
Response object, the json() of which returns a dict with upload
metadata.
Parameters
... | Upload data and create a Mapbox tileset
Effectively replicates the Studio upload feature. Returns a
Response object, the json() of which returns a dict with upload
metadata.
Parameters
----------
fileobj: file object or str
A filename or a Python file object... |
def _get_ordering_field_lookup(self, field_name):
"""
get real model field to order by
"""
field = field_name
get_field = getattr(self, "get_%s_ordering_field" % field_name, None)
if get_field:
field = get_field()
return field | get real model field to order by |
def _model_foreign(ins):
""" Get foreign keys info
:type ins: sqlalchemy.orm.mapper.Mapper
:rtype: list[SaForeignkeyDoc]
"""
fks = []
for t in ins.tables:
fks.extend([
SaForeignkeyDoc(
key=fk.column.key,
target=fk.target_fullname,
... | Get foreign keys info
:type ins: sqlalchemy.orm.mapper.Mapper
:rtype: list[SaForeignkeyDoc] |
def _wrap_class(request_handler, validator):
"""Decorate each HTTP verb method to check if the request is authenticated
:param request_handler: a tornado.web.RequestHandler instance
"""
METHODS = ['get', 'post', 'put', 'head', 'options', 'delete', 'patch']
for name in METHODS:
method = geta... | Decorate each HTTP verb method to check if the request is authenticated
:param request_handler: a tornado.web.RequestHandler instance |
def clear(self, decorated_function=None):
""" :meth:`WCacheStorage.clear` method implementation (Clears statistics also)
"""
if decorated_function is not None and decorated_function in self._storage:
self._storage.pop(decorated_function)
else:
self._storage.clear()
if self.__statistic is True:
self.... | :meth:`WCacheStorage.clear` method implementation (Clears statistics also) |
def _gti_dirint_lt_90(poa_global, aoi, aoi_lt_90, solar_zenith, solar_azimuth,
times, surface_tilt, surface_azimuth, pressure=101325.,
use_delta_kt_prime=True, temp_dew=None, albedo=.25,
model='perez', model_perez='allsitescomposite1990',
... | GTI-DIRINT model for AOI < 90 degrees. See Marion 2015 Section 2.1.
See gti_dirint signature for parameter details. |
def is_course_run_enrollable(course_run):
"""
Return true if the course run is enrollable, false otherwise.
We look for the following criteria:
- end is greater than now OR null
- enrollment_start is less than now OR null
- enrollment_end is greater than now OR null
"""
now = datetime.d... | Return true if the course run is enrollable, false otherwise.
We look for the following criteria:
- end is greater than now OR null
- enrollment_start is less than now OR null
- enrollment_end is greater than now OR null |
def findall(text):
"""Find all the timestrings within a block of text.
>>> timestring.findall("once upon a time, about 3 weeks ago, there was a boy whom was born on august 15th at 7:20 am. epic.")
[
('3 weeks ago,', <timestring.Date 2014-02-09 00:00:00 4483019280>),
('august 15th at 7:20 am', <ti... | Find all the timestrings within a block of text.
>>> timestring.findall("once upon a time, about 3 weeks ago, there was a boy whom was born on august 15th at 7:20 am. epic.")
[
('3 weeks ago,', <timestring.Date 2014-02-09 00:00:00 4483019280>),
('august 15th at 7:20 am', <timestring.Date 2014-08-15 0... |
def save(self):
"""
Creates a new user and account. Returns the newly created user.
"""
username, email, password = (self.cleaned_data['username'],
self.cleaned_data['email'],
self.cleaned_data['password... | Creates a new user and account. Returns the newly created user. |
def unstack_annotations(annotations_sframe, num_rows=None):
"""
Converts object detection annotations (ground truth or predictions) to
unstacked format (an `SArray` where each element is a list of object
instances).
Parameters
----------
annotations_sframe: SFrame
An `SFrame` with s... | Converts object detection annotations (ground truth or predictions) to
unstacked format (an `SArray` where each element is a list of object
instances).
Parameters
----------
annotations_sframe: SFrame
An `SFrame` with stacked predictions, produced by the
`stack_annotations` function... |
def events(cls, filters):
"""Retrieve events details from status.gandi.net."""
current = filters.pop('current', False)
current_params = []
if current:
current_params = [('current', 'true')]
filter_url = uparse.urlencode(sorted(list(filters.items())) + current_params)... | Retrieve events details from status.gandi.net. |
async def async_poller(client, initial_response, deserialization_callback, polling_method):
"""Async Poller for long running operations.
:param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient.
:type client: msrest.service_client.ServiceClient
:param initial... | Async Poller for long running operations.
:param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient.
:type client: msrest.service_client.ServiceClient
:param initial_response: The initial call response
:type initial_response: msrest.universal_http.ClientRespon... |
def note_list(self, body_matches=None, post_id=None, post_tags_match=None,
creator_name=None, creator_id=None, is_active=None):
"""Return list of notes.
Parameters:
body_matches (str): The note's body matches the given terms.
post_id (int): A specific post.
... | Return list of notes.
Parameters:
body_matches (str): The note's body matches the given terms.
post_id (int): A specific post.
post_tags_match (str): The note's post's tags match the given terms.
creator_name (str): The creator's name. Exact match.
cr... |
def model_to_select_list(model_class, filter_dict=None, q_filter=None):
"""
只选择 id 和 name,用来做列表选择
:param model_class:
:param filter_dict:
:param q_filter:
:return:
"""
if filter_dict is None:
filter_dict = {}
if q_filter is not None:
filter_list = [q_filter]
else... | 只选择 id 和 name,用来做列表选择
:param model_class:
:param filter_dict:
:param q_filter:
:return: |
def determinize(m):
"""Determinizes a finite automaton."""
if not m.is_finite():
raise TypeError("machine must be a finite automaton")
transitions = collections.defaultdict(lambda: collections.defaultdict(set))
alphabet = set()
for transition in m.get_transitions():
[[lstate], read]... | Determinizes a finite automaton. |
def hpforest(self, data: ['SASdata', str] = None,
freq: str = None,
id: str = None,
input: [str, list, dict] = None,
save: str = None,
score: [str, bool, 'SASdata'] = True,
target: [str, list, dict] = None,
... | Python method to call the HPFOREST procedure
Documentation link:
https://support.sas.com/documentation/solutions/miner/emhp/14.1/emhpprcref.pdf
:param data: SASdata object or string. This parameter is required.
:parm freq: The freq variable can only be a string type.
:parm id: ... |
def _repr_html_row_(self, keys):
"""
Jupyter Notebook magic repr function as a row – used by
``Legend._repr_html_()``.
"""
tr, th, c = '', '', ''
r = '<td style="{stl}">{v}</td>'
h = '<th>{k}</th>'
for k in keys:
v = self.__dict__.get(k)
... | Jupyter Notebook magic repr function as a row – used by
``Legend._repr_html_()``. |
def check_denovo_input(inputfile, params):
"""
Check if an input file is valid, which means BED, narrowPeak or FASTA
"""
background = params["background"]
input_type = determine_file_type(inputfile)
if input_type == "fasta":
valid_bg = FA_VALID_BGS
elif input_type in ["... | Check if an input file is valid, which means BED, narrowPeak or FASTA |
def re_evaluate(local_dict=None):
"""Re-evaluate the previous executed array expression without any check.
This is meant for accelerating loops that are re-evaluating the same
expression repeatedly without changing anything else than the operands.
If unsure, use evaluate() which is safer.
Paramete... | Re-evaluate the previous executed array expression without any check.
This is meant for accelerating loops that are re-evaluating the same
expression repeatedly without changing anything else than the operands.
If unsure, use evaluate() which is safer.
Parameters
----------
local_dict : dicti... |
def write_by_templ(templ, target, sub_value, safe=False):
"""根据模版写入文件。
:param str templ: 模版文件所在路径。
:param str target: 要写入的文件所在路径。
:param dict sub_value: 被替换的内容。
"""
templ_txt = read_file(templ)
txt = None
if safe:
txt = Template(templ_txt).safe_substitute(sub_value)
else:
... | 根据模版写入文件。
:param str templ: 模版文件所在路径。
:param str target: 要写入的文件所在路径。
:param dict sub_value: 被替换的内容。 |
def _imply_options(self):
'''
Some options enable others automatically
'''
self.no_upload = self.no_upload or self.to_stdout or self.offline
self.auto_update = self.auto_update and not self.offline
if (self.analyze_container or
self.analyze_file or
s... | Some options enable others automatically |
def process_flat_files(id_mappings_file, complexes_file=None, ptm_file=None,
ppi_file=None, seq_file=None, motif_window=7):
"""Get INDRA Statements from HPRD data.
Of the arguments, `id_mappings_file` is required, and at least one of
`complexes_file`, `ptm_file`, and `ppi_file` must ... | Get INDRA Statements from HPRD data.
Of the arguments, `id_mappings_file` is required, and at least one of
`complexes_file`, `ptm_file`, and `ppi_file` must also be given. If
`ptm_file` is given, `seq_file` must also be given.
Note that many proteins (> 1,600) in the HPRD content are associated with
... |
def get_n_excluded_patches(self):
"""
Gets number of excluded patches from patches_base:
#patches_base=1.0.0+THIS_NUMBER
"""
base = self.get_patches_base()
if not base:
return 0
p = base.rfind('+')
if p == -1:
return 0
try:
... | Gets number of excluded patches from patches_base:
#patches_base=1.0.0+THIS_NUMBER |
def remove_info_file():
"""Remove the current process's TensorBoardInfo file, if it exists.
If the file does not exist, no action is taken and no error is raised.
"""
try:
os.unlink(_get_info_file_path())
except OSError as e:
if e.errno == errno.ENOENT:
# The user may have wiped their temporary... | Remove the current process's TensorBoardInfo file, if it exists.
If the file does not exist, no action is taken and no error is raised. |
def removeFile(file):
"""remove a file"""
if "y" in speech.question("Are you sure you want to remove " + file + "? (Y/N): "):
speech.speak("Removing " + file + " with the 'rm' command.")
subprocess.call(["rm", "-r", file])
else:
speech.speak("Okay, I won't remove " + file + ".") | remove a file |
def topfnfile(self, fileobj):
"""
write a cache object to filename as a plain text pfn file
"""
for entry in self:
print >>fileobj, entry.path
fileobj.close() | write a cache object to filename as a plain text pfn file |
def run_file(path_or_file, context=None):
''' Context must be EvalJS object. Runs given path as a JS program. Returns (eval_value, context).
'''
if context is None:
context = EvalJs()
if not isinstance(context, EvalJs):
raise TypeError('context must be the instance of EvalJs')
eval_v... | Context must be EvalJS object. Runs given path as a JS program. Returns (eval_value, context). |
def get_books_for_schedule(self, schedule):
"""
Returns a dictionary of data. SLNs are the keys, an array of Book
objects are the values.
"""
slns = self._get_slns(schedule)
books = {}
for sln in slns:
try:
section_books = self.get_b... | Returns a dictionary of data. SLNs are the keys, an array of Book
objects are the values. |
def handle_errors(
cls, message, *format_args,
re_raise=True, exception_class=Exception,
do_finally=None, do_except=None, do_else=None,
**format_kwds
):
"""
provides a context manager that will intercept exceptions and repackage
them as Buzz in... | provides a context manager that will intercept exceptions and repackage
them as Buzz instances with a message attached:
.. code-block:: python
with Buzz.handle_errors("It didn't work"):
some_code_that_might_raise_an_exception()
:param: message: The message to attach... |
def recordAndPropagate(self, request: Request, clientName):
"""
Record the request in the list of requests and propagate.
:param request:
:param clientName:
"""
self.requests.add(request)
self.propagate(request, clientName)
self.tryForwarding(request) | Record the request in the list of requests and propagate.
:param request:
:param clientName: |
def is_cf_trajectory(nc, variable):
'''
Returns true if the variable is a CF trajectory feature type
:param netCDF4.Dataset nc: An open netCDF dataset
:param str variable: name of the variable to check
'''
# x(i, o), y(i, o), z(i, o), t(i, o)
# X(i, o)
dims = nc.variables[variable].dime... | Returns true if the variable is a CF trajectory feature type
:param netCDF4.Dataset nc: An open netCDF dataset
:param str variable: name of the variable to check |
def parse_pkcs12(data, password=None):
"""
Parses a PKCS#12 ANS.1 DER-encoded structure and extracts certs and keys
:param data:
A byte string of a DER-encoded PKCS#12 file
:param password:
A byte string of the password to any encrypted data
:raises:
ValueError - when any ... | Parses a PKCS#12 ANS.1 DER-encoded structure and extracts certs and keys
:param data:
A byte string of a DER-encoded PKCS#12 file
:param password:
A byte string of the password to any encrypted data
:raises:
ValueError - when any of the parameters are of the wrong type or value
... |
def percent(self, value) -> 'Gap':
"""Set the margin as a percentage."""
raise_not_number(value)
self.gap = '{}%'.format(value)
return self | Set the margin as a percentage. |
def synchronize(self, graph_data=None):
"""
Synchronize ``facebook_username``, ``first_name``, ``middle_name``,
``last_name`` and ``birthday`` with Facebook.
:param graph_data: Optional pre-fetched graph data
"""
profile = graph_data or self.graph.get('me')
self... | Synchronize ``facebook_username``, ``first_name``, ``middle_name``,
``last_name`` and ``birthday`` with Facebook.
:param graph_data: Optional pre-fetched graph data |
def dtype_contract(input_dtype=None, output_dtype=None):
"""Function decorator for specifying input and/or output array dtypes."""
def wrap(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
if input_dtype is not None:
check_dtype(args[0], input_dtype)... | Function decorator for specifying input and/or output array dtypes. |
def predecessors(self, node, exclude_compressed=True):
"""
Returns the list of predecessors of a given node
Parameters
----------
node : str
The target node
exclude_compressed : boolean
If true, compressed nodes are excluded from the predecessors... | Returns the list of predecessors of a given node
Parameters
----------
node : str
The target node
exclude_compressed : boolean
If true, compressed nodes are excluded from the predecessors list
Returns
-------
list
List of pre... |
def enumeration(*values, **kwargs):
''' Create an |Enumeration| object from a sequence of values.
Call ``enumeration`` with a sequence of (unique) strings to create an
Enumeration object:
.. code-block:: python
#: Specify the horizontal alignment for rendering text
TextAlign = enumera... | Create an |Enumeration| object from a sequence of values.
Call ``enumeration`` with a sequence of (unique) strings to create an
Enumeration object:
.. code-block:: python
#: Specify the horizontal alignment for rendering text
TextAlign = enumeration("left", "right", "center")
Args:
... |
def print_all():
"""
Prints all currently defined configurations.
"""
# read configuration file
_, conf = read_latoolscfg()
default = conf['DEFAULT']['config']
pstr = '\nCurrently defined LAtools configurations:\n\n'
for s in conf.sections():
if s == default:
pstr +... | Prints all currently defined configurations. |
def get_pr(pr_num, config=None, repo=DEFAULT_REPO, raw=False):
"""
Get the payload for the given PR number. Let exceptions bubble up.
"""
response = requests.get(PR_ENDPOINT.format(repo, pr_num), auth=get_auth_info(config))
if raw:
return response
else:
response.raise_for_status... | Get the payload for the given PR number. Let exceptions bubble up. |
def nu_max(self, *args):
"""Returns asteroseismic nu_max in uHz
reference: https://arxiv.org/pdf/1312.3853v1.pdf, Eq (3)
"""
return 3120.* (self.mass(*args) /
(self.radius(*args)**2 * np.sqrt(self.Teff(*args)/5777.))) | Returns asteroseismic nu_max in uHz
reference: https://arxiv.org/pdf/1312.3853v1.pdf, Eq (3) |
def _download_astorb(
self):
"""*download the astorb database file*
**Key Arguments:**
- ``astorbgz`` -- path to the downloaded astorb database file
"""
self.log.info('starting the ``_download_astorb`` method')
# DOWNLOAD ASTORB
url = self.settin... | *download the astorb database file*
**Key Arguments:**
- ``astorbgz`` -- path to the downloaded astorb database file |
def strain(self, ifo, duration=32, sample_rate=4096):
""" Return strain around the event
Currently this will return the strain around the event in the smallest
format available. Selection of other data is not yet available.
Parameters
----------
ifo: str
The... | Return strain around the event
Currently this will return the strain around the event in the smallest
format available. Selection of other data is not yet available.
Parameters
----------
ifo: str
The name of the observatory you want strain for. Ex. H1, L1, V1
... |
def _parse_materials(header, views):
"""
Convert materials and images stored in a GLTF header
and buffer views to PBRMaterial objects.
Parameters
------------
header : dict
Contains layout of file
views : (n,) bytes
Raw data
Returns
------------
materials : list
... | Convert materials and images stored in a GLTF header
and buffer views to PBRMaterial objects.
Parameters
------------
header : dict
Contains layout of file
views : (n,) bytes
Raw data
Returns
------------
materials : list
List of trimesh.visual.texture.Material object... |
def delete_all(self, filter, force=False, timeout=-1):
"""
Deletes all resources from the appliance that match the provided filter.
Args:
filter:
A general filter/query string to narrow the list of items deleted.
force:
If set to true, the... | Deletes all resources from the appliance that match the provided filter.
Args:
filter:
A general filter/query string to narrow the list of items deleted.
force:
If set to true, the operation completes despite any problems with network connectivity or erro... |
def basic_params1():
"""A set of basic hyperparameters."""
return hparam.HParams(
# If the problem consists of variable-length sequences
# (see problem.batch_size_means_tokens()), then this is the number
# of tokens per batch per GPU or per TPU core. Otherwise, this is
# the number of examp... | A set of basic hyperparameters. |
def _connect(self):
"""
Establish a connection to the master process's UNIX listener socket,
constructing a mitogen.master.Router to communicate with the master,
and a mitogen.parent.Context to represent it.
Depending on the original transport we should emulate, trigger one of
... | Establish a connection to the master process's UNIX listener socket,
constructing a mitogen.master.Router to communicate with the master,
and a mitogen.parent.Context to represent it.
Depending on the original transport we should emulate, trigger one of
the _connect_*() service calls de... |
def get_geocode(city, state, street_address="", zipcode=""):
"""
For given location or object, takes address data and returns
latitude and longitude coordinates from Google geocoding service
get_geocode(self, street_address="1709 Grand Ave.", state="MO", zip="64112")
Returns a tuple of (lat, long)... | For given location or object, takes address data and returns
latitude and longitude coordinates from Google geocoding service
get_geocode(self, street_address="1709 Grand Ave.", state="MO", zip="64112")
Returns a tuple of (lat, long)
Most times you'll want to join the return. |
def _file_where(user_id, api_path):
"""
Return a WHERE clause matching the given API path and user_id.
"""
directory, name = split_api_filepath(api_path)
return and_(
files.c.name == name,
files.c.user_id == user_id,
files.c.parent_name == directory,
) | Return a WHERE clause matching the given API path and user_id. |
def console_hline(
con: tcod.console.Console,
x: int,
y: int,
l: int,
flag: int = BKGND_DEFAULT,
) -> None:
"""Draw a horizontal line on the console.
This always uses the character 196, the horizontal line character.
.. deprecated:: 8.5
Use :any:`Console.hline` instead.
"""... | Draw a horizontal line on the console.
This always uses the character 196, the horizontal line character.
.. deprecated:: 8.5
Use :any:`Console.hline` instead. |
def random_forest_error(forest, X_train, X_test, inbag=None,
calibrate=True, memory_constrained=False,
memory_limit=None):
"""
Calculate error bars from scikit-learn RandomForest estimators.
RandomForest is a regressor or classifier object
this variance c... | Calculate error bars from scikit-learn RandomForest estimators.
RandomForest is a regressor or classifier object
this variance can be used to plot error bars for RandomForest objects
Parameters
----------
forest : RandomForest
Regressor or Classifier object.
X_train : ndarray
... |
def _idx_table_by_num(tables):
"""
Switch tables to index-by-number
:param dict tables: Metadata
:return list _tables: Metadata
"""
logger_jsons.info("enter idx_table_by_num")
_tables = []
for name, table in tables.items():
try:
# Get the modified table data
... | Switch tables to index-by-number
:param dict tables: Metadata
:return list _tables: Metadata |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.