Search is not available for this dataset
text stringlengths 75 104k |
|---|
def Kdp(scatterer):
"""
Specific differential phase (K_dp) for the current setup.
Args:
scatterer: a Scatterer instance.
Returns:
K_dp [deg/km].
NOTE: This only returns the correct value if the particle diameter and
wavelength are given in [mm]. The scatterer object should be ... |
def mg_refractive(m, mix):
"""Maxwell-Garnett EMA for the refractive index.
Args:
m: Tuple of the complex refractive indices of the media.
mix: Tuple of the volume fractions of the media, len(mix)==len(m)
(if sum(mix)!=1, these are taken relative to sum(mix))
Returns:
The ... |
def bruggeman_refractive(m, mix):
"""Bruggeman EMA for the refractive index.
For instructions, see mg_refractive in this module, except this routine
only works for two components.
"""
f1 = mix[0]/sum(mix)
f2 = mix[1]/sum(mix)
e1 = m[0]**2
e2 = m[1]**2
a = -2*(f1+f2)
b = (2*f1*e1... |
def ice_refractive(file):
"""
Interpolator for the refractive indices of ice.
Inputs:
File to read the refractive index lookup table from.
This is supplied as "ice_refr.dat", retrieved from
http://www.atmos.washington.edu/ice_optical_constants/
Returns:
A callable object th... |
def dsr_thurai_2007(D_eq):
"""
Drop shape relationship function from Thurai2007
(http://dx.doi.org/10.1175/JTECH2051.1) paper.
Arguments:
D_eq: Drop volume-equivalent diameter (mm)
Returns:
r: The vertical-to-horizontal drop axis ratio. Note: the Scatterer class
expects hori... |
def feature_importances(data, top_n=None, feature_names=None):
"""
Get and order feature importances from a scikit-learn model
or from an array-like structure.
If data is a scikit-learn model with sub-estimators (e.g. RandomForest,
AdaBoost) the function will compute the standard deviation of each
... |
def _group_by(data, criteria):
"""
Group objects in data using a function or a key
"""
if isinstance(criteria, str):
criteria_str = criteria
def criteria(x):
return x[criteria_str]
res = defaultdict(list)
for element in data:
key = criteria(element)
... |
def _get_params_value(params):
"""
Given an iterator (k1, k2), returns a function that when called
with an object obj returns a tuple of the form:
((k1, obj.parameters[k1]), (k2, obj.parameters[k2]))
"""
# sort params for consistency
ord_params = sorted(params)
def fn(obj):
... |
def _product(k, v):
"""
Perform the product between two objects
even if they don't support iteration
"""
if not _can_iterate(k):
k = [k]
if not _can_iterate(v):
v = [v]
return list(product(k, v)) |
def _mapping_to_tuple_pairs(d):
"""
Convert a mapping object (such as a dictionary) to tuple pairs,
using its keys and values to generate the pairs and then generating
all possible combinations between those
e.g. {1: (1,2,3)} -> (((1, 1),), ((1, 2),), ((1, 3),))
"""
# order t... |
def learning_curve(train_scores, test_scores, train_sizes, ax=None):
"""Plot a learning curve
Plot a metric vs number of examples for the training and test set
Parameters
----------
train_scores : array-like
Scores for the training set
test_scores : array-like
Scores for the te... |
def precision_recall(y_true, y_score, ax=None):
"""
Plot precision-recall curve.
Parameters
----------
y_true : array-like, shape = [n_samples]
Correct target values (ground truth).
y_score : array-like, shape = [n_samples] or [n_samples, 2] for binary
classification or [n... |
def _precision_recall(y_true, y_score, ax=None):
"""
Plot precision-recall curve.
Parameters
----------
y_true : array-like, shape = [n_samples]
Correct target values (ground truth).
y_score : array-like, shape = [n_samples]
Target scores (estimator predictions).
ax : matplo... |
def _precision_recall_multi(y_true, y_score, ax=None):
"""
Plot precision-recall curve.
Parameters
----------
y_true : array-like, shape = [n_samples, n_classes]
Correct target values (ground truth).
y_score : array-like, shape = [n_samples, n_classes]
Target scores (estimator p... |
def precision_at(y_true, y_score, proportion, ignore_nas=False):
'''
Calculates precision at a given proportion.
Only supports binary classification.
'''
# Sort scores in descending order
scores_sorted = np.sort(y_score)[::-1]
# Based on the proportion, get the index to split the data
#... |
def __precision(y_true, y_pred):
'''
Precision metric tolerant to unlabeled data in y_true,
NA values are ignored for the precision calculation
'''
# make copies of the arrays to avoid modifying the original ones
y_true = np.copy(y_true)
y_pred = np.copy(y_pred)
# precision = tp... |
def labels_at(y_true, y_score, proportion, normalize=False):
'''
Return the number of labels encountered in the top X proportion
'''
# Get indexes of scores sorted in descending order
indexes = np.argsort(y_score)[::-1]
# Sort true values in the same order
y_true_sorted = y_true[indexe... |
def validation_curve(train_scores, test_scores, param_range, param_name=None,
semilogx=False, ax=None):
"""Plot a validation curve
Plot a metric vs hyperpameter values for the training and test set
Parameters
----------
train_scores : array-like
Scores for the training... |
def confusion_matrix(y_true, y_pred, target_names=None, normalize=False,
cmap=None, ax=None):
"""
Plot confustion matrix.
Parameters
----------
y_true : array-like, shape = [n_samples]
Correct target values (ground truth).
y_pred : array-like, shape = [n_samples]
... |
def feature_importances(data, top_n=None, feature_names=None, ax=None):
"""
Get and order feature importances from a scikit-learn model
or from an array-like structure. If data is a scikit-learn model with
sub-estimators (e.g. RandomForest, AdaBoost) the function will compute the
standard deviation ... |
def precision_at_proportions(y_true, y_score, ax=None):
"""
Plot precision values at different proportions.
Parameters
----------
y_true : array-like
Correct target values (ground truth).
y_score : array-like
Target scores (estimator predictions).
ax : matplotlib Axes
... |
def feature_importances(data, top_n=None, feature_names=None):
"""
Get and order feature importances from a scikit-learn model
or from an array-like structure.
If data is a scikit-learn model with sub-estimators (e.g. RandomForest,
AdaBoost) the function will compute the standard deviation of each
... |
def block_parser(part, rgxin, rgxout, fmtin, fmtout):
"""
part is a string of ipython text, comprised of at most one
input, one output, comments, and blank lines. The block parser
parses the text into a list of::
blocks = [ (TOKEN0, data0), (TOKEN1, data1), ...]
where TOKEN is one of [COMME... |
def process_block(self, block):
"""
process block from the block_parser and return a list of processed lines
"""
ret = []
output = None
input_lines = None
lineno = self.IP.execution_count
input_prompt = self.promptin % lineno
output_prompt = self.... |
def grid_search(grid_scores, change, subset=None, kind='line', cmap=None,
ax=None):
"""
Plot results from a sklearn grid search by changing two parameters at most.
Parameters
----------
grid_scores : list of named tuples
Results from a sklearn grid search (get them using the... |
def confusion_matrix(self):
"""Confusion matrix plot
"""
return plot.confusion_matrix(self.y_true, self.y_pred,
self.target_names, ax=_gen_ax()) |
def roc(self):
"""ROC plot
"""
return plot.roc(self.y_true, self.y_score, ax=_gen_ax()) |
def precision_recall(self):
"""Precision-recall plot
"""
return plot.precision_recall(self.y_true, self.y_score, ax=_gen_ax()) |
def feature_importances(self):
"""Feature importances plot
"""
return plot.feature_importances(self.estimator,
feature_names=self.feature_names,
ax=_gen_ax()) |
def feature_importances_table(self):
"""Feature importances table
"""
from . import table
return table.feature_importances(self.estimator,
feature_names=self.feature_names) |
def precision_at_proportions(self):
"""Precision at proportions plot
"""
return plot.precision_at_proportions(self.y_true, self.y_score,
ax=_gen_ax()) |
def generate_report(self, template, path=None, style=None):
"""
Generate HTML report
Parameters
----------
template : markdown-formatted string or path to the template
file used for rendering the report. Any attribute of this
object can be included in th... |
def roc(y_true, y_score, ax=None):
"""
Plot ROC curve.
Parameters
----------
y_true : array-like, shape = [n_samples]
Correct target values (ground truth).
y_score : array-like, shape = [n_samples] or [n_samples, 2] for binary
classification or [n_samples, n_classes] for m... |
def _roc(y_true, y_score, ax=None):
"""
Plot ROC curve for binary classification.
Parameters
----------
y_true : array-like, shape = [n_samples]
Correct target values (ground truth).
y_score : array-like, shape = [n_samples]
Target scores (estimator predictions).
ax: matplot... |
def _roc_multi(y_true, y_score, ax=None):
"""
Plot ROC curve for multi classification.
Parameters
----------
y_true : array-like, shape = [n_samples, n_classes]
Correct target values (ground truth).
y_score : array-like, shape = [n_samples, n_classes]
Target scores (estimator pr... |
async def get_connection(self, container):
'''
Get an exclusive connection, useful for blocked commands and transactions.
You must call release or shutdown (not recommanded) to return the connection after use.
:param container: routine container
:return... |
async def execute_command(self, container, *args):
'''
Execute command on Redis server:
- For (P)SUBSCRIBE/(P)UNSUBSCRIBE, the command is sent to the subscribe connection.
It is recommended to use (p)subscribe/(p)unsubscribe method instead of directly call the command
- F... |
async def subscribe(self, container, *keys):
'''
Subscribe to specified channels
:param container: routine container
:param *keys: subscribed channels
:returns: list of event matchers for the specified channels
'''
await self._get_subscr... |
async def unsubscribe(self, container, *keys):
'''
Unsubscribe specified channels. Every subscribed key should be unsubscribed exactly once, even if duplicated subscribed.
:param container: routine container
:param \*keys: subscribed channels
'''
await s... |
async def psubscribe(self, container, *keys):
'''
Subscribe to specified globs
:param container: routine container
:param \*keys: subscribed globs
:returns: list of event matchers for the specified globs
'''
await self._get_subscribe_co... |
async def shutdown(self, container, force=False):
'''
Shutdown all connections. Exclusive connections created by get_connection will shutdown after release()
'''
p = self._connpool
self._connpool = []
self._shutdown = True
if self._defaultconn:
p.appen... |
def subscribe_state_matcher(self, container, connected = True):
'''
Return a matcher to match the subscribe connection status.
:param container: a routine container. NOTICE: this method is not a routine.
:param connected: if True, the matcher matches connection up. If F... |
def _escape_path(key):
'''
Replace '/', '\\' in key
'''
return _tobytes(key).replace(b'$', b'$_').replace(b'/', b'$+').replace(b'\\', b'$$') |
def reassemble_options(payload):
'''
Reassemble partial options to options, returns a list of dhcp_option
DHCP options are basically `|tag|length|value|` structure. When an
option is longer than 255 bytes, it can be splitted into multiple
structures with the same tag. The splitted structures mu... |
def build_options(payload, options, maxsize = 576, overload = OVERLOAD_FILE | OVERLOAD_SNAME, allowpartial = True):
'''
Split a list of options
This is the reverse operation of `reassemble_options`, it splits `dhcp_option` into
`dhcp_option_partial` if necessary, and set overload option if field ov... |
def create_option_from_value(tag, value):
"""
Set DHCP option with human friendly value
"""
dhcp_option.parser()
fake_opt = dhcp_option(tag = tag)
for c in dhcp_option.subclasses:
if c.criteria(fake_opt):
if hasattr(c, '_parse_from_value'):
return c(tag = tag,... |
def create_dhcp_options(input_dict, ignoreError = False, generateNone = False):
"""
Try best to create dhcp_options from human friendly values, ignoring
invalid values
"""
retdict = {}
for k,v in dict(input_dict).items():
try:
if generateNone and v is None:
re... |
def with_indices(*args):
'''
Create indices for an event class. Every event class must be decorated with this decorator.
'''
def decorator(cls):
for c in cls.__bases__:
if hasattr(c, '_indicesNames'):
cls._classnameIndex = c._classnameIndex + 1
for i i... |
def two_way_difference(self, b, extra_add = (), extra_remove = ()):
"""
Return (self - b, b - self)
"""
if self is b:
return ((), ())
if isinstance(b, DiffRef_):
extra_remove = extra_remove + b.add
b = b.origin
if extra_add == extra_rem... |
def getTypename(cls):
'''
:returns: return the proper name to match
'''
if cls is Event:
return None
else:
for c in cls.__bases__:
if issubclass(c, Event):
if c is Event:
return cls._getTypename()... |
def createMatcher(cls, *args, **kwargs):
'''
:param _ismatch: user-defined function ismatch(event) for matching test
:param \*args: indices
:param \*\*kwargs: index_name=index_value for matching criteria
'''
if kwargs and not args:
return EventMatcher(tuple(ge... |
async def limit(self, use = 1):
"""
Acquire "resources", wait until enough "resources" are acquired. For each loop,
`limit` number of "resources" are permitted.
:param use: number of "resouces" to be used.
:return: True if is limited
"""
c = self... |
async def run_task(self, container, task, newthread = False):
"Run task() in task pool. Raise an exception or return the return value"
e = TaskEvent(self, task=task, newthread = newthread)
await container.wait_for_send(e)
ev = await TaskDoneEvent.createMatcher(e)
if hasattr(ev, '... |
async def run_gen_task(self, container, gentask, newthread = True):
"Run generator gentask() in task pool, yield customized events"
e = TaskEvent(self, gen_task = gentask, newthread = newthread)
await container.wait_for_send(e)
ev = await TaskDoneEvent.createMatcher(e)
if hasattr... |
async def run_async_task(self, container, asynctask, newthread = True):
"Run asynctask(sender) in task pool, call sender(events) to send customized events, return result"
e = TaskEvent(self, async_task = asynctask, newthread = newthread)
await container.wait_for_send(e)
ev = await TaskDo... |
async def lock(self, container = None):
"Wait for lock acquire"
if container is None:
container = RoutineContainer.get_container(self.scheduler)
if self.locked:
pass
elif self.lockroutine:
await LockedEvent.createMatcher(self)
else:
... |
def trylock(self):
"Try to acquire lock and return True; if cannot acquire the lock at this moment, return False."
if self.locked:
return True
if self.lockroutine:
return False
waiter = self.scheduler.send(LockEvent(self.context, self.key, self))
if waiter... |
def beginlock(self, container):
"Start to acquire lock in another routine. Call trylock or lock later to acquire the lock. Call unlock to cancel the lock routine"
if self.locked:
return True
if self.lockroutine:
return False
self.lockroutine = container.subroutine... |
def unlock(self):
"Unlock the key"
if self.lockroutine:
self.lockroutine.close()
self.lockroutine = None
if self.locked:
self.locked = False
self.scheduler.ignore(LockEvent.createMatcher(self.context, self.key, self)) |
def create(self):
"""
Create the subqueue to change the default behavior of Lock to semaphore.
"""
self.queue = self.scheduler.queue.addSubQueue(self.priority, LockEvent.createMatcher(self.context, self.key),
maxdefault = self.size, defaultQueueCl... |
async def destroy(self, container = None):
"""
Destroy the created subqueue to change the behavior back to Lock
"""
if container is None:
container = RoutineContainer(self.scheduler)
if self.queue is not None:
await container.syscall_noreturn(syscall_remov... |
async def shutdown(self, force = False, connmark = -1):
'''
Can call without delegate
'''
if connmark is None:
connmark = self.connmark
self.scheduler.emergesend(ConnectionControlEvent(self, ConnectionControlEvent.SHUTDOWN, force, connmark)) |
async def reconnect(self, force = True, connmark = None):
'''
Can call without delegate
'''
if connmark is None:
connmark = self.connmark
self.scheduler.emergesend(ConnectionControlEvent(self, ConnectionControlEvent.RECONNECT, force, connmark)) |
async def reset(self, force = True, connmark = None):
'''
Can call without delegate
'''
if connmark is None:
connmark = self.connmark
self.scheduler.emergesend(ConnectionControlEvent(self, ConnectionControlEvent.RESET, force, connmark)) |
async def write(self, event, ignoreException = True):
'''
Can call without delegate
'''
connmark = self.connmark
if self.connected:
def _until():
if not self.connected or self.connmark != connmark:
return True
r = await ... |
async def shutdown(self, connmark = -1):
'''
Can call without delegate
'''
if connmark is None:
connmark = self.connmark
self.scheduler.emergesend(ConnectionControlEvent(self, ConnectionControlEvent.SHUTDOWN, True, connmark)) |
async def stoplisten(self, connmark = -1):
'''
Can call without delegate
'''
if connmark is None:
connmark = self.connmark
self.scheduler.emergesend(ConnectionControlEvent(self, ConnectionControlEvent.STOPLISTEN, True, connmark)) |
async def startlisten(self, connmark = -1):
'''
Can call without delegate
'''
if connmark is None:
connmark = self.connmark
self.scheduler.emergesend(ConnectionControlEvent(self, ConnectionControlEvent.STARTLISTEN, True, connmark)) |
def default_start():
"""
Use `sys.argv` for starting parameters. This is the entry-point of `vlcp-start`
"""
(config, daemon, pidfile, startup, fork) = parsearg()
if config is None:
if os.path.isfile('/etc/vlcp.conf'):
config = '/etc/vlcp.conf'
else:
print('/e... |
def close(self):
"Stop the output stream, but further download will still perform"
if self.stream:
self.stream.close(self.scheduler)
self.stream = None |
async def shutdown(self):
"Force stop the output stream, if there are more data to download, shutdown the connection"
if self.stream:
if not self.stream.dataeof and not self.stream.dataerror:
self.stream.close(self.scheduler)
await self.connection.shutdown()
... |
def createphysicalnetwork(type, create_processor = partial(default_processor, excluding=('id', 'type')),
reorder_dict = default_iterate_dict):
"""
:param type: physical network type
:param create_processor: create_processor(physicalnetwork, walk, write, \*, paramet... |
def deletephysicalnetwork(check_processor = default_physicalnetwork_delete_check,
reorder_dict = default_iterate_dict):
"""
:param check_processor: check_processor(physicalnetwork, physicalnetworkmap, walk, write, \*, parameters)
"""
def walker(walk, write, timestamp, par... |
def createphysicalport(create_processor = partial(default_processor, excluding=('vhost', 'systemid',
'bridge', 'name',
'physicalnetwork')),
... |
def updatephysicalport(update_processor = partial(default_processor, excluding=('vhost', 'systemid',
'bridge', 'name'),
disabled=('physicalnetwork',)),
... |
def deletephysicalport(check_processor=_false_processor,
reorder_dict = default_iterate_dict):
"""
:param check_processor: check_processor(physicalport, physicalnetwork, physicalnetworkmap,
walk, write \*, parameters)
"""
def walker(walk, write, timesta... |
def createlogicalnetwork(create_processor = partial(default_processor, excluding=('id', 'physicalnetwork')),
reorder_dict = default_iterate_dict):
"""
:param create_processor: create_processor(logicalnetwork, logicalnetworkmap, physicalnetwork,
physicalnetwo... |
def deletelogicalnetwork(check_processor=default_logicalnetwork_delete_check,
reorder_dict = default_iterate_dict):
"""
:param check_processor: check_processor(logicalnetwork, logicalnetworkmap,
physicalnetwork, physicalnetworkmap,
... |
async def restart_walk(self):
"""
Force a re-walk
"""
if not self._restartwalk:
self._restartwalk = True
await self.wait_for_send(FlowUpdaterNotification(self, FlowUpdaterNotification.STARTWALK)) |
async def _dataobject_update_detect(self, _initialkeys, _savedresult):
"""
Coroutine that wait for retrieved value update notification
"""
def expr(newvalues, updatedvalues):
if any(v.getkey() in _initialkeys for v in updatedvalues if v is not None):
return Tr... |
def updateobjects(self, updatedvalues):
"""
Force a update notification on specified objects, even if they are not actually updated
in ObjectDB
"""
if not self._updatedset:
self.scheduler.emergesend(FlowUpdaterNotification(self, FlowUpdaterNotification.DATAUPDATED))
... |
async def _flowupdater(self):
"""
Coroutine calling `updateflow()`
"""
lastresult = set(v for v in self._savedresult if v is not None and not v.isdeleted())
flowupdate = FlowUpdaterNotification.createMatcher(self, FlowUpdaterNotification.FLOWUPDATE)
while True:
... |
async def main(self):
"""
Main coroutine
"""
try:
lastkeys = set()
dataupdate = FlowUpdaterNotification.createMatcher(self, FlowUpdaterNotification.DATAUPDATED)
startwalk = FlowUpdaterNotification.createMatcher(self, FlowUpdaterNotification.STARTWALK)
... |
def syscall_direct(*events):
'''
Directly process these events. This should never be used for normal events.
'''
def _syscall(scheduler, processor):
for e in events:
processor(e)
return _syscall |
def syscall_generator(generator):
'''
Directly process events from a generator function. This should never be used for normal events.
'''
def _syscall(scheduler, processor):
for e in generator():
processor(e)
return _syscall |
def syscall_clearqueue(queue):
'''
Clear a queue
'''
def _syscall(scheduler, processor):
qes, qees = queue.clear()
events = scheduler.queue.unblockqueue(queue)
for e in events:
scheduler.eventtree.remove(e)
for e in qes:
processor(e)
for e ... |
def syscall_removequeue(queue, index):
'''
Remove subqueue `queue[index]` from queue.
'''
def _syscall(scheduler, processor):
events = scheduler.queue.unblockqueue(queue[index])
for e in events:
scheduler.eventtree.remove(e)
qes, qees = queue.removeSubQueue(index)
... |
def syscall_clearremovequeue(queue, index):
'''
Clear the subqueue `queue[index]` and remove it from queue.
'''
def _syscall(scheduler, processor):
qes, qees = queue[index].clear()
events = scheduler.queue.unblockqueue(queue[index])
for e in events:
scheduler.eventtre... |
def register(self, matchers, runnable):
'''
Register an iterator(runnable) to scheduler and wait for events
:param matchers: sequence of EventMatchers
:param runnable: an iterator that accept send method
:param daemon: if True, the runnable will be regi... |
def unregister(self, matchers, runnable):
'''
Unregister an iterator(runnable) and stop waiting for events
:param matchers: sequence of EventMatchers
:param runnable: an iterator that accept send method
'''
for m in matchers:
self.matchtree.r... |
def unregisterall(self, runnable):
'''
Unregister all matches and detach the runnable. Automatically called when runnable returns StopIteration.
'''
if runnable in self.registerIndex:
for m in self.registerIndex[runnable]:
self.matchtree.remove(m, runnable)
... |
def ignore(self, matcher):
'''
Unblock and ignore the matched events, if any.
'''
events = self.eventtree.findAndRemove(matcher)
for e in events:
self.queue.unblock(e)
e.canignore = True |
def quit(self, daemononly = False):
'''
Send quit event to quit the main loop
'''
if not self.quitting:
self.quitting = True
self.queue.append(SystemControlEvent(SystemControlEvent.QUIT, daemononly = daemononly), True) |
def setTimer(self, start, interval = None):
'''
Generate a TimerEvent on specified time
:param start: offset for time from now (seconds), or datetime for a fixed time
:param interval: if not None, the timer is regenerated by interval seconds.
:returns: ... |
def registerPolling(self, fd, options = POLLING_IN|POLLING_OUT, daemon = False):
'''
register a polling file descriptor
:param fd: file descriptor or socket object
:param options: bit mask flags. Polling object should ignore the incompatible flag.
'''
se... |
def unregisterPolling(self, fd, daemon = False):
'''
Unregister a polling file descriptor
:param fd: file descriptor or socket object
'''
self.polling.unregister(fd, daemon) |
def setDaemon(self, runnable, isdaemon, noregister = False):
'''
If a runnable is a daemon, it will not keep the main loop running. The main loop will end when all alived runnables are daemons.
'''
if not noregister and runnable not in self.registerIndex:
self.register((), ru... |
def syscall(self, func):
'''
Call the func in core context (main loop).
func should like::
def syscall_sample(scheduler, processor):
something...
where processor is a function which accept an event. When calling processor, scheduler directly... |
def main(self, installsignal = True, sendinit = True):
'''
Start main loop
'''
if installsignal:
sigterm = signal(SIGTERM, self._quitsignal)
sigint = signal(SIGINT, self._quitsignal)
try:
from signal import SIGUSR1
sigus... |
def getservers(self, vhost = None):
'''
Return current servers
:param vhost: return only servers of vhost if specified. '' to return only default servers.
None for all servers.
'''
if vhost is not None:
return [s for s in self.connection... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.