content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def test_analyze(request,hash,db_name):
"""
Get features of a sequence, using the sequence's sha-1 hash as the
identifier.
"""
db = blat.models.Feature_Database.objects.get(name=db_name)
sequence = blat.models.Sequence.objects.get(db=db,hash=hash)
ts = int(time.mktime(sequence.modified.timet... | 5,350,100 |
def _collect_scaling_groups(owner: str) -> List:
"""Collect autoscaling groups that contain key `ES_role` and belong to the specified owner"""
client = boto3.client("autoscaling")
print("Collecting scaling groups")
resp = client.describe_auto_scaling_groups()
assert "NextToken" not in resp, "did no... | 5,350,101 |
def check_for_updates():
"""
Check for the existence of newer versions of the client, reporting both current release version and development
version.
For help installing development versions of the client, see the docs for
:py:mod:`synapseclient` or the `README.md <https://github.com/Sage-Bionetwor... | 5,350,102 |
def generate(env):
# NOTE: SCons requires the use of this name, which fails gpylint.
"""SCons entry point for this tool."""
env = env # Silence gpylint
__builtin__.AddTargetGroup = AddTargetGroup
__builtin__.AddTargetHelp = AddTargetHelp
__builtin__.GetTargetGroups = GetTargetGroups
__builtin__.GetTa... | 5,350,103 |
def cross_entropy_loss(logits, labels, label_smoothing=0., dtype=jnp.float32):
"""Compute cross entropy for logits and labels w/ label smoothing
Args:
logits: [batch, length, num_classes] float array.
labels: categorical labels [batch, length] int array.
label_smoothing: label smoothing ... | 5,350,104 |
def set_optimizer(name, model, learning_rate):
"""
Specify which optimizer to use during training.
Initialize a torch.optim optimizer for the given model based on the specified name and learning rate.
Parameters
----------
name : string or None, default = 'adam'
The name of the torch.o... | 5,350,105 |
def get_http_url(server_path, get_path):
"""
Вариант с использованием httplib напрямую; ничем не лучше urllib2
server_path = "example.com"
get_path = "/some_path"
"""
# urllib - более высокого уровня библиотека, которая в случае http использует
# httplib;
# используем httplib ради л... | 5,350,106 |
def not_found_view(request):
"""Not Found view.
"""
model = request.context
return render_main_template(model, request, contenttile='not_found') | 5,350,107 |
def top_filtering(
logits, top_k=0, top_p=0.0, threshold=-float("Inf"), filter_value=-float("Inf")
):
""" Filter a distribution of logits using top-k, top-p (nucleus) and/or threshold filtering
Args:
logits: logits distribution shape (vocabulary size)
top_k: <=0: no filtering, >0... | 5,350,108 |
def get_physical_type(obj):
"""
Return the physical type that corresponds to a unit (or another
physical type representation).
Parameters
----------
obj : quantity-like or `~astropy.units.PhysicalType`-like
An object that (implicitly or explicitly) has a corresponding
physical t... | 5,350,109 |
def reverse(list):
"""Returns a new list or string with the elements or characters in reverse
order"""
if isinstance(list, str):
return "".join(reversed(list))
return _list(reversed(list)) | 5,350,110 |
def PubMedDiabetes(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/linqs",
version: str = "latest",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the PubMedDiabetes grap... | 5,350,111 |
def run_dti_artifact_correction(subject_list, session_list):
""" Attach the FSL-based diffusion MRI artifact detection and correction
workflow to the `main_wf`.
Parameters
----------
main_wf: nipype Workflow
wf_name: str
Name of the preprocessing workflow
params: dict with paramet... | 5,350,112 |
def find_neighbor_indices(atoms, probe, k):
"""
Returns list of indices of atoms within probe distance to atom k.
"""
neighbor_indices = []
atom_k = atoms[k]
radius = atom_k.radius + probe + probe
indices = list(range(k))
indices = indices + list(range(k+1, len(atoms)))
for i in ind... | 5,350,113 |
def genDir(EAs):
"""
Generate the projection direction given the euler angles. Since the image
is in the x-y plane, the projection direction is given by R(EA)*z where
z = (0,0,1)
"""
dir_vec = np.array([rotmat3D_EA(*EA)[:, 2] for EA in EAs])
return dir_vec | 5,350,114 |
def plainica(x, reducedim=0.99, backend=None, random_state=None):
""" Source decomposition with ICA.
Apply ICA to the data x, with optional PCA dimensionality reduction.
Parameters
----------
x : array, shape (n_trials, n_channels, n_samples) or (n_channels, n_samples)
data set
reduced... | 5,350,115 |
def make_spectrum_layout(obj, spectra, user, device, width, smoothing, smooth_number):
"""
Helper function that takes the object, spectra and user info,
as well as the total width of the figure,
and produces one layout for a spectrum plot.
This can be used once for each tab on the spectrum plot,
... | 5,350,116 |
def all_equal(values: list):
"""Check that all values in given list are equal"""
return all(values[0] == v for v in values) | 5,350,117 |
def _tester(func, *args):
"""
Tests function ``func`` on arguments and returns first positive.
>>> _tester(lambda x: x%3 == 0, 1, 2, 3, 4, 5, 6)
3
>>> _tester(lambda x: x%3 == 0, 1, 2)
None
:param func: function(arg)->boolean
:param args: other arguments
:return: something or none
... | 5,350,118 |
def test_deadlock(config):
"""test when two backends try to extract state of each other"""
acon1, acon2 = n_async_connect(config, 2)
acurs1 = acon1.cursor()
acurs2 = acon2.cursor()
while True:
acurs1.callproc('pg_query_state', (acon2.get_backend_pid(),))
acurs2.callproc('pg_query_state', (acon1.get_backend_p... | 5,350,119 |
def new_revision(partno):
"""
Presents the form to add a new revision, and creates it upon POST submit
"""
_load_if_released(partno) # ensures the component exists and is released
form = RevisionForm(request.form)
if request.method == 'POST' and form.validate_on_submit():
now = datetime... | 5,350,120 |
def fresnel_parameter(rays, diffraction_points):
""" returns the fresnel diffraction parameter (always as a positive)
Parameters
----------
rays : [n] list of shapely LineString (3d)
diffraction_points: [n] list of Points (3d)
diffraction point which the ray is rounding
Returns
---... | 5,350,121 |
def update_compilers_object(
new_compiler: Dict[str, Any], contract_type: str, previous_compilers: List[Dict[str, Any]]
) -> Iterable[Dict[str, Any]]:
"""
Updates a manifest's top-level `compilers` with a new compiler information object.
- If compiler version already exists, we just update the compiler'... | 5,350,122 |
def test_webapp_no_locales():
"""Test that locales are not required."""
err = ErrorBundle(listed=False)
data = _get_json()
del data["default_locale"]
del data["locales"]
_detect(err, data)
assert not err.failed() | 5,350,123 |
def validate(net, val_data, ctx, eval_metric):
"""Test on validation dataset."""
eval_metric.reset()
# set nms threshold and topk constraint
net.set_nms(nms_thresh=0.45, nms_topk=400)
net.hybridize()
for batch in val_data:
data = gluon.utils.split_and_load(batch[0], ctx_list=ctx, batch_a... | 5,350,124 |
def add_unique_geom_id(point_gdf: gpd.GeoDataFrame, log: Logger=None) -> gpd.GeoDataFrame:
"""Adds an unique identifier (string) to GeoDataFrame of points based on point locations (x/y).
"""
point_gdf[S.xy_id] = [f'{str(round(geom.x, 1))}_{str(round(geom.y, 1))}' for geom in point_gdf[S.geometry]]
uniq... | 5,350,125 |
def get_data_parallel_rank():
"""Return my rank for the data parallel group."""
return _TOPOLOGY.get_data_parallel_rank() | 5,350,126 |
def entropy(data):
"""
Compute the Shannon entropy, a measure of uncertainty.
"""
if len(data) == 0:
return None
n = sum(data)
_op = lambda f: f * math.log(f)
return - sum(_op(float(i) / n) for i in data) | 5,350,127 |
def transform_mtl_to_stac(metadata: dict) -> Item:
"""
Handle USGS MTL as a dict and return a STAC item.
NOT IMPLEMENTED
Issues include:
- There's no reference to UTM Zone or any other CRS info in the MTL
- There's no absolute file path or reference to a URI to find data.
"""
L... | 5,350,128 |
def alexnet(pretrained=False):
"""AlexNet model architecture from the
`"One weird trick..." <https://arxiv.org/abs/1404.5997>`_ paper.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = AlexNet()
if pretrained:
model_path = './model/alexnet.pth.... | 5,350,129 |
def main() :
"""
main():
parameters:
-----------
None
description:
------------
Creates a connection to the GPU underneath and holds it in "bc".
Next, we access the "colab" environment S3 bucket (which holds a parquet
file) and creates a table... | 5,350,130 |
def generate_random_targets(labels: Tensor, num_classes: int) -> Tensor:
"""
Generates one random target in (num_classes - 1) possibilities for each label that is different from the original
label.
Parameters
----------
labels: Tensor
Original labels. Generated targets will be different... | 5,350,131 |
def gpsdio_bigquery_schema(ctx, schemafile, cols):
"""
Produce a BigQuery schema for gpsdio data.
Produces a BigQuery schema for use together with gpsdio data in a
CSV container format (as supported by gpsdio-csv).
You must supply the same column list as used in the CSV!
"""
cols = cols.s... | 5,350,132 |
def recall_k(sent_im_dist, im_labels, ks=(1, 5, 10)):
"""
Compute recall at given ks.
"""
im_labels = tf.cast(im_labels, tf.bool)
def retrieval_recall(dist, labels, k):
# Use negative distance to find the index of
# the smallest k elements in each row.
pred = tf.nn.top_k(-dist, k=k)[1]
# ... | 5,350,133 |
def get_suggestion(project_slug, lang_slug, version_slug, pagename, user):
"""
| # | project | version | language | What to show |
| 1 | 0 | 0 | 0 | Error message |
| 2 | 0 | 0 | 1 | Error message (Can't happen) |
| 3 | 0 | 1 | 0 | Error messa... | 5,350,134 |
def a_test_predict_is_length():
"""
Tests that the prediction IS dataframe length is equal to the number of steps h
"""
model = ARIMA(data=data, ar=2, ma=2, family=Exponential())
x = model.fit()
assert(model.predict_is(h=5).shape[0] == 5) | 5,350,135 |
def resample_data_or_seg(data, new_shape, is_seg, axis=None, order=3, do_separate_z=False, cval=0, order_z=0):
"""
separate_z=True will resample with order 0 along z
:param data:
:param new_shape:
:param is_seg:
:param axis:
:param order:
:param do_separate_z:
:param cval:
:param... | 5,350,136 |
def get_eval(appdir, config):
"""Get an Evaluation object given the configured `GlobalConfig`.
"""
return core.Evaluation(appdir, config.client, config.reps,
config.test_reps, config.simulate) | 5,350,137 |
def load_queue_from_disk(filename):
"""
Load the old queue from disk when started. Old messages that weren't
posted yet are read from the queue and processed.
"""
if os.path.exists(filename):
log.msg("Loading queue from %s" % filename)
try:
with closing(open(filename, 'r'... | 5,350,138 |
def write_user_edges_to_file(edges):
"""
Given a list of edges `edges` (each edge of the form (f,g), where `f,g` are homogeneous
in 4 variables), save this list to the `user_edges` file to be read by the
main programs. The names of the variables are changed to `x,y,z,w`.
"""
R = PolynomialRing(Q... | 5,350,139 |
def ask_ok(title="Confirm", message=""):
"""Ask the user to confirm something via an ok-cancel question.
Parameters:
title (str): the text to show as the window title.
message (str): the message to show in the body of the dialog.
Returns:
bool: Whether the user selected "OK".
"... | 5,350,140 |
def indexate(points):
"""
Create an array of unique points and indexes into this array.
Arguments:
points: A sequence of 3-tuples
Returns:
An array of indices and a sequence of unique 3-tuples.
"""
pd = {}
indices = tuple(pd.setdefault(tuple(p), len(pd)) for p in points)
... | 5,350,141 |
def get_handle(endpoint_name,
relative_slo_ms=None,
absolute_slo_ms=None,
missing_ok=False):
"""Retrieve RayServeHandle for service endpoint to invoke it from Python.
Args:
endpoint_name (str): A registered service endpoint.
relative_slo_ms(float): S... | 5,350,142 |
def help_command(update: Update, context: CallbackContext) -> None:
"""Send a message when the command /help is issued."""
message = r"К вашим услугам\! Вот что я умею:" + "\n\n"
message += r"/fact \- расскажу интересный факт" + "\n"
message += r"/cat \- отправлю картинку котика" + "\n"
message += r... | 5,350,143 |
def test_get_parser(ap):
"""Test getting argument parser."""
import sattools.processing.showsat
sattools.processing.showsat.parse_cmdline()
assert ap.return_value.add_argument.call_count == 9 | 5,350,144 |
def senderPlusReceiver(dataBitArray):
"""[senderPlusReceiver function]
This function is used to send the data Bits
Sender -> sends the bits in the array
Receiver(in this case the user) -> ack the bit received using
"ack" or
"Ack" or
"ACK"
as the use input
This function is built in such a w... | 5,350,145 |
def create_symbolic_controller(states, inputs):
""""Returns a dictionary with keys that are the joint torque inputs and
the values are the controller expressions. This can be used to convert
the symbolic equations of motion from 0 = f(x', x, u, t) to a closed
loop form 0 = f(x', x, t).
Parameters
... | 5,350,146 |
def subprocess_main(framework=None):
"""
Please keep this file compatible with python2 in order to check user python version.
This function checks that Inference Engine Python API available and working as expected
and then in sub-process it executes main_<fw>.py files. Due to some OSs speci... | 5,350,147 |
def is_gene_name(instance):
"""This SHOULD check a webservice at HGNC/MGI for validation, but for now this just returns True always.."""
ignored(instance)
return True | 5,350,148 |
def install_package_family(pkg):
"""
:param: pkg ie asr900rsp2-universal.03.13.03.S.154-3.S3-ext.bin
:return: device_type of the installed image ie asr900
"""
img_dev = None
m = re.search(r'(asr\d+)\w*', pkg)
if m:
img_dev = m.group(1)
return img_dev | 5,350,149 |
def generate_visible(tower_height, heightmap):
"""Trace a ray and determine if a region is viewable.
Args:
tower_height: the elevation in meters above sea level of your antenna
heightmap: an enumerable of heights in a given direction
Returns:
an enumerable of True/False for visibility
... | 5,350,150 |
def get_dendritic_mask_path_from_sessid(maindir, sessid, runtype="prod",
check=True):
"""
get_dendritic_mask_path_from_sessid(maindir, sessid)
Returns path to dendritic mask file for the specified session.
Required args:
- maindir (str): main directory
... | 5,350,151 |
def test_attachment_blank(tmpdir):
"""Attachment header without a filename is an error."""
template_path = Path(tmpdir/"template.txt")
template_path.write_text(textwrap.dedent(u"""\
TO: to@test.com
FROM: from@test.com
ATTACHMENT:
Hello world
"""))
template_message = ... | 5,350,152 |
def shift(arr):
"""
Shifts all rows of boxs back one row
:param
arr: 2D Array of VPython Box elements
:return:
No return, this will adjust the input arr to allow for a new row to be added
"""
for i in range(99, 0, -1):
for j in range(99, -1, -1):
arr[i][j].hei... | 5,350,153 |
def sam_to_bam(samfile, bamfile, samtools_bin="samtools"):
"""
Convert SAM to sorted and indexed BAM.
Args:
samfile: Input Sam
bamfile: Output Bam (without .bam extension)
samtools_bin: path to samtools binary
"""
with tempfile.NamedTemporaryFile() as tmp_bam:
# sam... | 5,350,154 |
def abbn_min_vol():
"""
Real Name: b'"Ab-bn min vol"'
Original Eqn: b'25.6'
Units: b''
Limits: (None, None)
Type: constant
b''
"""
return 25.6 | 5,350,155 |
def clear(self: Client, player: str = None, item_name: str = None,
data: int = None, max_count: int = None) -> str:
"""Clears items from player inventory, including
items being dragged by the player.
Bedrock Edition implementation.
"""
return self.run('clear', player, item_name, data, max... | 5,350,156 |
def load_glove_embeddings(dim, vocab):
"""
Load GloVe embedding vectors for all words in our vocabulary.
https://machinelearningmastery.com/use-word-embedding-layers-deep-learning-keras/
Parameters
----------
dim : int
Dimension of GloVe embeddings. Can be 50, 100, 200 and 300.
voca... | 5,350,157 |
def check_vat_number(vat_number, country_code=None):
"""Check if a VAT number is valid.
If possible, the VAT number will be checked against available registries.
:param vat_number: VAT number to validate.
:param country_code:
Optional country code. Should be supplied if known, as there is no
... | 5,350,158 |
def argextrema(y, separate=True):
"""
Deprecated in favor of argrel{min|max} in scypy.signal to get separate
extrema in about the same CPU time.
If you need a list of
all relative extrema in order, using this with separate=False takes about
half the time as by combining the scipy
functions ... | 5,350,159 |
def examine_normal_mode(r_mol: RDKitMol,
p_mol: RDKitMol,
ts_xyz: np.array,
disp: np.array,
amplitude: Union[float, list] = 0.25,
weights: Union[bool, np.array] = True,
verbose... | 5,350,160 |
def generate_sprites(factor_dist, num_sprites=1):
"""Create callable that samples sprites from a factor distribution.
Args:
factor_dist: The factor distribution from which to sample. Should be an
instance of factor_distributions.AbstractDistribution.
num_sprites: Int or callable returning int. Number... | 5,350,161 |
def recode_from_index_mapper(meta, series, index_mapper, append):
"""
Convert a {value: logic} map to a {value: index} map.
This function takes a mapper of {key: logic} entries and resolves
the logic statements using the given meta/data to return a mapper
of {key: index}. The indexes returned can b... | 5,350,162 |
def update_dynamoDB(global_table_name, query_id, slack_data, current_date):
"""
update jarvis data to dynamoDB, seperate by queryId,
:param query_id: query attached together by + sign
:param slack_data: a list
:return:
"""
# convert list to string for dynamodb storage
slack_data = json.d... | 5,350,163 |
def get_variables():
"""Loads ODAHU config as Robot variable
"""
return {'CONFIG': {var: getattr(config, var) for var in config.ALL_VARIABLES}} | 5,350,164 |
def is_number(char: Text) -> bool:
"""Checks if char is number. Returns Boolean."""
return char in string.digits | 5,350,165 |
def sliceData(data, slicebox=[None,None,None,None]):
"""
Sum 2d data along both axes and return 1d datasets
**Inputs**
data (sans2d) : data in
slicebox (range?:xy): region over which to integrate (in data coordinates)
**Returns**
xout (sans1d) : xslice
yout (sans1d) : yslice
... | 5,350,166 |
def set_plugin_temporarily_enabled(folder=None):
""" Disables the plugin globally or for folder.
Folder can be a view """
if folder is None:
if is_plugin_globally_disabled():
plugin_disabled_for_folders.remove("*global")
else:
if isinstance(folder, sublime.View):
... | 5,350,167 |
def GetChangeUrl(host, change):
"""Given a Gerrit host name and change ID, returns a URL for the change."""
return '%s://%s/a/changes/%s' % (GERRIT_PROTOCOL, host, change) | 5,350,168 |
def _ignored_jenkins_node_names() -> List[str]:
"""
Ignore nodes with these names
:return: Config list
"""
return json.loads(os.environ['IGNORED_JENKINS_NODE_NAMES']) | 5,350,169 |
def recipe_clone_message(recipe):
"""
Renders the recipe clone message.
"""
return dict(recipe=recipe) | 5,350,170 |
def read_expression_file(file):
"""Reads a file with the expression profiles."""
D = []
genes = []
with open(file) as fp:
firstline = fp.readline()
classes = [c.strip() for c in firstline.split("\t")[1:]]
for line in fp.readlines():
items = [w.strip() for w in line.sp... | 5,350,171 |
def test_svd_soln():
"""
test SVD decomposition of solution by generating SVD, saving to file and reloading
"""
from proteus.deim_utils import read_snapshots,generate_svd_decomposition
ns = get_burgers_ns("test_svd_soln",T=0.1,nDTout=10,archive_pod_res=True)
failed = ns.calculateSolution("... | 5,350,172 |
def train(X, Y, n_h, num_iterations=10000, print_cost=False):
"""
定义神经网络模型,把之前的操作合并到一起
Args:
X: 输入值
Y: 真实值
n_h: 隐藏层大小/节点数
num_iterations: 训练次数
print_cost: 设置为True,则每1000次训练打印一次成本函数值
Return:
parameters: 模型训练所得参数,用于预测
"""
np.random.seed(3)
n_x... | 5,350,173 |
def get_drawdowns(cum_returns):
"""
Computes the drawdowns of the cumulative returns.
Parameters
----------
cum_returns : Series or DataFrame, required
a Series or DataFrame of cumulative returns
Returns
-------
Series or DataFrame
"""
cum_returns = cum_returns[cum_retu... | 5,350,174 |
def cmd_instance_create(context, classname, options):
"""
Create an instance and submit to wbemserver.
If successful, this operation returns the new instance name. Otherwise
it raises an exception
"""
ns = options['namespace'] or context.conn.default_namespace
try:
class_ = ... | 5,350,175 |
def test_flow_udp(serializer, options, tx_port, rx_port, api):
"""UDP Flow test traffic configuration
"""
udp_endpoint = PortTxRx(tx_port_name=tx_port.name,
rx_port_name=rx_port.name)
test_dscp = Priority(
Dscp(phb=Pattern(Dscp.PHB_CS7, ingress_result_name='phb')))
... | 5,350,176 |
def calc_recipe_quantity_ratio(
first_month: str,
first_recipe: str,
second_recipe: str,
file_name: str,
second_month: str = None) -> float:
"""
A function which calculates the ratio of quantity between two months.
:param first_month: str
:param first_r... | 5,350,177 |
def teacher_add_to_db():
"""Adds a teacher to database
Returns:
Redirect: Redirects to teachers list route
"""
if request.method == "POST":
fet_name = request.form["fet_name"]
fullname = request.form["fullname"]
teacher_email = request.form["t_email"]
try:
... | 5,350,178 |
def authorize(app_id, channel_id, team_id):
"""Just double check if this app is invoked from the expected app/channel/team"""
if app_id != SLACK_APP_ID:
return f"app ID {app_id}"
if team_id not in SLACK_TEAM_IDS:
return f"team ID {team_id}"
if channel_id not in SLACK_CHANNEL_IDS:
... | 5,350,179 |
def deal_one_card():
""" returns a random card from the deck """
cards = [11, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 10, 10]
return random.choice(cards) | 5,350,180 |
def _run(bot, inputs, logger, server_override=None, botengine_override=None):
"""
Run the given bot with the given parameters
:param bot: bot to run
:param inputs: the input JSON from the bot server
:param logger: logger object
:param server_override: Override the server URL with the known serve... | 5,350,181 |
def grey_pal(start=0.2, end=0.8):
"""
Utility for creating continuous grey scale palette
Parameters
----------
start : float
grey value at low end of palette
end : float
grey value at high end of palette
Returns
-------
out : function
Continuous color palett... | 5,350,182 |
def prepare(path, data_id):
"""Process each dataset based on individual characteristics
Args:
path to pull data from
"""
# assert type(train) == bool, 'Wrong train/test selection input'
if train:
suffix = "_train"
else:
suffix = "_test"
if dataset == "synapse":
... | 5,350,183 |
def process_watchdog(function_process, process_info_path, process_time, action):
"""
Monitoring function for the file transfer processes spawned off using Multiprocessing.
It will monitor if the process has either finished or has gone over it's processing time.
Parameters
----------
function_pr... | 5,350,184 |
def tags_to_matrix(events_df, tags_df, top_tags):
"""Converts tags to feature matrix
Args:
events_df: Events dataset
tags_df: Tags dataset
top_tags: Tags to include
Returns:
Feature matrix for tags
"""
# Combine tags into lists
tags = tags_df.groupby('id')['tag'... | 5,350,185 |
def get_parameter_value_and_validate_return_type(
domain: Optional[Domain] = None,
parameter_reference: Optional[Union[Any, str]] = None,
expected_return_type: Optional[Union[type, tuple]] = None,
variables: Optional[ParameterContainer] = None,
parameters: Optional[Dict[str, ParameterContainer]] = N... | 5,350,186 |
async def test_oppio_discovery_webhook(opp, aioclient_mock, oppio_client):
"""Test discovery webhook."""
aioclient_mock.get(
"http://127.0.0.1/discovery/testuuid",
json={
"result": "ok",
"data": {
"service": "mqtt",
"uuid": "test",
... | 5,350,187 |
def add_leaf_to_edge(t):
"""
Returns a `Shape` instance with a new root; both a new leaf and the input `Shape` pend from it.
:param t: `Shape` instance.
:return: `Shape` instance.
"""
return Shape([Shape.LEAF, t]) | 5,350,188 |
def setup_cwl_logger(ti, level=None):
"""
Sets logging level of cwltool logger to correspond LOGGING_LEVEL
from airflow.cfg. Configures handler based on the task instance
to redirect output to the proper file. Suppresses those loggers
from cwltool or related packages that spam.
Note: maybe we wi... | 5,350,189 |
def create_simulation(parameter_values=None, experiment=None, make_inputs=False):
"""
Create a PyBaMM simulation set up for interation with liionpack
Parameters
----------
parameter_values : :class:`pybamm.ParameterValues`
The default is None.
experiment : :class:`pybamm.Experiment`
... | 5,350,190 |
def tarball(options=""):
"""Build the article tarball.
Args:
options (str, optional): Additional options to pass to Snakemake.
"""
snakefile = snakefile = Path("${SYW_PATH}") / "workflow" / "build.smk"
snakemake = f"SNAKEMAKE_OUTPUT_CACHE={paths.user().cache} SNAKEMAKE_RUN_TYPE='tarball... | 5,350,191 |
def test_parses_valid_outofrange_dates() -> None:
"""It parses valid dates outside the range supported by pandas.Timestamp."""
df = pd.DataFrame(
[
("1676-01-01", datetime.date(1676, 1, 1)),
("2263-12-31", datetime.date(2263, 12, 31)),
]
)
parsed = parse_date(df[0... | 5,350,192 |
def add(lexer: str) -> None:
"""Add a paste to pinnwand's database from stdin."""
if lexer not in utility.list_languages():
log.error("add: unknown lexer")
return
paste = database.Paste(
sys.stdin.read(), lexer=lexer, expiry=timedelta(days=1)
)
with database.session() as se... | 5,350,193 |
def _logistic_loss_and_grad(w, X, y, alpha, mask, sample_weight=None):
"""Computes the logistic loss and gradient.
Parameters
----------
w : ndarray, shape (n_features,) or (n_features + 1,)
Coefficient vector.
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training ... | 5,350,194 |
def verify_status_code(request_response: requests.Response) -> tuple:
"""Verify the status code of the post request to the search url and raise exceptions if the code is unexpected
:type request_response: requests.Response
:return:
"""
if request_response.status_code == 200:
return STATUS_C... | 5,350,195 |
def copynew(strpath, destdir):#need an arge for the base destindation directory
"""replicate the fs tree of strpath in destdir'
if srcpath file is newer or doesn't exist then copy it
- bug where mutliple runs are required to get everything synced
- picks up a new file with each run
... | 5,350,196 |
def instability_product_graphs(gra):
""" Determine if the species has look for functional group attachments that
could cause molecule instabilities
"""
# Build graphs for the detection scheme
rad_grp_dct = radical_group_dct(gra)
# Check for instability causing functional groups
prd_gra... | 5,350,197 |
def build_pdb_rmsd_matrix(pdb_paths, pdb_diff_path=None):
"""
Returns rmsd difference matrix for multiple pdb files.
Returns rmsd_list (3-item list), pdb_comp_amount (int).
Optional with pdb_diff_path return pdb_diff_comp(int).
"""
# make 3 column list or ndarray for x, y = (pdb1-n * pdb1-n) and... | 5,350,198 |
def handle_options():
"""
Define default options for a complete and automatic process
then check the command line arguments for parts of the process to skip
Returns:
auto: whether or not we accept user inputs on job and location
scrap: whether or not we do the scraping
working_d... | 5,350,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.