content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def diff(source: list):
"""
Compute the first-order discrete differences for a 1-dimensional list.
TODO: Support higher orders and dimensions as required.
"""
result = []
for index in range(1, len(source)):
result.append(source[index] - source[index - 1])
return result | 5,348,800 |
def is_gzip(name):
"""Return True if the name indicates that the file is compressed with
gzip."""
return name.endswith(".gz") | 5,348,801 |
def _search_on_path(filenames):
"""Find file on system path."""
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52224
from os.path import exists, abspath, join
from os import pathsep, environ
search_path = environ["PATH"]
paths = search_path.split(pathsep)
for path in paths:
... | 5,348,802 |
def test_two_waypoints() -> None:
"""Fresh world with two waypoints."""
world = World()
world.add_waypoints(
[("station", 48.99420, 8.4003), ("castle", 49.0134, 8.4044)]
)
assert len(world.waypoints) == 2
assert world.graph is None | 5,348,803 |
def test_boto3_authentication_returns_exception() -> None:
"""
Tests that authentication returns an exception
"""
s3_client = Mock(assume_role=Mock(side_effect=get_client_error("AssumeRole", "AccessDenied", "S3")))
mfa = "273941"
username = "test.user"
role = "testRole"
account = "testAc... | 5,348,804 |
def test_partial_load__bad_column():
"""Crash if an invalid column name is given."""
stream = io.BytesIO(b"zyxwvut\0\xba\xdc\x0f\xfe\xe1\x5b\xad\x01")
with pytest.raises(ValueError) as errinfo:
BasicStruct.partial_load(stream, "lol")
assert str(errinfo.value) == "BasicStruct doesn't have a fie... | 5,348,805 |
def possible_sums_of(numbers: list) -> list:
"""Compute all possible sums of numbers excluding self."""
possible_sums = []
for idx, nr_0 in enumerate(numbers[:-1]):
for nr_1 in numbers[idx + 1:]:
possible_sums.append(nr_0 + nr_1)
return possible_sums | 5,348,806 |
def normalize_norms(X, scale_factor=1, axis=0, by='sum'):
""" wrapper of `normalize_colsum` and `normalize_rowsum`
Parameters
----------
X:
a (sparse) matrix
scale_factor: numeric, None
if None, use the median of sum level as the scaling factor.
axis: int, {0, 1}
if axis... | 5,348,807 |
def full_solution(combined, prob_dists):
"""
combined: (w, n-1->n-w, 3, 3)
prob_dists: (n, 3, total_reads)
p[v,g,k] = prob of observing k of total_reads on ref if gneotype ig on varaint v
"""
N = len(combined[0])+1
best_idx, best_score = np.empty(N), -np.inf*np.ones(N)
for j, counts in e... | 5,348,808 |
def _generate_code_for_client(service: ProtoService, root: ProtoNode,
output: OutputFile) -> None:
"""Outputs client code for an RPC service."""
output.write_line('namespace nanopb {')
class_name = f'{service.cpp_namespace(root)}Client'
output.write_line(f'\nclass {class_... | 5,348,809 |
def download_url(url, filename):
"""Download a file."""
if not os.path.exists(filename):
print('Downloading {} ...'.format(filename))
download_webfile(url, filename)
print('Downloading complete.') | 5,348,810 |
def ycbcr2bgr(img):
"""Convert a YCbCr image to BGR image.
The bgr version of ycbcr2rgb.
It implements the ITU-R BT.601 conversion for standard-definition
television. See more details in
https://en.wikipedia.org/wiki/YCbCr#ITU-R_BT.601_conversion.
It differs from a similar function in cv2.cvtCol... | 5,348,811 |
def main(config):
"""Read shapes, plot map
"""
data_path = config['paths']['data']
# data
output_file = os.path.join(config['paths']['figures'], 'network-air-map.png')
air_edge_file = os.path.join(data_path, 'network', 'air_edges.shp')
air_node_file = os.path.join(data_path, 'network', 'air... | 5,348,812 |
def tensorize_fg_coeffs(
data,
wgts,
fg_model_comps,
notebook_progressbar=False,
verbose=False,
):
"""Initialize foreground coefficient tensors from uvdata and modeling component dictionaries.
Parameters
----------
data: list
list of tf.Tensor objects, each with shape (ngrp... | 5,348,813 |
def main() -> None:
"""Execute the main routine."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--operation_dir",
help="path to the directory where temporary test files are stored; "
"if not specified, uses mkdtemp()")
args = parser.parse_args()
... | 5,348,814 |
def test_get_serializer_class():
"""
The view should use the serializer defined in the app's settings.
"""
view = views.UserCreateView()
expected = app_settings.USER_SERIALIZER
assert view.get_serializer_class() == expected | 5,348,815 |
def receive_incoming_bets():
"""
Sends fixtures to the front-end
"""
return fixtures.fixtures_information | 5,348,816 |
def fibonacci(n):
"""
object: fibonacci(n) returns the first n Fibonacci numbers in a list
input: n- the number used to calculate the fibonacci list
return: retList- the fibonacci list
"""
if type(n) != int:
print(n)
print(":input not an integer")
return False
if... | 5,348,817 |
def psnr_batch(_mse_batch_val):
"""
:param _mse_val_each: ndarray
:return: ndarray
Usage:
1) The Bath is deal with channel.
Thus, it is recommended to call mse_batch function before the psnr_batch function.
2) cumsum_psnr_rgb += (metric_.psnr_batch(_mse_batch_val=... | 5,348,818 |
def get_u0(u0_type, num_features):
"""return a polyhedral definition for U^0, B_mat and b_vec"""
assert u0_type in ["box", "positive_normed"]
if u0_type == "box":
B_mat, b_vec = U0_box(num_features)
if u0_type == "positive_normed":
B_mat, b_vec = U0_positive_normed(num_features)
r... | 5,348,819 |
def funcparser_callable_search_list(*args, caller=None, access="control", **kwargs):
"""
Usage: $objlist(#123)
Legacy alias for search with a return_list=True kwarg preset.
"""
return funcparser_callable_search(*args, caller=caller, access=access,
return_list=... | 5,348,820 |
def upcoming_movie_name(soup):
"""
Extracts the list of movies from BeautifulSoup object.
:param soup: BeautifulSoup object containing the html content.
:return: list of movie names
"""
movie_names = []
movie_name_tag = soup.find_all('h4')
for _movie in movie_name_tag:
_movie_result = _movie.find_all('a')
t... | 5,348,821 |
def options(*args, **kw):
"""Mark the decorated function as a handler for OPTIONS requests."""
return _make_handler_decorator('OPTIONS', *args, **kw) | 5,348,822 |
def lookup_vendor_name(mac_address):
"""
Translates the returned mac-address to a vendor
"""
url = "http://macvendors.co/api/%s" % mac_address
request = urllib2.Request(url, headers={'User-Agent': "API Browser"})
try:
response = urllib2.urlopen(request)
reader = codecs.getr... | 5,348,823 |
def train_on(text):
""" Return a dictionary whose keys are alle the tuple of len PREFIX
of consecutive words inside text, and whose value is the list of
every single word which follows that tuple inside the text. For ex:
{('Happy', 'birthday'): ['to', 'dear'] ...} """
words = text.split()
assert... | 5,348,824 |
def learning_rate_decay(alpha, decay_rate, global_step, decay_step):
"""learning_rate_decay: updates the learning rate using
inverse time decay in numpy
Args:
alpha : is the original learning rate
decay_rate : is the weight used to determine the
r... | 5,348,825 |
def update_studio(request):
"""updates the studio
"""
studio_id = request.params.get('studio_id')
studio = Studio.query.filter_by(id=studio_id).first()
name = request.params.get('name', None)
dwh = request.params.get('dwh', None)
wh_mon_start = get_time(request, 'mon_start')
wh_mon_end... | 5,348,826 |
def to_json(data, fpath):
"""
Save a dict as JSON.
"""
with open(fpath, 'w') as f:
json.dump(data, f, indent=2, sort_keys=True)
f.write('\n') | 5,348,827 |
def flip_coin(num_of_experiments = 1000, num_of_flips = 30):
"""
Flip the coin `num_of_flips` times and repeat this experiment `num_of_experiments` times. And
return the number of heads grouped together in all the experiments.
"""
all_heads = []
for i in range(num_of_experiments):
heads ... | 5,348,828 |
def get_questions(
path: str,
uid2idx: dict = None,
path_data: Path = None,
) -> po.DataFrame:
"""
Identify correct answer text and filter out wrong distractors from question string
Get tokens and lemmas
Get explanation sentence ids and roles
"""
# Dropping questions without explanations hurts score
df = p... | 5,348,829 |
def simple_command(device, cmd_id, data=None, receive=True):
"""
Raises:
HIDException -> if reading/writing to the USB device failed:
KBProtocolException -> if the packet is too large
"""
cmd_packet = bytearray(EP_VENDOR_SIZE)
cmd_packet[0] = cmd_id
# Optional data component
if data... | 5,348,830 |
def dbopen(dbname, perm = 'r'):
"""Open a Datascope database"""
return Dbptr(dbname, perm) | 5,348,831 |
def create(haproxy_lua_svc: HaproxyLuaFacade, **kwargs):
"""
Create a new lua
"""
json_payload = {
'lua': {
"enabled": kwargs['enabled'],
"name": kwargs['name'],
"description": kwargs['description'],
"content": kwargs['content'],
}
}
... | 5,348,832 |
def to_nx(dsk):
"""
Code mainly identical to dask.dot.to_graphviz and kept compatible.
"""
collapse_outputs = False
verbose = False
data_attributes = {}
function_attributes = {}
g = nx.DiGraph()
seen = set()
connected = set()
for k, v in dsk.items():
k_name = nam... | 5,348,833 |
def predict(model, img_base64):
"""
Returns the prediction for a given image.
Params:
model: the neural network (classifier).
"""
return model.predict_disease(img_base64) | 5,348,834 |
def get_hash(x: str):
"""Generate a hash from a string."""
h = hashlib.md5(x.encode())
return h.hexdigest() | 5,348,835 |
def pair_force(r1, r2, par1, par2, sigma_c, box, r_cut, lj=True, coulomb=True):
"""Compute the sum of the Lennard Jones force and the short ranged part
of the Coulomb force between two particles.
Arguments:
r1 (ndarray): A one dimensional numpy-array with d elements (position of... | 5,348,836 |
def delete_network_acl_entry(client, network_acl_id, num=100, egress=False, dry=True):
"""
Delete a network acl entry
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.delete_network_acl_entry
"""
try:
response = client.delete_network_acl_entry( E... | 5,348,837 |
def mp_run(data, process_num, func, *args):
""" run func with multi process
"""
level_start = time.time()
partn = max(len(data) / process_num, 1)
start = 0
p_idx = 0
ps = []
while start < len(data):
local_data = data[start:start + partn]
start += partn
p = mp.Proc... | 5,348,838 |
def _write_heat_cool_to_b18(list_dict, old_new_names, zone, b18_lines, string):
"""
Args:
list_dict:
old_new_names:
zone:
b18_lines:
string:
"""
for key in list_dict.keys():
if old_new_names[zone.Name.upper()][0] in key:
f_count = checkStr(b18_... | 5,348,839 |
def test_list_token_min_length_3_nistxml_sv_iv_list_token_min_length_4_4(mode, save_output, output_format):
"""
Type list/token is restricted by facet minLength with value 8.
"""
assert_bindings(
schema="nistData/list/token/Schema+Instance/NISTSchema-SV-IV-list-token-minLength-4.xsd",
in... | 5,348,840 |
def spell(corpus):
"""
Train a Spelling Normalizer
Parameters
----------
corpus : list of strings. Prefer to feed with malaya.load_malay_dictionary().
Returns
-------
SPELL_NORMALIZE: Trained malaya.normalizer._SPELL_NORMALIZE class
"""
if not isinstance(corpus, list):
... | 5,348,841 |
def check_PA_vector(angle_list, unit='deg'):
""" Checks if the angle list has the right format to avoid any bug in the
pca-adi algorithm. The right format complies to 3 criteria:
1) angles are expressed in degree
2) the angles are positive
3) there is no jump of more than 180 deg between c... | 5,348,842 |
def client_id_to_org_type_id(client_id):
"""
Client ID should be a string: "g:" + self._options['org'] + ":" +
self._options['type'] + ":" + self._options['id'],
"""
split = client_id.split(':')
if len(split) != 4:
raise InvalidClientId()
org = split[1]
... | 5,348,843 |
def get_total_implements():
"""Obtiene los implementos totales solicitados en prestamos."""
total_implements = 0
for i in Loans.objects.all():
total_implements += i.ammount_implements
return total_implements | 5,348,844 |
def add_menu(installdir: Path, name: Text) -> None:
"""Add a submenu to 'VNM' menu.
Parameters
----------
name : Text
The name of the submenu.
"""
logging.info(f"Adding submenu for '{name}'")
icon_path = installdir/f"icons/{name.split()[0]}.png"
icon_src = (Path(__file__).parent... | 5,348,845 |
def _parse_maybe_array(
type_name: str, innermost_type: Optional[Union[ast_nodes.ValueType,
ast_nodes.PointerType]]
) -> Union[ast_nodes.ValueType, ast_nodes.PointerType, ast_nodes.ArrayType]:
"""Internal-only helper that parses a type that may be an array type."... | 5,348,846 |
def ema_decay_schedule(
base_rate: jnp.ndarray,
step: jnp.ndarray,
total_steps: jnp.ndarray,
use_schedule: bool,
) -> jnp.ndarray:
"""Anneals decay rate to 1 with cosine schedule."""
if not use_schedule:
return base_rate
multiplier = _cosine_decay(step, total_steps, 1.)
return 1. - (1. - bas... | 5,348,847 |
def _kill_filter(mm: MergedMiningCoordinator, filter_fn: Callable[[MergedMiningStratumProtocol], bool]) -> int:
""" Kill all workers that the filter `fltr` returns true for.
"""
count = 0
for protocol in filter(filter_fn, mm.miner_protocols.values()):
count += 1
protocol.transport.abort... | 5,348,848 |
def get_node_layer_sort_preference(device_role):
"""Layer priority selection function
Layer sort preference is designed as numeric value.
This function identifies it by LAYERS_SORT_ORDER
object position by default. With numeric values,
the logic may be improved without changes on NeXt app side.
... | 5,348,849 |
def test_predictive_evaluation_0(model, test_interactions_ds):
"""Evaluation without counting None predictions."""
assert predictive_evaluation(model, test_interactions_ds, count_none_predictions=False,
n_test_predictions=None, skip_errors=True) == {'MSE': 0.6667, 'RMSE': 0.8165... | 5,348,850 |
def is_anagram(s,t):
"""True if strings s and t are anagrams.
"""
# We can use sorted() on a string, which will give a list of characters
# == will then compare two lists of characters, now sorted.
return sorted(s)==sorted(t) | 5,348,851 |
def read_dictionary(set_permutations):
"""
This function reads file "dictionary.txt" stored in FILE
and appends words in each line into a Python list
"""
global word_dict
with open('dictionary.txt') as f:
for line in f:
line = line.strip()
word_dict += line.split(',')
count = 0
for ans in set_permutati... | 5,348,852 |
def f1_score(labels, predict, name=None):
"""
Streaming f1 score.
"""
predictions = tf.floor(predict + 0.5)
with tf.variable_scope(name, 'f1', (labels, predictions)):
epsilon = 1e-7
_, tp = tf.metrics.true_positives(labels, predictions)
_, fn = tf.metrics.false_negatives(labe... | 5,348,853 |
async def test_turn_on_with_brightness(mock_send, hass):
"""Test the light turns on to the specified brightness."""
await hass.async_block_till_done()
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: ENTITY_1_LIGHT, ATTR_BRIGHTNESS: 60},
blocki... | 5,348,854 |
def test_notify_matrix_plugin_fetch(mock_post, mock_get):
"""
API: NotifyMatrix() Server Fetch/API Tests
"""
# Disable Throttling to speed testing
plugins.NotifyBase.request_rate_per_sec = 0
response_obj = {
'room_id': '!abc123:localhost',
'room_alias': '#abc123:localhost',
... | 5,348,855 |
def remove_quotes(fin, fout):
"""
Remove quotes in lines.
If a line has odd number quotes, remove all quotes in this line.
"""
fin = open(fin)
fout = open(fout, "w")
for line in fin:
fout.write(line.replace("\"", ""))
fin.close()
fout.close() | 5,348,856 |
def site_summary_data(query, notime=True, extra="(1=1)"):
"""
Summary of jobs in different states for errors page to indicate if the errors caused by massive site failures or not
"""
summary = []
summaryResources = []
# remove jobstatus from the query
if 'jobstatus__in' in query:
del... | 5,348,857 |
def test_multiout_st(tmpdir):
""" testing a simple function that returns a numpy array, adding splitter"""
wf = Workflow("wf", input_spec=["val"], val=[0, 1, 2])
wf.add(arrayout(name="mo", val=wf.lzin.val))
wf.mo.split("val").combine("val")
wf.set_output([("array", wf.mo.lzout.b)])
wf.cache_dir... | 5,348,858 |
def load_solr(csv_file, solr_url):
"""
Load CSV file into Solr. solr_params are a dictionary of parameters
sent to solr on the index request.
"""
file_path = os.path.abspath(csv_file)
solr_params = {}
for fieldname in MULTIVALUE_FIELDNAMES:
tag_split = "f.%s.split" % fieldname
... | 5,348,859 |
def Epsilon(u):
"""Vector symmetric gradient."""
return Sym(Grad(u.transpose())) | 5,348,860 |
def create_section_and_PCA(data: ML_prepare, labled: bool = False):
"""
Creates PCA for every section (organism group) of the data:
"all", "filaments", "total_counts", "various".
Using helper function "pca_plot".
Plots by the "y", results, whether labeled or not.
Parameters
----------
d... | 5,348,861 |
def at(seq, msg, cmd=None, *args, **kwargs):
"""Output the comwdg"""
return translator(seq)(*COMWDG_CMD)() | 5,348,862 |
def get_directory(f):
"""Get a directory in the form of a list of entries."""
entries = []
while 1:
line = f.readline()
if not line:
print '(Unexpected EOF from server)'
break
if line[-2:] == CRLF:
line = line[:-2]
elif line[-1:] in CRLF:
... | 5,348,863 |
def pytest_configure(config):
"""pytest hook: Perform custom initialization at startup time.
Args:
config: The pytest configuration.
Returns:
Nothing.
"""
global log
global console
global ubconfig
test_py_dir = os.path.dirname(os.path.abspath(__file__))
source_dir... | 5,348,864 |
def init(provider=None):
"""
Runs through a questionnaire to set up your project's deploy settings
"""
if os.path.exists(DEPLOY_YAML):
_yellow("\nIt looks like you've already gone through the questionnaire.")
cont = prompt("Do you want to go through it again and overwrite the current one... | 5,348,865 |
def plot_data(options, data):
"""Plotting data en returning figure."""
color = 'cornflowerblue'
wip_col = options['column']
ax = plt.axes()
ax.xaxis.set_major_locator(mdates.DayLocator(interval=1))
ax.xaxis.set_major_formatter(mdates.DateFormatter('%y-%m-%d'))
ax.xaxis.grid(options['grid'])
... | 5,348,866 |
async def test_get_job_failure_reason_success(mock_client):
"""Assert get_job_failure_reason return failure reason as string"""
mock_client.return_value.__aenter__.return_value.describe_job_run.return_value = {
"jobRun": {
"failureReason": "Unknown",
"stateDetails": "TERMINATED",... | 5,348,867 |
def get_worker_status(worker):
"""Retrieve worker status by worker ID from redis."""
set_redis_worker_status_pool()
global WORKER_STATUS_POOL
# retrieve worker status
r = StrictRedis(connection_pool=WORKER_STATUS_POOL)
res = r.get(WORKER_STATUS_KEY_TMPL % worker)
return res.decode() if has... | 5,348,868 |
def lif_r_psc_aibs_converter(config, syn_tau=[5.5, 8.5, 2.8, 5.8]):
"""Creates a nest glif_lif_r_psc object"""
coeffs = config['coeffs']
threshold_params = config['threshold_dynamics_method']['params']
reset_params = config['voltage_reset_method']['params']
params = {'V_th': coeffs['th_inf'] * confi... | 5,348,869 |
def test_best_estimator():
"""Ensure that the best estimator is the one giving the best score (by
re-running it)"""
train_file = os.path.join(os.path.dirname(__file__), './u1_ml100k_train')
test_file = os.path.join(os.path.dirname(__file__), './u1_ml100k_test')
data = Dataset.load_from_folds([(trai... | 5,348,870 |
def pytest_collection_modifyitems(items, config):
"""
This function is called upon during the pytest test collection phase and allows for modification
of the test items within the list
"""
collect_only = config.getoption("--collect-only")
cassandra_dir = config.getoption("--cassandra-dir")
c... | 5,348,871 |
def get_bigwig_values(bigwig_path, chrom_name, chrom_end, chrom_start=0):
"""
Get the values for a genomic region of interest from a bigwig file.
:param bigwig_path: Path to the bigwig file
:param chrom_name: Chromosome name
:param chrom_end: chromosome end
:param chrom_start: chromosome start
... | 5,348,872 |
def dtool_config(files):
"""Provide default dtool config."""
logger = logging.getLogger(__name__)
# use environment variables instead of custom config file, see
# https://github.com/jic-dtool/dtoolcore/pull/17
# _original_environ = os.environ.copy()
# inject configuration into environment:
... | 5,348,873 |
def get_successors(state, maxwords):
"""Traverses state graph to find valid anagrams."""
terminal = len(state['chars']) == 0
# Check whether the state is invalid and should be pruned
if not is_valid(state['anagram'], terminal, maxwords):
return []
# If valid terminal state, stop search an... | 5,348,874 |
def read_and_parse_cdl_file(file_name):
"""
Reads relevant information from a "cdl" file
"""
if file_name is None:
return None
wl_map = {}
bl_map = {}
colclk_wl_map = {}
# Parse line-by-line
with open(file_name, "r") as fp:
for line in fp:
line = line.... | 5,348,875 |
def test_install_noneditable_git(script, tmpdir):
"""
Test installing from a non-editable git URL with a given tag.
"""
result = script.pip(
'install',
'git+https://github.com/pypa/pip-test-package.git'
'@0.1.1#egg=pip-test-package'
)
egg_info_folder = (
script.si... | 5,348,876 |
def test_4_arguments() -> None:
"""four arguments supplied when 3 expected"""
for extra in BAD_EXTRA_ARGUMENTS:
with pytest.raises(TypeError, match=re.compile(
"__init__[(][)] takes 4 positional arguments but {} were given".format(
len(extra) + 4))):
AutomataUnive... | 5,348,877 |
def detect_outlier_at_index(
srs: pd.Series,
idx: int,
n_samples: int,
z_score_threshold: float,
) -> bool:
"""
Check if a value at index `idx` in a series is an outlier.
The passed series is supposed to be ordered by increasing timestamps.
This function
- detects z-score window in... | 5,348,878 |
def _organize_parameter(parameter):
"""
Convert operation parameter message to its dict format.
Args:
parameter (OperationParameter): Operation parameter message.
Returns:
dict, operation parameter.
"""
parameter_result = dict()
parameter_keys = [
'mapStr',
... | 5,348,879 |
def colorpicker(request):
"""
Controller for the app home page.
"""
my_param = MyParamColor()
context = get_context(request, my_param)
return render(request, 'tethys_django_form_tutorial/colorpicker.html', context) | 5,348,880 |
def _copy_part_from_other_model(model,
other_model,
part,
realization = None,
consolidate = True,
force = False,
cut_refs_to_uui... | 5,348,881 |
def flatten_in(iterable, pred=None):
"""Like flatten, but recurse also into tuples/lists not matching pred.
This makes also those items get the same flattening applied inside them.
Example::
is_nested = lambda e: all(isinstance(x, (list, tuple)) for x in e)
data = (((1, 2), ((3, 4), (5, 6... | 5,348,882 |
def test_non_callable_methods(monkeypatch, subtests):
"""
Tests that an exception is raised when any of the integration base methods are non-callable.
"""
from django_guid.config import Settings
from django_guid.integrations import SentryIntegration
mock_integration = SentryIntegration()
... | 5,348,883 |
def translate_date(default=defaults.get('language')):
"""Parse/translate a date."""
d = request.args.get('date')
if not d:
raise RuntimeError(_('Date is mandatory.'))
dest_lang = request.args.get('dest') if request.args.get('dest') else default
variation = request.args.get('variation') if r... | 5,348,884 |
def for_4():
""" *'s printed in the shape of number 4 """
for row in range(9):
for col in range(7):
if row+col ==6 or row ==6 or col ==5 and row>3:
print('*',end=' ')
else:
print(' ',end=' ')
print() | 5,348,885 |
def ocaml_configure(
opam = None,
build = None,
switch = None,
# hermetic = False,
# verify = False,
# install = False,
# pin = False,
# force = False,
debug = False,
verbose = False
):
# is_rules_ocaml = False,
... | 5,348,886 |
def check_call(
*command: Any,
working_directory: Union[PathLike, str] = ".",
verbose: bool = False,
quoted: bool = False,
**kwargs: Any,
) -> Optional[str]:
"""Proxy for subprocess.check_call"""
return check_run(
*command, working_directory=working_directory, verbose=verbose, quoted... | 5,348,887 |
def fit_client(client: Client, weights: Weights) -> Tuple[Weights, int]:
"""Refine weights on a single client."""
return client.fit(weights) | 5,348,888 |
def _find_rpms_in_packages(koji_api, name_list, major_minor):
"""
Given a list of package names, look up the RPMs that are built in them.
Of course, this is an inexact science to do generically; contents can
vary from build to build, and multiple packages could build the same RPM name.
We will first... | 5,348,889 |
def calculate_algorithm_tags(analyses):
"""
Calculate the algorithm tags (eg. "ip", True) that should be applied to a sample document based on a list of its
associated analyses.
:param analyses: the analyses to calculate tags for
:type analyses: list
:return: algorithm tags to apply to the sam... | 5,348,890 |
def open_json(filepath):
"""
Returns open .json file in python as a list.
:param: .json file path
:returns: list
:rvalue: str
"""
with open(filepath) as f:
notes = json.load(f)
return notes | 5,348,891 |
def test_account_registration_view_form_ok(client):
"""test valid form data on registration view"""
payload = {
"email": "testuser@gmail.com",
"password1": "UnMotDePasse12",
"password2": "UnMotDePasse12",
"username": "testuser",
}
response = client.post("/register/", payl... | 5,348,892 |
def remove_user_list():
"""
Endpoint to remove a specific list or a complete user
---
tags:
- User Methods
parameters:
- name: user
type: string
in: query
required: true
description: user you want to query
... | 5,348,893 |
def upload(f, content_type, token, api_key):
"""Upload a file with the given content type to Climate
This example supports files up to 5 MiB (5,242,880 bytes).
Returns The upload id if the upload is successful, False otherwise.
"""
uri = '{}/v4/uploads'.format(api_uri)
headers = {
'aut... | 5,348,894 |
def log_info_level(message):
"""
Add info level log to the app's logger
======================================
Parameters
----------
message : str
Message to log.
"""
app.logger.info('{}:{}'.format(asctime(), message)) | 5,348,895 |
def get_node_name_centres(nodeset: Nodeset, coordinates_field: Field, name_field: Field):
"""
Find mean locations of node coordinate with the same names.
:param nodeset: Zinc Nodeset or NodesetGroup to search.
:param coordinates_field: The coordinate field to evaluate.
:param name_field: The name fi... | 5,348,896 |
def pred(a):
"""
pred :: a -> a
the predecessor of a value. For numeric types, pred subtracts 1.
"""
return Enum[a].pred(a) | 5,348,897 |
def main():
"""
Entry point of the app.
"""
if len(sys.argv) != 2:
print(f"{sys.argv[0]} [SERVER_LIST_FILE]")
return 1
return process(server_list_file=sys.argv[1]) | 5,348,898 |
def pascal_to_snake(pascal_string):
"""Return a snake_string for a given PascalString."""
camel_string = _pascal_to_camel(pascal_string)
snake_string = _camel_to_snake(camel_string)
return "".join(snake_string) | 5,348,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.