content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def send_graph_tracebacks(destinations,
run_key,
origin_stack,
graph,
send_source=True):
"""Send the tracebacks of a graph execution call to debug server(s).
Args:
destinations: gRPC destination addresses, a... | 5,351,600 |
def deg2hms(x):
"""Transform degrees to *hours:minutes:seconds* strings.
Parameters
----------
x : float
The degree value c [0, 360) to be written as a sexagesimal string.
Returns
-------
out : str
The input angle written as a sexagesimal string, in the
form, hours:... | 5,351,601 |
def normalise_whitespace(row):
"""Return table row with normalised white space.
This involves stripping leading and trailing whitespace, as well as
consolidating white space to single spaces.
"""
pairs = (
(k, _normalise_cell(v))
for k, v in row.items())
return {
k: v fo... | 5,351,602 |
def test_eap_proto_otp_errors(dev, apdev):
"""EAP-OTP local error cases"""
def otp_handler2(ctx, req):
logger.info("otp_handler2 - RX " + req.encode("hex"))
if 'num' not in ctx:
ctx['num'] = 0
ctx['num'] = ctx['num'] + 1
if 'id' not in ctx:
ctx['id'] = 1
... | 5,351,603 |
def notice(message: str):
"""
Emit notification diagnostic `message`.
"""
write('notice: {}'.format(message)) | 5,351,604 |
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Setup Lennox devices."""
from lennox_api import (
LennoxIComfortAPI,
LENNOX_FAHRENHEIT, LENNOX_CELSIUS,
)
MAP_UNIT = {
TEMP_CELSIUS: LENNOX_CELSIUS,
TEMP_FAHRENHEIT: LENNOX_FAHRENHEIT,
}
us... | 5,351,605 |
def test_deprecated_base_class(caplog):
"""Test deprecated base class."""
class CustomLock(lock.LockDevice):
pass
CustomLock()
assert "LockDevice is deprecated, modify CustomLock" in caplog.text | 5,351,606 |
def pivot_proportions(df, groups, responses, weights=1):
"""
Pivot data to show the breakdown of responses for each group.
Parameters:
df: a pandas DataFrame with data to be aggregated
groups: the name of the column containing the groups to partition by
respones: the name of the column th... | 5,351,607 |
def row_stack(a1, a2):
"""
Stacks data from subsequent sweeps, while padding "empty" columns from
subsequent sweeps.
Inputs
------
a1: np.array
destination array
a2: np.array
array which is added onto the first array
Returns
-------
out: np.array
stacked d... | 5,351,608 |
def globalPrediction(vid, category_names, vid_probs, predicted_labels):
"""
Get a matrix of probabilities over the classes for the c3d features of
a video. Generate the top 3 predictions from the prob matrix
"""
anno_list = []
# Idea 1 : To form the hist over the categories, each bin ha... | 5,351,609 |
def GetApexServerStatus(api_key):
"""
get the status of Apex Legends servers.
:param api_key: The API key to use.
Warning
You must put either a clickable link to "https://apexlegendsstatus.com" OR have a message such as "Data from apexlegendsstatus.com" when displaying data coming from this API. You... | 5,351,610 |
def test_ot3_remote_everything_commit_id_build_args(
ot3_remote_everything_commit_id: RuntimeComposeFileModel,
opentrons_commit: str,
ot3_firmware_commit: str,
) -> None:
"""Test build args when all source-types are remote commit id.
Confirm that all build args are using the head of their individua... | 5,351,611 |
def construct_mdx(cube_name, rows, columns, contexts=None, suppress=None):
""" Method to construct MDX Query from
:param cube_name: Name of the Cube
:param rows: Dictionary of Dimension Names and Selections
:param columns: Dictionary of Dimension Names and Selections (Dimension-MDX, List of Elementna... | 5,351,612 |
def test_data_change_ndisplay_points(viewer_factory):
"""Test change data calls for points layer with ndisplay change."""
view, viewer = viewer_factory()
np.random.seed(0)
data = 20 * np.random.random((10, 3))
layer = viewer.add_points(data)
visual = view.layer_to_visual[layer]
@patch.obje... | 5,351,613 |
def mock_train_model(spark_context, testserver):
"""Pre-condition: worker.update_one is assumed to be working."""
inq = Queue()
outq = Queue()
job = get_job()
job['urls'] = [testserver.url]
db = get_fake_mongo_client().ophicleide
db.models.insert_one(job)
inq.put(job)
update_model... | 5,351,614 |
def calc_number_of_children(*args):
"""
calc_number_of_children(loc, tif, dont_deref_ptr=False) -> int
Calculate max number of lines of a formatted c data, when expanded (
'PTV_EXPAND' ).
@param loc: location of the data ( ALOC_STATIC or ALOC_CUSTOM )
(C++: const argloc_t &)
@param ti... | 5,351,615 |
def _entity_namespace_key(entity, key):
"""Return an entry from an entity_namespace.
Raises :class:`_exc.InvalidRequestError` rather than attribute error
on not found.
"""
ns = entity.entity_namespace
try:
return getattr(ns, key)
except AttributeError as err:
util.raise_(... | 5,351,616 |
def get_messages(mtype, read=False, uid=None):
""" Returns query for messages. If `read` is True it only queries for unread messages """
query = Message.select().where(Message.mtype << mtype)
query = query.where(Message.receivedby == current_user.uid if not uid else uid)
if read:
query = query.w... | 5,351,617 |
def coords_to_indices(coords, top, left, csx, csy, shape, preserve_out_of_bounds=False):
"""
Convert coordinates to array indices using the given specs.
Coordinates outside of the shape are not returned.
:param coords: Tuple of coordinates in the form ([x...], [y...])
:param top: Top coordinate o... | 5,351,618 |
def test_validate_flow_definition_multiple_validity_errors():
"""Confirm that validity checks can report multiple errors."""
schema = {
# "StartAt" is missing
"States": {
"bogus": {},
},
}
with pytest.raises(flows_client.FlowValidationError) as raised:
flows_... | 5,351,619 |
def sanitize_vcf_file(vcf_file, out_file, snp_log_file, sample_log_file, logging, min_count=1, max_missing=0.25,
max_alt_states=4, disruptive_threshold=1,window_size=30,max_snps=2):
"""
Filter a user provided vcf and write a filtered vcf file
Parameters
----------
vcf_file [str... | 5,351,620 |
def geq(column: str, value: Optional[Any]) -> str:
"""
>>> geq("col", None)
'1'
>>> geq("col", 1)
'col >= 1'
>>> geq("col", "1")
"col >= '1'"
"""
if not value:
return "1"
if isinstance(value, str):
return f"{column} >= '{value}'"
return f"{column} >= {value}" | 5,351,621 |
def count_model_param_and_flops(model, dct_layer_sparse_facto_op=None):
"""
Return the number of params and the number of flops of 2DConvolutional Layers and Dense Layers for both the base model and the compressed model.
:return:
"""
from keras.layers import Conv2D, Dense
from palmnet.layers i... | 5,351,622 |
def longest_match(list1, list2):
"""
Find the length of the longest substring match between list1 and list2.
>>> longest_match([], [])
0
>>> longest_match('test', 'test')
4
>>> longest_match('test', 'toast')
2
>>> longest_match('supercalifragilisticexpialidocious', 'mystical califor... | 5,351,623 |
def solr_clear_index(app, args):
"""Removes all data from a Solr index. Equivalent to removing the
`data/index` directory while Solr is stopped. You can optionally specify
the id of the Plone site as the first command line argument.
"""
_get_site(app, args) # calls setSite so queryUtility works
... | 5,351,624 |
def deserialize_config(data, **kwargs):
"""Create instance of a JobConfiguration from a dict.
Parameters
----------
data : dict
Dictionary loaded from a serialized config file.
Returns
-------
JobConfiguration
"""
registry = Registry()
config_module = data["configurati... | 5,351,625 |
def pmat06(date1, date2):
"""
Wrapper for ERFA function ``eraPmat06``.
Parameters
----------
date1 : double array
date2 : double array
Returns
-------
rbp : double array
Notes
-----
The ERFA documentation is below.
- - - - - - - - - -
e r a P m a t 0 6
- ... | 5,351,626 |
def _splitData(data):
"""Takes either a cursor or result set and returns result set and list of columns."""
if hasattr(data, 'fetchall'):
rows = data.fetchall()
cols = data.columns()
elif isinstance(data, list):
rows = data
if hasattr(rows[0], '_fields'):
cols = r... | 5,351,627 |
def printSingles(hints):
"""Prints if a cell has a row/column/section constrained choice.
Useful when there are no cells with only one valid input. Highlights rows,
columns, and sections where certain inputs that aren't constrained by
validity, but the cell is only location in their row/column/section ... | 5,351,628 |
def get_cpu_count():
"""
Try and estimate the number of CPU on the host. First using multiprocessing
native function, other using content of /proc/cpuinfo. If none of those
methods did work, 4 is returned.
"""
try:
import multiprocessing
cpucount = multiprocessing.cpu_count()
... | 5,351,629 |
def get_relation_data(collection, relation_paths):
"""Prepare relations for usage inside extend_relations."""
out = []
for path in relation_paths:
promote = path.get("promote", False)
numpy_path = []
for step in path["steps"]:
if isinstance(step, str):
ste... | 5,351,630 |
def generate_paddle_quads():
"""
This function builds a matrix of paddles, each row in the matrix
represents the paddle skin (four colors) and each column represents
the size.
"""
paddle_base_width = 32
paddle_height = 16
x = 0
y = paddle_height * 4
spritesheet = []
for _ ... | 5,351,631 |
def cut(img):
"""
Applies central horizontal threshold in Fourier spectrum
"""
# Apply fourier transform and shift
img_fft = fftn(img)
img_fft_shift = fftshift(img_fft)
# Print spectrum before
plt.imshow(np.abs(img_fft_shift), cmap='gray', norm=LogNorm(vmin=5))
plt.show()
# Fi... | 5,351,632 |
def csrmm2(m, n, k, descrA, csrValA, csrRowPtrA, csrColIndA, B, handle=None,
C=None, nnz=None, transA=CUSPARSE_OPERATION_NON_TRANSPOSE,
transB=CUSPARSE_OPERATION_NON_TRANSPOSE, alpha=1.0, beta=0.0,
ldb=None, ldc=None, check_inputs=True):
""" multiply two sparse matrices: C = transA... | 5,351,633 |
def is_translated(path):
""" Checks if all files in the translation has at least one translation.
Arguments:
path (str): path to po-file
Returns: True if all files in translation has at least one translation,
otherwise False.
"""
po = polib.pofile(path)
files = []
for ... | 5,351,634 |
def pylm_component(name):
"""Decorator for registering a class to lightmetrica"""
def pylm_component_(object):
# Get base class
base = object.__bases__[0]
base.reg(object, name)
return object
return pylm_component_ | 5,351,635 |
def dice_score(input_mask, target_mask, eps=1e-5):
"""
input mask: (B * K, HW) #probabilities [0, 1]
target_mask: (B * K, HW) #binary
"""
dims = tuple(range(1, input_mask.ndimension()))
intersections = torch.sum(input_mask * target_mask, dims) #(B, N)
cardinalities = torch.sum(input_mask + ... | 5,351,636 |
def append_write(filename="", text=""):
"""
appends a string at the end of a text file (UTF8)
and returns the number of characters added
"""
with open(filename, "a", encoding="utf-8") as f:
f.write(text)
return len(text) | 5,351,637 |
def test_post_data(client):
"""
Test that we can handle simple HTTP POST
"""
data = '{"message": "Hi, Friend!"}'
rv = client.post(
"/",
data=data,
headers={"Content-Type": "application/json"})
assert rv.data == b"Hi, Friend!" | 5,351,638 |
def psnr(img1, img2):
"""
compute PSNR between two images
"""
MSE = torch.mean((img1-img2)**2)
return 10*torch.log10(1**2/MSE) | 5,351,639 |
def is_scalar(dims):
"""
Returns True if a dims specification is effectively
a scalar (has dimension 1).
"""
return np.prod(flatten(dims)) == 1 | 5,351,640 |
def check_list_increasing(list_to_check: Union[list, tuple]):
"""
Check that a list does not decrease across any two consecutive elements.
Args:
list_to_check: The iterable that should be non-decreasing
"""
assert all(
list_to_check[i] <= list_to_check[i + 1] for i in range(len(lis... | 5,351,641 |
def applyMinv(obj, inputs, shape_cache):
"""Simple wrapper around a component's applyMinv where we can reshape the
arrays for each input and expand any needed array elements into full arrays.
"""
inputkeys = sorted(inputs.keys())
for key in inputkeys:
pre_process_dicts(obj, key, inputs, sha... | 5,351,642 |
def get_para_input(arg):
"""Get input directory parameter"""
input_dir = os.path.abspath(arg)
if str(input_dir).endswith('/'):
input_dir = input_dir[:-1]
input_dir = input_dir.replace('\\', '/')
return input_dir | 5,351,643 |
def get_county_data():
"""Get the raw data from coronavirus-tracker-api.herokuapp.com."""
url = ('https://coronavirus-tracker-api.herokuapp.com/v2/locations?source=csbs')
raw_data = None
while raw_data is None:
try:
raw_data = json.loads(requests.request('GET', url, verify=False).text)
except:
... | 5,351,644 |
def r2lm(measured_y, estimated_y):
"""
r^2 based on the latest measured y-values (r2lm)
Calculate r^2 based on the latest measured y-values. Measured_y and estimated_y must be vectors.
Parameters
----------
measured_y: numpy.array or pandas.DataFrame
estimated_y: numpy.array or pa... | 5,351,645 |
def G_to_NX_sparse(X, Y):
"""convert sparse adj matrix to NetworkX Graph"""
Gs = []
N = len(Y)
for n in range(N):
x = X[n]
G = nx.DiGraph()
for i,j,w in x:
G.add_edge(i,j, weight=w)
Gs.append(G)
return Gs, Y | 5,351,646 |
def pmf(k, n, a, b, loc=0):
"""JAX implementation of scipy.stats.betabinom.pmf."""
return lax.exp(logpmf(k, n, a, b, loc)) | 5,351,647 |
def discrete_bottleneck(x,
hidden_size,
z_size,
filter_size,
name,
mode=None,
startup_steps=50000,
bottleneck_kind='dvq',
num_bl... | 5,351,648 |
def binarize_categorical(x, ids):
""" replace categorical feature with multiple binary ones """
x_ = np.zeros((x.shape[0], 1))
for idx in ids:
x_ = np.hstack((x_, binarize_categorical_feature(x[:, idx:idx+1])))
x = np.delete(x, ids, axis=1)
x = np.hstack((x, x_[:, 1:]))
return x | 5,351,649 |
def test_simple_launch(debugger_api: _DebuggerAPI, rcc: Rcc, rcc_config_location: str):
"""
This is an integrated test of the debug adapter. It communicates with it as if it was
VSCode.
"""
from robocorp_ls_core.debug_adapter_core.dap.dap_schema import TerminatedEvent
from robocorp_ls_core.debug... | 5,351,650 |
def import_json_dataset(fileset):
"""Returns a list of imported raw JSON data for every file in the fileset.
"""
d = []
for f in fileset:
d.append(import_json_data(f))
return d | 5,351,651 |
def angle_difference(angle1, angle2):
"""
Calculates the difference between the given angles in clockwise direction as radians.
:param angle1: float
:param angle2: float
:return: float; between 0 and 2*Pi
"""
if (angle1 > 0 and angle2 >= 0) and angle1 > angle2:
return abs(angle1 - a... | 5,351,652 |
def p_attribute_dict(p):
"""
attribute_dict : attribute attribute_dict
| attribute
"""
attribute, *attr_dict = p[1:]
if attr_dict:
attr_dict, = attr_dict
else:
attr_dict = dict()
attr_dict.update(attribute)
p[0] = attr_dict | 5,351,653 |
def merge_nd(nd_cdp, nd_lldp):
""" Merge CDP and LLDP data into one structure """
neis = dict()
nd = list()
for n in nd_lldp:
neis[(n['local_device_id'], n['remote_device_id'], n['local_int'], n['remote_int'])] = n
for n in nd_cdp:
# Always prefer CDP, but grab description from L... | 5,351,654 |
def combine_divisions(division):
"""Return the new pattern after the rules have been applied to every division"""
size = int(sqrt(len(division)))
matrix = []
for r in xrange(size):
matrix.append([])
for c in xrange(r * size, (r + 1) * size):
matrix[len(matrix) - 1].append(di... | 5,351,655 |
def is_repair(expr):
"""
判断赋值表达式是否出现过滤函数,如果已经过滤,停止污点回溯,判定漏洞已修复
:param expr: 赋值表达式
:return:
"""
is_re = False # 是否修复,默认值是未修复
global is_repair_functions
if expr in is_repair_functions:
logger.debug("[AST] function {} in is_repair_functions, The vulnerability does not exist ".form... | 5,351,656 |
def modified_greedy(sentences,
tokenized,
model,
stopwords,
original_indices,
sent_representations,
objective_function,
min_sentence_length):
"""Implementation of the MMR summa... | 5,351,657 |
def noiseless(rho, unitary):
"""Returns the noiseless predictions."""
rhotilde = unitary @ rho @ unitary.conj().T
elt = rhotilde[0, 0]
if elt >= 0.49999999:
return 0, elt
return 1, elt | 5,351,658 |
def enter_text_slow(box, text):
"""Enter text slow; ForJoy fails to work if send_keys is done all at once."""
for c in text:
box.send_keys(c)
wait() | 5,351,659 |
def __get_service_info_from_thrift(root_path, idl_service, need_test_methods):
"""从指定IDL_Service和request_config配置表中,获取测试方法和Request的映射表"""
customized_request_config = yaml.load(
open(os.path.join(root_path, 'test_red', 'request_config.yaml')))
method_request = collections.OrderedDict()
idl_meth... | 5,351,660 |
def energy_calc(p, t):
"""
Calculates energy from power and time using the formula:
energy = power * time
Parameters
----------
p: Int or float
The power value of the equation.
t: Int or float
The time value of the equation (seconds).
Returns
-------
Int
... | 5,351,661 |
def copy_data_to_device(device, data, destination, filename=None):
""" Copies data into a device and creates a file to store that data.
Args:
data ('str'): The data to be copied
destination ('str'): Folder of where to store file
filename ('str'): Name of the file cre... | 5,351,662 |
def api_images_list_json(version):
"""
Return Docker Image listing https://docs.docker.com/engine/api/v1.41/#tag/Image
:param version: Docker API version
:return: string of fake images associated with honeypot.
"""
logging.info("images-list - %s, %s, %s, %s, %s" % (
version, request.rem... | 5,351,663 |
def _handle_eval_return(self, result, col, as_pyranges, subset):
"""Handle return from eval.
If col is set, add/update cols. If subset is True, use return series to subset PyRanges.
Otherwise return PyRanges or dict of data."""
if as_pyranges:
if not result:
return pr.PyRanges()
... | 5,351,664 |
def ecg_ppg_adxl_temp():
"""
This is a testcase template.
Create a copy of this function, rename the function name and description to use this
:return:
"""
capture_time = 40 # seconds
exp_val = 0 # TODO: These variables need to be updated
act_val = 0 # TODO: These variables need to be... | 5,351,665 |
def compute_sigma0_sparse(V, dX, W_sensors, W_points, W_observations, column_dict):
"""
Computes the resulting standard deviation of the residuals for the current state of the bundle network.
Parameters
----------
V : ndarray
An array of residuals of the difference between registered meas... | 5,351,666 |
def version_callback() -> Any:
"""Print the version of the package."""
print(f"version: {version}")
return version | 5,351,667 |
def fill(bitdef, value):
"""
Fill undefined bits with a value.
For example ``1..0100.1`` becomes ``111010011`` when filled with 1s.
Args:
bitdef (str): The bitdef to fill.
value (str): The value to fill with, "0" or "1".
Returns:
str: The filled bitdef.
"""
output ... | 5,351,668 |
def get_cman_info(state):
"""
Will take a list of congressmen and return the relevant attributes
:param congress_list: list of divs that contain congress data
:param state: state you are scraping
:return: list of relevant scraped attributes
"""
cman_attrs = []
abbrev = states[state]
... | 5,351,669 |
def concatenate_data(data, field='normalized_data'):
"""
Concatenate trial data in a list of dictionaries
:param data: nested dict, contains all trial infos
:param field: str, dict key in info dict in general data structure
:return:
"""
time_series = np.concatenate([info[field] for info in d... | 5,351,670 |
def _find_weight_ops(op, graph, weights):
""" Find the vars come from operators with weight.
"""
pre_ops = graph.pre_ops(op)
for pre_op in pre_ops:
### if depthwise conv is one of elementwise's input,
### add it into this same search space
if _is_depthwise(pre_op):
f... | 5,351,671 |
def test_try_for_unmatched_fails():
"""Bladerunner "hits enter" up to 3 times to try to guess the prompt."""
runner = Bladerunner()
server = Mock()
server.expect = Mock(side_effect=pexpect.TIMEOUT("fake"))
server.before = bytes_or_string("mock output")
with patch.object(runner, "send_interrupt... | 5,351,672 |
def get_GUI_presets_dict():
"""Return a dictionary of all of the available potential functions."""
preset_dict = {'cosine_potential': np.array([3.14, -6.28, 12.57, 0.01, 0,
0, 0, 0]).astype(str),
'two_gaussian_potential': np.array([2.67, -4, 4,... | 5,351,673 |
def _escapeEnds(original):
"""Comment, function end.
Escape comment end, because non-greedy becomes greedy in context. Example:
blockCommentNonGreedy = '(\s*/\*[\s\S]+?\*/\s*){0,1}?'
"""
original = _escapeWildCard(original)
commentEscaped = original \
.replace(commentEndEscape, commentE... | 5,351,674 |
def handle_bad_request(error: BadRequest) -> Response:
"""Render the base 400 error page."""
rendered = render_template("base/400.html", error=error,
pagetitle="400 Bad Request")
response: Response = make_response(rendered)
response.status_code = status.BAD_REQUEST
ret... | 5,351,675 |
def top_k_top_p_filtering(
logits: torch.FloatTensor,
top_k: int = 0,
top_p: float = 1.0,
filter_value: float = -float("Inf"),
min_tokens_to_keep: int = 1,
) -> torch.FloatTensor:
"""
Filter a distribution of logits using top-k and/or nucleus (top-p) filtering
Args:
logits: logi... | 5,351,676 |
def parseFimo(fimoFile, strand):
""" parse the fimo.txt file
Args:
the fimo.txt file
strand = single or double
Returns:
fimoDict: a dict between motif ID and a list of sequences it occurs in
"""
#dict to store for each motif list of seqs that it occurs in
fimoDict = {}
#read the fimo.txt file
with open... | 5,351,677 |
def pad_batch_dimension_for_multiple_chains(
observed_time_series, model, chain_batch_shape):
""""Expand the observed time series with extra batch dimension(s)."""
# Running with multiple chains introduces an extra batch dimension. In
# general we also need to pad the observed time series with a matching batc... | 5,351,678 |
def get_bytes(size: Union[str, int]) -> int:
"""Converts string representation of bytes to a number of bytes.
If an integer is passed, it is returned as is (no conversion).
Args:
size (Union[str, int]): A string or integer representation of bytes to be converted.
(e... | 5,351,679 |
def port_translation_func(req: AdvancedDataTypeRequest) -> AdvancedDataTypeResponse:
"""
Convert a passed in AdvancedDataTypeRequest to a AdvancedDataTypeResponse
"""
resp: AdvancedDataTypeResponse = {
"values": [],
"error_message": "",
"display_value": "",
"valid_filter_... | 5,351,680 |
def test_filter_env_remove():
"""
~xxx syntax: remove elements that not match
"""
yc = YamlConfig(env='xx')
result = yc.filter_env({
'~foo': 'bar'
})
assert result == {} | 5,351,681 |
def stop(cli):
"""Wrapper function for the relevant RPC function call.
Args:
cli (str): Full path to cli binary associated with coin.
Returns:
String: String containing the command output.
"""
command = DAEMON_STOP_COMMAND.format(cli)
return subpro... | 5,351,682 |
def get_two_diff_order_index(start=0, stop=1, order=True, diff=True):
"""
Returns two integers from a range, they can be:
put in order (default) or unordered
always different(default) or can be repeated
start - integer (default = 0)
stop - integer (default= 1)
order - boolean ( defau... | 5,351,683 |
def setup(args):
"""
Create configs and perform basic setups.
"""
cfg = config.get_cfg()
if args.gpus is not None:
gpus = args.gpus
else:
gpus = []
gpus_str = ""
for g in gpus:
gpus_str += str(g) + ","
gpus_str = gpus_str[:-1]
os.environ['CUDA_VISIBLE_DEV... | 5,351,684 |
def GriewankRosenbrock(arr: np.ndarray, seed: int = 0) -> float:
"""Implementation for BBOB GriewankRosenbrock function."""
dim = len(arr)
r_x = np.matmul(_R(dim, seed, b"R"), arr)
# Slightly off BBOB documentation in order to center optima at origin.
# Should be: max(1.0, (dim**0.5) / 8.0) * r_x + 0.5 * np.o... | 5,351,685 |
def listCurrentAuctionsByKeyword(username, keyword):
"""Listar os leilões que estão a decorrer"""
try:
valid = utils.validateTypes([keyword], [str])
if not valid:
return jsonify({'erro': 404})
auctions = db.listAuctions(keyword)
if auctions == "noResults":
... | 5,351,686 |
def list_artifacts(arn=None, type=None, nextToken=None):
"""
Gets information about artifacts.
See also: AWS API Documentation
Examples
The following example lists screenshot artifacts for a specific run.
Expected Output:
:example: response = client.list_artifacts(
arn='str... | 5,351,687 |
def knife(bot, trigger):
""" Tell someone a wonderful fact about cats """
f = open("~/knife_scraper/cat_facts.txt")
fact_list = f.readlines()
max_fact = len(fact_list)
fact_num = randint(0,max_fact-1)
bot.say('{}'.format(fact_list[fact_num].strip())) | 5,351,688 |
def _get_relative_maddir(maddir, port):
""" Return a relative path version of maddir
GPDB and HAWQ installations have a symlink outside of GPHOME that
links to the current GPHOME. After a DB upgrade, this symlink is updated to
the new GPHOME.
'maddir_lib', which uses the absolute path of GPHOME, i... | 5,351,689 |
def hash_dict(data: Dict) -> int:
"""
Hashes a Dictionary recursively.
List values are converted to Tuples.
WARNING: Hashing nested dictionaries is expensive.
"""
cleaned_dict: Dict = {}
def _clean_dict(data: Dict) -> Dict:
d: Dict = {}
for k, v in data.items():
... | 5,351,690 |
def sele():
"""身份验证的JSID获取.
Return:
若获取成功,则返回JSID字符串,
若获取失败,则返回空字符串""
"""
logger = logging.getLogger("sele.py")
logger.info("Start sele")
try:
# phantomjs请求头设置
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"] = (
... | 5,351,691 |
def instrument_packages_ip_template(instrument, ip_version, template_name=None):
"""
Retrieves the specified instrument package template metadata
:param instrument: instrument used to make observation
:type instrument: str
:param ip_version: ip version description here
:type ip_version: float
... | 5,351,692 |
def add_feature_metadata(id, description, type):
"""Generate RDF metadata for a feature
:param id: if used to identify the feature
:param description: feature description
:param type: feature type
:return: rdflib graph after loading the feature
"""
g = Graph()
feature_uri = URIRef(OPEN... | 5,351,693 |
def test_select_nodes_by_layer():
"""Are nodes correctly filtered based on their layer?"""
ddg = dg.DiscourseDocumentGraph(namespace='test')
assert len(ddg) == 1
add_tokens(ddg, ['The', 'dog', 'barks', '.'])
assert len(ddg) == 5
# don't filter any nodes
all_node_ids = list(dg.select_nodes_b... | 5,351,694 |
def sink(input_flow_direction_raster):
"""
Creates a raster layer identifying all sinks or areas of internal drainage.
The value type for the Sink function output raster layer is floating point.
For more information, see
https://pro.arcgis.com/en/pro-app/help/data/imagery/sink-function.htm
P... | 5,351,695 |
def bad_multi_examples_per_input_estimator_out_of_range_input_refs(
export_path, eval_export_path):
"""Like the above (good) estimator, but the input_refs is out of range."""
estimator = tf.estimator.Estimator(model_fn=_model_fn)
estimator.train(input_fn=_train_input_fn, steps=1)
return util.export_model_a... | 5,351,696 |
def vector_to_diagonal(v):
"""Converts a vector to a diagonal matrix with vector elements
as the diagonal elements of the matrix"""
diag_matrix = [[0 for i in range(len(v))] for j in range(len(v))]
for i in range(len(v)):
diag_matrix[i][i] = v[i]
return diag_matrix | 5,351,697 |
def _get_filenames(path: Union[str, Path], media_type: MediaType) -> List[str]:
"""
Get filenames from a directory or a path to a file.
:param path: Path to the file or to the location that contains files.
:param media_type: Type of the media (image or video)
:example:
>>> path = "../images... | 5,351,698 |
def method(method_class):
"""Decorator to use to mark an API method.
When invoking L{Registry.scan} the classes marked with this decorator
will be added to the registry.
@param method_class: The L{Method} class to register.
"""
def callback(scanner, name, method_class):
if method_clas... | 5,351,699 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.