content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def create_output(whole_exome_indel_list, whole_exome_snp_list, coding_exons_indel_list, coding_exons_snp_list):
"""
This function creates the output file.
:param whole_exome_indel_list: A list the values for a single output line.
:type whole_exome_indel_list: list
:param whole_exome_snp_list: A li... | 5,352,700 |
def get_offset(sample_time):
"""
Find simple offsett values.
During the sample time of this function
the BBB with the magnetometer on should be rotated
along all axis.
sample_time is in seconds
"""
start = time.clock()
mag_samples = []
mag_max = [0,0,0]
mag_min = [0,0,0]
... | 5,352,701 |
def get_zips(directory: str) -> list:
"""
Return a the ZIP from a specified directory after running
some sanity checks
"""
zips = {}
for file in [os.path.join(dp, file) for dp, dn, fn in os.walk(directory) for file in fn]:
if file.split('.')[-1] != 'zip':
continue
... | 5,352,702 |
def get_multimode_2d_dist(num_modes: int = 1, scale: float = 1.0):
"""Get a multimodal distribution of Gaussians."""
angles = jnp.linspace(0, jnp.pi * 2, num_modes + 1)
angles = angles[:-1]
x, y = jnp.cos(angles) * scale / 2., jnp.sin(angles) * scale / 2.
loc = jnp.array([x, y]).T
scale = jnp.ones((num_mode... | 5,352,703 |
def residual_squared_error(data_1, data_2):
"""
Calculation the residual squared error between two arrays.
Parameters
----------
data: numpy array
Data
calc: numpy array
Calculated values
Return
------
rse: float
residual squared error
"""
RSS = np.... | 5,352,704 |
def by_index(pot):
""" Build a new potential where the keys of the potential dictionary
correspond to the indices along values of n-dimensional grids,
rather than, possibly, the coordinate values of the grids themselves.
Key Transformation:
((grid_val_i, grid_val_j, ...)_i,) -> ((i,... | 5,352,705 |
def current_user():
"""Получить текущего пользователя или отредактировать профиль"""
user = get_user_from_request()
if request.method == "POST":
json = request.get_json()
user.email = json.get("email", user.email)
user.name = json.get("name", user.name)
user.about = sanitiz... | 5,352,706 |
def writelines_infile(filename,lines):
"""Writelines in the specific file"""
with open(filename,"w") as file:
file.writelines(lines) | 5,352,707 |
def test_impossible_traveler_det_distance_bad_dest_coords(mocker):
"""
Given:
Coordinations lists to calculate distances between, when the two are different
When:
Calculating impossible traveler distances
Then
Raise an error
"""
with pytest.raises(ValueError) as e:
... | 5,352,708 |
async def async_union_polygons(bal_name, geom_list):
"""union a set of polygons & return the resulting multipolygon"""
start_time = datetime.now()
big_poly = unary_union(geom_list)
print(f"\t - {bal_name} : set of polygons unioned: {datetime.now() - start_time}")
return big_poly | 5,352,709 |
async def test_websocket_get_triggers(hass, hass_ws_client, device_reg, entity_reg):
"""Test we get the expected triggers from a light through websocket."""
await async_setup_component(hass, "device_automation", {})
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
... | 5,352,710 |
def _error_text(because: str, text: str, backend: usertypes.Backend) -> str:
"""Get an error text for the given information."""
other_backend, other_setting = _other_backend(backend)
if other_backend == usertypes.Backend.QtWebKit:
warning = ("<i>Note that QtWebKit hasn't been updated since "
... | 5,352,711 |
def COUNT(condition: pd.DataFrame, n: int):
"""the number of days fits the 'condition' in the past n days
Args:
condition (pd.DataFrame): dataframe index by date time(level 0) and asset(level 1), containing bool values
n (int): the number of past days
"""
return condition.rollin... | 5,352,712 |
def Mat33_nrow():
"""Mat33_nrow() -> int"""
return _simbody.Mat33_nrow() | 5,352,713 |
def test_build_instru23():
"""USE_PL=0 PL_OVERLOAD_NEW_DELETE=0"""
build_target("testprogram", test_build_instru23.__doc__) | 5,352,714 |
def isstruct(ob):
""" isstruct(ob)
Returns whether the given object is an SSDF struct.
"""
if hasattr(ob, '__is_ssdf_struct__'):
return bool(ob.__is_ssdf_struct__)
else:
return False | 5,352,715 |
def where_between(field_name, start_date, end_date):
"""
Return the bit of query for the dates interval.
"""
str = """ {0} between date_format('{1}', '%%Y-%%c-%%d %%H:%%i:%%S')
and date_format('{2}', '%%Y-%%c-%%d 23:%%i:%%S')
""" .format( field_name,
... | 5,352,716 |
def to_console_formatted_string(data: dict) -> str:
"""..."""
def make_line(key: str) -> str:
if key.startswith('__cauldron_'):
return ''
data_class = getattr(data[key], '__class__', data[key])
data_type = getattr(data_class, '__name__', type(data[key]))
value = '{... | 5,352,717 |
def cluster_instance_get_info_ajax(request, c_id):
"""
get cluster instance status
"""
dic = {"res": True, "info":None, "err":None}
instance_id = request.GET.get("instance_id")
require_vnc = request.GET.get("require_vnc")
if require_vnc == "true":
require_vnc = True
else:
require_vnc = False
if instance_id... | 5,352,718 |
def timeit(verbose=False):
"""
Time functions via decoration. Optionally output time to stdout.
Parameters:
-----------
verbose : bool
Example Usage:
>>> @timeit(verbose=True)
>>> def foo(*args, **kwargs): pass
"""
def _timeit(f):
@wraps(f)
def wrapper(*args, **... | 5,352,719 |
async def _iter_reference_values(
client: redis.ResourceClient,
index: redis.ResourceIndex,
key: _RedisKeyT,
*,
window_size: int = DEFAULT_WINDOW_SIZE,
match: typing.Optional[str] = None,
) -> typing.AsyncIterator[typing.Iterator[typing.Optional[bytes]]]:
"""Asynchronously iterate over slice... | 5,352,720 |
def signal_period(peaks, sampling_rate=1000, desired_length=None,
interpolation_order="cubic"):
"""Calculate signal period from a series of peaks.
Parameters
----------
peaks : list, array, DataFrame, Series or dict
The samples at which the peaks occur. If an array is passed i... | 5,352,721 |
def model_co_group_by_key_tuple(email_list, phone_list, output_path):
"""Applying a CoGroupByKey Transform to a tuple.
URL: https://cloud.google.com/dataflow/model/group-by-key
"""
import google.cloud.dataflow as df
from google.cloud.dataflow.utils.options import PipelineOptions
p = df.Pipeline(options=Pip... | 5,352,722 |
def get_recommendation(anime_name, cosine_sim, clean_anime, anime_index):
"""
Getting pairwise similarity scores for all anime in the data frame.
The function returns the top 10 most similar anime to the given query.
"""
idx = anime_index[anime_name]
sim_scores = list(enumerate(cosine_sim[idx]))... | 5,352,723 |
def get_nwb_metadata(experiment_id):
"""
Collects metadata based on the experiment id and converts the weight to a float.
This is needed for further export to nwb_converter.
This function also validates, that all metadata is nwb compatible.
:param experiment_id: The experiment id given by the us... | 5,352,724 |
def string_to_value_error_mark(string: str) -> Tuple[float, Union[float, None], str]:
"""
Convert string to float and error.
Parameters
----------
string : str
DESCRIPTION.
Returns
-------
value : float
Value.
error : float
Error.
"""
value, error, ... | 5,352,725 |
def _children_with_tags(element, tags):
"""Returns child elements of the given element whose tag is in a given list.
Args:
element: an ElementTree.Element.
tags: a list of strings that are the tags to look for in child elements.
Returns:
an iterable of ElementTree.Element instances, which are the ch... | 5,352,726 |
def train_and_eval():
"""Train and evaluate StackOver NWP task."""
logging.info('Show FLAGS for debugging:')
for f in HPARAM_FLAGS:
logging.info('%s=%s', f, FLAGS[f].value)
hparam_dict = collections.OrderedDict([
(name, FLAGS[name].value) for name in HPARAM_FLAGS
])
if FLAGS.experiment_type == '... | 5,352,727 |
def align_with_known_width(val, width: int, lowerBitCntToAlign: int):
"""
Does same as :func:`~.align` just with the known width of val
"""
return val & (mask(width - lowerBitCntToAlign) << lowerBitCntToAlign) | 5,352,728 |
def get_email_from_request(request):
"""Use cpg-utils to extract user from already-authenticated request headers."""
user = get_user_from_headers(request.headers)
if not user:
raise web.HTTPForbidden(reason='Invalid authorization header')
return user | 5,352,729 |
def test_get_authorization_header(authorizer):
"""
Get authorization header, confirms expected value
"""
assert authorizer.get_authorization_header() == "Bearer " + TOKEN | 5,352,730 |
def get_kernels(params: List[Tuple[str, int, int, int, int]]) -> List[np.ndarray]:
"""
Create list of kernels
:param params: list of tuples with following format ("kernel name", angle, multiplier, rotation angle)
:return: list of kernels
"""
kernels = [] # type: List[np.ndarray]
for param i... | 5,352,731 |
def forward_propagation(propagation_start_node, func, x):
"""A forward propagation starting at the `propagation_start_node` and
wrapping the all the composition operations along the way.
Parameters
----------
propagation_start_node : Node
The node where the gradient function (or anything si... | 5,352,732 |
def main():
"""
Main function of the script.
"""
args = parse_args()
if args.version:
print("{v}".format(v=__version__))
return 0
config = ConfigFile(args.config_file, CONFIG_FILE_SCHEMA)
if args.help_config:
print(config.help())
return 0
if args.conf... | 5,352,733 |
def read_entities():
"""
find list of entities
:return:
"""
intents = Entity.objects.only('name','id')
return build_response.sent_json(intents.to_json()) | 5,352,734 |
def available(unit, item) -> bool:
"""
If any hook reports false, then it is false
"""
for skill in unit.skills:
for component in skill.components:
if component.defines('available'):
if component.ignore_conditional or condition(skill, unit):
if not... | 5,352,735 |
def step_impl(context, easting, northing, index):
"""
:type context: behave.runner.Context
:param easting: The easting value from the trajectory at index.
:type easting: float
:param northing: The northing value for the trajectory at index.
:type northing: float
:param index: The index of th... | 5,352,736 |
def cleanArray(arr):
"""Clean an array or list from unsupported objects for plotting.
Objects are replaced by None, which is then converted to NaN.
"""
try:
return np.asarray(arr, float)
except ValueError:
return np.array([x if isinstance(x, number_types) else None
... | 5,352,737 |
def translate_node_in_object_space(node, translation_list, **kwargs):
"""
Translates given node with the given translation vector
:param node: str
:param translation_list: list(float, float, float)
"""
raise NotImplementedError() | 5,352,738 |
def test_datetime(snapshot):
"""Simple test with datetime"""
expect = datetime.datetime(2017, 11, 19)
snapshot.assert_match(expect) | 5,352,739 |
def parse_args():
"""Main function for parsing args. Utilizes the 'check_config'
function from the config module to ensure an API key has been
passed. If not, user is prompted to conduct initial configuration
for pwea.
If a valid configuration is found (there is currently no validity
check for ... | 5,352,740 |
def test_tcp_server_ssl(sdc_builder, sdc_executor):
"""Runs a test using the TCP server origin pipeline with Enable TLS set and asserts that the file is received"""
expected_msg = get_expected_message(TCP_SSL_FILE_PATH)
# Start TCP server pipeline.
pipeline_builder = sdc_builder.get_pipeline_builder()
... | 5,352,741 |
def check_in_federated_context():
"""Checks if the current context is a `tff.program.FederatedContext`."""
context_stack = get_context_stack.get_context_stack()
if not isinstance(context_stack.current, FederatedContext):
raise ValueError(
'Expected the current context to be a `tff.program.FederatedCon... | 5,352,742 |
def _ensure_args(G, source, method, directed,
return_predecessors, unweighted, overwrite, indices):
"""
Ensures the args passed in are usable for the API api_name and returns the
args with proper defaults if not specified, or raises TypeError or
ValueError if incorrectly specified.
... | 5,352,743 |
def read_siemens_scil_b0():
""" Load Siemens 1.5T b0 image form the scil b0 dataset.
Returns
-------
img : obj,
Nifti1Image
"""
file = pjoin(dipy_home,
'datasets_multi-site_all_companies',
'1.5T',
'Siemens',
'b0.nii.gz'... | 5,352,744 |
def twin_primes():
"""
Twin Primes: Primes that are two more or less than another prime\n
OEIS A001097
"""
P = primes()
a,b,c = next(P),next(P),next(P)
while True:
if b-2 == a or b+2 == c:
yield b
a,b,c = b,c,next(P) | 5,352,745 |
def gdpcleaner(gdpdata: pd.DataFrame):
"""
Author: Gabe Fairbrother
Remove spurious columns, Rename relevant columns, Remove NaNs
Parameters
----------
gdpdata: DataFrame
a loaded dataframe based on a downloaded Open Government GDP at basic prices dataset (https://open.canada.ca/en/open... | 5,352,746 |
def sort_with_num(path):
"""Extract leading numbers in a file name for numerical sorting."""
fname = path.name
nums = re.match('^\d+', fname)
if nums:
return int(nums[0])
else:
return 0 | 5,352,747 |
def ship_new(name):
"""Creates a new ship."""
click.echo(f"Created ship {name}") | 5,352,748 |
def test_get_representative_trip_for_route(request):
"""
Tests that parameters are read
"""
print("\n--Starting:", request.node.name)
transit_network = Transit.load_all_gtfs_feeds(
path = os.path.join(root_dir, "data", "external", "gtfs", "2015"),
roadway_network= roadway_network,
... | 5,352,749 |
def prob8(cur: sqlite3.Cursor) -> pd.DataFrame:
"""Give a list of the services which connect the stops 'Craiglockhart' and
'Tollcross'.
Parameters
----------
cur (sqlite3.Cursor) : The cursor for the database we're accessing.
Returns
-------
(pd.DataFrame) : Table with the ... | 5,352,750 |
def get_command(name):
""" return command represented by name """
_rc = COMMANDS[name]()
return _rc | 5,352,751 |
def start_adash(self):
"""Start Adash in background inside VM."""
adash_start_command = (
"./adash-linux-x86_64 --bindhost 0.0.0.0 -bindport 8081 -notoken &"
)
with When(
"Connect to VM and run the Adash in background",
description=f"{adash_start_command}",
):
ba... | 5,352,752 |
def _str_struct(a):
"""converts the structure to a string for logging purposes."""
shape_dtype = lambda x: (jnp.asarray(x).shape, str(jnp.asarray(x).dtype))
return str(jax.tree_map(shape_dtype, a)) | 5,352,753 |
def get_moved_files(dir_path: str) -> Set:
"""
获取要移动的文件(夹),包括:
- 文件夹
- 损坏的图片
- 非图像文件
- 重复的图片
"""
removed_files = set()
file_map = {}
for file in os.listdir(dir_path):
file_path = os.path.join(dir_path, file)
# 过滤文件
if os.path.isfile(file_pa... | 5,352,754 |
def delete_interface_address(
api_client, interface_id, address_id, **kwargs
): # noqa: E501
"""delete_interface_address # noqa: E501
Delete interface address details # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_re... | 5,352,755 |
async def set_promo(message: types.Message, state: FSMContext):
"""
Команда /setpromo
"""
arg = message.get_args()
if not arg:
return await message.answer(_("Укажите аргумент: промокод. Например: <pre>/set_promo my-promo-code</pre>"),
parse_mode="HTML")
... | 5,352,756 |
def plot_waterfall(*sigObjs, step=10, xLim:list=None,
Pmin=20, Pmax=None, tmin=0, tmax=None, azim=-72, elev=14,
cmap='jet', winPlot=False, waterfallPlot=True, fill=True,
lines=False, alpha=1, figsize=(20, 8), winAlpha=0,
removeGridLines=Fal... | 5,352,757 |
def write_output(
payload: Union[dict, List[dict]],
out_handle: TextIO,
fmt: str = "json",
compact: bool = False,
indent: int = 4,
) -> None:
"""Writes the given dictionary as JSON or YAML to the output handle.
:param payload: Payload to write.
:param out_handle: Output handle.
:par... | 5,352,758 |
def play_game(game):
"""Run the checkers game
This is the entry function that runs the entire game.
"""
game.show_rules()
game.get_player_names()
while True:
curr_player = game.black_name if game.blacks_turn else \
game.white_name
# Prompt for player input
mo... | 5,352,759 |
def test_pytest_really_fails():
"""Make sure pytest fails due to incorrect expected output in the .md.
Generate a pytest that will assert.
"""
simulator_status = verify.one_example(
"phmdoctest tests/unexpected_output.md --outfile discarded.py",
want_file_name=None,
pytest_optio... | 5,352,760 |
def indices(input_file):
"""
Parse the index file or target file and return a list of values.
:return:
"""
index_list = []
line_num = 0
index_file = list(csv.reader(open(input_file), delimiter='\t'))
for line in index_file:
line_num += 1
col_count = len(line)
if... | 5,352,761 |
def parse_arguments():
"""
Parse the arguments from the user
"""
parser = argparse.ArgumentParser(
description="omeClust visualization script.\n",
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument(
"adist",
help="the input file D*N, Rows: D feature... | 5,352,762 |
def checkSequences(numl, rowlen, seqlen):
"""In a square of numbers, represented by the list with
the given row length, look for the top product with length
of seqlen"""
listl=len(numl)
collen=math.ceil(listl/rowlen)
seqind=seqlen-1
log.debug("List length, collen: %d, %d", listl, collen)
... | 5,352,763 |
def has_same_facts(ruler_intervals1, ruler_intervals2, D):
"""
Check whether the two same-pattern ruler lists have the same facts at each corresponding ruler-interval
Args:
ruler_intervals1: a list of ruler-intervals
ruler_intervals2: a list of ruler-intervals
D: contain all relation... | 5,352,764 |
def listenInput(username):
"""Listens user input and returns formatted messages (PeerMessage) using generators."""
print('Starting listening user input...')
while (True):
msgToSend = input()
if (len(msgToSend) == 0):
continue
timeStr = datetime.datetime.now().strftime('%... | 5,352,765 |
def named_keywords_params_func2(name, age, *info, qq, **kwargs):
"""
#如果函数定义中已经有了一个可变参数,后面跟着的命名关键字参数就不再需要一个特殊分隔符*了
"""
print(name, age, *info, qq, kwargs) | 5,352,766 |
def make_example_scripts_docs(spth, npth, rpth):
"""
Generate rst docs from example scripts. Arguments `spth`, `npth`,
and `rpth` are the top-level scripts directory, the top-level
notebooks directory, and the top-level output directory within the
docs respectively.
"""
# Ensure that outpu... | 5,352,767 |
def main():
""" """
undet = argument_parse()
print 'Start\t|\tCheck incorrect index'
fq_list = split_fastq(undet)
print 'Process\t|\tAnalysis undetermined data'
combined_df = multi_process(fq_list)
sorted_combined_df = combined_df.sort_values(
by='count',
ascending=F... | 5,352,768 |
def ruleset_delete(p_engine, p_username, rulesetname, envname):
"""
Delete ruleset from Masking engine
param1: p_engine: engine name from configuration
param2: rulesetname: ruleset name
return 0 if added, non 0 for error
"""
return ruleset_worker(p_engine=p_engine, p_username=p_username, ru... | 5,352,769 |
def tree_gzw(flag, width, mbl='-', xmin='-', xmax='-', ymin='-', ymax='-', logpath=None, outdir=None, shellscript=None):
"""
| Phase unwrapping tree generation (GZW algorithm)
| Copyright 2008, Gamma Remote Sensing, v3.6 5-Sep-2008 clw/uw
Parameters
----------
flag:
(input) phase unwrap... | 5,352,770 |
def upload_to_bucket(client, src_path, dest_bucket_name, dest_path):
"""Upload a file or a directory (recursively) from local file system to specified bucket.
Args:
client: Google Cloud storage client object to ask resources.
src_path: Path to the local file or directory you want to send
... | 5,352,771 |
def config_from_file(file_name):
"""Load and return json from file."""
with open(file_name) as config_file:
config = ujson.load(config_file)
return config | 5,352,772 |
def setMinGap(typeID, minGap):
"""setMinGap(string, double) -> None
Sets the offset (gap to front vehicle if halting) of vehicles of this type.
"""
traci._sendDoubleCmd(tc.CMD_SET_VEHICLETYPE_VARIABLE, tc.VAR_MINGAP, typeID, minGap) | 5,352,773 |
def build_yaml_object(
dataset_id: str,
table_id: str,
config: dict,
schema: dict,
metadata: dict = dict(),
columns_schema: dict = dict(),
partition_columns: list = list(),
):
"""Build a dataset_config.yaml or table_config.yaml
Args:
dataset_id (str): The dataset id.
... | 5,352,774 |
def read_candidate_data_list(file, path=IEDC_paths.candidates):
"""
Will read a candidate file and return its data.
:param file: Filename of the file to process
:param path: Path of the file
:return: Dictionary of dataframes for metadata, classifications, and data
"""
# make it a proper pat... | 5,352,775 |
def make_cumulative(frame, filedate, unit):
"""Create a cumulative graph of cases over time"""
gb = frame.groupby("Accurate_Episode_Date").agg(patients=("Row_ID", "count"))
gb = gb.resample("D").last().fillna(0).reset_index()
max_date = gb["Accurate_Episode_Date"].max().strftime("%Y-%m-%d")
gb["cumu... | 5,352,776 |
def most_common(l):
""" Helper function.
:l: List of strings.
:returns: most common string.
"""
# another way to get max of list?
#from collections import Counter
#data = Counter(your_list_in_here)
#data.most_common() # Returns all unique items and their counts
#data.most_... | 5,352,777 |
def reduce_avg(reduce_target, lengths, dim):
"""
Args:
reduce_target : shape(d_0, d_1,..,d_dim, .., d_k)
lengths : shape(d0, .., d_(dim-1))
dim : which dimension to average, should be a python number
"""
shape_of_lengths = lengths.get_shape()
shape_of_target = reduce_target.g... | 5,352,778 |
def _near_mod_2pi(e, t, atol=_DEFAULT_ATOL):
"""Returns whether a value, e, translated by t, is equal to 0 mod 2 * pi."""
return _near_mod_n(e, t, n=2 * np.pi, atol=atol) | 5,352,779 |
def create_test_votes_data(votes_table):
"""
Populates a votes table with test data
:param votes_table: table object
"""
votes_table.put_item(
Item={
"User": "user_1",
"TopicKey": "project_a/topic_aaa",
"ProjectName": "project_a",
"Topic": "top... | 5,352,780 |
def create_returns_similarity(strategy: QFSeries, benchmark: QFSeries, mean_normalization: bool = True,
std_normalization: bool = True, frequency: Frequency = None) -> KDEChart:
"""
Creates a new returns similarity chart. The frequency is determined by the specified returns series.... | 5,352,781 |
def info(path):
"""
Debug sideload status.
"""
_echo_table("System Info:", [system_info()])
_echo_table("Word Installation:", [office_installation("word")])
net_shares = get_net_shares()
_echo_table("Net Shares:", net_shares)
title = rf"HKEY_CURRENT_USER\{SUBKEY_OFFICE}\{OFFICE_SUBKEY... | 5,352,782 |
def apply_colormap_on_image(org_im, activation, colormap_name='viridis', alpha=.4, thresh=30):
"""
Apply heatmap on image
Args:
org_img (PIL img): Original image
activation_map (numpy arr): Activation map (grayscale) 0-255
colormap_name (str): Name of the colormap
"""
imp... | 5,352,783 |
def player_stats(cli, nick, chan, rest):
"""Gets the stats for the given player and role or a list of role totals if no role is given."""
if (chan != nick and var.LAST_PSTATS and var.PSTATS_RATE_LIMIT and
var.LAST_PSTATS + timedelta(seconds=var.PSTATS_RATE_LIMIT) >
datetime.now()):
... | 5,352,784 |
def _cli():
"""
command line interface
:return:
"""
parser = generate_parser()
args = parser.parse_args()
return interface(args.bids_dir,
args.output_dir,
args.aseg,
args.subject_list,
args.session_list,
... | 5,352,785 |
def run_experiment(parpath, subjname, run_id):
"""
Main function to run total experient.
For each new participant, a new run sequence will be generated
and saved into the output pickle file.
run_num selected from 1-8.
"""
if not os.path.isfile(pjoin(parpath, 'RecordStimuli', subjname... | 5,352,786 |
def statusize():
"""Posts a status from the web."""
db = get_session(current_app)
user_id = session.get('user_id')
if not user_id:
return forbidden('You must be logged in to statusize!')
user = db.query(User).get(user_id)
message = request.form.get('message', '')
if not message:
... | 5,352,787 |
def savedata():
"""
This function will put data in output file if given.
"""
# for item in tldSorting(finalset):
# print(termcolor.colored(item, color='green', attrs=['bold']))
# if ipv4list:
# print(termcolor.colored("\nGot Some IPv4 addresses:\n",
# ... | 5,352,788 |
def urlsafe_b64decode_nopadding(val):
"""Deal with unpadded urlsafe base64."""
# Yes, it accepts extra = characters.
return base64.urlsafe_b64decode(str(val) + '===') | 5,352,789 |
def _subimg_bbox(img, subimage, xc, yc):
"""
Find the x/y bounding-box pixel coordinates in ``img`` needed to
add ``subimage``, centered at ``(xc, yc)``, to ``img``. Returns
``None`` if the ``subimage`` would extend past the ``img``
boundary.
"""
ys, xs = subimage.shape
y, x = img.shap... | 5,352,790 |
def draw_rectangle(faces, img):
""" Draws the box and text around the intruder's face
Keyword arguments:
faces -- frames that have faces detected
img -- the frame itself, this is what we are drawing on.
"""
for (x, y, w, h) in faces:
cv2.rectangle(img, (x, y), (x + w, y + h), (0, 0, 255... | 5,352,791 |
def _ToDatetimeObject(date_str):
"""Converts a string into datetime object.
Args:
date_str: (str) A date and optional time for the oldest article
allowed. This should be in ISO 8601 format. (yyyy-mm-dd)
Returns:
datetime.datetime Object.
Raises:
ValueError: Invalid date for... | 5,352,792 |
def select_most_uncertain_patch(x_image_pl, y_label_pl, fb_pred, ed_pred, fb_prob_mean_bald, kernel_window, stride_size,
already_select_image_index, previously_selected_binary_mask, num_most_uncert_patch,
method):
"""This function is used to acquire th... | 5,352,793 |
def submit_extraction(connector, host, key, datasetid, extractorname):
"""Submit dataset for extraction by given extractor.
Keyword arguments:
connector -- connector information, used to get missing parameters and send status updates
host -- the clowder host, including http and port, should end with a ... | 5,352,794 |
def fix_sys_path():
# XXX - MarkH had the bright idea *after* writing this that we should
# ensure the CVS version of SpamBayes is *not* used to resolve SB imports.
# This would allow us to effectively test the distutils setup script, so
# any modules or files missing from the installed version raise er... | 5,352,795 |
def fcmp(x, y, precision):
"""fcmp(x, y, precision) -> -1, 0, or 1"""
if math.fabs(x-y) < precision:
return 0
elif x < y:
return -1
return 1 | 5,352,796 |
def destroy_nodes(cluster):
"""Call deleteinstance on all nodes in cluster."""
for z in cluster.keys():
for node in cluster[z]:
print "...deleting node %s in zone %s" % (node['name'], node['zone'])
_ = subprocess.call(["gcutil",
"--service_version=%s" % API_VE... | 5,352,797 |
def parse_vcf_line(line):
"""
Args:
line (str): line in VCF file obj.
Returns:
parsed_line_lst (lst): with tuple elem (chr, pos, ref, alt)
Example:
deletion
pos 123456789012
reference ATTAGTAGATGT
deletion ATTA---GATGT
VCF:
... | 5,352,798 |
def new_auth():
"""
Performs the new authentication song and dance. Waves the dead chicken in the air in just the right way.
@see: https://api-portal.digikey.com/node/188
"""
magic_code = invoke_auth_magic_one()
invoke_auth_magic_two(magic_code)
return | 5,352,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.