content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def lemmatize(
nlp: Optional[Language] = None, name="lemmatize"
) -> ops.base.SpacyBasedOperation:
"""Helper function to return SpacyBasedOperation for lemmatizing.
This operation returns a stream.DataStream where each item is a string after
being lemmatized.
Parameters
----------
nlp : Opt... | 5,350,300 |
def mean_absolute_deviation(curve1: np.ndarray, curve2: np.ndarray, *args):
"""Calculate the mean deviation."""
diff = np.abs(curve1 - curve2)
return np.mean(diff) | 5,350,301 |
def get_segtype_hops(seg_type, connector=None): # pragma: no cover
"""
Request list of segments by type used to construct paths.
:param seg_type: The type of PathSegmentType requested.
:returns: List of SCIONDSegTypeHopReplyEntry objects.
"""
global _connector
if not connector:
con... | 5,350,302 |
def main():
"""Starts the parser on the file given by the filename as the first
argument on the commandline.
"""
filename = optParse()
fsm = Doxypy()
fsm.parseFile(filename) | 5,350,303 |
def build_moderation_channel_embed(ctx, channel, action):
"""
Builds a moderation embed which display some information about the mod channel being created/removed
:param ctx: The discord context
:param channel: The channel to be created/removed
:param action: either "Added" or "Removed" to tell the... | 5,350,304 |
def print_settings(settings):
"""
This function returns the harmonic approximation settings .
Returns
-------
text: str
Pretty-printed settings for the current Quantas run.
"""
text = '\nCalculator: Equation of state (EoS) fitting\n'
text += '\nMeasurement units\n'
text +... | 5,350,305 |
def run_analysis(output, stimtype="gabors", comp="surp", ctrl=False,
CI=0.95, alg="sklearn", parallel=False, all_scores_df=None):
"""
run_analysis(output)
Calculates statistics on scores from runs for each specific analysis
criteria and saves them in the summary scores dataframe.
... | 5,350,306 |
def decrypt_ballots_with_all_guardians(
ballots: List[Dict], guardians: List[Dict], context: Dict
) -> Dict:
"""
Decrypt all ballots using the guardians.
Runs the decryption in batches, rather than all at once.
"""
ballots_per_batch = 2
decrypted_ballots: Dict = {}
for batch in batch_li... | 5,350,307 |
def getRealItemScenePos(item):
"""
Returns item's real position in scene space. Mostly for e.g. stranditems.
This will change as the root item is moved round the scene,
but should not change when zooming.
"""
view = pathview()
try:
vhitem = item.virtualHelixItem()
linepos = l... | 5,350,308 |
def get_default_upload_mode():
"""
Returns the string for the default upload mode
:return: Default upload mode string
"""
return api.MODE_DEFAULT | 5,350,309 |
def remove_multi_whitespace(string_or_list):
""" Cleans redundant whitespace from extracted data """
if type(string_or_list) == str:
return ' '.join(string_or_list.split())
return [' '.join(string.split()) for string in string_or_list] | 5,350,310 |
def validate_board(board: list) -> bool:
"""
Checks if board fits the rules. If fits returns True, else False.
>>> validate_board(["**** ****","***1 ****","** 3****","* 4 1****",\
" 9 5 "," 6 83 *","3 1 **"," 8 2***"," 2 ****"])
False
"""
if check_rows(board) and\
c... | 5,350,311 |
def test_copying_in_on_modified_block(
action_block_factory,
create_temp_files,
module_factory,
):
"""Module should copy properly."""
file1, file2, file3, file4 = create_temp_files(4)
file2.write_text('original')
file4.write_text('some other content')
action_block = action_block_factory... | 5,350,312 |
def get_H(m, n):
"""Calculate the distance of each point of the m, n matrix from the center"""
u = np.array([i if i <= m / 2 else m - i for i in range(m)],
dtype=np.float32)
v = np.array([i if i <= m / 2 else m - i for i in range(m)],
dtype=np.float32)
v.shape = n, 1
... | 5,350,313 |
def create_users(xml_filename, test_mode=False, verbose=False):
"""
Import OET cruise record XML file and create django auth users from the list of participants
:param filename: the name of the XML file
:return: the number of users created
"""
num_created = 0
cruise_record = xml2struct(xml_... | 5,350,314 |
def parse_variants(ref: str, call: List[str], pos: List[str],
results: Dict[str, int]):
""" Parse the variants and add to results """
call_variant = set(call)
pos_variant = set(pos)
# The types of concordant calls are counted separately
if call_variant == pos_variant:
# ... | 5,350,315 |
def learn(infile, outfile, apply_model, minimum_abundance_ratio, maximum_sec_shift, cb_decoys, xeval_fraction, xeval_num_iter, ss_initial_fdr, ss_iteration_fdr, ss_num_iter, xgb_autotune, parametric, pfdr, pi0_lambda, pi0_method, pi0_smooth_df, pi0_smooth_log_pi0, lfdr_truncate, lfdr_monotone, lfdr_transformation, lfdr... | 5,350,316 |
def writeJSONFile(filename,JSONDocument):
""" Writes a JSON document to a named file
Parameters
----------
filename : str
name of the file
JSONDocument : str
JSON document to write to the file
Returns
-------
True
"""
filename='data/'+filename
with open(fil... | 5,350,317 |
def read_readme():
"""Read README content.
If the README.rst file does not exist yet
(this is the case when not releasing)
only the short description is returned.
"""
try:
return local_file('README.rst')
except IOError:
return __doc__ | 5,350,318 |
def get_data_loader(dataset, dataset_dir, batch_size, workers=8, is_training=False):
""" Create data loader. """
return data.DataLoader(
get_dataset(dataset, is_training=is_training, dataset_dir=dataset_dir),
batch_size=batch_size,
shuffle=is_training,
num_workers=workers,
... | 5,350,319 |
def listwhom(detailed=False):
"""Return the list of currently avalailable databases for covid19
data in PyCoA.
The first one is the default one.
If detailed=True, gives information location of each given database.
"""
try:
if int(detailed):
df = pd.DataFrame(get_db_list_d... | 5,350,320 |
def paginate(
gcp_resource: Callable,
method_name: str,
items_name: str,
subitems_name: str = None,
exclude_region_resources: bool = False,
**kwargs,
) -> Iterable:
"""Paginate GCP API list and aggregatedList results.
Args:
gcp_resource: GCP resource on which we do our paging
... | 5,350,321 |
def project(s):
"""Maps (x,y,z) coordinates to planar-simplex."""
# Is s an appropriate sequence or just a single point?
try:
return unzip(map(project_point, s))
except TypeError:
return project_point(s)
except IndexError: # for numpy arrays
return project_point(s) | 5,350,322 |
def point_at_angle_on_ellipse(
phi: ArrayLike, coefficients: ArrayLike
) -> NDArray:
"""
Return the coordinates of the point at angle :math:`\\phi` in degrees on
the ellipse with given canonical form coefficients.
Parameters
----------
phi
Point at angle :math:`\\phi` in degrees to ... | 5,350,323 |
def append_file(source, dest, seek=None):
"""
This function appends the contents of the source file into the dest file. If a seek value is specified, 0s are inserted from the end of the dest file until the seek value is reached.
"""
with open(source, 'rb') as in_file:
data = in_file.read()
with open(dest, 'ab'... | 5,350,324 |
def handle_row(row, data, a_tags):
"""
iterates through each post in the row
:param row: object of the specific row
:param data: dict format of the final data
:param a_tags: array of strings containing href's
:return: None
"""
single_post = defaultdict()
try:
for ... | 5,350,325 |
def fatal_errors():
"""Context manager meant to wrap an entire program and present schema errors in
an easy-to-read way.
"""
try:
yield
except FatalSchemaError as e:
u.err(f"{e.context}\n{e.message}\n\n")
sys.exit(1)
except s.SchemaError as e:
u.err(f"\n{e.code}\n\n")
sys.exit(1) | 5,350,326 |
def bmm(tensor1, tensor2):
"""
Performs a batch matrix-matrix product of this tensor
and tensor2. Both tensors must be 3D containing equal number
of matrices.
If this is a (b x n x m) Tensor, batch2 is a (b x m x p) Tensor,
Result will be a (b x n x p) Tensor.
Parameters
----------
... | 5,350,327 |
def test_check_custom_attr():
"""Test if Doc and Token custom attributes exist"""
doc = nlp("Joey doesnt share food")
token = doc[0]
# add new custom attributes
doc.set_attribute(name="doc_tag", value="doc_value")
token.set_attribute(name="token_tag", value="token_value")
# check if the c... | 5,350,328 |
def parse_variable(config, v):
"""Parse a configuration variable from a string that should look like 'key = value'
and write that value to config[key].
:param config: The configuration dict to wich to write the key,value pair
:param v: A string of the form 'key = value'
"""
if '=' not in... | 5,350,329 |
def get_monotask_from_macrotask(monotask_type, macrotask):
""" Returns a Monotask of the specified type from the provided Macrotask. """
return next((monotask for monotask in macrotask.monotasks if isinstance(monotask, monotask_type))) | 5,350,330 |
def vivo_query(query, parms):
"""
A new VIVO query function using SPARQLWrapper. Tested with Stardog, UF VIVO and Dbpedia
:param query: SPARQL query. VIVO PREFIX will be added
:param parms: dictionary with query parms: queryuri, username and password
:return: result object, typically JSON
:rt... | 5,350,331 |
def get_services_by_type(service_type, db_session):
# type: (Str, Session) -> Iterable[models.Service]
"""
Obtains all services that correspond to requested service-type.
"""
ax.verify_param(service_type, not_none=True, not_empty=True, http_error=HTTPBadRequest,
msg_on_fail="Inva... | 5,350,332 |
def copy_arch(arch, library_dir, libgfortran, libquadmath):
"""Copy libraries specific to a given architecture.
Args:
arch (str): The architecture being copied.
library_dir (str): The directory containing the dynamic libraries.
libgfortran (str): The name (not path) of the ``libgfortran... | 5,350,333 |
def getREADMEforDescription(readmePath=os.path.join(os.path.abspath(os.path.dirname(__file__)), 'README.md')):
"""Use the Markdown from the file for the package's long_description.
long_description_content_type should be 'text/markdown' in this case.
This is why we need the README to be in the MANIFEST.in file.
... | 5,350,334 |
def init_db():
"""Initializes the database."""
try:
with _get_conn().cursor() as cursor:
# execute schema sql file
with app.open_resource('db/schema/0001/user.sql', mode='r') as f:
sql = f.read()
print sql
result = cursor.execute(sql)
print result
finally:
print _get_conn().close() | 5,350,335 |
def abstractable(cls):
"""
A class decorator that scoops up AbstractValueRange class properties in order
to create .validate and .abstract methods for the class. Note that properties
added after the class is defined aren't counted. Each AbstractValueRange
found is is also replaced with a class instance constr... | 5,350,336 |
def summary1c(sequence):
"""
What comes in: A sequence of integers, all >= 2.
What goes out:
-- Returns the sum of INDICES of the items in the sequence
that are prime.
Side effects: None.
Examples:
-- If the given sequence is [20, 23, 29, 30, 33, 29, 100, 2, 4],
th... | 5,350,337 |
def choose_page(btn_click_list: List[Tuple[int, str]]) -> str:
"""
Given a list of tuples of (num_clicks, next_page) choose the next_page that
corresponds to exactly 1 num_clicks.
This is to help with deciding which page to go to next when clicking on one
of many buttons on a page.
The expecta... | 5,350,338 |
def get_content(request, path=''):
"""Get content from datastore as requested on the url path
Args:
path - comes without leading slash. / added in code
"""
content = StaticContent.get_by_key_name("/%s" % path)
if not content:
if path == '':
# Nothing generated y... | 5,350,339 |
def reconstruct_entity(input_examples, entitys_iter):
""" the entitys_iter contains the prediction entity of the splited examples.
We need to reconstruct the complete entitys for each example in input_examples.
and return the results as dictionary.
input_examples: each should contains (start, end) indic... | 5,350,340 |
def sudo_password_handler(spawn, context):
""" Password handler for sudo command
Need a better place for 'sudo' password, using line_password as workaround
"""
credentials = context.get('credentials')
if credentials:
try:
spawn.sendline(
to_plaintext(credentials[S... | 5,350,341 |
def ensure_valid_schema(spec: Dict) -> List[str]:
"""
Ensure that the provided spec has no schema errors.
Returns a list with all the errors found.
"""
error_messages = []
validator = cerberus.Validator(yaml.safe_load(SNOWFLAKE_SPEC_SCHEMA))
validator.validate(spec)
for entity_type, er... | 5,350,342 |
def test__dialect__base_parse_struct(
dialect,
sqlfile,
code_only,
yamlfile,
yaml_loader,
):
"""For given test examples, check parsed structure against yaml."""
parsed: Optional[BaseSegment] = parse_example_file(dialect, sqlfile)
actual_hash = compute_parse_tree_hash(parsed)
# Load t... | 5,350,343 |
def realord(s, pos=0):
"""
Returns the unicode of a character in a unicode string, taking surrogate pairs into account
"""
if s is None:
return None
code = ord(s[pos])
if code >= 0xD800 and code < 0xDC00:
if len(s) <= pos + 1:
print("realord warning: missing surro... | 5,350,344 |
def forceAspect(ax,aspect=1):
"""
Forces the aspect ratio to be equal
Copy of Yann's answer to the SO question:
http://stackoverflow.com/questions/7965743/\
how-can-i-set-the-aspect-ratio-in-matplotlib
:param ax:
:param aspect:
"""
im = ax.get_images()
extent = im[0].get_ex... | 5,350,345 |
def evaluateSpectral(left_state,right_state,xy):
"""Use this method to compute the Roe Average.
q(state)
q[0] = rho
q[1] = rho*u
q[2] = rho*v
q[3] = rho*e
"""
spec_state = numpy.zeros(left_state.shape)
rootrhoL = numpy.sqrt(left_state[0])
rootrhoR = numpy.sqrt(right_state[0])
... | 5,350,346 |
def get_multidata_bbg(requests):
"""function for multiple asynchronous refdata requests, returns a
dictionary of the form correlationID:result.
Function Parameters
----------
requests : dictionary of correlationID:request pairs. CorrelationIDs
are unique integers (cannot reuse until ... | 5,350,347 |
def specific_kinetic_energy(particles):
"""
Returns the specific kinetic energy of each particle in the set.
>>> from amuse.datamodel import Particles
>>> particles = Particles(2)
>>> particles.vx = [1.0, 1.0] | units.ms
>>> particles.vy = [0.0, 0.0] | units.ms
>>> particles.vz = [0.0, 0.0]... | 5,350,348 |
def parse_loot_percentage(text):
"""Use to parse loot percentage string, ie: Roubo: 50% becomes 0.5"""
percentage = float(text.split(':')[1].strip("%")) / 100
return percentage | 5,350,349 |
def major_minor_change(old_version, new_version):
"""Check if a major or minor change occurred."""
major_mismatch = old_version.major != new_version.major
minor_mismatch = old_version.minor != new_version.minor
if major_mismatch or minor_mismatch:
return True
return False | 5,350,350 |
def request_mnu_data() -> Iterator[MnuData]:
"""
Request info from the NHK website
"""
yield from map(parse_json_entry, request_mnu_json()) | 5,350,351 |
def list_all_queues(path: str, vhost: Optional[str] = '/') -> List[Dict]:
"""Send a request to RabbitMQ api to list all the data queues.
Args:
path: Path to the RabbitMQ management api to send the request to.
vhost: Virtual host of the RabbitMQ.
Returns:
List of all the data queues.
... | 5,350,352 |
def submit_images_local(c):
"""This command isn't implemented please modify to use.
The call below will work for submitting jobs to execute locally on a GPU.
Here we also map a volume to the docker container executing locally. This is the
location we tell our script to look for our training and valida... | 5,350,353 |
def get_tf_model_variables(config_path, init_checkpoint):
"""Return tf model parameters in a dictionary format.
Args:
config_path: path to TF model configuration file
init_checkpoint: path to saved TF model checkpoint
Returns:
tf_config: dictionary tf model configurations
tf_variables: dictionar... | 5,350,354 |
def org_unit_type_filter(queryset, passed_in_org_types):
"""Get specific Organisational units based on a filter."""
for passed_in_org_type in passed_in_org_types:
queryset = queryset.filter(org_unit_type_id=passed_in_org_type)
return queryset | 5,350,355 |
def circle_and_rectangle():
"""
-- Constructs an rg.RoseWindow.
-- Constructs and draws a rg.Circle and rg.Rectangle
on the window such that:
-- They fit in the window and are easily visible.
-- The rg.Circle is filled with 'blue'
-- Prints (on the console, on SEPARATE lines) ... | 5,350,356 |
def is_str_digit(n: str) -> bool:
"""Check whether the given string is a digit or not. """
try:
float(n)
return True
except ValueError:
return False | 5,350,357 |
def draw_all_poly_detection(im_array, detections, class_names, scale, cfg, threshold=0.2):
"""
visualize all detections in one image
:param im_array: [b=1 c h w] in rgb
:param detections: [ numpy.ndarray([[x1 y1 x2 y2 score]]) for j in classes ]
:param class_names: list of names in imdb
:param s... | 5,350,358 |
def test_requests():
"""WebService_test.test_requests()
Use TestApp to confirm correct response status, status int,
and content-type.
"""
app = webtest.TestApp(WebService(TestFactory()))
# Check invalid request (bad values)
response = app.get("/?id=bad", expect_errors=True)
assert_equal... | 5,350,359 |
def generate_dswx_layers(input_list, output_file,
hls_thresholds = None,
dem_file=None,
output_interpreted_band=None,
output_rgb_file=None,
output_infrared_rgb_file=None,
... | 5,350,360 |
def range_check_function(bottom, top):
"""Returns a function that checks if bottom <= arg < top, allowing bottom and/or top to be None"""
import operator
if top is None:
if bottom is None:
# Can't currently happen (checked before calling this), but let's do something reasonable
return lambda _: True
else:
... | 5,350,361 |
def filter_employee():
""" When the client requests a specific employee.
Valid queries:
?employeeid=<employeeid>
Returns: json representation of product.
"""
query_parameters = request.args
conn = psycopg2.connect(DATABASE_URL, sslmode='require')
cursor = conn.cursor()
lookup_... | 5,350,362 |
def update_ufos(ai_settings,stats,sb,screen,ship,ufos,bullets):
"""update the position of ufos"""
check_fleet_edges(ai_settings, ufos)
ufos.update()
#检测ufos和飞船之间的撞船
if pygame.sprite.spritecollideany(ship,ufos):
ship_hit(ai_settings,stats,sb,screen,ship,ufos,bullets)
#检测ufos是否撞... | 5,350,363 |
def ParallelLSTDQ(D,env,w,damping=0.001,ncpus=None):
"""
D : source of samples (s,a,r,s',a')
env: environment contianing k,phi,gamma
w : weights for the linear policy evaluation
damping : keeps the result relatively stable
ncpus : the number of cpus to use
"""
if ncpus:
nproces... | 5,350,364 |
def get_value_from_settings_with_default_string(wf, value, default_value):
"""Returns either a value as set in the settings file or a default as specified by caller"""
try:
ret = wf.settings[value]['value']
return str(ret)
except KeyError:
return default_value | 5,350,365 |
def jsonify(records):
"""
Parse asyncpg record response into JSON format
"""
return [dict(r.items()) for r in records] | 5,350,366 |
def acceptExtin(in_f, out_f, addr=None):
""" Create a command source with the given fds as input and output. """
# Force new versions to be loaded.
#
# deep_reload(Hub)
nubID = g.nubIDs.gimme()
d = Hub.ASCIIReplyDecoder(debug=1)
e = Hub.ASCIICmdEncoder(debug=1, sendCommander=True)
nub... | 5,350,367 |
def write_commits(commit_content, file_name):
"""
Writes commit content to a provided file.
:param commit_content:
:param file_name:
:return: none
"""
with open(file_name, "w") as json_file:
json.dump(commit_content, json_file) | 5,350,368 |
def smoothing_cross_entropy(logits,
labels,
vocab_size,
confidence,
gaussian=False,
zero_pad=True):
"""Cross entropy with label smoothing to limit over-confidence.
A... | 5,350,369 |
def test_ass_style_list_double_parenthood() -> None:
"""Test that style insertion cannot reclaim parenthood from another list."""
style = AssStyle(name="dummy style")
styles1 = AssStyleList()
styles2 = AssStyleList()
styles1.append(style)
with pytest.raises(TypeError):
styles2.append(sty... | 5,350,370 |
def get_db_matching_location_interurban(latitude, longitude) -> dict:
"""
extracts location from db by closest geo point to location found, using road number if provided and limits to
requested resolution
:param latitude: location latitude
:param longitude: location longitude
"""
def get_bo... | 5,350,371 |
def get(settings_obj, key, default=None, callback=None):
"""
Return a Sublime Text plugin setting value.
Parameters:
settings_obj - a sublime.Settings object or a dictionary containing
settings
key - the name of the setting
default - the default value to... | 5,350,372 |
def parse_json_confing(config_file):
"""Parse JSON for config
JSON will can look like this:
{
"request_type": "server",
"comboBase": "www.cdn.com"
"base": "/base/path", //build directory
"js_path": "js", //path relative to base
"css_path": "path/to/css", //... | 5,350,373 |
def site():
"""Main front-end web application"""
html = render.html("index")
return html | 5,350,374 |
def validate_user_options(args):
"""Check the user has provided suitable operational options
:param args: cmd-line args parser
Return nothing
"""
logger = logging.getLogger(__name__)
if 'genbank' in args.source:
logger.info("Extract GenBank protein sequences")
if 'uni... | 5,350,375 |
def parse_vad_label(line, frame_size: float = 0.032, frame_shift: float = 0.008):
"""Parse VAD information in each line, and convert it to frame-wise VAD label.
Args:
line (str): e.g. "0.2,3.11 3.48,10.51 10.52,11.02"
frame_size (float): frame size (in seconds) that is used when
... | 5,350,376 |
def preprocess_image(img, img_width, img_height):
"""Preprocesses the image before feeding it into the ML model"""
x = get_square_image(img)
x = np.asarray(img.resize((img_width, img_height))).astype(np.float32)
x_transposed = x.transpose((2,0,1))
x_batchified = np.expand_dims(x_transposed, axis=0)
... | 5,350,377 |
def Pose_2_KUKA(H):
"""Converts a pose (4x4 matrix) to an XYZABC KUKA target (Euler angles), required by KUKA KRC controllers.
:param H: pose
:type H: :class:`.Mat`
.. seealso:: :class:`.Mat`, :func:`~robodk.TxyzRxyz_2_Pose`, :func:`~robodk.Pose_2_TxyzRxyz`, :func:`~robodk.Pose_2_ABB`, :func:`~r... | 5,350,378 |
def skip_regenerate_image(request: FixtureRequest) -> Optional[str]:
"""Enable parametrization for the same cli option"""
return _request_param_or_config_option_or_default(request, 'skip_regenerate_image', None) | 5,350,379 |
def executeGC(img_name, filename, gamma):
"""
:type img_name: str, the file name of the single channel image
:type filename: str, the name for saving
:type gamma: float
"""
img = cv.imread(img_name, 0)
img = img.astype(np.float)
r, c = img.shape
for i in range(r):
for j in ra... | 5,350,380 |
def parse_config(tool_name, key_col_name, value_col_name):
"""Parses the "execute" field for the given tool from installation config
file.
Parameters:
tool_name: Tool name to search from file.
Raises:
STAPLERerror if config file does not exists.
STAPLERerror if tool value can no... | 5,350,381 |
def path_leaf(path):
"""
Extracts file name from given path
:param str path: Path be extracted the file name from
:return str: File name
"""
head, tail = ntpath.split(path)
return tail or ntpath.basename(head) | 5,350,382 |
def delete_cache_clusters(
cluster_ids: List[str],
final_snapshot_id: str = None,
configuration: Configuration = None,
secrets: Secrets = None,
) -> List[AWSResponse]:
"""
Deletes one or more cache clusters and creates a final snapshot
Parameters:
cluster_ids: list: a list of one o... | 5,350,383 |
def define_styleGenerator(content_nc: int, style_nc: int, n_c: int, n_blocks=4, norm='instance', use_dropout=False, padding_type='zero', cbam=False, gpu_ids=[]):
"""
This ResNet applies the encoded style from the style tensor onto the given content tensor.
Parameters:
----------
- content_nc (i... | 5,350,384 |
def answer():
"""
answer
"""
# logger
M_LOG.info("answer")
if "answer" == flask.request.form["type"]:
# save answer
gdct_data["answer"] = {"id": flask.request.form["id"],
"type": flask.request.form["type"],
"sdp": fla... | 5,350,385 |
def load_requirements():
""" Helps to avoid storing requirements in more than one file"""
reqs = parse_requirements('requirements-to-freeze.txt', session=False)
reqs_list = [str(ir.req) for ir in reqs]
return reqs_list | 5,350,386 |
def assert_is_dot_format(dot):
""" Checks that the dot is usable by graphviz. """
# We launch a process calling graphviz to render the dot. If the exit code is not 0 we assume that the syntax
# wasn't good
def run_graph(dot):
""" Runs graphviz to see if the syntax is good. """
graph = A... | 5,350,387 |
def discover(type=None, regex=None, paths=None):
"""Find and return available plug-ins
This function looks for files within paths registered via
:func:`register_plugin_path` and those added to `PYBLISHPLUGINPATH`.
It determines *type* - :class:`Selector`, :class:`Validator`,
:class:`Extractor` or ... | 5,350,388 |
def create_index(column_names, unique=False):
"""
Create a new index of the columns in column_names, where column_names is
a list of strings. If unique is True, it will be a
unique index.
"""
connection = _State.connection()
_State.reflect_metadata()
table_name = _State.table.name
t... | 5,350,389 |
def next_permutation(a):
"""Generate the lexicographically next permutation inplace.
https://en.wikipedia.org/wiki/Permutation#Generation_in_lexicographic_order
Return false if there is no next permutation.
"""
# Find the largest index i such that a[i] < a[i + 1]. If no such
# index exists, the... | 5,350,390 |
def normalize(v):
"""
Calculate normalized vector
:param v: input vector
:return: normalized vector
"""
from numpy.linalg import norm
return v/norm(v) | 5,350,391 |
def replace_hyphen_by_romaji(text):
"""
長音「ー」などを仮名に置換する。
"""
# error check
if len(text) < 2:
return ""
while "-" in list(text) or "~" in list(text):
text_ = text
if (text[0] == "-" or text[0] == "~") and len(text) >= 2:
text = text[2:]
continue
... | 5,350,392 |
def data_source_iterator(config: Dict[str, Any]) -> Iterable[DataFrame]:
"""Load all data tables defined by the provided config file."""
for source in config["sources"]:
try:
yield read_data(config["schema"], source["state"], source["url"])
print(f"Data successfully downloaded fo... | 5,350,393 |
def aucroc_ic50(df,threshold=500):
"""
Compute AUC ROC for predictions and targets in DataFrame, based on a given threshold
Parameters
----------
df : pandas.DataFrame with predictons in column "preds" and targets in column "targs" in nM
threshold: float, binding affinty threshold for binders in... | 5,350,394 |
def infer_getattr(node, context=None):
"""Understand getattr calls
If one of the arguments is an Uninferable object, then the
result will be an Uninferable object. Otherwise, the normal attribute
lookup will be done.
"""
obj, attr = _infer_getattr_args(node, context)
if (
obj is uti... | 5,350,395 |
def get_plugin(molcapsule: 'PyObject *', plug_no: 'int') -> "PyObject *":
"""get_plugin(molcapsule, plug_no) -> PyObject *"""
return _libpymolfile.get_plugin(molcapsule, plug_no) | 5,350,396 |
def apply_input(dut, f):
"""Apply DuT input stimulus."""
# start the module
dut.active_i <= 1
# get file size
f_size = f.size()
# iterate over all 64 bit input data words
for i in range(f_size*8/BIT_WIDTH_INPUT):
# wait for fifo to become not full
while True:
yi... | 5,350,397 |
def fake_redis_con():
"""
Purpose:
Create Fake Redis Connection To Test With
Args:
N/A
Return:
fake_redis_con (Pytest Fixture (FakeRedis Connection Obj)): Fake redis connection
that simulates redis functionality for testing
"""
return fakeredis.FakeStrictRedi... | 5,350,398 |
def verbose(function, *args, **kwargs):
"""Improved verbose decorator to allow functions to override log-level
Do not call this directly to set global verbosrity level, instead use
set_log_level().
Parameters
----------
function - function
Function to be decorated to allow for overridi... | 5,350,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.