id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
4,581
def for_me(conditions, myself): if (not conditions.audience_restriction): return True for restriction in conditions.audience_restriction: if (not restriction.audience): continue for audience in restriction.audience: if (audience.text.strip() == myself): return True else: pass return False
[ "def", "for_me", "(", "conditions", ",", "myself", ")", ":", "if", "(", "not", "conditions", ".", "audience_restriction", ")", ":", "return", "True", "for", "restriction", "in", "conditions", ".", "audience_restriction", ":", "if", "(", "not", "restriction", ...
am i among the intended audiences .
train
true
4,582
def octahedron(radius, dtype=np.uint8): n = ((2 * radius) + 1) (Z, Y, X) = np.mgrid[(- radius):radius:(n * 1j), (- radius):radius:(n * 1j), (- radius):radius:(n * 1j)] s = ((np.abs(X) + np.abs(Y)) + np.abs(Z)) return np.array((s <= radius), dtype=dtype)
[ "def", "octahedron", "(", "radius", ",", "dtype", "=", "np", ".", "uint8", ")", ":", "n", "=", "(", "(", "2", "*", "radius", ")", "+", "1", ")", "(", "Z", ",", "Y", ",", "X", ")", "=", "np", ".", "mgrid", "[", "(", "-", "radius", ")", ":"...
generates a octahedron-shaped structuring element .
train
false
4,583
def test_read_space_delimiter(): table = '\n Name --Phone- ----TCP-----\n John 555-1234 192.168.1.10\n Mary 555-2134 192.168.1.12\n Bob 555-4527 192.168.1.9\n' dat = ascii.read(table, Reader=ascii.FixedWidth, guess=False, delimiter=' ') assert_equal(tuple(dat.dtype.names), ('Name', '--Phone-', '---...
[ "def", "test_read_space_delimiter", "(", ")", ":", "table", "=", "'\\n Name --Phone- ----TCP-----\\n John 555-1234 192.168.1.10\\n Mary 555-2134 192.168.1.12\\n Bob 555-4527 192.168.1.9\\n'", "dat", "=", "ascii", ".", "read", "(", "table", ",", "Reader", "=", "a...
table with space delimiter .
train
false
4,584
def getindex(ndim, ind, strides): ret = 0 for i in range(ndim): ret += (strides[i] * ind[i]) return ret
[ "def", "getindex", "(", "ndim", ",", "ind", ",", "strides", ")", ":", "ret", "=", "0", "for", "i", "in", "range", "(", "ndim", ")", ":", "ret", "+=", "(", "strides", "[", "i", "]", "*", "ind", "[", "i", "]", ")", "return", "ret" ]
convert multi-dimensional index to the position in the flat list .
train
false
4,585
def fixed2csv(f, schema, output=None, **kwargs): streaming = (True if output else False) if (not streaming): output = six.StringIO() try: encoding = kwargs['encoding'] except KeyError: encoding = None writer = agate.csv.writer(output) reader = FixedWidthReader(f, schema, encoding=encoding) writer.writerows...
[ "def", "fixed2csv", "(", "f", ",", "schema", ",", "output", "=", "None", ",", "**", "kwargs", ")", ":", "streaming", "=", "(", "True", "if", "output", "else", "False", ")", "if", "(", "not", "streaming", ")", ":", "output", "=", "six", ".", "String...
convert a fixed-width file to csv using a csv-formatted schema description .
train
false
4,587
def _wait_for_job(linode_id, job_id, timeout=300, quiet=True): interval = 5 iterations = int((timeout / interval)) for i in range(0, iterations): jobs_result = _query('linode', 'job.list', args={'LinodeID': linode_id})['DATA'] if ((jobs_result[0]['JOBID'] == job_id) and (jobs_result[0]['HOST_SUCCESS'] == 1)): ...
[ "def", "_wait_for_job", "(", "linode_id", ",", "job_id", ",", "timeout", "=", "300", ",", "quiet", "=", "True", ")", ":", "interval", "=", "5", "iterations", "=", "int", "(", "(", "timeout", "/", "interval", ")", ")", "for", "i", "in", "range", "(", ...
wait for a job to return .
train
false
4,588
def _CheckFacetDiscoveryLimit(facet_limit): if (facet_limit is None): return None else: return _CheckInteger(facet_limit, 'discover_facet_limit', upper_bound=MAXIMUM_FACETS_TO_RETURN)
[ "def", "_CheckFacetDiscoveryLimit", "(", "facet_limit", ")", ":", "if", "(", "facet_limit", "is", "None", ")", ":", "return", "None", "else", ":", "return", "_CheckInteger", "(", "facet_limit", ",", "'discover_facet_limit'", ",", "upper_bound", "=", "MAXIMUM_FACET...
checks the facet limit is an integer within range .
train
false
4,589
def createoutputdirs(outputs): for output in list(outputs.values()): dirname = os.path.dirname(output) if (not os.path.isdir(dirname)): os.makedirs(dirname)
[ "def", "createoutputdirs", "(", "outputs", ")", ":", "for", "output", "in", "list", "(", "outputs", ".", "values", "(", ")", ")", ":", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "output", ")", "if", "(", "not", "os", ".", "path", ".", ...
create all output directories .
train
false
4,590
def flocker_volume_options(cls): original_parameters = getattr(cls, 'optParameters', []) cls.optParameters = (original_parameters + [['config', None, DEFAULT_CONFIG_PATH.path, 'The path to the Flocker volume configuration file, containing the node ID of the Flocker volume service on this node. This file will be creat...
[ "def", "flocker_volume_options", "(", "cls", ")", ":", "original_parameters", "=", "getattr", "(", "cls", ",", "'optParameters'", ",", "[", "]", ")", "cls", ".", "optParameters", "=", "(", "original_parameters", "+", "[", "[", "'config'", ",", "None", ",", ...
a class decorator to add volumeservice specific command line options to flocker commands .
train
false
4,591
def getAddIndexedLoops(loop, vertexes, zList): indexedLoops = [] for z in zList: indexedLoop = getAddIndexedLoop(loop, vertexes, z) indexedLoops.append(indexedLoop) return indexedLoops
[ "def", "getAddIndexedLoops", "(", "loop", ",", "vertexes", ",", "zList", ")", ":", "indexedLoops", "=", "[", "]", "for", "z", "in", "zList", ":", "indexedLoop", "=", "getAddIndexedLoop", "(", "loop", ",", "vertexes", ",", "z", ")", "indexedLoops", ".", "...
get and add indexed loops .
train
false
4,592
def _filterargs(source): argsregex = "}\\('(.*)', *(\\d+), *(\\d+), *'(.*)'\\.split\\('\\|'\\), *(\\d+), *(.*)\\)\\)" args = re.search(argsregex, source, re.DOTALL).groups() try: return (args[0], args[3].split('|'), int(args[1]), int(args[2])) except ValueError: raise UnpackingError('Corrupted p.a.c.k.e.r. data...
[ "def", "_filterargs", "(", "source", ")", ":", "argsregex", "=", "\"}\\\\('(.*)', *(\\\\d+), *(\\\\d+), *'(.*)'\\\\.split\\\\('\\\\|'\\\\), *(\\\\d+), *(.*)\\\\)\\\\)\"", "args", "=", "re", ".", "search", "(", "argsregex", ",", "source", ",", "re", ".", "DOTALL", ")", "...
juice from a source file the four args needed by decoder .
train
false
4,593
def is_dyad(frac): if (isinstance(frac, numbers.Integral) and (frac >= 0)): return True elif (isinstance(frac, Fraction) and (frac >= 0) and is_power2(frac.denominator)): return True else: return False
[ "def", "is_dyad", "(", "frac", ")", ":", "if", "(", "isinstance", "(", "frac", ",", "numbers", ".", "Integral", ")", "and", "(", "frac", ">=", "0", ")", ")", ":", "return", "True", "elif", "(", "isinstance", "(", "frac", ",", "Fraction", ")", "and"...
test if frac is a nonnegative dyadic fraction or integer .
train
false
4,594
def has_course_started(start_date): return (datetime.now(utc) > start_date)
[ "def", "has_course_started", "(", "start_date", ")", ":", "return", "(", "datetime", ".", "now", "(", "utc", ")", ">", "start_date", ")" ]
given a courses start datetime .
train
false
4,595
def idzp_rid(eps, m, n, matveca): proj = np.empty(((m + 1) + ((2 * n) * (min(m, n) + 1))), dtype=np.complex128, order='F') (k, idx, proj, ier) = _id.idzp_rid(eps, m, n, matveca, proj) if ier: raise _RETCODE_ERROR proj = proj[:(k * (n - k))].reshape((k, (n - k)), order='F') return (k, idx, proj)
[ "def", "idzp_rid", "(", "eps", ",", "m", ",", "n", ",", "matveca", ")", ":", "proj", "=", "np", ".", "empty", "(", "(", "(", "m", "+", "1", ")", "+", "(", "(", "2", "*", "n", ")", "*", "(", "min", "(", "m", ",", "n", ")", "+", "1", ")...
compute id of a complex matrix to a specified relative precision using random matrix-vector multiplication .
train
false
4,596
@register.tag def regroup(parser, token): bits = token.split_contents() if (len(bits) != 6): raise TemplateSyntaxError("'regroup' tag takes five arguments") target = parser.compile_filter(bits[1]) if (bits[2] != 'by'): raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'") if (bits[4] != 'a...
[ "@", "register", ".", "tag", "def", "regroup", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "(", "len", "(", "bits", ")", "!=", "6", ")", ":", "raise", "TemplateSyntaxError", "(", "\"'regroup' tag ...
regroups a list of alike objects by a common attribute .
train
false
4,597
def add_permissions(user, permissions): for permission in permissions: (app_label, __, codename) = permission.partition('.') perm = Permission.objects.get(content_type__app_label=app_label, codename=codename) user.user_permissions.add(perm)
[ "def", "add_permissions", "(", "user", ",", "permissions", ")", ":", "for", "permission", "in", "permissions", ":", "(", "app_label", ",", "__", ",", "codename", ")", "=", "permission", ".", "partition", "(", "'.'", ")", "perm", "=", "Permission", ".", "...
grant permissions to the passed user .
train
false
4,599
def check_dataset_edition_permission(authorize_get=False): def inner(view_func): def decorate(request, *args, **kwargs): dataset = kwargs.get('dataset') if ((dataset is not None) and (not (authorize_get and (request.method == 'GET')))): Job.objects.can_edit_or_exception(request, dataset.coordinator) ret...
[ "def", "check_dataset_edition_permission", "(", "authorize_get", "=", "False", ")", ":", "def", "inner", "(", "view_func", ")", ":", "def", "decorate", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "dataset", "=", "kwargs", ".", "get", ...
decorator ensuring that the user has the permissions to modify a dataset .
train
false
4,600
def HT_TRENDMODE(ds, count): return call_talib_with_ds(ds, count, talib.HT_TRENDMODE)
[ "def", "HT_TRENDMODE", "(", "ds", ",", "count", ")", ":", "return", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "HT_TRENDMODE", ")" ]
hilbert transform - trend vs cycle mode .
train
false
4,601
def clear_compatversion_cache_on_delete(sender, instance, **kw): try: if (not (instance.addon.type == amo.ADDON_EXTENSION)): return except ObjectDoesNotExist: return if (not kw.get('raw')): instance.addon.invalidate_d2c_versions()
[ "def", "clear_compatversion_cache_on_delete", "(", "sender", ",", "instance", ",", "**", "kw", ")", ":", "try", ":", "if", "(", "not", "(", "instance", ".", "addon", ".", "type", "==", "amo", ".", "ADDON_EXTENSION", ")", ")", ":", "return", "except", "Ob...
clears compatversion cache when version deleted .
train
false
4,603
def try_deserialize_handler(serialized_handler): if serialized_handler: return pickle.loads(serialized_handler)
[ "def", "try_deserialize_handler", "(", "serialized_handler", ")", ":", "if", "serialized_handler", ":", "return", "pickle", ".", "loads", "(", "serialized_handler", ")" ]
reverse function of try_serialize_handler .
train
false
4,604
def AssignScriptMaps(script_maps, target, update='replace'): script_map_func = ('_AssignScriptMaps' + update.capitalize()) try: script_map_func = eval(script_map_func) except NameError: msg = ("Unknown ScriptMapUpdate option '%s'" % update) raise ConfigurationError(msg) script_maps = [str(s) for s in script_m...
[ "def", "AssignScriptMaps", "(", "script_maps", ",", "target", ",", "update", "=", "'replace'", ")", ":", "script_map_func", "=", "(", "'_AssignScriptMaps'", "+", "update", ".", "capitalize", "(", ")", ")", "try", ":", "script_map_func", "=", "eval", "(", "sc...
updates iis with the supplied script map information .
train
false
4,605
def wait_set(name, value, profile=None): return {'name': name, 'changes': {}, 'result': True, 'comment': ''}
[ "def", "wait_set", "(", "name", ",", "value", ",", "profile", "=", "None", ")", ":", "return", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", "}" ]
set a key in etcd only if the watch statement calls it .
train
false
4,606
def _validate_core_properties(properties): props_string = '' for (prop_name, prop_value) in six.iteritems(properties): if (prop_name in STRING_PROPS_LIST): if (not isinstance(prop_value, six.string_types)): raise ValueError((('In option "properties", core property "' + prop_name) + '" value must be a string'...
[ "def", "_validate_core_properties", "(", "properties", ")", ":", "props_string", "=", "''", "for", "(", "prop_name", ",", "prop_value", ")", "in", "six", ".", "iteritems", "(", "properties", ")", ":", "if", "(", "prop_name", "in", "STRING_PROPS_LIST", ")", "...
internal function to validate core properties .
train
true
4,608
def _is_author(cc_content, context): return (context['cc_requester']['id'] == cc_content['user_id'])
[ "def", "_is_author", "(", "cc_content", ",", "context", ")", ":", "return", "(", "context", "[", "'cc_requester'", "]", "[", "'id'", "]", "==", "cc_content", "[", "'user_id'", "]", ")" ]
return true if the requester authored the given content .
train
false
4,609
def location_contact(): return s3_rest_controller(hide_filter=False)
[ "def", "location_contact", "(", ")", ":", "return", "s3_rest_controller", "(", "hide_filter", "=", "False", ")" ]
restful crud controller for community contacts .
train
false
4,610
def local_diff_branch(): remote = os.getenv(LOCAL_REMOTE_ENV) branch = os.getenv(LOCAL_BRANCH_ENV) if ((remote is not None) and (branch is not None)): return ('%s/%s' % (remote, branch))
[ "def", "local_diff_branch", "(", ")", ":", "remote", "=", "os", ".", "getenv", "(", "LOCAL_REMOTE_ENV", ")", "branch", "=", "os", ".", "getenv", "(", "LOCAL_BRANCH_ENV", ")", "if", "(", "(", "remote", "is", "not", "None", ")", "and", "(", "branch", "is...
get a remote branch to diff against in a local checkout .
train
false
4,611
def make_fasta(rec): gi = rec.annotations.get('gi', '') org = rec.annotations.get('organism', '') date = rec.annotations.get('date', '') head = ('>gi:%s, id:%s, org:%s, date:%s\n' % (gi, rec.id, org, date)) body = '\n'.join(textwrap.wrap(rec.seq.data, width=80)) return (head, body)
[ "def", "make_fasta", "(", "rec", ")", ":", "gi", "=", "rec", ".", "annotations", ".", "get", "(", "'gi'", ",", "''", ")", "org", "=", "rec", ".", "annotations", ".", "get", "(", "'organism'", ",", "''", ")", "date", "=", "rec", ".", "annotations", ...
creates fasta format from a record .
train
false
4,612
def find_css_class_with_wait(context, css_class, **kwargs): return _find_elem_with_wait(context, (By.CLASS_NAME, css_class), **kwargs)
[ "def", "find_css_class_with_wait", "(", "context", ",", "css_class", ",", "**", "kwargs", ")", ":", "return", "_find_elem_with_wait", "(", "context", ",", "(", "By", ".", "CLASS_NAME", ",", "css_class", ")", ",", "**", "kwargs", ")" ]
tries to find an element with given css class with an explicit timeout .
train
false
4,613
def degrees(radians=0, arcminutes=0, arcseconds=0): deg = 0.0 if radians: deg = math.degrees(radians) if arcminutes: deg += (arcminutes / arcmin(degrees=1.0)) if arcseconds: deg += (arcseconds / arcsec(degrees=1.0)) return deg
[ "def", "degrees", "(", "radians", "=", "0", ",", "arcminutes", "=", "0", ",", "arcseconds", "=", "0", ")", ":", "deg", "=", "0.0", "if", "radians", ":", "deg", "=", "math", ".", "degrees", "(", "radians", ")", "if", "arcminutes", ":", "deg", "+=", ...
return the degrees of the two node sets in the bipartite graph b .
train
true
4,615
def poweroff(): return shutdown()
[ "def", "poweroff", "(", ")", ":", "return", "shutdown", "(", ")" ]
power server off cli example: .
train
false
4,616
def package_relationship_delete(context, data_dict): model = context['model'] user = context['user'] (id, id2, rel) = _get_or_bust(data_dict, ['subject', 'object', 'type']) pkg1 = model.Package.get(id) pkg2 = model.Package.get(id2) if (not pkg1): raise NotFound(('Subject package %r was not found.' % id)) if (n...
[ "def", "package_relationship_delete", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "user", "=", "context", "[", "'user'", "]", "(", "id", ",", "id2", ",", "rel", ")", "=", "_get_or_bust", "(", "data_dict", ","...
delete a dataset relationship .
train
false
4,617
def logout_user(): user = _get_user() if ('user_id' in session): session.pop('user_id') if ('_fresh' in session): session.pop('_fresh') cookie_name = current_app.config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME) if (cookie_name in request.cookies): session['remember'] = 'clear' user_logged_out.send(current_ap...
[ "def", "logout_user", "(", ")", ":", "user", "=", "_get_user", "(", ")", "if", "(", "'user_id'", "in", "session", ")", ":", "session", ".", "pop", "(", "'user_id'", ")", "if", "(", "'_fresh'", "in", "session", ")", ":", "session", ".", "pop", "(", ...
logs out the current .
train
true
4,618
def doctest(*paths, **kwargs): subprocess = kwargs.pop('subprocess', True) rerun = kwargs.pop('rerun', 0) print_counter = (lambda i: (print(('rerun %d' % (rerun - i))) if (rerun - i) else None)) if subprocess: for i in range(rerun, (-1), (-1)): print_counter(i) ret = run_in_subprocess_with_hash_randomizatio...
[ "def", "doctest", "(", "*", "paths", ",", "**", "kwargs", ")", ":", "subprocess", "=", "kwargs", ".", "pop", "(", "'subprocess'", ",", "True", ")", "rerun", "=", "kwargs", ".", "pop", "(", "'rerun'", ",", "0", ")", "print_counter", "=", "(", "lambda"...
execute sphinx doctest target .
train
false
4,619
def __build_word_numeral(*args): re_ = None for word_list in args: for word in word_list: if (not re_): re_ = '(?:(?=\\w+)' else: re_ += '|' re_ += word re_ += ')' return re_
[ "def", "__build_word_numeral", "(", "*", "args", ")", ":", "re_", "=", "None", "for", "word_list", "in", "args", ":", "for", "word", "in", "word_list", ":", "if", "(", "not", "re_", ")", ":", "re_", "=", "'(?:(?=\\\\w+)'", "else", ":", "re_", "+=", "...
build word numeral regexp from list .
train
false
4,620
def reshape_axes(axes, shape, newshape): if (len(axes) != len(shape)): raise ValueError('axes do not match shape') if (product(shape) != product(newshape)): raise ValueError(('can not reshape %s to %s' % (shape, newshape))) if ((not axes) or (not newshape)): return '' lendiff = max(0, (len(shape) - len(newsha...
[ "def", "reshape_axes", "(", "axes", ",", "shape", ",", "newshape", ")", ":", "if", "(", "len", "(", "axes", ")", "!=", "len", "(", "shape", ")", ")", ":", "raise", "ValueError", "(", "'axes do not match shape'", ")", "if", "(", "product", "(", "shape",...
return axes matching new shape .
train
true
4,621
def parse_header(line): plist = _parse_header_params((';' + line)) key = plist.pop(0).lower().decode(u'ascii') pdict = {} for p in plist: i = p.find('=') if (i >= 0): name = p[:i].strip().lower().decode(u'ascii') value = p[(i + 1):].strip() if ((len(value) >= 2) and (value[:1] == value[(-1):] == '"')):...
[ "def", "parse_header", "(", "line", ")", ":", "plist", "=", "_parse_header_params", "(", "(", "';'", "+", "line", ")", ")", "key", "=", "plist", ".", "pop", "(", "0", ")", ".", "lower", "(", ")", ".", "decode", "(", "u'ascii'", ")", "pdict", "=", ...
parse a content-type like header .
train
false
4,622
def set_log_format(log_format, server=_DEFAULT_SERVER): setting = 'LogPluginClsid' log_format_types = get_log_format_types() format_id = log_format_types.get(log_format, None) if (not format_id): message = "Invalid log format '{0}' specified. Valid formats: {1}".format(log_format, log_format_types.keys()) raise...
[ "def", "set_log_format", "(", "log_format", ",", "server", "=", "_DEFAULT_SERVER", ")", ":", "setting", "=", "'LogPluginClsid'", "log_format_types", "=", "get_log_format_types", "(", ")", "format_id", "=", "log_format_types", ".", "get", "(", "log_format", ",", "N...
set the active log format for the smtp virtual server .
train
true
4,624
def _fit_binary(estimator, X, y, classes=None): unique_y = np.unique(y) if (len(unique_y) == 1): if (classes is not None): if (y[0] == (-1)): c = 0 else: c = y[0] warnings.warn(('Label %s is present in all training examples.' % str(classes[c]))) estimator = _ConstantPredictor().fit(X, unique_y) ...
[ "def", "_fit_binary", "(", "estimator", ",", "X", ",", "y", ",", "classes", "=", "None", ")", ":", "unique_y", "=", "np", ".", "unique", "(", "y", ")", "if", "(", "len", "(", "unique_y", ")", "==", "1", ")", ":", "if", "(", "classes", "is", "no...
fit a single binary estimator .
train
false
4,625
def _enable_privilege(privilege_name): return _change_privilege_state(privilege_name, True)
[ "def", "_enable_privilege", "(", "privilege_name", ")", ":", "return", "_change_privilege_state", "(", "privilege_name", ",", "True", ")" ]
enables the named privilege for this process .
train
false
4,627
def _launch(appfile): _finder.open(_application_file(('ID ', appfile)))
[ "def", "_launch", "(", "appfile", ")", ":", "_finder", ".", "open", "(", "_application_file", "(", "(", "'ID '", ",", "appfile", ")", ")", ")" ]
open a file thru the finder .
train
false
4,628
def test_transform_data(): (n_sensors, n_vertices, n_times) = (10, 20, 4) kernel = rng.randn(n_vertices, n_sensors) sens_data = rng.randn(n_sensors, n_times) vertices = np.arange(n_vertices) data = np.dot(kernel, sens_data) for (idx, tmin_idx, tmax_idx) in zip([None, np.arange((n_vertices // 2), n_vertices)], [No...
[ "def", "test_transform_data", "(", ")", ":", "(", "n_sensors", ",", "n_vertices", ",", "n_times", ")", "=", "(", "10", ",", "20", ",", "4", ")", "kernel", "=", "rng", ".", "randn", "(", "n_vertices", ",", "n_sensors", ")", "sens_data", "=", "rng", "....
test applying linear transform to data .
train
false
4,629
def _window_too_small(): return Window(TokenListControl.static([(Token.WindowTooSmall, u' Window too small... ')]))
[ "def", "_window_too_small", "(", ")", ":", "return", "Window", "(", "TokenListControl", ".", "static", "(", "[", "(", "Token", ".", "WindowTooSmall", ",", "u' Window too small... '", ")", "]", ")", ")" ]
create a window that displays the window too small text .
train
false
4,631
def event_source_mapping_absent(name, EventSourceArn, FunctionName, region=None, key=None, keyid=None, profile=None): ret = {'name': None, 'result': True, 'comment': '', 'changes': {}} desc = __salt__['boto_lambda.describe_event_source_mapping'](EventSourceArn=EventSourceArn, FunctionName=FunctionName, region=region,...
[ "def", "event_source_mapping_absent", "(", "name", ",", "EventSourceArn", ",", "FunctionName", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "None", ...
ensure event source mapping with passed properties is absent .
train
true
4,632
def RetryNoBackoff(callable_func, retry_notify_func, delay=5, max_tries=200): return RetryWithBackoff(callable_func, retry_notify_func, delay, 1, delay, max_tries)
[ "def", "RetryNoBackoff", "(", "callable_func", ",", "retry_notify_func", ",", "delay", "=", "5", ",", "max_tries", "=", "200", ")", ":", "return", "RetryWithBackoff", "(", "callable_func", ",", "retry_notify_func", ",", "delay", ",", "1", ",", "delay", ",", ...
calls a function multiple times .
train
false
4,634
def version_handler(min_ver, max_ver=None): def decorator(f): min_version_float = float(min_ver) if max_ver: max_version_float = float(max_ver) else: max_version_float = float(max_version_string()) qualified_name = _fully_qualified_name(f) VERSIONED_METHODS[qualified_name].append((min_version_float, ma...
[ "def", "version_handler", "(", "min_ver", ",", "max_ver", "=", "None", ")", ":", "def", "decorator", "(", "f", ")", ":", "min_version_float", "=", "float", "(", "min_ver", ")", "if", "max_ver", ":", "max_version_float", "=", "float", "(", "max_ver", ")", ...
decorator for versioning api methods .
train
false
4,636
def get_deprecated_login_lock_out_by_combination_browser_user_agent(): return AUTH.LOGIN_LOCK_OUT_BY_COMBINATION_BROWSER_USER_AGENT_AND_IP.get()
[ "def", "get_deprecated_login_lock_out_by_combination_browser_user_agent", "(", ")", ":", "return", "AUTH", ".", "LOGIN_LOCK_OUT_BY_COMBINATION_BROWSER_USER_AGENT_AND_IP", ".", "get", "(", ")" ]
return value of deprecated login_lock_out_by_combination_browser_user_agent_and_ip config .
train
false
4,637
def cors_tool(): req_head = cherrypy.request.headers resp_head = cherrypy.response.headers resp_head['Access-Control-Allow-Origin'] = req_head.get('Origin', '*') resp_head['Access-Control-Expose-Headers'] = 'GET, POST' resp_head['Access-Control-Allow-Credentials'] = 'true' if (cherrypy.request.method == 'OPTIONS'...
[ "def", "cors_tool", "(", ")", ":", "req_head", "=", "cherrypy", ".", "request", ".", "headers", "resp_head", "=", "cherrypy", ".", "response", ".", "headers", "resp_head", "[", "'Access-Control-Allow-Origin'", "]", "=", "req_head", ".", "get", "(", "'Origin'",...
handle both simple and complex cors requests add cors headers to each response .
train
false
4,638
def test_ast_valid_let(): can_compile(u'(let [a b])') can_compile(u'(let [a 1])') can_compile(u'(let [a 1 b None])')
[ "def", "test_ast_valid_let", "(", ")", ":", "can_compile", "(", "u'(let [a b])'", ")", "can_compile", "(", "u'(let [a 1])'", ")", "can_compile", "(", "u'(let [a 1 b None])'", ")" ]
make sure ast can compile valid let .
train
false
4,639
def test_hsl_to_rgb_part_9(): assert (hsl_to_rgb(120, 20, 50) == (102, 153, 102)) assert (hsl_to_rgb(120, 60, 50) == (51, 204, 51)) assert (hsl_to_rgb(120, 100, 50) == (0, 255, 0))
[ "def", "test_hsl_to_rgb_part_9", "(", ")", ":", "assert", "(", "hsl_to_rgb", "(", "120", ",", "20", ",", "50", ")", "==", "(", "102", ",", "153", ",", "102", ")", ")", "assert", "(", "hsl_to_rgb", "(", "120", ",", "60", ",", "50", ")", "==", "(",...
test hsl to rgb color function .
train
false
4,641
def _CheckOffset(offset): return _CheckInteger(offset, 'offset', zero_ok=True, upper_bound=MAXIMUM_SEARCH_OFFSET)
[ "def", "_CheckOffset", "(", "offset", ")", ":", "return", "_CheckInteger", "(", "offset", ",", "'offset'", ",", "zero_ok", "=", "True", ",", "upper_bound", "=", "MAXIMUM_SEARCH_OFFSET", ")" ]
checks the offset in document list is an integer within range .
train
false
4,642
def env_func(f, argtypes): f.argtypes = argtypes f.restype = None f.errcheck = check_envelope return f
[ "def", "env_func", "(", "f", ",", "argtypes", ")", ":", "f", ".", "argtypes", "=", "argtypes", "f", ".", "restype", "=", "None", "f", ".", "errcheck", "=", "check_envelope", "return", "f" ]
for getting ogrenvelopes .
train
false
4,644
def delete_cached_file(filename): global _FILE_CACHE if (filename in _FILE_CACHE): del _FILE_CACHE[filename]
[ "def", "delete_cached_file", "(", "filename", ")", ":", "global", "_FILE_CACHE", "if", "(", "filename", "in", "_FILE_CACHE", ")", ":", "del", "_FILE_CACHE", "[", "filename", "]" ]
delete cached file if present .
train
false
4,645
def test_hsl_to_rgb_part_7(): assert (hsl_to_rgb(0, 20, 50) == (153, 102, 102)) assert (hsl_to_rgb(0, 60, 50) == (204, 51, 51)) assert (hsl_to_rgb(0, 100, 50) == (255, 0, 0))
[ "def", "test_hsl_to_rgb_part_7", "(", ")", ":", "assert", "(", "hsl_to_rgb", "(", "0", ",", "20", ",", "50", ")", "==", "(", "153", ",", "102", ",", "102", ")", ")", "assert", "(", "hsl_to_rgb", "(", "0", ",", "60", ",", "50", ")", "==", "(", "...
test hsl to rgb color function .
train
false
4,646
def render_pep440(pieces): if pieces['closest-tag']: rendered = pieces['closest-tag'] if (pieces['distance'] or pieces['dirty']): rendered += plus_or_dot(pieces) rendered += ('%d.g%s' % (pieces['distance'], pieces['short'])) if pieces['dirty']: rendered += '.dirty' else: rendered = ('0+untagged.%d....
[ "def", "render_pep440", "(", "pieces", ")", ":", "if", "pieces", "[", "'closest-tag'", "]", ":", "rendered", "=", "pieces", "[", "'closest-tag'", "]", "if", "(", "pieces", "[", "'distance'", "]", "or", "pieces", "[", "'dirty'", "]", ")", ":", "rendered",...
build up version string .
train
true
4,648
def get_plugin(name): if (internal._REGISTRY is None): raise NoSuchPluginException(("Could not load plugin {0} because the registry either hasn't " + 'been loaded or has just been unloaded.'.format(name))) return internal._REGISTRY[name]
[ "def", "get_plugin", "(", "name", ")", ":", "if", "(", "internal", ".", "_REGISTRY", "is", "None", ")", ":", "raise", "NoSuchPluginException", "(", "(", "\"Could not load plugin {0} because the registry either hasn't \"", "+", "'been loaded or has just been unloaded.'", "...
get an instance of a active plugin by name .
train
false
4,649
def _format_issue(issue): ret = {'id': issue.get('id'), 'issue_number': issue.get('number'), 'state': issue.get('state'), 'title': issue.get('title'), 'user': issue.get('user').get('login'), 'html_url': issue.get('html_url')} assignee = issue.get('assignee') if assignee: assignee = assignee.get('login') labels = ...
[ "def", "_format_issue", "(", "issue", ")", ":", "ret", "=", "{", "'id'", ":", "issue", ".", "get", "(", "'id'", ")", ",", "'issue_number'", ":", "issue", ".", "get", "(", "'number'", ")", ",", "'state'", ":", "issue", ".", "get", "(", "'state'", ")...
helper function to format api return information into a more manageable and useful dictionary for issue information .
train
true
4,650
def remove_handler(key, handler): if (type(key) is Predicate): key = key.name getattr(Q, key).remove_handler(handler)
[ "def", "remove_handler", "(", "key", ",", "handler", ")", ":", "if", "(", "type", "(", "key", ")", "is", "Predicate", ")", ":", "key", "=", "key", ".", "name", "getattr", "(", "Q", ",", "key", ")", ".", "remove_handler", "(", "handler", ")" ]
removes a handler from the ask system .
train
false
4,652
def iterModules(): return theSystemPath.iterModules()
[ "def", "iterModules", "(", ")", ":", "return", "theSystemPath", ".", "iterModules", "(", ")" ]
iterate all modules and top-level packages on the global python path .
train
false
4,653
def irange(start, end): return range(start, (end + 1))
[ "def", "irange", "(", "start", ",", "end", ")", ":", "return", "range", "(", "start", ",", "(", "end", "+", "1", ")", ")" ]
inclusive range from start to end irange -> 1 .
train
false
4,654
def divmod_by_constant(builder, val, divisor): assert (divisor > 0) divisor = val.type(divisor) one = val.type(1) quot = alloca_once(builder, val.type) with builder.if_else(is_neg_int(builder, val)) as (if_neg, if_pos): with if_pos: quot_val = builder.sdiv(val, divisor) builder.store(quot_val, quot) with...
[ "def", "divmod_by_constant", "(", "builder", ",", "val", ",", "divisor", ")", ":", "assert", "(", "divisor", ">", "0", ")", "divisor", "=", "val", ".", "type", "(", "divisor", ")", "one", "=", "val", ".", "type", "(", "1", ")", "quot", "=", "alloca...
compute the of *val* divided by the constant positive *divisor* .
train
false
4,655
def bellman_ford_predecessor_and_distance(G, source, target=None, cutoff=None, weight='weight'): if (source not in G): raise nx.NodeNotFound(('Node %s is not found in the graph' % source)) weight = _weight_function(G, weight) if any(((weight(u, v, d) < 0) for (u, v, d) in G.selfloop_edges(data=True))): raise nx....
[ "def", "bellman_ford_predecessor_and_distance", "(", "G", ",", "source", ",", "target", "=", "None", ",", "cutoff", "=", "None", ",", "weight", "=", "'weight'", ")", ":", "if", "(", "source", "not", "in", "G", ")", ":", "raise", "nx", ".", "NodeNotFound"...
compute shortest path lengths and predecessors on shortest paths in weighted graphs .
train
false
4,656
def test_album_info(*args, **kwargs): track_info = TrackInfo(title=u'new title', track_id=u'trackid', index=0) album_info = AlbumInfo(artist=u'artist', album=u'album', tracks=[track_info], album_id=u'albumid', artist_id=u'artistid') return iter([album_info])
[ "def", "test_album_info", "(", "*", "args", ",", "**", "kwargs", ")", ":", "track_info", "=", "TrackInfo", "(", "title", "=", "u'new title'", ",", "track_id", "=", "u'trackid'", ",", "index", "=", "0", ")", "album_info", "=", "AlbumInfo", "(", "artist", ...
create an albuminfo object for testing .
train
false
4,658
def _normalizeargs(sequence, output=None): if (output is None): output = [] cls = sequence.__class__ if ((InterfaceClass in cls.__mro__) or (Implements in cls.__mro__)): output.append(sequence) else: for v in sequence: _normalizeargs(v, output) return output
[ "def", "_normalizeargs", "(", "sequence", ",", "output", "=", "None", ")", ":", "if", "(", "output", "is", "None", ")", ":", "output", "=", "[", "]", "cls", "=", "sequence", ".", "__class__", "if", "(", "(", "InterfaceClass", "in", "cls", ".", "__mro...
normalize declaration arguments normalization arguments might contain declarions .
train
false
4,659
@mock_ec2 def test_igw_filter_by_tags(): conn = boto.connect_vpc(u'the_key', u'the_secret') igw1 = conn.create_internet_gateway() igw2 = conn.create_internet_gateway() igw1.add_tag(u'tests', u'yes') result = conn.get_all_internet_gateways(filters={u'tag:tests': u'yes'}) result.should.have.length_of(1) result[0]....
[ "@", "mock_ec2", "def", "test_igw_filter_by_tags", "(", ")", ":", "conn", "=", "boto", ".", "connect_vpc", "(", "u'the_key'", ",", "u'the_secret'", ")", "igw1", "=", "conn", ".", "create_internet_gateway", "(", ")", "igw2", "=", "conn", ".", "create_internet_g...
internet gateway filter by vpc id .
train
false
4,660
def load_sorts(): g = globals() if g['_sorting_init']: return def gt(a, b): return (a > b) default_sort = quicksort.make_jit_quicksort() reversed_sort = quicksort.make_jit_quicksort(lt=gt) g['run_default_sort'] = default_sort.run_quicksort g['run_reversed_sort'] = reversed_sort.run_quicksort g['_sorting_ini...
[ "def", "load_sorts", "(", ")", ":", "g", "=", "globals", "(", ")", "if", "g", "[", "'_sorting_init'", "]", ":", "return", "def", "gt", "(", "a", ",", "b", ")", ":", "return", "(", "a", ">", "b", ")", "default_sort", "=", "quicksort", ".", "make_j...
load quicksort lazily .
train
false
4,661
def require_support_permission(func): @wraps(func) def inner(request, *args, **kwargs): if has_access(request.user, 'support', 'global'): return func(request, *args, **kwargs) else: return HttpResponseForbidden() return login_required(inner)
[ "def", "require_support_permission", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "inner", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "has_access", "(", "request", ".", "user", ",", "'support'", ",", "'globa...
view decorator that requires the user to have permission to use the support ui .
train
false
4,662
def find_missing(input_list): total = ((len(input_list) * (len(input_list) + 1)) / 2) summed = 0 for element in input_list: summed += element missing = (total - summed) return missing
[ "def", "find_missing", "(", "input_list", ")", ":", "total", "=", "(", "(", "len", "(", "input_list", ")", "*", "(", "len", "(", "input_list", ")", "+", "1", ")", ")", "/", "2", ")", "summed", "=", "0", "for", "element", "in", "input_list", ":", ...
find the missing number in shuffled list .
train
false
4,663
def hostinterface_get(hostids, **connection_args): conn_args = _login(**connection_args) try: if conn_args: method = 'hostinterface.get' params = {'output': 'extend'} if hostids: params.setdefault('hostids', hostids) params = _params_extend(params, **connection_args) ret = _query(method, params, ...
[ "def", "hostinterface_get", "(", "hostids", ",", "**", "connection_args", ")", ":", "conn_args", "=", "_login", "(", "**", "connection_args", ")", "try", ":", "if", "conn_args", ":", "method", "=", "'hostinterface.get'", "params", "=", "{", "'output'", ":", ...
retrieve host groups according to the given parameters .
train
false
4,664
@utils.arg('monitor_id', metavar='<monitor-id>', help='ID of the monitor to upload to an image') @utils.arg('--force', metavar='<True|False>', help="Optional flag to indicate whether to upload a monitor even if it's attached to an instance. (Default=False)", default=False) @utils.arg('--container-format', metavar='<con...
[ "@", "utils", ".", "arg", "(", "'monitor_id'", ",", "metavar", "=", "'<monitor-id>'", ",", "help", "=", "'ID of the monitor to upload to an image'", ")", "@", "utils", ".", "arg", "(", "'--force'", ",", "metavar", "=", "'<True|False>'", ",", "help", "=", "\"Op...
upload monitor to image service as image .
train
false
4,666
def uninstall(pecls): if isinstance(pecls, six.string_types): pecls = [pecls] return _pecl('uninstall {0}'.format(_cmd_quote(' '.join(pecls))))
[ "def", "uninstall", "(", "pecls", ")", ":", "if", "isinstance", "(", "pecls", ",", "six", ".", "string_types", ")", ":", "pecls", "=", "[", "pecls", "]", "return", "_pecl", "(", "'uninstall {0}'", ".", "format", "(", "_cmd_quote", "(", "' '", ".", "joi...
remove one or more packages .
train
true
4,667
def returner_argspec(module=''): returners_ = salt.loader.returners(__opts__, []) return salt.utils.argspec_report(returners_, module)
[ "def", "returner_argspec", "(", "module", "=", "''", ")", ":", "returners_", "=", "salt", ".", "loader", ".", "returners", "(", "__opts__", ",", "[", "]", ")", "return", "salt", ".", "utils", ".", "argspec_report", "(", "returners_", ",", "module", ")" ]
return the argument specification of functions in salt returner modules .
train
true
4,668
@treeio_login_required @handle_response_format def currency_edit(request, currency_id, response_format='html'): currency = get_object_or_404(Currency, pk=currency_id) if ((not request.user.profile.has_permission(currency, mode='w')) and (not request.user.profile.is_admin('treeio_finance'))): return user_denied(requ...
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "currency_edit", "(", "request", ",", "currency_id", ",", "response_format", "=", "'html'", ")", ":", "currency", "=", "get_object_or_404", "(", "Currency", ",", "pk", "=", "currency_id", ")", "if...
currency edit .
train
false
4,669
@pytest.mark.django_db def test_make_aware_use_tz_false(settings): settings.USE_TZ = False datetime_object = datetime(2016, 1, 2, 21, 52, 25) assert timezone.is_naive(datetime_object) datetime_aware = make_aware(datetime_object) assert timezone.is_naive(datetime_aware)
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_make_aware_use_tz_false", "(", "settings", ")", ":", "settings", ".", "USE_TZ", "=", "False", "datetime_object", "=", "datetime", "(", "2016", ",", "1", ",", "2", ",", "21", ",", "52", ",", "25",...
tests datetimes are left intact if use_tz is not in effect .
train
false
4,670
def binary_partitions(n): from math import ceil, log pow = int((2 ** ceil(log(n, 2)))) sum = 0 partition = [] while pow: if ((sum + pow) <= n): partition.append(pow) sum += pow pow >>= 1 last_num = ((len(partition) - 1) - (n & 1)) while (last_num >= 0): (yield partition) if (partition[last_num] == ...
[ "def", "binary_partitions", "(", "n", ")", ":", "from", "math", "import", "ceil", ",", "log", "pow", "=", "int", "(", "(", "2", "**", "ceil", "(", "log", "(", "n", ",", "2", ")", ")", ")", ")", "sum", "=", "0", "partition", "=", "[", "]", "wh...
generates the binary partition of n .
train
false
4,671
def results_extractor(train_obj): return DD()
[ "def", "results_extractor", "(", "train_obj", ")", ":", "return", "DD", "(", ")" ]
default results extractor that does nothing .
train
false
4,672
def _cert_file(name, cert_type): return os.path.join(LE_LIVE, name, '{0}.pem'.format(cert_type))
[ "def", "_cert_file", "(", "name", ",", "cert_type", ")", ":", "return", "os", ".", "path", ".", "join", "(", "LE_LIVE", ",", "name", ",", "'{0}.pem'", ".", "format", "(", "cert_type", ")", ")" ]
return expected path of a lets encrypt live cert .
train
true
4,674
def unregister_webapi_capabilities(capabilities_id): try: del _registered_capabilities[capabilities_id] except KeyError: logging.error(u'Failed to unregister unknown web API capabilities "%s".', capabilities_id) raise KeyError((u'"%s" is not a registered web API capabilities set' % capabilities_id))
[ "def", "unregister_webapi_capabilities", "(", "capabilities_id", ")", ":", "try", ":", "del", "_registered_capabilities", "[", "capabilities_id", "]", "except", "KeyError", ":", "logging", ".", "error", "(", "u'Failed to unregister unknown web API capabilities \"%s\".'", ",...
unregisters a previously registered set of web api capabilities .
train
false
4,675
def parse_troubleshooting(troubleshooting_json): if (not troubleshooting_json): return None try: parsed = json.loads(troubleshooting_json) except ValueError: return None spec = (((), dict), (('accessibility',), dict), (('accessibility', 'isActive'), bool), (('application',), dict), (('application', 'name'), b...
[ "def", "parse_troubleshooting", "(", "troubleshooting_json", ")", ":", "if", "(", "not", "troubleshooting_json", ")", ":", "return", "None", "try", ":", "parsed", "=", "json", ".", "loads", "(", "troubleshooting_json", ")", "except", "ValueError", ":", "return",...
normalizes the troubleshooting data from question .
train
false
4,676
def addToThreadsRemove(extrusionHalfWidth, nestedRings, oldOrderedLocation, skein, threadSequence): while (len(nestedRings) > 0): getTransferClosestNestedRing(extrusionHalfWidth, nestedRings, oldOrderedLocation, skein, threadSequence)
[ "def", "addToThreadsRemove", "(", "extrusionHalfWidth", ",", "nestedRings", ",", "oldOrderedLocation", ",", "skein", ",", "threadSequence", ")", ":", "while", "(", "len", "(", "nestedRings", ")", ">", "0", ")", ":", "getTransferClosestNestedRing", "(", "extrusionH...
add to threads from the last location from nested rings .
train
false
4,677
def _CopyProperties(target_dict, source_dict): for (key, value) in source_dict['properties'].items(): assert ((key not in target_dict['properties']) or (target_dict['properties'][key] == value)), (source_dict, target_dict) target_dict['properties'][key] = deepcopy(value)
[ "def", "_CopyProperties", "(", "target_dict", ",", "source_dict", ")", ":", "for", "(", "key", ",", "value", ")", "in", "source_dict", "[", "'properties'", "]", ".", "items", "(", ")", ":", "assert", "(", "(", "key", "not", "in", "target_dict", "[", "'...
deep copies properties in source_dict[properties] to target_dict[properties] .
train
false
4,679
def user_chpass(user, host='localhost', password=None, password_hash=None, allow_passwordless=False, unix_socket=None, password_column=None, **connection_args): args = {} if (password is not None): password_sql = 'PASSWORD(%(password)s)' args['password'] = password elif (password_hash is not None): password_sq...
[ "def", "user_chpass", "(", "user", ",", "host", "=", "'localhost'", ",", "password", "=", "None", ",", "password_hash", "=", "None", ",", "allow_passwordless", "=", "False", ",", "unix_socket", "=", "None", ",", "password_column", "=", "None", ",", "**", "...
change password for a cluster admin or a database user .
train
false
4,682
def getFirstTranslatorFileNameUnmodified(fileName): if (fileName != ''): return fileName unmodified = getGNUTranslatorFilesUnmodified() if (len(unmodified) == 0): print 'There are no unmodified gcode files in this folder.' return '' return unmodified[0]
[ "def", "getFirstTranslatorFileNameUnmodified", "(", "fileName", ")", ":", "if", "(", "fileName", "!=", "''", ")", ":", "return", "fileName", "unmodified", "=", "getGNUTranslatorFilesUnmodified", "(", ")", "if", "(", "len", "(", "unmodified", ")", "==", "0", ")...
get the first file name from the translators in the import plugins folder .
train
false
4,683
def postfixes(seq): n = len(seq) for i in range(n): (yield seq[((n - i) - 1):])
[ "def", "postfixes", "(", "seq", ")", ":", "n", "=", "len", "(", "seq", ")", "for", "i", "in", "range", "(", "n", ")", ":", "(", "yield", "seq", "[", "(", "(", "n", "-", "i", ")", "-", "1", ")", ":", "]", ")" ]
generate all postfixes of a sequence .
train
false
4,685
def is_unavailable_exception(e): try: if ((e.errcode == (-1)) or (e.headers is None)): return True exc_mess = e.headers.get('X-exception') except AttributeError: exc_mess = str(e) if (exc_mess and ('temporarily unavailable' in exc_mess.lower())): return True
[ "def", "is_unavailable_exception", "(", "e", ")", ":", "try", ":", "if", "(", "(", "e", ".", "errcode", "==", "(", "-", "1", ")", ")", "or", "(", "e", ".", "headers", "is", "None", ")", ")", ":", "return", "True", "exc_mess", "=", "e", ".", "he...
returns true if the given protocolerror is the product of a server-side exception caused by the temporarily unavailable response sometimes given by operations on non-blocking sockets .
train
false
4,687
def symmath_check_simple(expect, ans, adict={}, symtab=None, extra_options=None): options = {'__MATRIX__': False, '__ABC__': False, '__LOWER__': False} if extra_options: options.update(extra_options) for op in options: if (op in expect): expect = expect.replace(op, '') options[op] = True expect = expect.r...
[ "def", "symmath_check_simple", "(", "expect", ",", "ans", ",", "adict", "=", "{", "}", ",", "symtab", "=", "None", ",", "extra_options", "=", "None", ")", ":", "options", "=", "{", "'__MATRIX__'", ":", "False", ",", "'__ABC__'", ":", "False", ",", "'__...
check a symbolic mathematical expression using sympy .
train
false
4,688
def get_tests_from_fs(parent_dir, control_pattern, add_noncompliant=False): tests = {} profilers = False if ('client/profilers' in parent_dir): profilers = True for dir in [parent_dir]: files = recursive_walk(dir, control_pattern) for file in files: if (('__init__.py' in file) or ('.svn' in file)): con...
[ "def", "get_tests_from_fs", "(", "parent_dir", ",", "control_pattern", ",", "add_noncompliant", "=", "False", ")", ":", "tests", "=", "{", "}", "profilers", "=", "False", "if", "(", "'client/profilers'", "in", "parent_dir", ")", ":", "profilers", "=", "True", ...
find control files in file system and load a list with their info .
train
false
4,689
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
4,690
def current_request(): return getattr(_thread_local, u'request', None)
[ "def", "current_request", "(", ")", ":", "return", "getattr", "(", "_thread_local", ",", "u'request'", ",", "None", ")" ]
retrieves the request from the current thread .
train
false
4,692
def b64encode(t): return base64.b64encode(t)
[ "def", "b64encode", "(", "t", ")", ":", "return", "base64", ".", "b64encode", "(", "t", ")" ]
encode a string using base64 .
train
false
4,693
def GetRealPath(filename): if os.path.isabs(filename): return filename if (filename.startswith('./') or filename.startswith('../')): return os.path.abspath(filename) path = os.getenv('PATH', '') for directory in path.split(':'): tryname = os.path.join(directory, filename) if os.path.exists(tryname): if (...
[ "def", "GetRealPath", "(", "filename", ")", ":", "if", "os", ".", "path", ".", "isabs", "(", "filename", ")", ":", "return", "filename", "if", "(", "filename", ".", "startswith", "(", "'./'", ")", "or", "filename", ".", "startswith", "(", "'../'", ")",...
given an executable filename .
train
true
4,694
@requires_sklearn def test_ica_rank_reduction(): raw = read_raw_fif(raw_fname).crop(0.5, stop).load_data() picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads')[:10] n_components = 5 max_pca_components = len(picks) for n_pca_components in [6, 10]: with warnings.catch_warnings(r...
[ "@", "requires_sklearn", "def", "test_ica_rank_reduction", "(", ")", ":", "raw", "=", "read_raw_fif", "(", "raw_fname", ")", ".", "crop", "(", "0.5", ",", "stop", ")", ".", "load_data", "(", ")", "picks", "=", "pick_types", "(", "raw", ".", "info", ",", ...
test recovery ica rank reduction .
train
false
4,695
def _encode_datetime(name, value, dummy0, dummy1): millis = _datetime_to_millis(value) return ((' DCTB ' + name) + _PACK_LONG(millis))
[ "def", "_encode_datetime", "(", "name", ",", "value", ",", "dummy0", ",", "dummy1", ")", ":", "millis", "=", "_datetime_to_millis", "(", "value", ")", "return", "(", "(", "' DCTB '", "+", "name", ")", "+", "_PACK_LONG", "(", "millis", ")", ")" ]
encode datetime .
train
true
4,696
@pytest.fixture def import_fake(monkeypatch): fake = ImportFake() monkeypatch.setattr('builtins.__import__', fake.fake_import) monkeypatch.setattr('qutebrowser.utils.version.importlib.import_module', fake.fake_importlib_import) return fake
[ "@", "pytest", ".", "fixture", "def", "import_fake", "(", "monkeypatch", ")", ":", "fake", "=", "ImportFake", "(", ")", "monkeypatch", ".", "setattr", "(", "'builtins.__import__'", ",", "fake", ".", "fake_import", ")", "monkeypatch", ".", "setattr", "(", "'q...
fixture to patch imports using importfake .
train
false
4,697
def _reset_config(app): user = factories.Sysadmin() env = {'REMOTE_USER': user['name'].encode('ascii')} app.post(url=url_for(controller='admin', action='reset_config'), extra_environ=env)
[ "def", "_reset_config", "(", "app", ")", ":", "user", "=", "factories", ".", "Sysadmin", "(", ")", "env", "=", "{", "'REMOTE_USER'", ":", "user", "[", "'name'", "]", ".", "encode", "(", "'ascii'", ")", "}", "app", ".", "post", "(", "url", "=", "url...
reset config via action .
train
false
4,699
@register.simple_tag def check_description(check): try: return escape(CHECKS[check].description) except KeyError: return escape(check)
[ "@", "register", ".", "simple_tag", "def", "check_description", "(", "check", ")", ":", "try", ":", "return", "escape", "(", "CHECKS", "[", "check", "]", ".", "description", ")", "except", "KeyError", ":", "return", "escape", "(", "check", ")" ]
returns check description .
train
false
4,700
@contextmanager def check_exact_number_of_calls(object_with_method, method_name, num_calls): with check_number_of_calls(object_with_method, method_name, num_calls, num_calls): (yield)
[ "@", "contextmanager", "def", "check_exact_number_of_calls", "(", "object_with_method", ",", "method_name", ",", "num_calls", ")", ":", "with", "check_number_of_calls", "(", "object_with_method", ",", "method_name", ",", "num_calls", ",", "num_calls", ")", ":", "(", ...
instruments the given method on the given object to verify the number of calls to the method is exactly equal to num_calls .
train
false
4,701
def method_params(doc): doclines = doc.splitlines() if ('Args:' in doclines): begin = doclines.index('Args:') if ('Returns:' in doclines[(begin + 1):]): end = doclines.index('Returns:', begin) args = doclines[(begin + 1):end] else: args = doclines[(begin + 1):] parameters = [] for line in args: ...
[ "def", "method_params", "(", "doc", ")", ":", "doclines", "=", "doc", ".", "splitlines", "(", ")", "if", "(", "'Args:'", "in", "doclines", ")", ":", "begin", "=", "doclines", ".", "index", "(", "'Args:'", ")", "if", "(", "'Returns:'", "in", "doclines",...
document the parameters of a method .
train
false
4,702
def iter_multipart_mime_documents(wsgi_input, boundary, read_chunk_size=4096): boundary = ('--' + boundary) blen = (len(boundary) + 2) try: got = wsgi_input.readline(blen) while (got == '\r\n'): got = wsgi_input.readline(blen) except (IOError, ValueError) as e: raise swift.common.exceptions.ChunkReadError(...
[ "def", "iter_multipart_mime_documents", "(", "wsgi_input", ",", "boundary", ",", "read_chunk_size", "=", "4096", ")", ":", "boundary", "=", "(", "'--'", "+", "boundary", ")", "blen", "=", "(", "len", "(", "boundary", ")", "+", "2", ")", "try", ":", "got"...
given a multi-part-mime-encoded input file object and boundary .
train
false
4,703
@command('(open|view)\\s*(\\d{1,4})') def open_view_bynum(action, num): srt = sorted(g.userpl) name = srt[(int(num) - 1)] open_save_view(action, name)
[ "@", "command", "(", "'(open|view)\\\\s*(\\\\d{1,4})'", ")", "def", "open_view_bynum", "(", "action", ",", "num", ")", ":", "srt", "=", "sorted", "(", "g", ".", "userpl", ")", "name", "=", "srt", "[", "(", "int", "(", "num", ")", "-", "1", ")", "]", ...
open or view a saved playlist by number .
train
false
4,704
def is_container_agent_running(node): d = node.run_script('service_running', 'flocker-container-agent') def not_existing(failure): failure.trap(ProcessTerminated) return False d.addCallbacks((lambda result: True), not_existing) return d
[ "def", "is_container_agent_running", "(", "node", ")", ":", "d", "=", "node", ".", "run_script", "(", "'service_running'", ",", "'flocker-container-agent'", ")", "def", "not_existing", "(", "failure", ")", ":", "failure", ".", "trap", "(", "ProcessTerminated", "...
check if the container agent is running on the specified node .
train
false