id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
21,730
def all_shortest_paths(G, source, target, weight=None): if (weight is not None): (pred, dist) = nx.dijkstra_predecessor_and_distance(G, source, weight=weight) else: pred = nx.predecessor(G, source) if (source not in G): raise nx.NodeNotFound('Source {} is not in G'.format(source)) if (target not in pred): raise nx.NetworkXNoPath() stack = [[target, 0]] top = 0 while (top >= 0): (node, i) = stack[top] if (node == source): (yield [p for (p, n) in reversed(stack[:(top + 1)])]) if (len(pred[node]) > i): top += 1 if (top == len(stack)): stack.append([pred[node][i], 0]) else: stack[top] = [pred[node][i], 0] else: stack[(top - 1)][1] += 1 top -= 1
[ "def", "all_shortest_paths", "(", "G", ",", "source", ",", "target", ",", "weight", "=", "None", ")", ":", "if", "(", "weight", "is", "not", "None", ")", ":", "(", "pred", ",", "dist", ")", "=", "nx", ".", "dijkstra_predecessor_and_distance", "(", "G",...
compute all shortest paths in the graph .
train
false
21,731
def cyimport(import_path): if HAVE_CYTHON: import pyximport (py_importer, pyx_importer) = pyximport.install() mod = __import__(import_path, fromlist=[True]) pyximport.uninstall(py_importer, pyx_importer) return mod
[ "def", "cyimport", "(", "import_path", ")", ":", "if", "HAVE_CYTHON", ":", "import", "pyximport", "(", "py_importer", ",", "pyx_importer", ")", "=", "pyximport", ".", "install", "(", ")", "mod", "=", "__import__", "(", "import_path", ",", "fromlist", "=", ...
import a cython module if available .
train
false
21,732
def test_hubble(): data.hubble_deep_field()
[ "def", "test_hubble", "(", ")", ":", "data", ".", "hubble_deep_field", "(", ")" ]
test that "hubble" image can be loaded .
train
false
21,733
def aticq(ri, di, astrom): pos = erfa.s2c(ri, di) ppr = erfa.trxp(astrom[u'bpn'], pos) d = np.zeros_like(ppr) for j in range(2): before = norm((ppr - d)) after = erfa.ab(before, astrom[u'v'], astrom[u'em'], astrom[u'bm1']) d = (after - before) pnat = norm((ppr - d)) d = np.zeros_like(pnat) for j in range(5): before = norm((pnat - d)) after = erfa.ld(1.0, before, before, astrom[u'eh'], astrom[u'em'], 5e-08) d = (after - before) pco = norm((pnat - d)) (rc, dc) = erfa.c2s(pco) return (erfa.anp(rc), dc)
[ "def", "aticq", "(", "ri", ",", "di", ",", "astrom", ")", ":", "pos", "=", "erfa", ".", "s2c", "(", "ri", ",", "di", ")", "ppr", "=", "erfa", ".", "trxp", "(", "astrom", "[", "u'bpn'", "]", ",", "pos", ")", "d", "=", "np", ".", "zeros_like", ...
a slightly modified version of the erfa function eraaticq .
train
false
21,737
def require_prebuilt_dist(func): @wraps(func) def wrapper_func(self, args): ctx = self.ctx ctx.set_archs(self._archs) ctx.prepare_build_environment(user_sdk_dir=self.sdk_dir, user_ndk_dir=self.ndk_dir, user_android_api=self.android_api, user_ndk_ver=self.ndk_version) dist = self._dist if dist.needs_build: info_notify('No dist exists that meets your requirements, so one will be built.') build_dist_from_args(ctx, dist, args) func(self, args) return wrapper_func
[ "def", "require_prebuilt_dist", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper_func", "(", "self", ",", "args", ")", ":", "ctx", "=", "self", ".", "ctx", "ctx", ".", "set_archs", "(", "self", ".", "_archs", ")", "ctx", ".", ...
decorator for toolchaincl methods .
train
false
21,738
def test_cache_update_metadata(config_stub, tmpdir): config_stub.data = {'storage': {'cache-size': 1024}, 'general': {'private-browsing': False}} url = 'http://qutebrowser.org' disk_cache = cache.DiskCache(str(tmpdir)) preload_cache(disk_cache, url, 'foo') assert (disk_cache.cacheSize() > 0) metadata = QNetworkCacheMetaData() metadata.setUrl(QUrl(url)) assert metadata.isValid() disk_cache.updateMetaData(metadata) assert (disk_cache.metaData(QUrl(url)) == metadata)
[ "def", "test_cache_update_metadata", "(", "config_stub", ",", "tmpdir", ")", ":", "config_stub", ".", "data", "=", "{", "'storage'", ":", "{", "'cache-size'", ":", "1024", "}", ",", "'general'", ":", "{", "'private-browsing'", ":", "False", "}", "}", "url", ...
test updating the meta data for an existing cache entry .
train
false
21,739
def _relpath_posix(path, start=os.path.curdir): if (not path): raise ValueError('no path specified') start_list = os.path.abspath(start).split(os.path.sep) path_list = os.path.abspath(path).split(os.path.sep) i = len(os.path.commonprefix([start_list, path_list])) rel_list = (([os.path.pardir] * (len(start_list) - i)) + path_list[i:]) if (not rel_list): return os.path.curdir return os.path.join(*rel_list)
[ "def", "_relpath_posix", "(", "path", ",", "start", "=", "os", ".", "path", ".", "curdir", ")", ":", "if", "(", "not", "path", ")", ":", "raise", "ValueError", "(", "'no path specified'", ")", "start_list", "=", "os", ".", "path", ".", "abspath", "(", ...
return a relative version of a path .
train
true
21,740
@utils.synchronized(SERIAL_LOCK) def release_port(host, port): ALLOCATED_PORTS.discard((host, port))
[ "@", "utils", ".", "synchronized", "(", "SERIAL_LOCK", ")", "def", "release_port", "(", "host", ",", "port", ")", ":", "ALLOCATED_PORTS", ".", "discard", "(", "(", "host", ",", "port", ")", ")" ]
release tcp port to be used next time .
train
false
21,741
def splittag(url): (path, delim, tag) = url.rpartition('#') if delim: return (path, tag) return (url, None)
[ "def", "splittag", "(", "url", ")", ":", "(", "path", ",", "delim", ",", "tag", ")", "=", "url", ".", "rpartition", "(", "'#'", ")", "if", "delim", ":", "return", "(", "path", ",", "tag", ")", "return", "(", "url", ",", "None", ")" ]
splittag --> /path .
train
false
21,743
def dict_to_qs(dct): itms = [('%s=%s' % (key, val)) for (key, val) in list(dct.items()) if (val is not None)] return '&'.join(itms)
[ "def", "dict_to_qs", "(", "dct", ")", ":", "itms", "=", "[", "(", "'%s=%s'", "%", "(", "key", ",", "val", ")", ")", "for", "(", "key", ",", "val", ")", "in", "list", "(", "dct", ".", "items", "(", ")", ")", "if", "(", "val", "is", "not", "N...
takes a dictionary and uses it to create a query string .
train
true
21,744
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) def do_shelve_offload(cs, args): _find_server(cs, args.server).shelve_offload()
[ "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "def", "do_shelve_offload", "(", "cs", ",", "args", ")", ":", "_find_server", "(", "cs", ",", "args", ".", "s...
remove a shelved server from the compute node .
train
false
21,745
def set_num_instances_async(instances, module=None, version=None): def _ResultHook(rpc): mapped_errors = [modules_service_pb.ModulesServiceError.INVALID_VERSION, modules_service_pb.ModulesServiceError.TRANSIENT_ERROR] _CheckAsyncResult(rpc, mapped_errors, {}) if (not isinstance(instances, (long, int))): raise TypeError("'instances' arg must be of type long or int.") request = modules_service_pb.SetNumInstancesRequest() request.set_instances(instances) if module: request.set_module(module) if version: request.set_version(version) response = modules_service_pb.SetNumInstancesResponse() return _MakeAsyncCall('SetNumInstances', request, response, _ResultHook)
[ "def", "set_num_instances_async", "(", "instances", ",", "module", "=", "None", ",", "version", "=", "None", ")", ":", "def", "_ResultHook", "(", "rpc", ")", ":", "mapped_errors", "=", "[", "modules_service_pb", ".", "ModulesServiceError", ".", "INVALID_VERSION"...
returns a userrpc to set the number of instances on the module version .
train
false
21,747
def test_oss_sample_wt_fit(): oss = OneSidedSelection(random_state=RND_SEED) assert_raises(RuntimeError, oss.sample, X, Y)
[ "def", "test_oss_sample_wt_fit", "(", ")", ":", "oss", "=", "OneSidedSelection", "(", "random_state", "=", "RND_SEED", ")", "assert_raises", "(", "RuntimeError", ",", "oss", ".", "sample", ",", "X", ",", "Y", ")" ]
test either if an error is raised when sample is called before fitting .
train
false
21,748
def complete_command(text): global rline_mpstate return rline_mpstate.command_map.keys()
[ "def", "complete_command", "(", "text", ")", ":", "global", "rline_mpstate", "return", "rline_mpstate", ".", "command_map", ".", "keys", "(", ")" ]
returns a list of valid commands starting with the first argument .
train
false
21,749
def validate_float_or_None(s): if ((s is None) or (s == u'None')): return None try: return float(s) except ValueError: raise ValueError((u'Could not convert "%s" to float or None' % s))
[ "def", "validate_float_or_None", "(", "s", ")", ":", "if", "(", "(", "s", "is", "None", ")", "or", "(", "s", "==", "u'None'", ")", ")", ":", "return", "None", "try", ":", "return", "float", "(", "s", ")", "except", "ValueError", ":", "raise", "Valu...
convert s to float .
train
false
21,750
def delete_ipsecpolicy(ipsecpolicy, profile=None): conn = _auth(profile) return conn.delete_ipsecpolicy(ipsecpolicy)
[ "def", "delete_ipsecpolicy", "(", "ipsecpolicy", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "delete_ipsecpolicy", "(", "ipsecpolicy", ")" ]
deletes the specified ipsecpolicy cli example: .
train
false
21,753
def manifestations_from_deployment(deployment, dataset_id): for node in deployment.nodes.itervalues(): if (dataset_id in node.manifestations): (yield (node.manifestations[dataset_id], node))
[ "def", "manifestations_from_deployment", "(", "deployment", ",", "dataset_id", ")", ":", "for", "node", "in", "deployment", ".", "nodes", ".", "itervalues", "(", ")", ":", "if", "(", "dataset_id", "in", "node", ".", "manifestations", ")", ":", "(", "yield", ...
extract all other manifestations of the supplied dataset_id from the supplied deployment .
train
false
21,754
def is_private_address(address): if (not is_valid_ipv4_address(address)): raise ValueError(("'%s' isn't a valid IPv4 address" % address)) if (address.startswith('10.') or address.startswith('192.168.') or address.startswith('127.')): return True if address.startswith('172.'): second_octet = int(address.split('.')[1]) if ((second_octet >= 16) and (second_octet <= 31)): return True return False
[ "def", "is_private_address", "(", "address", ")", ":", "if", "(", "not", "is_valid_ipv4_address", "(", "address", ")", ")", ":", "raise", "ValueError", "(", "(", "\"'%s' isn't a valid IPv4 address\"", "%", "address", ")", ")", "if", "(", "address", ".", "start...
checks if the ipv4 address is in a range belonging to the local network or loopback .
train
false
21,755
def ValidatePropertyKey(name, value): if (not value.has_id_or_name()): raise datastore_errors.BadValueError(('Incomplete key found for reference property %s.' % name))
[ "def", "ValidatePropertyKey", "(", "name", ",", "value", ")", ":", "if", "(", "not", "value", ".", "has_id_or_name", "(", ")", ")", ":", "raise", "datastore_errors", ".", "BadValueError", "(", "(", "'Incomplete key found for reference property %s.'", "%", "name", ...
raises an exception if the supplied datastore .
train
false
21,756
def make_accept(key): guid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' return sha1(('%s%s' % (key, guid))).digest().encode('base64').strip()
[ "def", "make_accept", "(", "key", ")", ":", "guid", "=", "'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'", "return", "sha1", "(", "(", "'%s%s'", "%", "(", "key", ",", "guid", ")", ")", ")", ".", "digest", "(", ")", ".", "encode", "(", "'base64'", ")", ".", "st...
create an "accept" response for a given key .
train
false
21,757
def ceil(mat, target=None): if (not target): target = mat err_code = _eigenmat.apply_ceil(mat.p_mat, target.p_mat) if err_code: raise generate_exception(err_code) return target
[ "def", "ceil", "(", "mat", ",", "target", "=", "None", ")", ":", "if", "(", "not", "target", ")", ":", "target", "=", "mat", "err_code", "=", "_eigenmat", ".", "apply_ceil", "(", "mat", ".", "p_mat", ",", "target", ".", "p_mat", ")", "if", "err_cod...
apply the ceil function to each element of the matrix mat .
train
false
21,759
def module_description_list(head): r = [] if head: item = head while item: item = item.contents r.append((item.name, item.shortname, item.longname, item.help)) item = item.next libvlc_module_description_list_release(head) return r
[ "def", "module_description_list", "(", "head", ")", ":", "r", "=", "[", "]", "if", "head", ":", "item", "=", "head", "while", "item", ":", "item", "=", "item", ".", "contents", "r", ".", "append", "(", "(", "item", ".", "name", ",", "item", ".", ...
convert a moduledescription linked list to a python list .
train
true
21,761
@register.tag def permission_form(parser, token): return PermissionFormNode.handle_token(parser, token, approved=True)
[ "@", "register", ".", "tag", "def", "permission_form", "(", "parser", ",", "token", ")", ":", "return", "PermissionFormNode", ".", "handle_token", "(", "parser", ",", "token", ",", "approved", "=", "True", ")" ]
renders an "add permissions" form for the given object .
train
false
21,762
def arg_split(s, posix=False, strict=True): lex = shlex.shlex(s, posix=posix) lex.whitespace_split = True lex.commenters = '' tokens = [] while True: try: tokens.append(next(lex)) except StopIteration: break except ValueError: if strict: raise tokens.append(lex.token) break return tokens
[ "def", "arg_split", "(", "s", ",", "posix", "=", "False", ",", "strict", "=", "True", ")", ":", "lex", "=", "shlex", ".", "shlex", "(", "s", ",", "posix", "=", "posix", ")", "lex", ".", "whitespace_split", "=", "True", "lex", ".", "commenters", "="...
split a command lines arguments in a shell-like manner .
train
false
21,763
@must_be_logged_in def dataverse_user_config_get(auth, **kwargs): user_addon = auth.user.get_addon('dataverse') user_has_auth = False if user_addon: user_has_auth = user_addon.has_auth return ({'result': {'userHasAuth': user_has_auth, 'urls': {'create': api_url_for('dataverse_add_user_account'), 'accounts': api_url_for('dataverse_account_list')}, 'hosts': DEFAULT_HOSTS}}, http.OK)
[ "@", "must_be_logged_in", "def", "dataverse_user_config_get", "(", "auth", ",", "**", "kwargs", ")", ":", "user_addon", "=", "auth", ".", "user", ".", "get_addon", "(", "'dataverse'", ")", "user_has_auth", "=", "False", "if", "user_addon", ":", "user_has_auth", ...
view for getting a json representation of the logged-in users dataverse user settings .
train
false
21,764
def add_default_fields(elem, default_fields): for field in default_fields.get(elem.tag, []): if (elem.find(field) is None): SubElement(elem, field) for child in elem: add_default_fields(child, default_fields)
[ "def", "add_default_fields", "(", "elem", ",", "default_fields", ")", ":", "for", "field", "in", "default_fields", ".", "get", "(", "elem", ".", "tag", ",", "[", "]", ")", ":", "if", "(", "elem", ".", "find", "(", "field", ")", "is", "None", ")", "...
add blank elements and subelements specified in default_fields .
train
false
21,765
def find_homologs(query_file, subject_genome, e_value, max_hits, working_dir, blast_mat_root, wordsize, percent_aligned, extra_params={}, require_hit=False, DEBUG=True): start_time = time() raw_blast_output = [] seqs = open(query_file, 'U').readlines() if DEBUG: print ('BLASTING %s vs. %s' % (query_file, subject_genome)) blast_db = subject_genome raw_output_data = blast_genome(seqs, blast_db, e_value, max_hits, wordsize, working_dir, blast_mat_root, extra_params, DEBUG=DEBUG) if DEBUG: print 'Length of raw BLAST results:', len(raw_output_data) curr_blast_result = BlastResult(raw_output_data) align_filter = make_percent_align_filter(percent_aligned) (filtered_ids, removed_ids) = query_ids_from_blast_result(curr_blast_result, align_filter, DEBUG=DEBUG) return (raw_output_data, filtered_ids, removed_ids)
[ "def", "find_homologs", "(", "query_file", ",", "subject_genome", ",", "e_value", ",", "max_hits", ",", "working_dir", ",", "blast_mat_root", ",", "wordsize", ",", "percent_aligned", ",", "extra_params", "=", "{", "}", ",", "require_hit", "=", "False", ",", "D...
blast query_file against subject_genome query_file -- .
train
false
21,766
def _ParseQueueYaml(unused_self, root_path): if (root_path is None): return None for queueyaml in ('queue.yaml', 'queue.yml'): try: fh = open(os.path.join(root_path, queueyaml), 'r') except IOError: continue try: queue_info = queueinfo.LoadSingleQueue(fh) return queue_info finally: fh.close() return None
[ "def", "_ParseQueueYaml", "(", "unused_self", ",", "root_path", ")", ":", "if", "(", "root_path", "is", "None", ")", ":", "return", "None", "for", "queueyaml", "in", "(", "'queue.yaml'", ",", "'queue.yml'", ")", ":", "try", ":", "fh", "=", "open", "(", ...
loads the queue .
train
false
21,767
def check_abstract_methods(base, subclass): for attrname in dir(base): if attrname.startswith('_'): continue attr = getattr(base, attrname) if is_abstract_method(attr): oattr = getattr(subclass, attrname) if is_abstract_method(oattr): raise Exception(('%s.%s not overridden' % (subclass.__name__, attrname)))
[ "def", "check_abstract_methods", "(", "base", ",", "subclass", ")", ":", "for", "attrname", "in", "dir", "(", "base", ")", ":", "if", "attrname", ".", "startswith", "(", "'_'", ")", ":", "continue", "attr", "=", "getattr", "(", "base", ",", "attrname", ...
raises assertionerror if subclass does not override a method on base that is marked as an abstract method .
train
false
21,769
def odnoklassniki_api(backend, data, api_url, public_key, client_secret, request_type='oauth'): data.update({'application_key': public_key, 'format': 'JSON'}) if (request_type == 'oauth'): data['sig'] = odnoklassniki_oauth_sig(data, client_secret) elif (request_type == 'iframe_session'): data['sig'] = odnoklassniki_iframe_sig(data, data['session_secret_key']) elif (request_type == 'iframe_nosession'): data['sig'] = odnoklassniki_iframe_sig(data, client_secret) else: msg = 'Unknown request type {0}. How should it be signed?' raise AuthFailed(backend, msg.format(request_type)) return backend.get_json((api_url + 'fb.do'), params=data)
[ "def", "odnoklassniki_api", "(", "backend", ",", "data", ",", "api_url", ",", "public_key", ",", "client_secret", ",", "request_type", "=", "'oauth'", ")", ":", "data", ".", "update", "(", "{", "'application_key'", ":", "public_key", ",", "'format'", ":", "'...
calls odnoklassniki rest api method URL .
train
false
21,770
def add_wsgi_middleware(application): return webapp._config_handle.add_wsgi_middleware(application)
[ "def", "add_wsgi_middleware", "(", "application", ")", ":", "return", "webapp", ".", "_config_handle", ".", "add_wsgi_middleware", "(", "application", ")" ]
wrap wsgi middleware around a wsgi application object .
train
false
21,771
def monomial_min(*monoms): M = list(monoms[0]) for N in monoms[1:]: for (i, n) in enumerate(N): M[i] = min(M[i], n) return tuple(M)
[ "def", "monomial_min", "(", "*", "monoms", ")", ":", "M", "=", "list", "(", "monoms", "[", "0", "]", ")", "for", "N", "in", "monoms", "[", "1", ":", "]", ":", "for", "(", "i", ",", "n", ")", "in", "enumerate", "(", "N", ")", ":", "M", "[", ...
returns minimal degree for each variable in a set of monomials .
train
false
21,773
def codeword(bits): return (int(bits, 2), len(bits))
[ "def", "codeword", "(", "bits", ")", ":", "return", "(", "int", "(", "bits", ",", "2", ")", ",", "len", "(", "bits", ")", ")" ]
return tuple rather than list .
train
false
21,775
def print_url_destination_goal_details(goal_details): print('------ Url Destination Goal -------') print(('Goal URL = %s' % goal_details.get('url'))) print(('Case Sensitive = %s' % goal_details.get('caseSensitive'))) print(('Match Type = %s' % goal_details.get('matchType'))) print(('First Step Required = %s' % goal_details.get('firstStepRequired'))) print('------ Url Destination Goal Steps -------') for goal_step in goal_details.get('steps', []): print(('Step Number = %s' % goal_step.get('number'))) print(('Step Name = %s' % goal_step.get('name'))) print(('Step URL = %s' % goal_step.get('url'))) if (not goal_details.get('steps')): print('No Steps Configured')
[ "def", "print_url_destination_goal_details", "(", "goal_details", ")", ":", "print", "(", "'------ Url Destination Goal -------'", ")", "print", "(", "(", "'Goal URL = %s'", "%", "goal_details", ".", "get", "(", "'url'", ")", ")", ")", "print", "(", "(", ...
prints all the url destination goal type info .
train
false
21,776
def test_lex_expression_integer(): objs = tokenize('(foo 2)') assert (objs == [HyExpression([HySymbol('foo'), HyInteger(2)])])
[ "def", "test_lex_expression_integer", "(", ")", ":", "objs", "=", "tokenize", "(", "'(foo 2)'", ")", "assert", "(", "objs", "==", "[", "HyExpression", "(", "[", "HySymbol", "(", "'foo'", ")", ",", "HyInteger", "(", "2", ")", "]", ")", "]", ")" ]
make sure expressions can produce integers .
train
false
21,777
def arrayize(seq): array = [] for item in seq: assert (type(item) is str) escaped = item.replace('\\', '\\\\').replace('"', '\\"') quoted = (('"' + escaped) + '"') array.append(quoted) joined = ', '.join(array) return (('{' + joined) + '}')
[ "def", "arrayize", "(", "seq", ")", ":", "array", "=", "[", "]", "for", "item", "in", "seq", ":", "assert", "(", "type", "(", "item", ")", "is", "str", ")", "escaped", "=", "item", ".", "replace", "(", "'\\\\'", ",", "'\\\\\\\\'", ")", ".", "repl...
given a sequence of str .
train
false
21,780
def get_tag_uri(url, date): bits = urlparse(url) d = '' if (date is not None): d = (',%s' % datetime_safe.new_datetime(date).strftime('%Y-%m-%d')) return ('tag:%s%s:%s/%s' % (bits.hostname, d, bits.path, bits.fragment))
[ "def", "get_tag_uri", "(", "url", ",", "date", ")", ":", "bits", "=", "urlparse", "(", "url", ")", "d", "=", "''", "if", "(", "date", "is", "not", "None", ")", ":", "d", "=", "(", "',%s'", "%", "datetime_safe", ".", "new_datetime", "(", "date", "...
creates a taguri .
train
false
21,781
def file_upload_quota(request): request.upload_handlers.insert(0, QuotaUploadHandler()) return file_upload_echo(request)
[ "def", "file_upload_quota", "(", "request", ")", ":", "request", ".", "upload_handlers", ".", "insert", "(", "0", ",", "QuotaUploadHandler", "(", ")", ")", "return", "file_upload_echo", "(", "request", ")" ]
dynamically add in an upload handler .
train
false
21,782
@webserviceHandle('/reloadmodule') def reloadmodule(): for child in GlobalObject().root.childsmanager._childs.values(): child.callbackChildNotForResult('sreload') return 'reload'
[ "@", "webserviceHandle", "(", "'/reloadmodule'", ")", "def", "reloadmodule", "(", ")", ":", "for", "child", "in", "GlobalObject", "(", ")", ".", "root", ".", "childsmanager", ".", "_childs", ".", "values", "(", ")", ":", "child", ".", "callbackChildNotForRes...
reload module .
train
false
21,784
@pytest.mark.parametrize('text, deleted, rest', [('test delete|foobar', 'delete', 'test |foobar'), ('test delete |foobar', 'delete ', 'test |foobar'), ('open -t github.com/foo/bar |', 'bar ', 'open -t github.com/foo/|'), ('open -t |github.com/foo/bar', '-t ', 'open |github.com/foo/bar'), ('open foo/bar.baz|', 'bar.baz', 'open foo/|')]) def test_rl_unix_filename_rubout(lineedit, bridge, text, deleted, rest): _validate_deletion(lineedit, bridge, bridge.rl_unix_filename_rubout, text, deleted, rest)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'text, deleted, rest'", ",", "[", "(", "'test delete|foobar'", ",", "'delete'", ",", "'test |foobar'", ")", ",", "(", "'test delete |foobar'", ",", "'delete '", ",", "'test |foobar'", ")", ",", "(", "'open -t...
delete filename segment and see if it comes back with yank .
train
false
21,785
def initial_nodes_to_merge(tree): to_process = set([]) for n in tree.tips(): sibs_are_tips = [s.istip() for s in n.siblings()] if all(sibs_are_tips): to_process.add(n.Parent) return to_process
[ "def", "initial_nodes_to_merge", "(", "tree", ")", ":", "to_process", "=", "set", "(", "[", "]", ")", "for", "n", "in", "tree", ".", "tips", "(", ")", ":", "sibs_are_tips", "=", "[", "s", ".", "istip", "(", ")", "for", "s", "in", "n", ".", "sibli...
determine what nodes are safe to process first the first nodes to process are those internal nodes that have tips as children .
train
false
21,786
def test_scharr_zeros(): result = filters.scharr(np.zeros((10, 10)), np.ones((10, 10), bool)) assert np.all((result < 1e-16))
[ "def", "test_scharr_zeros", "(", ")", ":", "result", "=", "filters", ".", "scharr", "(", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ")", ",", "np", ".", "ones", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert", "np", "...
scharr on an array of all zeros .
train
false
21,787
def get_serials1(path=STORAGE1): if (not os.path.isfile(path)): return [] storage = parse_preference(path) salt = storage.get('AmazonSaltKey') if (salt and (len(salt) == 16)): obfuscation = AndroidObfuscationV2(a2b_hex(salt)) else: obfuscation = AndroidObfuscation() def get_value(key): encrypted_key = obfuscation.encrypt(key) encrypted_value = storage.get(encrypted_key) if encrypted_value: return obfuscation.decrypt(encrypted_value) return '' try: dsnid = get_value('DsnId') except: sys.stderr.write('cannot get DsnId\n') return [] try: tokens = set(get_value('kindle.account.tokens').split(',')) except: return [] serials = [] if dsnid: serials.append(dsnid) for token in tokens: if token: serials.append(('%s%s' % (dsnid, token))) serials.append(token) return serials
[ "def", "get_serials1", "(", "path", "=", "STORAGE1", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "path", ")", ")", ":", "return", "[", "]", "storage", "=", "parse_preference", "(", "path", ")", "salt", "=", "storage", ".", "g...
get serials from androids shared preference xml .
train
false
21,788
def instance_update_and_get_original(context, instance_uuid, values, columns_to_join=None, expected=None): rv = IMPL.instance_update_and_get_original(context, instance_uuid, values, columns_to_join=columns_to_join, expected=expected) return rv
[ "def", "instance_update_and_get_original", "(", "context", ",", "instance_uuid", ",", "values", ",", "columns_to_join", "=", "None", ",", "expected", "=", "None", ")", ":", "rv", "=", "IMPL", ".", "instance_update_and_get_original", "(", "context", ",", "instance_...
set the given properties on an instance and update it .
train
false
21,789
def list_subnets(call=None, kwargs=None): if (call == 'action'): raise SaltCloudSystemExit('The avail_sizes function must be called with -f or --function, or with the --list-sizes option') global netconn if (not netconn): netconn = get_conn(NetworkManagementClient) if (('group' in kwargs) and ('resource_group' not in kwargs)): kwargs['resource_group'] = kwargs['group'] if ('resource_group' not in kwargs): raise SaltCloudSystemExit('A resource_group must be specified as "group" or "resource_group"') if ('network' not in kwargs): raise SaltCloudSystemExit('A "network" must be specified') region = get_location() bank = 'cloud/metadata/azurearm/{0}/{1}'.format(region, kwargs['network']) ret = {} subnets = netconn.subnets.list(kwargs['resource_group'], kwargs['network']) for subnet in subnets: ret[subnet.name] = make_safe(subnet) ret[subnet.name]['ip_configurations'] = {} for ip_ in subnet.ip_configurations: comps = ip_.id.split('/') name = comps[(-1)] ret[subnet.name]['ip_configurations'][name] = make_safe(ip_) ret[subnet.name]['ip_configurations'][name]['subnet'] = subnet.name ret[subnet.name]['resource_group'] = kwargs['resource_group'] return ret
[ "def", "list_subnets", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "(", "call", "==", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The avail_sizes function must be called with -f or --function, or with the --list-sizes option'", ...
fetches a list of all networks for a tenant cli example: .
train
false
21,792
def move_by_idmap(map, **kwargs): def task_id_in_map(body, message): return map.get(body[u'id']) return move(task_id_in_map, limit=len(map), **kwargs)
[ "def", "move_by_idmap", "(", "map", ",", "**", "kwargs", ")", ":", "def", "task_id_in_map", "(", "body", ",", "message", ")", ":", "return", "map", ".", "get", "(", "body", "[", "u'id'", "]", ")", "return", "move", "(", "task_id_in_map", ",", "limit", ...
move tasks by matching from a task_id: queue mapping .
train
false
21,794
def parse_content_type_header(content_type): if (';' in content_type): split = content_type.split(';') media = split[0] options = {} for pair in split[1:]: split_pair = pair.split('=', 1) if (len(split_pair) != 2): continue key = split_pair[0].strip() value = split_pair[1].strip() options[key] = value else: media = content_type options = {} result = (media, options) return result
[ "def", "parse_content_type_header", "(", "content_type", ")", ":", "if", "(", "';'", "in", "content_type", ")", ":", "split", "=", "content_type", ".", "split", "(", "';'", ")", "media", "=", "split", "[", "0", "]", "options", "=", "{", "}", "for", "pa...
parse and normalize request content type and return a tuple with the content type and the options .
train
false
21,796
def _get_xdg_cache_dir(): path = os.environ.get('XDG_CACHE_HOME') if (path is None): path = _get_home() if (path is not None): path = os.path.join(path, '.cache', 'matplotlib') return path
[ "def", "_get_xdg_cache_dir", "(", ")", ":", "path", "=", "os", ".", "environ", ".", "get", "(", "'XDG_CACHE_HOME'", ")", "if", "(", "path", "is", "None", ")", ":", "path", "=", "_get_home", "(", ")", "if", "(", "path", "is", "not", "None", ")", ":"...
returns the xdg cache directory .
train
false
21,797
def IgnoreHeadersRewriter(response): for h in _IGNORE_RESPONSE_HEADERS: if (h in response.headers): del response.headers[h]
[ "def", "IgnoreHeadersRewriter", "(", "response", ")", ":", "for", "h", "in", "_IGNORE_RESPONSE_HEADERS", ":", "if", "(", "h", "in", "response", ".", "headers", ")", ":", "del", "response", ".", "headers", "[", "h", "]" ]
ignore specific response headers .
train
false
21,798
def adjust_sff_cycles(sff_data, num_cycles): num_flows = (num_cycles * 4) (header, reads) = sff_data h = header.copy() h['number_of_flows_per_read'] = num_flows h['header_length'] = (num_flows + 40) h['index_offset'] = 0 h['index_length'] = 0 h['flow_chars'] = ('TACG' * num_cycles) read_clip_keys = ['clip_qual_left', 'clip_qual_right', 'clip_adapter_left', 'clip_adapter_right'] def adjust_read(read): r = read.copy() r['flowgram_values'] = read['flowgram_values'][:num_flows] enumerated_flow_indices = list(enumerate(_cumulative_sum(read['flow_index_per_base']))) num_bases = 6789 for (base_idx, flow_idx) in reversed(enumerated_flow_indices): num_bases = (base_idx + 1) if (flow_idx <= num_flows): break r['number_of_bases'] = num_bases r['flow_index_per_base'] = read['flow_index_per_base'][:num_bases] r['Bases'] = read['Bases'][:num_bases] r['quality_scores'] = read['quality_scores'][:num_bases] for key in read_clip_keys: if (r[key] > num_bases): r[key] = num_bases return r return (h, imap(adjust_read, reads))
[ "def", "adjust_sff_cycles", "(", "sff_data", ",", "num_cycles", ")", ":", "num_flows", "=", "(", "num_cycles", "*", "4", ")", "(", "header", ",", "reads", ")", "=", "sff_data", "h", "=", "header", ".", "copy", "(", ")", "h", "[", "'number_of_flows_per_re...
modify the number of cycles in a set of sff data .
train
false
21,799
def basicAuth(realm): assert ('"' not in realm), 'Realms cannot contain the " (quote) character.' return ('Basic realm="%s"' % realm)
[ "def", "basicAuth", "(", "realm", ")", ":", "assert", "(", "'\"'", "not", "in", "realm", ")", ",", "'Realms cannot contain the \" (quote) character.'", "return", "(", "'Basic realm=\"%s\"'", "%", "realm", ")" ]
challengenes the client for a basic authentication .
train
false
21,800
def reset(): _runtime.reset()
[ "def", "reset", "(", ")", ":", "_runtime", ".", "reset", "(", ")" ]
return a string that resets the cgi and browser to a known state .
train
false
21,801
def build_datastore_path(datastore_name, path): return ('[%s] %s' % (datastore_name, path))
[ "def", "build_datastore_path", "(", "datastore_name", ",", "path", ")", ":", "return", "(", "'[%s] %s'", "%", "(", "datastore_name", ",", "path", ")", ")" ]
build the datastore compliant path .
train
false
21,803
def set_time_server(time_server='time.apple.com'): cmd = 'systemsetup -setnetworktimeserver {0}'.format(time_server) salt.utils.mac_utils.execute_return_success(cmd) return (time_server in get_time_server())
[ "def", "set_time_server", "(", "time_server", "=", "'time.apple.com'", ")", ":", "cmd", "=", "'systemsetup -setnetworktimeserver {0}'", ".", "format", "(", "time_server", ")", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_success", "(", "cmd", ")", ...
designates a network time server .
train
true
21,804
def document_model_driven_signature(section, name, operation_model, include=None, exclude=None): params = {} if operation_model.input_shape: params = operation_model.input_shape.members parameter_names = list(params.keys()) if (include is not None): for member in include: parameter_names.append(member.name) if (exclude is not None): for member in exclude: if (member in parameter_names): parameter_names.remove(member) signature_params = '' if parameter_names: signature_params = '**kwargs' section.style.start_sphinx_py_method(name, signature_params)
[ "def", "document_model_driven_signature", "(", "section", ",", "name", ",", "operation_model", ",", "include", "=", "None", ",", "exclude", "=", "None", ")", ":", "params", "=", "{", "}", "if", "operation_model", ".", "input_shape", ":", "params", "=", "oper...
documents the signature of a model-driven method .
train
false
21,805
def register_pickle(): def pickle_dumps(obj, dumper=pickle.dumps): return dumper(obj, protocol=pickle_protocol) registry.register('pickle', pickle_dumps, unpickle, content_type='application/x-python-serialize', content_encoding='binary')
[ "def", "register_pickle", "(", ")", ":", "def", "pickle_dumps", "(", "obj", ",", "dumper", "=", "pickle", ".", "dumps", ")", ":", "return", "dumper", "(", "obj", ",", "protocol", "=", "pickle_protocol", ")", "registry", ".", "register", "(", "'pickle'", ...
the fastest serialization method .
train
false
21,806
def LINEARREG_INTERCEPT(ds, count, timeperiod=(- (2 ** 31))): return call_talib_with_ds(ds, count, talib.LINEARREG_INTERCEPT, timeperiod)
[ "def", "LINEARREG_INTERCEPT", "(", "ds", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ")", ":", "return", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "LINEARREG_INTERCEPT", ",", "timeperiod", ")" ]
linear regression intercept .
train
false
21,807
def a_valid_tap(tap): regex = re.compile('^([\\w-]+)/(homebrew-)?([\\w-]+)$') return regex.match(tap)
[ "def", "a_valid_tap", "(", "tap", ")", ":", "regex", "=", "re", ".", "compile", "(", "'^([\\\\w-]+)/(homebrew-)?([\\\\w-]+)$'", ")", "return", "regex", ".", "match", "(", "tap", ")" ]
returns true if the tap is valid .
train
false
21,809
def get_datacenter_id(): return config.get_cloud_config_value('datacenter_id', get_configured_provider(), __opts__, search_global=False)
[ "def", "get_datacenter_id", "(", ")", ":", "return", "config", ".", "get_cloud_config_value", "(", "'datacenter_id'", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ")" ]
return datacenter id from provider configuration .
train
false
21,810
def FindStartOfExpressionInLine(line, endpos, stack): i = endpos while (i >= 0): char = line[i] if (char in ')]}'): stack.append(char) elif (char == '>'): if ((i > 0) and ((line[(i - 1)] == '-') or Match('\\s>=\\s', line[(i - 1):]) or Search('\\boperator\\s*$', line[0:i]))): i -= 1 else: stack.append('>') elif (char == '<'): if ((i > 0) and (line[(i - 1)] == '<')): i -= 1 elif (stack and (stack[(-1)] == '>')): stack.pop() if (not stack): return (i, None) elif (char in '([{'): while (stack and (stack[(-1)] == '>')): stack.pop() if (not stack): return ((-1), None) if (((char == '(') and (stack[(-1)] == ')')) or ((char == '[') and (stack[(-1)] == ']')) or ((char == '{') and (stack[(-1)] == '}'))): stack.pop() if (not stack): return (i, None) else: return ((-1), None) elif (char == ';'): while (stack and (stack[(-1)] == '>')): stack.pop() if (not stack): return ((-1), None) i -= 1 return ((-1), stack)
[ "def", "FindStartOfExpressionInLine", "(", "line", ",", "endpos", ",", "stack", ")", ":", "i", "=", "endpos", "while", "(", "i", ">=", "0", ")", ":", "char", "=", "line", "[", "i", "]", "if", "(", "char", "in", "')]}'", ")", ":", "stack", ".", "a...
find position at the matching startchar .
train
true
21,812
def imshow_collection(ic, plugin=None, **plugin_args): return call_plugin('imshow_collection', ic, plugin=plugin, **plugin_args)
[ "def", "imshow_collection", "(", "ic", ",", "plugin", "=", "None", ",", "**", "plugin_args", ")", ":", "return", "call_plugin", "(", "'imshow_collection'", ",", "ic", ",", "plugin", "=", "plugin", ",", "**", "plugin_args", ")" ]
display all images in the collection .
train
false
21,813
def image_from_path(path): with lopen(path, u'rb') as f: return image_from_data(f.read())
[ "def", "image_from_path", "(", "path", ")", ":", "with", "lopen", "(", "path", ",", "u'rb'", ")", "as", "f", ":", "return", "image_from_data", "(", "f", ".", "read", "(", ")", ")" ]
load an image from the specified path .
train
false
21,814
def empty_if_not_sysadmin(key, data, errors, context): from ckan.lib.navl.validators import empty user = context.get('user') ignore_auth = context.get('ignore_auth') if (ignore_auth or (user and authz.is_sysadmin(user))): return empty(key, data, errors, context)
[ "def", "empty_if_not_sysadmin", "(", "key", ",", "data", ",", "errors", ",", "context", ")", ":", "from", "ckan", ".", "lib", ".", "navl", ".", "validators", "import", "empty", "user", "=", "context", ".", "get", "(", "'user'", ")", "ignore_auth", "=", ...
only sysadmins may pass this value .
train
false
21,815
def verbose_clean_up_f(f): deletion_list = [l.strip() for l in f] remove_all(deletion_list) print 'Post-run clean-up complete.' return True
[ "def", "verbose_clean_up_f", "(", "f", ")", ":", "deletion_list", "=", "[", "l", ".", "strip", "(", ")", "for", "l", "in", "f", "]", "remove_all", "(", "deletion_list", ")", "print", "'Post-run clean-up complete.'", "return", "True" ]
removes list of files in f f: file containing list of filepaths example f: f1 .
train
false
21,817
def start_server(data_stream, port=5557, hwm=20): logging.basicConfig(level='INFO') context = zmq.Context() socket = context.socket(zmq.PUSH) socket.set_hwm(hwm) socket.bind('tcp://*:{}'.format(port)) it = data_stream logger.info('server started') while True: try: data = next(it) stop = False logger.debug('sending {} arrays'.format(len(data))) except StopIteration: it = data_stream data = None stop = True logger.debug('sending StopIteration') send_arrays(socket, data, stop=stop)
[ "def", "start_server", "(", "data_stream", ",", "port", "=", "5557", ",", "hwm", "=", "20", ")", ":", "logging", ".", "basicConfig", "(", "level", "=", "'INFO'", ")", "context", "=", "zmq", ".", "Context", "(", ")", "socket", "=", "context", ".", "so...
start all instances for the given version of the server .
train
true
21,818
def make_loc_files(file_creator, size=None): if size: body = ('*' * size) else: body = 'This is a test.' filename1 = file_creator.create_file(os.path.join('some_directory', 'text1.txt'), body) filename2 = file_creator.create_file(os.path.join('some_directory', 'another_directory', 'text2.txt'), body) filename1 = six.text_type(filename1) filename2 = six.text_type(filename2) return [filename1, filename2, os.path.dirname(filename2), os.path.dirname(filename1)]
[ "def", "make_loc_files", "(", "file_creator", ",", "size", "=", "None", ")", ":", "if", "size", ":", "body", "=", "(", "'*'", "*", "size", ")", "else", ":", "body", "=", "'This is a test.'", "filename1", "=", "file_creator", ".", "create_file", "(", "os"...
this sets up the test by making a directory named some_directory .
train
false
21,819
@contextmanager def set_siteconfig_settings(settings): siteconfig = SiteConfiguration.objects.get_current() old_settings = {} for (setting, value) in six.iteritems(settings): old_settings[setting] = siteconfig.get(setting) siteconfig.set(setting, value) siteconfig.save() load_site_config() try: (yield) finally: for (setting, value) in six.iteritems(old_settings): siteconfig.set(setting, value) siteconfig.save() load_site_config()
[ "@", "contextmanager", "def", "set_siteconfig_settings", "(", "settings", ")", ":", "siteconfig", "=", "SiteConfiguration", ".", "objects", ".", "get_current", "(", ")", "old_settings", "=", "{", "}", "for", "(", "setting", ",", "value", ")", "in", "six", "....
a context manager to toggle site configuration settings .
train
false
21,821
def get_dependent_dists(dists, dist): if (dist not in dists): raise DistlibException((u'given distribution %r is not a member of the list' % dist.name)) graph = make_graph(dists) dep = [dist] todo = graph.reverse_list[dist] while todo: d = todo.pop() dep.append(d) for succ in graph.reverse_list[d]: if (succ not in dep): todo.append(succ) dep.pop(0) return dep
[ "def", "get_dependent_dists", "(", "dists", ",", "dist", ")", ":", "if", "(", "dist", "not", "in", "dists", ")", ":", "raise", "DistlibException", "(", "(", "u'given distribution %r is not a member of the list'", "%", "dist", ".", "name", ")", ")", "graph", "=...
recursively generate a list of distributions from *dists* that are dependent on *dist* .
train
true
21,822
def find_router_gw_port(context, cluster, router_id): results = query_lrouter_lports(cluster, router_id, relations='LogicalPortAttachment') for lport in results: if ('_relations' in lport): attachment = lport['_relations'].get('LogicalPortAttachment') if (attachment and (attachment.get('type') == 'L3GatewayAttachment')): return lport
[ "def", "find_router_gw_port", "(", "context", ",", "cluster", ",", "router_id", ")", ":", "results", "=", "query_lrouter_lports", "(", "cluster", ",", "router_id", ",", "relations", "=", "'LogicalPortAttachment'", ")", "for", "lport", "in", "results", ":", "if",...
retrieves the external gateway port for a nvp logical router .
train
false
21,823
def docx_path(name): return absjoin(test_file_dir, ('%s.docx' % name))
[ "def", "docx_path", "(", "name", ")", ":", "return", "absjoin", "(", "test_file_dir", ",", "(", "'%s.docx'", "%", "name", ")", ")" ]
return the absolute path to test .
train
false
21,824
@map_project_slug @map_subproject_slug def redirect_page_with_filename(request, project, subproject, filename): return HttpResponseRedirect(resolve((subproject or project), filename=filename))
[ "@", "map_project_slug", "@", "map_subproject_slug", "def", "redirect_page_with_filename", "(", "request", ",", "project", ",", "subproject", ",", "filename", ")", ":", "return", "HttpResponseRedirect", "(", "resolve", "(", "(", "subproject", "or", "project", ")", ...
redirect /page/file .
train
false
21,825
@debug @timeit def calculate_series(e, x, logx=None): from sympy.polys import cancel for t in e.lseries(x, logx=logx): t = cancel(t) if (t.has(exp) and t.has(log)): t = powdenest(t) if t.simplify(): break return t
[ "@", "debug", "@", "timeit", "def", "calculate_series", "(", "e", ",", "x", ",", "logx", "=", "None", ")", ":", "from", "sympy", ".", "polys", "import", "cancel", "for", "t", "in", "e", ".", "lseries", "(", "x", ",", "logx", "=", "logx", ")", ":"...
calculates at least one term of the series of "e" in "x" .
train
false
21,826
def _getExperimentDescriptionSchema(): installPath = os.path.dirname(os.path.abspath(__file__)) schemaFilePath = os.path.join(installPath, 'experimentDescriptionSchema.json') return json.loads(open(schemaFilePath, 'r').read())
[ "def", "_getExperimentDescriptionSchema", "(", ")", ":", "installPath", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "schemaFilePath", "=", "os", ".", "path", ".", "join", "(", "installPath", ...
returns the experiment description schema .
train
true
21,827
@xmlrpc_func(returns='struct[]', args=['string', 'string', 'string']) def get_authors(apikey, username, password): authenticate(username, password) return [author_structure(author) for author in Author.objects.filter(is_staff=True)]
[ "@", "xmlrpc_func", "(", "returns", "=", "'struct[]'", ",", "args", "=", "[", "'string'", ",", "'string'", ",", "'string'", "]", ")", "def", "get_authors", "(", "apikey", ",", "username", ",", "password", ")", ":", "authenticate", "(", "username", ",", "...
return the published authors .
train
true
21,828
def scoreatpercentile(data, percentile): per = np.array(percentile) cdf = empiricalcdf(data) interpolator = interpolate.interp1d(np.sort(cdf), np.sort(data)) return interpolator((per / 100.0))
[ "def", "scoreatpercentile", "(", "data", ",", "percentile", ")", ":", "per", "=", "np", ".", "array", "(", "percentile", ")", "cdf", "=", "empiricalcdf", "(", "data", ")", "interpolator", "=", "interpolate", ".", "interp1d", "(", "np", ".", "sort", "(", ...
calculate the score at the given per percentile of the sequence a .
train
false
21,829
def modifyStrings(o, modFunct, titlesRefs, namesRefs, charactersRefs): if isinstance(o, (unicode, str)): return modFunct(o, titlesRefs, namesRefs, charactersRefs) elif isinstance(o, (list, tuple, dict)): _stillorig = 1 if isinstance(o, (list, tuple)): keys = xrange(len(o)) else: keys = o.keys() for i in keys: v = o[i] if isinstance(v, (unicode, str)): if _stillorig: o = copy(o) _stillorig = 0 o[i] = modFunct(v, titlesRefs, namesRefs, charactersRefs) elif isinstance(v, (list, tuple)): modifyStrings(o[i], modFunct, titlesRefs, namesRefs, charactersRefs) return o
[ "def", "modifyStrings", "(", "o", ",", "modFunct", ",", "titlesRefs", ",", "namesRefs", ",", "charactersRefs", ")", ":", "if", "isinstance", "(", "o", ",", "(", "unicode", ",", "str", ")", ")", ":", "return", "modFunct", "(", "o", ",", "titlesRefs", ",...
modify a string .
train
false
21,830
def getLabelString(dictionary): for key in dictionary: labelIndex = key.find('label') if (labelIndex >= 0): return dictionary[key] return ''
[ "def", "getLabelString", "(", "dictionary", ")", ":", "for", "key", "in", "dictionary", ":", "labelIndex", "=", "key", ".", "find", "(", "'label'", ")", "if", "(", "labelIndex", ">=", "0", ")", ":", "return", "dictionary", "[", "key", "]", "return", "'...
get the label string for the dictionary .
train
false
21,831
def test_pkl_yaml_src_field(): try: (fd, fn) = mkstemp() close(fd) o = DumDum() o.x = ('a', 'b', 'c') serial.save(fn, o) yaml = (("!pkl: '" + fn) + "'\n") loaded = load(yaml) assert (loaded.x == ('a', 'b', 'c')) assert (loaded.yaml_src == yaml) finally: os.remove(fn)
[ "def", "test_pkl_yaml_src_field", "(", ")", ":", "try", ":", "(", "fd", ",", "fn", ")", "=", "mkstemp", "(", ")", "close", "(", "fd", ")", "o", "=", "DumDum", "(", ")", "o", ".", "x", "=", "(", "'a'", ",", "'b'", ",", "'c'", ")", "serial", "....
tests a regression where yaml_src wasnt getting correctly set on pkls .
train
false
21,833
def _integer_rational_reconstruction(c, m, domain): if (c < 0): c += m (r0, s0) = (m, domain.zero) (r1, s1) = (c, domain.one) bound = sqrt((m / 2)) while (r1 >= bound): quo = (r0 // r1) (r0, r1) = (r1, (r0 - (quo * r1))) (s0, s1) = (s1, (s0 - (quo * s1))) if (abs(s1) >= bound): return None if (s1 < 0): (a, b) = ((- r1), (- s1)) elif (s1 > 0): (a, b) = (r1, s1) else: return None field = domain.get_field() return (field(a) / field(b))
[ "def", "_integer_rational_reconstruction", "(", "c", ",", "m", ",", "domain", ")", ":", "if", "(", "c", "<", "0", ")", ":", "c", "+=", "m", "(", "r0", ",", "s0", ")", "=", "(", "m", ",", "domain", ".", "zero", ")", "(", "r1", ",", "s1", ")", ...
reconstruct a rational number frac a b from .
train
false
21,834
def volume_type_qos_associations_get(context, qos_specs_id, inactive=False): return IMPL.volume_type_qos_associations_get(context, qos_specs_id, inactive)
[ "def", "volume_type_qos_associations_get", "(", "context", ",", "qos_specs_id", ",", "inactive", "=", "False", ")", ":", "return", "IMPL", ".", "volume_type_qos_associations_get", "(", "context", ",", "qos_specs_id", ",", "inactive", ")" ]
get volume types that are associated with specific qos specs .
train
false
21,835
def fateman_poly_F_1(n): Y = [Symbol(('y_' + str(i))) for i in range(0, (n + 1))] (y_0, y_1) = (Y[0], Y[1]) u = (y_0 + Add(*[y for y in Y[1:]])) v = ((y_0 ** 2) + Add(*[(y ** 2) for y in Y[1:]])) F = ((u + 1) * (u + 2)).as_poly(*Y) G = ((v + 1) * (((((-3) * y_1) * (y_0 ** 2)) + (y_1 ** 2)) - 1)).as_poly(*Y) H = Poly(1, *Y) return (F, G, H)
[ "def", "fateman_poly_F_1", "(", "n", ")", ":", "Y", "=", "[", "Symbol", "(", "(", "'y_'", "+", "str", "(", "i", ")", ")", ")", "for", "i", "in", "range", "(", "0", ",", "(", "n", "+", "1", ")", ")", "]", "(", "y_0", ",", "y_1", ")", "=", ...
fatemans gcd benchmark: trivial gcd .
train
false
21,836
def parse_http_line(http_line, http_methods): http_line_split = http_line.split() method = '' path = '' if (len(http_line_split) > 1): method = http_line_split[0] path = http_line_split[1] if ((method + ' ') not in http_methods): method = None path = None return (method, path)
[ "def", "parse_http_line", "(", "http_line", ",", "http_methods", ")", ":", "http_line_split", "=", "http_line", ".", "split", "(", ")", "method", "=", "''", "path", "=", "''", "if", "(", "len", "(", "http_line_split", ")", ">", "1", ")", ":", "method", ...
parse the header with the http method in it .
train
false
21,837
def jd2jcal(jd): j = (jd + 1402) k = ipart(((j - 1) / 1461.0)) l = (j - (1461.0 * k)) n = (ipart(((l - 1) / 365.0)) - ipart((l / 1461.0))) i = ((l - (365.0 * n)) + 30.0) j = ipart(((80.0 * i) / 2447.0)) day = (i - ipart(((2447.0 * j) / 80.0))) i = ipart((j / 11.0)) month = ((j + 2) - (12.0 * i)) year = ((((4 * k) + n) + i) - 4716.0) return (int(year), int(month), int(day))
[ "def", "jd2jcal", "(", "jd", ")", ":", "j", "=", "(", "jd", "+", "1402", ")", "k", "=", "ipart", "(", "(", "(", "j", "-", "1", ")", "/", "1461.0", ")", ")", "l", "=", "(", "j", "-", "(", "1461.0", "*", "k", ")", ")", "n", "=", "(", "i...
julian calendar date for the given julian date .
train
false
21,838
def reverse_upsize_quota_delta(context, instance): return resize_quota_delta(context, instance.new_flavor, instance.old_flavor, (-1), (-1))
[ "def", "reverse_upsize_quota_delta", "(", "context", ",", "instance", ")", ":", "return", "resize_quota_delta", "(", "context", ",", "instance", ".", "new_flavor", ",", "instance", ".", "old_flavor", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")" ]
calculate deltas required to reverse a prior upsizing quota adjustment .
train
false
21,840
def replace_course_urls(course_id, block, view, frag, context): return wrap_fragment(frag, static_replace.replace_course_urls(frag.content, course_id))
[ "def", "replace_course_urls", "(", "course_id", ",", "block", ",", "view", ",", "frag", ",", "context", ")", ":", "return", "wrap_fragment", "(", "frag", ",", "static_replace", ".", "replace_course_urls", "(", "frag", ".", "content", ",", "course_id", ")", "...
replace /course/$stuff urls with /courses/$course_id/$stuff urls text: the text to replace course_module: a coursedescriptor returns: text with the links replaced .
train
false
21,841
def create_manifest(manifest_path, index_file, annot_dir, image_dir, root_dir): records = [] with open(index_file) as f: for img in f: tag = img.rstrip(os.linesep) image = os.path.join(image_dir, (tag + '.jpg')) annot = os.path.join(annot_dir, (tag + '.json')) assert os.path.exists(image), 'Path {} not found'.format(image) assert os.path.exists(annot), 'Path {} not found'.format(annot) records.append((os.path.relpath(image, root_dir), os.path.relpath(annot, root_dir))) np.savetxt(manifest_path, records, fmt='%s,%s')
[ "def", "create_manifest", "(", "manifest_path", ",", "index_file", ",", "annot_dir", ",", "image_dir", ",", "root_dir", ")", ":", "records", "=", "[", "]", "with", "open", "(", "index_file", ")", "as", "f", ":", "for", "img", "in", "f", ":", "tag", "="...
based on a pascalvoc index file .
train
false
21,842
def p_exclusive_or_expression_2(t): pass
[ "def", "p_exclusive_or_expression_2", "(", "t", ")", ":", "pass" ]
exclusive_or_expression : exclusive_or_expression xor and_expression .
train
false
21,845
@handle_response_format @treeio_login_required def stream_delete(request, stream_id, response_format='html'): stream = get_object_or_404(MessageStream, pk=stream_id) if (not request.user.profile.has_permission(stream, mode='w')): return user_denied(request, message="You don't have access to this Stream", response_format=response_format) if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): stream.trash = True stream.save() else: stream.delete() return HttpResponseRedirect('/messaging/') elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('messaging_stream_view', args=[stream.id])) context = _get_default_context(request) context.update({'stream': stream}) return render_to_response('messaging/stream_delete', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "stream_delete", "(", "request", ",", "stream_id", ",", "response_format", "=", "'html'", ")", ":", "stream", "=", "get_object_or_404", "(", "MessageStream", ",", "pk", "=", "stream_id", ")", "if"...
delete stream page .
train
false
21,846
def setup_proximity_component(hass, name, config): ignored_zones = config.get(CONF_IGNORED_ZONES) proximity_devices = config.get(CONF_DEVICES) tolerance = config.get(CONF_TOLERANCE) proximity_zone = name unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT, hass.config.units.length_unit) zone_id = 'zone.{}'.format(proximity_zone) proximity = Proximity(hass, proximity_zone, DEFAULT_DIST_TO_ZONE, DEFAULT_DIR_OF_TRAVEL, DEFAULT_NEAREST, ignored_zones, proximity_devices, tolerance, zone_id, unit_of_measurement) proximity.entity_id = '{}.{}'.format(DOMAIN, proximity_zone) proximity.update_ha_state() track_state_change(hass, proximity_devices, proximity.check_proximity_state_change) return True
[ "def", "setup_proximity_component", "(", "hass", ",", "name", ",", "config", ")", ":", "ignored_zones", "=", "config", ".", "get", "(", "CONF_IGNORED_ZONES", ")", "proximity_devices", "=", "config", ".", "get", "(", "CONF_DEVICES", ")", "tolerance", "=", "conf...
set up individual proximity component .
train
false
21,847
def split_and_validate_path(request, minsegs=1, maxsegs=None, rest_with_last=False): try: segs = split_path(unquote(request.path), minsegs, maxsegs, rest_with_last) validate_device_partition(segs[0], segs[1]) return segs except ValueError as err: raise HTTPBadRequest(body=str(err), request=request, content_type='text/plain')
[ "def", "split_and_validate_path", "(", "request", ",", "minsegs", "=", "1", ",", "maxsegs", "=", "None", ",", "rest_with_last", "=", "False", ")", ":", "try", ":", "segs", "=", "split_path", "(", "unquote", "(", "request", ".", "path", ")", ",", "minsegs...
utility function to split and validate the request path .
train
false
21,848
def get_client(host, port=None, timeout=None, use_ssl=False, username=None, password=None, tenant=None, auth_url=None, auth_strategy=None, auth_token=None, region=None, is_silent_upload=False, insecure=False): if (auth_url or os.getenv('OS_AUTH_URL')): force_strategy = 'keystone' else: force_strategy = None creds = {'username': (username or os.getenv('OS_AUTH_USER', os.getenv('OS_USERNAME'))), 'password': (password or os.getenv('OS_AUTH_KEY', os.getenv('OS_PASSWORD'))), 'tenant': (tenant or os.getenv('OS_AUTH_TENANT', os.getenv('OS_TENANT_NAME'))), 'auth_url': (auth_url or os.getenv('OS_AUTH_URL')), 'strategy': (force_strategy or auth_strategy or os.getenv('OS_AUTH_STRATEGY', 'noauth')), 'region': (region or os.getenv('OS_REGION_NAME'))} if ((creds['strategy'] == 'keystone') and (not creds['auth_url'])): msg = '--os_auth_url option or OS_AUTH_URL environment variable required when keystone authentication strategy is enabled\n' raise exception.ClientConfigurationError(msg) return CacheClient(host=host, port=port, timeout=timeout, use_ssl=use_ssl, auth_tok=(auth_token or os.getenv('OS_TOKEN')), creds=creds, insecure=insecure)
[ "def", "get_client", "(", "host", ",", "port", "=", "None", ",", "timeout", "=", "None", ",", "use_ssl", "=", "False", ",", "username", "=", "None", ",", "password", "=", "None", ",", "tenant", "=", "None", ",", "auth_url", "=", "None", ",", "auth_st...
builds an http client authenticated with the service account credentials .
train
false
21,849
def get_users(): try: recs = psutil.users() return [dict(x._asdict()) for x in recs] except AttributeError: try: import utmp result = [] while True: rec = utmp.utmpaccess.getutent() if (rec is None): return result elif (rec[0] == 7): started = rec[8] if isinstance(started, tuple): started = started[0] result.append({'name': rec[4], 'terminal': rec[2], 'started': started, 'host': rec[5]}) except ImportError: return False
[ "def", "get_users", "(", ")", ":", "try", ":", "recs", "=", "psutil", ".", "users", "(", ")", "return", "[", "dict", "(", "x", ".", "_asdict", "(", ")", ")", "for", "x", "in", "recs", "]", "except", "AttributeError", ":", "try", ":", "import", "u...
list all users .
train
true
21,850
def _is_mapped_class(entity): insp = inspection.inspect(entity, False) return ((insp is not None) and hasattr(insp, 'mapper') and (insp.is_mapper or insp.is_aliased_class))
[ "def", "_is_mapped_class", "(", "entity", ")", ":", "insp", "=", "inspection", ".", "inspect", "(", "entity", ",", "False", ")", "return", "(", "(", "insp", "is", "not", "None", ")", "and", "hasattr", "(", "insp", ",", "'mapper'", ")", "and", "(", "i...
return true if the given object is a mapped class .
train
false
21,852
def _CheckGeoPoint(geo_point): if (not isinstance(geo_point, GeoPoint)): raise TypeError(('geo_point must be a GeoPoint, got %s' % geo_point.__class__.__name__)) return geo_point
[ "def", "_CheckGeoPoint", "(", "geo_point", ")", ":", "if", "(", "not", "isinstance", "(", "geo_point", ",", "GeoPoint", ")", ")", ":", "raise", "TypeError", "(", "(", "'geo_point must be a GeoPoint, got %s'", "%", "geo_point", ".", "__class__", ".", "__name__", ...
checks geo_point is a geopoint and returns it .
train
false
21,853
def _get_list_from_email_marketing_provider(sailthru_client): try: sailthru_get_response = sailthru_client.api_get('list', {}) except SailthruClientError as exc: log.error('Exception attempting to get list from Sailthru - %s', unicode(exc)) return {} if (not sailthru_get_response.is_ok()): error = sailthru_get_response.get_error() log.info('Error attempting to read list record from Sailthru: %s', error.get_message()) return {} list_map = dict() for user_list in sailthru_get_response.json['lists']: list_map[user_list.get('name')] = user_list return list_map
[ "def", "_get_list_from_email_marketing_provider", "(", "sailthru_client", ")", ":", "try", ":", "sailthru_get_response", "=", "sailthru_client", ".", "api_get", "(", "'list'", ",", "{", "}", ")", "except", "SailthruClientError", "as", "exc", ":", "log", ".", "erro...
get sailthru list .
train
false
21,855
def integer_powers(exprs): terms = {} for term in exprs: for j in terms: a = cancel((term / j)) if a.is_Rational: terms[j].append((term, a)) break else: terms[term] = [(term, S(1))] newterms = {} for term in terms: common_denom = reduce(ilcm, [i.as_numer_denom()[1] for (_, i) in terms[term]]) newterm = (term / common_denom) newmults = [(i, (j * common_denom)) for (i, j) in terms[term]] newterms[newterm] = newmults return sorted(iter(newterms.items()), key=(lambda item: item[0].sort_key()))
[ "def", "integer_powers", "(", "exprs", ")", ":", "terms", "=", "{", "}", "for", "term", "in", "exprs", ":", "for", "j", "in", "terms", ":", "a", "=", "cancel", "(", "(", "term", "/", "j", ")", ")", "if", "a", ".", "is_Rational", ":", "terms", "...
rewrites a list of expressions as integer multiples of each other .
train
false
21,856
def test_provider_export_as_json(): assert json.dumps(dict(providers))
[ "def", "test_provider_export_as_json", "(", ")", ":", "assert", "json", ".", "dumps", "(", "dict", "(", "providers", ")", ")" ]
provider dict should be exportable as json .
train
false
21,857
def replace_extension(filename, new_extension): if (isinstance(filename, (str, unicode)) and (u'.' in filename)): (basename, _, _) = filename.rpartition(u'.') if basename: return u'{0}.{1}'.format(basename, new_extension) return filename
[ "def", "replace_extension", "(", "filename", ",", "new_extension", ")", ":", "if", "(", "isinstance", "(", "filename", ",", "(", "str", ",", "unicode", ")", ")", "and", "(", "u'.'", "in", "filename", ")", ")", ":", "(", "basename", ",", "_", ",", "_"...
replace the extension of the provided filename with a new extension .
train
false
21,858
def _get_ppa_info_from_launchpad(owner_name, ppa_name): lp_url = 'https://launchpad.net/api/1.0/~{0}/+archive/{1}'.format(owner_name, ppa_name) request = _Request(lp_url, headers={'Accept': 'application/json'}) lp_page = _urlopen(request) return json.load(lp_page)
[ "def", "_get_ppa_info_from_launchpad", "(", "owner_name", ",", "ppa_name", ")", ":", "lp_url", "=", "'https://launchpad.net/api/1.0/~{0}/+archive/{1}'", ".", "format", "(", "owner_name", ",", "ppa_name", ")", "request", "=", "_Request", "(", "lp_url", ",", "headers", ...
idea from softwareproperties .
train
true
21,859
def get_repository_by_name(app, name): return get_repository_query(app).filter_by(name=name).first()
[ "def", "get_repository_by_name", "(", "app", ",", "name", ")", ":", "return", "get_repository_query", "(", "app", ")", ".", "filter_by", "(", "name", "=", "name", ")", ".", "first", "(", ")" ]
get a repository from the database via name .
train
false