id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
47,761
def removeGcodeFile(gcodeFilePath): if ('alterations' not in gcodeFilePath): os.remove(gcodeFilePath) print ('removeGeneratedFiles deleted ' + gcodeFilePath) return if ('example_' not in gcodeFilePath): os.remove(gcodeFilePath) print ('removeGeneratedFiles deleted ' + gcodeFilePath)
[ "def", "removeGcodeFile", "(", "gcodeFilePath", ")", ":", "if", "(", "'alterations'", "not", "in", "gcodeFilePath", ")", ":", "os", ".", "remove", "(", "gcodeFilePath", ")", "print", "(", "'removeGeneratedFiles deleted '", "+", "gcodeFilePath", ")", "return", "i...
remove gcode file .
train
false
47,762
def filter_list(complete_list, browsers): table = {} for entry in complete_list: candidate = (entry[1].text_content(), entry[2].text_content()) for name in browsers: found = False if (name.lower() in candidate[1].lower()): for version in browsers[name]: if (version.lower() in candidate[1].lower()): if (table.get(name) is None): table[name] = [] table[name].append(candidate) browsers[name].remove(version) found = True break if found: break return table
[ "def", "filter_list", "(", "complete_list", ",", "browsers", ")", ":", "table", "=", "{", "}", "for", "entry", "in", "complete_list", ":", "candidate", "=", "(", "entry", "[", "1", "]", ".", "text_content", "(", ")", ",", "entry", "[", "2", "]", ".",...
filter the received list based on a look up table .
train
false
47,763
def entoken(textstream, positions=False, chars=False, start_pos=0, start_char=0, **kwargs): pos = start_pos char = start_char t = Token(positions=positions, chars=chars, **kwargs) for text in textstream: t.text = text if positions: t.pos = pos pos += 1 if chars: t.startchar = char char = (char + len(text)) t.endchar = char (yield t)
[ "def", "entoken", "(", "textstream", ",", "positions", "=", "False", ",", "chars", "=", "False", ",", "start_pos", "=", "0", ",", "start_char", "=", "0", ",", "**", "kwargs", ")", ":", "pos", "=", "start_pos", "char", "=", "start_char", "t", "=", "To...
takes a sequence of unicode strings and yields a series of token objects .
train
false
47,764
def _get_queryset(klass): if hasattr(klass, '_default_manager'): return klass._default_manager.all() return klass
[ "def", "_get_queryset", "(", "klass", ")", ":", "if", "hasattr", "(", "klass", ",", "'_default_manager'", ")", ":", "return", "klass", ".", "_default_manager", ".", "all", "(", ")", "return", "klass" ]
return a queryset or a manager .
train
false
47,765
def read_bad_channels(fid, node): nodes = dir_tree_find(node, FIFF.FIFFB_MNE_BAD_CHANNELS) bads = [] if (len(nodes) > 0): for node in nodes: tag = find_tag(fid, node, FIFF.FIFF_MNE_CH_NAME_LIST) if ((tag is not None) and (tag.data is not None)): bads = tag.data.split(':') return bads
[ "def", "read_bad_channels", "(", "fid", ",", "node", ")", ":", "nodes", "=", "dir_tree_find", "(", "node", ",", "FIFF", ".", "FIFFB_MNE_BAD_CHANNELS", ")", "bads", "=", "[", "]", "if", "(", "len", "(", "nodes", ")", ">", "0", ")", ":", "for", "node",...
read bad channels .
train
false
47,766
def passwd(passwd, user='', alg='sha1', realm=None): digest = ((hasattr(hashlib, alg) and getattr(hashlib, alg)) or None) if digest: if realm: digest.update('{0}:{1}:{2}'.format(user, realm, passwd)) else: digest.update(passwd) return ((digest and digest.hexdigest()) or False)
[ "def", "passwd", "(", "passwd", ",", "user", "=", "''", ",", "alg", "=", "'sha1'", ",", "realm", "=", "None", ")", ":", "digest", "=", "(", "(", "hasattr", "(", "hashlib", ",", "alg", ")", "and", "getattr", "(", "hashlib", ",", "alg", ")", ")", ...
this function replaces the $catalina_home/bin/digest .
train
true
47,767
def genVersion(*args, **kwargs): return ('from incremental import Version\n__version__=%r' % Version(*args, **kwargs)).encode('ascii')
[ "def", "genVersion", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "(", "'from incremental import Version\\n__version__=%r'", "%", "Version", "(", "*", "args", ",", "**", "kwargs", ")", ")", ".", "encode", "(", "'ascii'", ")" ]
a convenience for generating _version .
train
false
47,769
def write_qual_line(demultiplexed_qual_f, qual_seq, label_line, keep_barcode, bc_len): if keep_barcode: final_seq = qual_seq else: final_seq = qual_seq[bc_len:] qual_line_size = 60 current_qual_scores_lines = [] for slice in range(0, len(final_seq), qual_line_size): current_segment = final_seq[slice:(slice + qual_line_size)] current_qual_scores_lines.append(' '.join(map(str, current_segment))) demultiplexed_qual_f.write(('>%s\n' % label_line)) demultiplexed_qual_f.write('\n'.join(current_qual_scores_lines)) demultiplexed_qual_f.write('\n')
[ "def", "write_qual_line", "(", "demultiplexed_qual_f", ",", "qual_seq", ",", "label_line", ",", "keep_barcode", ",", "bc_len", ")", ":", "if", "keep_barcode", ":", "final_seq", "=", "qual_seq", "else", ":", "final_seq", "=", "qual_seq", "[", "bc_len", ":", "]"...
writes quality score sequence out in proper format demultiplexed_qual_f: open file object to write label/qual scores to .
train
false
47,770
@vm_ref_cache_from_instance def get_vm_ref(session, instance): uuid = instance.uuid vm_ref = (search_vm_ref_by_identifier(session, uuid) or _get_vm_ref_from_name(session, instance.name)) if (vm_ref is None): raise exception.InstanceNotFound(instance_id=uuid) return vm_ref
[ "@", "vm_ref_cache_from_instance", "def", "get_vm_ref", "(", "session", ",", "instance", ")", ":", "uuid", "=", "instance", ".", "uuid", "vm_ref", "=", "(", "search_vm_ref_by_identifier", "(", "session", ",", "uuid", ")", "or", "_get_vm_ref_from_name", "(", "ses...
get reference to the vm through uuid or vm name .
train
false
47,771
def validate_next_url(next_url): if next_url.startswith('//'): return False if (not ((next_url[0] == '/') or next_url.startswith(settings.DOMAIN) or next_url.startswith(settings.CAS_SERVER_URL) or next_url.startswith(settings.MFR_SERVER_URL))): return False return True
[ "def", "validate_next_url", "(", "next_url", ")", ":", "if", "next_url", ".", "startswith", "(", "'//'", ")", ":", "return", "False", "if", "(", "not", "(", "(", "next_url", "[", "0", "]", "==", "'/'", ")", "or", "next_url", ".", "startswith", "(", "...
non-view helper function that checks next_url .
train
false
47,772
def getVersion(): with open(os.path.join(REPO_DIR, 'VERSION'), 'r') as versionFile: return versionFile.read().strip()
[ "def", "getVersion", "(", ")", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "REPO_DIR", ",", "'VERSION'", ")", ",", "'r'", ")", "as", "versionFile", ":", "return", "versionFile", ".", "read", "(", ")", ".", "strip", "(", ")" ]
get version from local file .
train
true
47,774
def format_dot(dotfilename, format=None): cmd = (u"dot -T%s -O '%s'" % (format, dotfilename)) try: CommandLine(cmd).run() except IOError as ioe: if (u'could not be found' in str(ioe)): raise IOError(u"Cannot draw directed graph; executable 'dot' is unavailable") else: raise ioe else: logger.info((u'Converting dotfile: %s to %s format' % (dotfilename, format)))
[ "def", "format_dot", "(", "dotfilename", ",", "format", "=", "None", ")", ":", "cmd", "=", "(", "u\"dot -T%s -O '%s'\"", "%", "(", "format", ",", "dotfilename", ")", ")", "try", ":", "CommandLine", "(", "cmd", ")", ".", "run", "(", ")", "except", "IOEr...
dump a directed graph .
train
false
47,775
def _StartIOLoop(callback): io_loop = ioloop.IOLoop.current() try: httpclient.AsyncHTTPClient.configure('tornado.curl_httpclient.CurlAsyncHTTPClient') httpclient.AsyncHTTPClient(io_loop=io_loop, max_clients=100) except: logging.exception('failed to configure tornado AsyncHTTPClient to use pycurl') io_loop.add_callback(callback) io_loop.start()
[ "def", "_StartIOLoop", "(", "callback", ")", ":", "io_loop", "=", "ioloop", ".", "IOLoop", ".", "current", "(", ")", "try", ":", "httpclient", ".", "AsyncHTTPClient", ".", "configure", "(", "'tornado.curl_httpclient.CurlAsyncHTTPClient'", ")", "httpclient", ".", ...
creates a new ioloop object .
train
false
47,776
def set_win32_cert_path(): if hasattr(sys, 'frozen'): prog_path = os.path.dirname(sys.executable) crt_path = os.path.join(prog_path, 'ca-certificates.crt') else: cur_path = os.path.dirname(__file__) crt_path = os.path.join(cur_path, 'packaging', 'datadog-agent', 'win32', 'install_files', 'ca-certificates.crt') import tornado.simple_httpclient log.info(('Windows certificate path: %s' % crt_path)) tornado.simple_httpclient._DEFAULT_CA_CERTS = crt_path
[ "def", "set_win32_cert_path", "(", ")", ":", "if", "hasattr", "(", "sys", ",", "'frozen'", ")", ":", "prog_path", "=", "os", ".", "path", ".", "dirname", "(", "sys", ".", "executable", ")", "crt_path", "=", "os", ".", "path", ".", "join", "(", "prog_...
in order to use tornado .
train
false
47,777
def scp_between_remotes(src, dst, port, s_passwd, d_passwd, s_name, d_name, s_path, d_path, limit='', log_filename=None, timeout=600, src_inter=None, dst_inter=None): if limit: limit = ('-l %s' % limit) if (src and src.lower().startswith('fe80')): if (not src_inter): raise SCPError('When using ipv6 linklocal address must assign ', 'the interface the neighbour attache') src = ('%s%%%s' % (src, src_inter)) if (dst and dst.lower().startswith('fe80')): if (not dst_inter): raise SCPError('When using ipv6 linklocal address must assign ', 'the interface the neighbour attache') dst = ('%s%%%s' % (dst, dst_inter)) command = ('scp -v -o UserKnownHostsFile=/dev/null -o PreferredAuthentications=password -r %s -P %s %s@\\[%s\\]:%s %s@\\[%s\\]:%s' % (limit, port, s_name, src, s_path, d_name, dst, d_path)) password_list = [] password_list.append(s_passwd) password_list.append(d_passwd) return remote_scp(command, password_list, log_filename, timeout)
[ "def", "scp_between_remotes", "(", "src", ",", "dst", ",", "port", ",", "s_passwd", ",", "d_passwd", ",", "s_name", ",", "d_name", ",", "s_path", ",", "d_path", ",", "limit", "=", "''", ",", "log_filename", "=", "None", ",", "timeout", "=", "600", ",",...
copy files from a remote host to another remote host .
train
false
47,778
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
47,781
def get_all_headers(message, key): return message.get_all(key)
[ "def", "get_all_headers", "(", "message", ",", "key", ")", ":", "return", "message", ".", "get_all", "(", "key", ")" ]
given an httpmessage .
train
false
47,782
def get_rnd_shuffle(builder): fnty = ir.FunctionType(ir.VoidType(), (rnd_state_ptr_t,)) fn = builder.function.module.get_or_insert_function(fnty, 'numba_rnd_shuffle') fn.args[0].add_attribute('nocapture') return fn
[ "def", "get_rnd_shuffle", "(", "builder", ")", ":", "fnty", "=", "ir", ".", "FunctionType", "(", "ir", ".", "VoidType", "(", ")", ",", "(", "rnd_state_ptr_t", ",", ")", ")", "fn", "=", "builder", ".", "function", ".", "module", ".", "get_or_insert_functi...
get the internal function to shuffle the mt taste .
train
false
47,783
def filePathDelta(origin, destination): commonItems = 0 path1 = origin.path.split(os.sep) path2 = destination.path.split(os.sep) for (elem1, elem2) in zip(path1, path2): if (elem1 == elem2): commonItems += 1 else: break path = (['..'] * (len(path1) - commonItems)) return (path + path2[commonItems:])
[ "def", "filePathDelta", "(", "origin", ",", "destination", ")", ":", "commonItems", "=", "0", "path1", "=", "origin", ".", "path", ".", "split", "(", "os", ".", "sep", ")", "path2", "=", "destination", ".", "path", ".", "split", "(", "os", ".", "sep"...
return a list of strings that represent c{destination} as a path relative to c{origin} .
train
false
47,784
def servermods(): cmd = '{0} -l'.format(_detect_os()) ret = [] out = __salt__['cmd.run'](cmd).splitlines() for line in out: if (not line): continue if ('.c' in line): ret.append(line.strip()) return ret
[ "def", "servermods", "(", ")", ":", "cmd", "=", "'{0} -l'", ".", "format", "(", "_detect_os", "(", ")", ")", "ret", "=", "[", "]", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", ".", "splitlines", "(", ")", "for", "line", "in", "...
return list of modules compiled into the server cli example: .
train
true
47,785
def java_import(jvm_view, import_str): gateway_client = jvm_view._gateway_client command = ((((((proto.JVMVIEW_COMMAND_NAME + proto.JVM_IMPORT_SUB_COMMAND_NAME) + jvm_view._id) + u'\n') + escape_new_line(import_str)) + u'\n') + proto.END_COMMAND_PART) answer = gateway_client.send_command(command) return_value = get_return_value(answer, gateway_client, None, None) return return_value
[ "def", "java_import", "(", "jvm_view", ",", "import_str", ")", ":", "gateway_client", "=", "jvm_view", ".", "_gateway_client", "command", "=", "(", "(", "(", "(", "(", "(", "proto", ".", "JVMVIEW_COMMAND_NAME", "+", "proto", ".", "JVM_IMPORT_SUB_COMMAND_NAME", ...
imports the package or class specified by import_str in the jvm view namespace .
train
false
47,789
def process_projection_requirements(figure, *args, **kwargs): ispolar = kwargs.pop(u'polar', False) projection = kwargs.pop(u'projection', None) if ispolar: if ((projection is not None) and (projection != u'polar')): raise ValueError((u'polar=True, yet projection=%r. Only one of these arguments should be supplied.' % projection)) projection = u'polar' if (projection == u'polar'): kwargs.setdefault(u'resolution', 1) if (isinstance(projection, six.string_types) or (projection is None)): projection_class = get_projection_class(projection) elif hasattr(projection, u'_as_mpl_axes'): (projection_class, extra_kwargs) = projection._as_mpl_axes() kwargs.update(**extra_kwargs) else: raise TypeError((u'projection must be a string, None or implement a _as_mpl_axes method. Got %r' % projection)) key = figure._make_key(*args, **kwargs) return (projection_class, kwargs, key)
[ "def", "process_projection_requirements", "(", "figure", ",", "*", "args", ",", "**", "kwargs", ")", ":", "ispolar", "=", "kwargs", ".", "pop", "(", "u'polar'", ",", "False", ")", "projection", "=", "kwargs", ".", "pop", "(", "u'projection'", ",", "None", ...
handle the args/kwargs to for add_axes/add_subplot/gca .
train
false
47,790
def get_user_scope(module, position): user_stmt = module.get_statement_for_position(position) if (user_stmt is None): def scan(scope): for s in scope.children: if (s.start_pos <= position <= s.end_pos): if isinstance(s, (tree.Scope, tree.Flow)): return (scan(s) or s) elif (s.type in ('suite', 'decorated')): return scan(s) return None return (scan(module) or module) else: return user_stmt.get_parent_scope(include_flows=True)
[ "def", "get_user_scope", "(", "module", ",", "position", ")", ":", "user_stmt", "=", "module", ".", "get_statement_for_position", "(", "position", ")", "if", "(", "user_stmt", "is", "None", ")", ":", "def", "scan", "(", "scope", ")", ":", "for", "s", "in...
returns the scope in which the user resides .
train
false
47,791
def dup_expand(polys, K): if (not polys): return [K.one] f = polys[0] for g in polys[1:]: f = dup_mul(f, g, K) return f
[ "def", "dup_expand", "(", "polys", ",", "K", ")", ":", "if", "(", "not", "polys", ")", ":", "return", "[", "K", ".", "one", "]", "f", "=", "polys", "[", "0", "]", "for", "g", "in", "polys", "[", "1", ":", "]", ":", "f", "=", "dup_mul", "(",...
multiply together several polynomials in k[x] .
train
false
47,793
@contextmanager def check_mongo_calls(num_finds=0, num_sends=None): with check_mongo_calls_range(num_finds, num_finds, num_sends, num_sends): (yield)
[ "@", "contextmanager", "def", "check_mongo_calls", "(", "num_finds", "=", "0", ",", "num_sends", "=", "None", ")", ":", "with", "check_mongo_calls_range", "(", "num_finds", ",", "num_finds", ",", "num_sends", ",", "num_sends", ")", ":", "(", "yield", ")" ]
instruments the given store to count the number of calls to find and the number of calls to send_message which is for insert .
train
false
47,795
@app.route('/cookies') def view_cookies(hide_env=True): cookies = dict(request.cookies.items()) if (hide_env and ('show_env' not in request.args)): for key in ENV_COOKIES: try: del cookies[key] except KeyError: pass return jsonify(cookies=cookies)
[ "@", "app", ".", "route", "(", "'/cookies'", ")", "def", "view_cookies", "(", "hide_env", "=", "True", ")", ":", "cookies", "=", "dict", "(", "request", ".", "cookies", ".", "items", "(", ")", ")", "if", "(", "hide_env", "and", "(", "'show_env'", "no...
returns cookie data .
train
true
47,796
def _parse_acl(acl, user, group): comps = acl.split(':') vals = {} vals['type'] = 'acl' if (comps[0] == 'default'): vals['type'] = 'default' comps.pop(0) if ((comps[0] == 'user') and (not comps[1])): comps[1] = user elif ((comps[0] == 'group') and (not comps[1])): comps[1] = group vals[comps[0]] = comps[1] octal = 0 vals['permissions'] = {} if ('r' in comps[2]): octal += 4 vals['permissions']['read'] = True else: vals['permissions']['read'] = False if ('w' in comps[2]): octal += 2 vals['permissions']['write'] = True else: vals['permissions']['write'] = False if ('x' in comps[2]): octal += 1 vals['permissions']['execute'] = True else: vals['permissions']['execute'] = False vals['octal'] = octal return vals
[ "def", "_parse_acl", "(", "acl", ",", "user", ",", "group", ")", ":", "comps", "=", "acl", ".", "split", "(", "':'", ")", "vals", "=", "{", "}", "vals", "[", "'type'", "]", "=", "'acl'", "if", "(", "comps", "[", "0", "]", "==", "'default'", ")"...
parse a single acl rule .
train
true
47,798
def make_datadog_service(options): app_key = os.environ['DATADOG_APP_KEY'] api_key = os.environ['DATADOG_API_KEY'] host = socket.getfqdn((options['host'] or '')) return DatadogMetricsService(api_key=api_key, app_key=app_key, host=host)
[ "def", "make_datadog_service", "(", "options", ")", ":", "app_key", "=", "os", ".", "environ", "[", "'DATADOG_APP_KEY'", "]", "api_key", "=", "os", ".", "environ", "[", "'DATADOG_API_KEY'", "]", "host", "=", "socket", ".", "getfqdn", "(", "(", "options", "...
create a datadog service instance for interacting with datadog .
train
false
47,799
def _get_added_exploration_change_list(exploration_id): return [{'cmd': collection_domain.CMD_ADD_COLLECTION_NODE, 'exploration_id': exploration_id}]
[ "def", "_get_added_exploration_change_list", "(", "exploration_id", ")", ":", "return", "[", "{", "'cmd'", ":", "collection_domain", ".", "CMD_ADD_COLLECTION_NODE", ",", "'exploration_id'", ":", "exploration_id", "}", "]" ]
generates a change list for adding an exploration to a collection .
train
false
47,800
def getMatrixTetragrid(prefix, xmlElement): matrixTetragrid = getMatrixTetragridC(None, prefix, xmlElement) matrixTetragrid = getMatrixTetragridM(matrixTetragrid, prefix, xmlElement) matrixTetragrid = getMatrixTetragridMatrix(matrixTetragrid, prefix, xmlElement) matrixTetragrid = getMatrixTetragridR(matrixTetragrid, prefix, xmlElement) return matrixTetragrid
[ "def", "getMatrixTetragrid", "(", "prefix", ",", "xmlElement", ")", ":", "matrixTetragrid", "=", "getMatrixTetragridC", "(", "None", ",", "prefix", ",", "xmlElement", ")", "matrixTetragrid", "=", "getMatrixTetragridM", "(", "matrixTetragrid", ",", "prefix", ",", "...
get the matrix tetragrid from the xmlelement .
train
false
47,801
def load_context(context): file_path = _get_context_filepath() if os.path.exists(file_path): with io.open(file_path, encoding='utf-8') as f: for line in f: execute(line, context)
[ "def", "load_context", "(", "context", ")", ":", "file_path", "=", "_get_context_filepath", "(", ")", "if", "os", ".", "path", ".", "exists", "(", "file_path", ")", ":", "with", "io", ".", "open", "(", "file_path", ",", "encoding", "=", "'utf-8'", ")", ...
load a context object in place from user data directory .
train
true
47,802
def dup_rr_prs_gcd(f, g, K): result = _dup_rr_trivial_gcd(f, g, K) if (result is not None): return result (fc, F) = dup_primitive(f, K) (gc, G) = dup_primitive(g, K) c = K.gcd(fc, gc) h = dup_subresultants(F, G, K)[(-1)] (_, h) = dup_primitive(h, K) if K.is_negative(dup_LC(h, K)): c = (- c) h = dup_mul_ground(h, c, K) cff = dup_quo(f, h, K) cfg = dup_quo(g, h, K) return (h, cff, cfg)
[ "def", "dup_rr_prs_gcd", "(", "f", ",", "g", ",", "K", ")", ":", "result", "=", "_dup_rr_trivial_gcd", "(", "f", ",", "g", ",", "K", ")", "if", "(", "result", "is", "not", "None", ")", ":", "return", "result", "(", "fc", ",", "F", ")", "=", "du...
computes polynomial gcd using subresultants over a ring .
train
false
47,803
def openblas_threads_text(): header = '\n extern "C"\n {\n void openblas_set_num_threads(int);\n void goto_set_num_threads(int);\n int openblas_get_num_threads(void);\n }\n ' return header
[ "def", "openblas_threads_text", "(", ")", ":", "header", "=", "'\\n extern \"C\"\\n {\\n void openblas_set_num_threads(int);\\n void goto_set_num_threads(int);\\n int openblas_get_num_threads(void);\\n }\\n '", "return", "header" ]
c header for openblas threads interface .
train
false
47,804
def make_hidden_alias(argument_table, existing_name, alias_name): current = argument_table[existing_name] copy_arg = _copy_argument(argument_table, existing_name, alias_name) copy_arg._UNDOCUMENTED = True if current.required: copy_arg.required = False current.required = False current._DOCUMENT_AS_REQUIRED = True
[ "def", "make_hidden_alias", "(", "argument_table", ",", "existing_name", ",", "alias_name", ")", ":", "current", "=", "argument_table", "[", "existing_name", "]", "copy_arg", "=", "_copy_argument", "(", "argument_table", ",", "existing_name", ",", "alias_name", ")",...
create a hidden alias for an existing argument .
train
false
47,805
def detached(logfile=None, pidfile=None, uid=None, gid=None, umask=0, workdir=None, **opts): if (not resource): raise RuntimeError('This platform does not support detach.') workdir = (os.getcwd() if (workdir is None) else workdir) signals.reset('SIGCLD') set_effective_user(uid=uid, gid=gid) (logfile and open(logfile, 'a').close()) (pidfile and create_pidlock(pidfile)) return DaemonContext(umask=umask, workdir=workdir)
[ "def", "detached", "(", "logfile", "=", "None", ",", "pidfile", "=", "None", ",", "uid", "=", "None", ",", "gid", "=", "None", ",", "umask", "=", "0", ",", "workdir", "=", "None", ",", "**", "opts", ")", ":", "if", "(", "not", "resource", ")", ...
detach the current process in the background .
train
false
47,806
def coerce_to_dtype(dtype, value): name = dtype.name if name.startswith('datetime64'): if (name == 'datetime64[D]'): return make_datetime64D(value) elif (name == 'datetime64[ns]'): return make_datetime64ns(value) else: raise TypeError(("Don't know how to coerce values of dtype %s" % dtype)) return dtype.type(value)
[ "def", "coerce_to_dtype", "(", "dtype", ",", "value", ")", ":", "name", "=", "dtype", ".", "name", "if", "name", ".", "startswith", "(", "'datetime64'", ")", ":", "if", "(", "name", "==", "'datetime64[D]'", ")", ":", "return", "make_datetime64D", "(", "v...
make a value with the specified numpy dtype .
train
true
47,807
def filename_priority(filename, cover_names): return [idx for (idx, x) in enumerate(cover_names) if (x in filename)]
[ "def", "filename_priority", "(", "filename", ",", "cover_names", ")", ":", "return", "[", "idx", "for", "(", "idx", ",", "x", ")", "in", "enumerate", "(", "cover_names", ")", "if", "(", "x", "in", "filename", ")", "]" ]
sort order for image names .
train
false
47,808
def pcasvd(data, keepdim=0, demean=True): (nobs, nvars) = data.shape x = np.array(data) if demean: m = x.mean(0) else: m = 0 x -= m (U, s, v) = np.linalg.svd(x.T, full_matrices=1) factors = np.dot(U.T, x.T).T if keepdim: xreduced = (np.dot(factors[:, :keepdim], U[:, :keepdim].T) + m) else: xreduced = data keepdim = nvars ('print reassigning keepdim to max', keepdim) evals = ((s ** 2) / (x.shape[0] - 1)) return (xreduced, factors[:, :keepdim], evals[:keepdim], U[:, :keepdim])
[ "def", "pcasvd", "(", "data", ",", "keepdim", "=", "0", ",", "demean", "=", "True", ")", ":", "(", "nobs", ",", "nvars", ")", "=", "data", ".", "shape", "x", "=", "np", ".", "array", "(", "data", ")", "if", "demean", ":", "m", "=", "x", ".", ...
principal components with svd parameters data : ndarray .
train
false
47,811
def run_test_on_partitions(job, test, partitions, mountpoint_func, tag, fs_opt, do_fsck=True, **dargs): for p in partitions: p.set_fs_options(fs_opt) parallel(partitions, 'setup_before_test', mountpoint_func=mountpoint_func) mountpoint = mountpoint_func(partitions[0]) job.run_test(test, tag=tag, partitions=partitions, dir=mountpoint, **dargs) parallel(partitions, 'unmount') if do_fsck: parallel(partitions, 'fsck')
[ "def", "run_test_on_partitions", "(", "job", ",", "test", ",", "partitions", ",", "mountpoint_func", ",", "tag", ",", "fs_opt", ",", "do_fsck", "=", "True", ",", "**", "dargs", ")", ":", "for", "p", "in", "partitions", ":", "p", ".", "set_fs_options", "(...
run a test that requires multiple partitions .
train
false
47,813
def parseXML(xml): xmlObj = BeautifulSoup.BeautifulStoneSoup(xml, convertEntities=BeautifulSoup.BeautifulStoneSoup.XHTML_ENTITIES) if xmlObj: mainTag = xmlObj.find() if mainTag: return parseTags(mainTag) return None
[ "def", "parseXML", "(", "xml", ")", ":", "xmlObj", "=", "BeautifulSoup", ".", "BeautifulStoneSoup", "(", "xml", ",", "convertEntities", "=", "BeautifulSoup", ".", "BeautifulStoneSoup", ".", "XHTML_ENTITIES", ")", "if", "xmlObj", ":", "mainTag", "=", "xmlObj", ...
parse a xml string .
train
false
47,814
def test_blackbody_synphot(): fac = ((np.pi * ((const.R_sun / const.kpc) ** 2)) * u.sr) with np.errstate(all=u'ignore'): flux = (blackbody_nu(([100, 1, 1000, 10000.0, 100000.0] * u.AA), 5000) * fac) assert (flux.unit == FNU) assert (np.log10(flux[0].value) < (-143)) np.testing.assert_allclose(flux.value[1:], [0, 2.01950807e-34, 3.78584515e-26, 1.90431881e-27], rtol=0.01)
[ "def", "test_blackbody_synphot", "(", ")", ":", "fac", "=", "(", "(", "np", ".", "pi", "*", "(", "(", "const", ".", "R_sun", "/", "const", ".", "kpc", ")", "**", "2", ")", ")", "*", "u", ".", "sr", ")", "with", "np", ".", "errstate", "(", "al...
test that it is consistent with iraf synphot bbfunc .
train
false
47,815
def _get_modules_conf(): if ('systemd' in __grains__): return '/etc/modules-load.d/salt_managed.conf' return '/etc/modules'
[ "def", "_get_modules_conf", "(", ")", ":", "if", "(", "'systemd'", "in", "__grains__", ")", ":", "return", "'/etc/modules-load.d/salt_managed.conf'", "return", "'/etc/modules'" ]
return location of modules config file .
train
false
47,816
def _process_file(full_path, template_linters, options, summary_results, out): num_violations = 0 directory = os.path.dirname(full_path) file_name = os.path.basename(full_path) for template_linter in template_linters: results = template_linter.process_file(directory, file_name) results.print_results(options, summary_results, out)
[ "def", "_process_file", "(", "full_path", ",", "template_linters", ",", "options", ",", "summary_results", ",", "out", ")", ":", "num_violations", "=", "0", "directory", "=", "os", ".", "path", ".", "dirname", "(", "full_path", ")", "file_name", "=", "os", ...
for each linter .
train
false
47,817
def pattern_convert(grammar, raw_node_info): (type, value, context, children) = raw_node_info if (children or (type in grammar.number2symbol)): return pytree.Node(type, children, context=context) else: return pytree.Leaf(type, value, context=context)
[ "def", "pattern_convert", "(", "grammar", ",", "raw_node_info", ")", ":", "(", "type", ",", "value", ",", "context", ",", "children", ")", "=", "raw_node_info", "if", "(", "children", "or", "(", "type", "in", "grammar", ".", "number2symbol", ")", ")", ":...
converts raw node information to a node or leaf instance .
train
true
47,818
def getElementsByTagName(iNode, name): matches = [] matches_append = matches.append slice = [iNode] while (len(slice) > 0): c = slice.pop(0) if (c.nodeName == name): matches_append(c) slice[:0] = c.childNodes return matches
[ "def", "getElementsByTagName", "(", "iNode", ",", "name", ")", ":", "matches", "=", "[", "]", "matches_append", "=", "matches", ".", "append", "slice", "=", "[", "iNode", "]", "while", "(", "len", "(", "slice", ")", ">", "0", ")", ":", "c", "=", "s...
return a list of all child elements of c{inode} with a name matching c{name} .
train
false
47,819
def round_to_seconds(dt): rounding = 0 if (dt.microsecond >= 500000): rounding = 1 return (dt + datetime.timedelta(0, rounding, (- dt.microsecond)))
[ "def", "round_to_seconds", "(", "dt", ")", ":", "rounding", "=", "0", "if", "(", "dt", ".", "microsecond", ">=", "500000", ")", ":", "rounding", "=", "1", "return", "(", "dt", "+", "datetime", ".", "timedelta", "(", "0", ",", "rounding", ",", "(", ...
round a datetime to the nearest second .
train
false
47,820
def test_ascii_fancy(): o = nikola.utils.slugify(u'The quick brown fox jumps over the lazy dog!-123.456', lang=u'en') assert (o == u'the-quick-brown-fox-jumps-over-the-lazy-dog-123456') assert isinstance(o, nikola.utils.unicode_str)
[ "def", "test_ascii_fancy", "(", ")", ":", "o", "=", "nikola", ".", "utils", ".", "slugify", "(", "u'The quick brown fox jumps over the lazy dog!-123.456'", ",", "lang", "=", "u'en'", ")", "assert", "(", "o", "==", "u'the-quick-brown-fox-jumps-over-the-lazy-dog-123456'",...
test an ascii string .
train
false
47,823
def _mkdir(newdir): if os.path.isdir(newdir): pass elif os.path.isfile(newdir): raise OSError(("a file with the same name as the desired dir, '%s', already exists." % newdir)) else: (head, tail) = os.path.split(newdir) if (head and (not os.path.isdir(head))): _mkdir(head) if tail: os.mkdir(newdir)
[ "def", "_mkdir", "(", "newdir", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "newdir", ")", ":", "pass", "elif", "os", ".", "path", ".", "isfile", "(", "newdir", ")", ":", "raise", "OSError", "(", "(", "\"a file with the same name as the desired...
works the way a good mkdir should :) - already exists .
train
true
47,824
def need(): return s3_rest_controller()
[ "def", "need", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
47,825
def _removeOldBackRefs(senderkey, signal, receiver, receivers): try: index = receivers.index(receiver) except ValueError: return False else: oldReceiver = receivers[index] del receivers[index] found = 0 signals = connections.get(signal) if (signals is not None): for (sig, recs) in connections.get(signal, {}).iteritems(): if (sig != signal): for rec in recs: if (rec is oldReceiver): found = 1 break if (not found): _killBackref(oldReceiver, senderkey) return True return False
[ "def", "_removeOldBackRefs", "(", "senderkey", ",", "signal", ",", "receiver", ",", "receivers", ")", ":", "try", ":", "index", "=", "receivers", ".", "index", "(", "receiver", ")", "except", "ValueError", ":", "return", "False", "else", ":", "oldReceiver", ...
kill old sendersback references from receiver this guards against multiple registration of the same receiver for a given signal and sender leaking memory as old back reference records build up .
train
true
47,828
def draw(): get_current_fig_manager().canvas.draw()
[ "def", "draw", "(", ")", ":", "get_current_fig_manager", "(", ")", ".", "canvas", ".", "draw", "(", ")" ]
redraw the current figure .
train
false
47,831
def _get_table_str(table): table_str = '' col_size = [max((len(str(val)) for val in column)) for column in zip(*table)] for line in table: table_str += '\n' table_str += ' '.join(('{0:<{1}}'.format(val, col_size[i]) for (i, val) in enumerate(line))) return table_str
[ "def", "_get_table_str", "(", "table", ")", ":", "table_str", "=", "''", "col_size", "=", "[", "max", "(", "(", "len", "(", "str", "(", "val", ")", ")", "for", "val", "in", "column", ")", ")", "for", "column", "in", "zip", "(", "*", "table", ")",...
pretty print a table provided as a list of lists .
train
false
47,835
def json_hook(obj): if ('$dt' in obj): x = obj['$dt'] return datetime.strptime(x, '%Y-%m-%dT%H:%M:%S.%f') return obj
[ "def", "json_hook", "(", "obj", ")", ":", "if", "(", "'$dt'", "in", "obj", ")", ":", "x", "=", "obj", "[", "'$dt'", "]", "return", "datetime", ".", "strptime", "(", "x", ",", "'%Y-%m-%dT%H:%M:%S.%f'", ")", "return", "obj" ]
json object hook .
train
false
47,836
def bi_zeros(nt): kf = 2 if ((not isscalar(nt)) or (floor(nt) != nt) or (nt <= 0)): raise ValueError('nt must be a positive integer scalar.') return specfun.airyzo(nt, kf)
[ "def", "bi_zeros", "(", "nt", ")", ":", "kf", "=", "2", "if", "(", "(", "not", "isscalar", "(", "nt", ")", ")", "or", "(", "floor", "(", "nt", ")", "!=", "nt", ")", "or", "(", "nt", "<=", "0", ")", ")", ":", "raise", "ValueError", "(", "'nt...
compute nt zeros and values of the airy function bi and its derivative .
train
false
47,837
def l2norm(a): return np.sqrt(np.sum((np.abs(a) ** 2)))
[ "def", "l2norm", "(", "a", ")", ":", "return", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "(", "np", ".", "abs", "(", "a", ")", "**", "2", ")", ")", ")" ]
return the *l2* norm of *a* .
train
false
47,838
@register.as_tag def signup_form(*args): return get_profile_form()()
[ "@", "register", ".", "as_tag", "def", "signup_form", "(", "*", "args", ")", ":", "return", "get_profile_form", "(", ")", "(", ")" ]
returns the signup form: {% signup_form as form %} {{ form }} .
train
false
47,840
def getSubDirectories(testdir): subdirs = [os.path.join(testdir, d) for d in filter(os.path.isdir, [os.path.join(testdir, dd) for dd in os.listdir(testdir)])] for d in copy(subdirs): subdirs.extend(getSubDirectories(os.path.join(testdir, d))) return subdirs
[ "def", "getSubDirectories", "(", "testdir", ")", ":", "subdirs", "=", "[", "os", ".", "path", ".", "join", "(", "testdir", ",", "d", ")", "for", "d", "in", "filter", "(", "os", ".", "path", ".", "isdir", ",", "[", "os", ".", "path", ".", "join", ...
recursively builds a list of all subdirectories in the test suite .
train
false
47,842
def pathlist2filename(pathlist): fullpath = '' for elem in pathlist: fullpath = os.path.join(fullpath, elem) try: return fullpath.decode('utf-8') except UnicodeDecodeError: charenc = chardet.detect(fullpath)['encoding'] return fullpath.decode(charenc)
[ "def", "pathlist2filename", "(", "pathlist", ")", ":", "fullpath", "=", "''", "for", "elem", "in", "pathlist", ":", "fullpath", "=", "os", ".", "path", ".", "join", "(", "fullpath", ",", "elem", ")", "try", ":", "return", "fullpath", ".", "decode", "("...
convert a multi-file torrent file path entry to a filename .
train
false
47,845
def test_power_schedule(backend_default): sch = PowerSchedule(step_config=2, change=0.5) target_lr = [1.0, 1.0, 0.5, 0.5, 0.25, 0.25, 0.125, 0.125] for (e, lr) in enumerate(target_lr): assert np.allclose(lr, sch.get_learning_rate(learning_rate=1.0, epoch=e))
[ "def", "test_power_schedule", "(", "backend_default", ")", ":", "sch", "=", "PowerSchedule", "(", "step_config", "=", "2", ",", "change", "=", "0.5", ")", "target_lr", "=", "[", "1.0", ",", "1.0", ",", "0.5", ",", "0.5", ",", "0.25", ",", "0.25", ",", ...
test the powerschedule class .
train
false
47,846
@testing.requires_testing_data def test_morph_source_spaces(): src = read_source_spaces(fname_fs) src_morph = read_source_spaces(fname_morph) src_morph_py = morph_source_spaces(src, 'sample', subjects_dir=subjects_dir) _compare_source_spaces(src_morph, src_morph_py, mode='approx')
[ "@", "testing", ".", "requires_testing_data", "def", "test_morph_source_spaces", "(", ")", ":", "src", "=", "read_source_spaces", "(", "fname_fs", ")", "src_morph", "=", "read_source_spaces", "(", "fname_morph", ")", "src_morph_py", "=", "morph_source_spaces", "(", ...
test morphing of source spaces .
train
false
47,851
@world.absorb def css_has_text(css_selector, text, index=0, strip=False): if text: wait_for((lambda _: css_text(css_selector, index=index))) actual_text = css_text(css_selector, index=index) if strip: actual_text = actual_text.strip() text = text.strip() return (actual_text == text)
[ "@", "world", ".", "absorb", "def", "css_has_text", "(", "css_selector", ",", "text", ",", "index", "=", "0", ",", "strip", "=", "False", ")", ":", "if", "text", ":", "wait_for", "(", "(", "lambda", "_", ":", "css_text", "(", "css_selector", ",", "in...
return a boolean indicating whether the element with css_selector has text .
train
false
47,852
def _formatparam(param, value=None, quote=True): if ((value is not None) and (len(value) > 0)): if isinstance(value, tuple): param += '*' value = utils.encode_rfc2231(value[2], value[0], value[1]) if (quote or tspecials.search(value)): return ('%s="%s"' % (param, utils.quote(value))) else: return ('%s=%s' % (param, value)) else: return param
[ "def", "_formatparam", "(", "param", ",", "value", "=", "None", ",", "quote", "=", "True", ")", ":", "if", "(", "(", "value", "is", "not", "None", ")", "and", "(", "len", "(", "value", ")", ">", "0", ")", ")", ":", "if", "isinstance", "(", "val...
convenience function to format and return a key=value pair .
train
true
47,853
def get_test_data(delta=0.05): from matplotlib.mlab import bivariate_normal x = y = np.arange((-3.0), 3.0, delta) (X, Y) = np.meshgrid(x, y) Z1 = bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0) Z2 = bivariate_normal(X, Y, 1.5, 0.5, 1, 1) Z = (Z2 - Z1) X = (X * 10) Y = (Y * 10) Z = (Z * 500) return (X, Y, Z)
[ "def", "get_test_data", "(", "delta", "=", "0.05", ")", ":", "from", "matplotlib", ".", "mlab", "import", "bivariate_normal", "x", "=", "y", "=", "np", ".", "arange", "(", "(", "-", "3.0", ")", ",", "3.0", ",", "delta", ")", "(", "X", ",", "Y", "...
return a tuple x .
train
false
47,854
def _breadcrumbs(location=None, first_caption='Planet Earth'): breadcrumbs = [(first_caption, reverse(locations))] if (location is not None): for loc in location.path: url = reverse(locations, args=(loc.uid,)) breadcrumbs.append((loc, url)) return breadcrumbs
[ "def", "_breadcrumbs", "(", "location", "=", "None", ",", "first_caption", "=", "'Planet Earth'", ")", ":", "breadcrumbs", "=", "[", "(", "first_caption", ",", "reverse", "(", "locations", ")", ")", "]", "if", "(", "location", "is", "not", "None", ")", "...
return the breadcrumb trail leading to location .
train
false
47,855
def _compress(compress): if (compress in ('bz2', 'bzip2', 'j')): compression = 'j' ext = 'bz2' elif (compress in ('gz', 'gzip', 'z')): compression = 'z' ext = 'gz' elif (compress in ('xz', 'a', 'J')): compression = 'J' ext = 'xz' return (compression, ext)
[ "def", "_compress", "(", "compress", ")", ":", "if", "(", "compress", "in", "(", "'bz2'", ",", "'bzip2'", ",", "'j'", ")", ")", ":", "compression", "=", "'j'", "ext", "=", "'bz2'", "elif", "(", "compress", "in", "(", "'gz'", ",", "'gzip'", ",", "'z...
resolve compression flags .
train
true
47,856
@verbose def make_ad_hoc_cov(info, verbose=None): info = pick_info(info, pick_types(info, meg=True, eeg=True, exclude=[])) info._check_consistency() grad_std = 5e-13 mag_std = 2e-14 eeg_std = 2e-07 logger.info(('Using standard noise values (MEG grad : %6.1f fT/cm MEG mag : %6.1f fT EEG : %6.1f uV)' % ((10000000000000.0 * grad_std), (1000000000000000.0 * mag_std), (1000000.0 * eeg_std)))) data = np.zeros(len(info['ch_names'])) for (meg, eeg, val) in zip(('grad', 'mag', False), (False, False, True), (grad_std, mag_std, eeg_std)): data[pick_types(info, meg=meg, eeg=eeg)] = (val * val) return Covariance(data, info['ch_names'], info['bads'], info['projs'], nfree=0)
[ "@", "verbose", "def", "make_ad_hoc_cov", "(", "info", ",", "verbose", "=", "None", ")", ":", "info", "=", "pick_info", "(", "info", ",", "pick_types", "(", "info", ",", "meg", "=", "True", ",", "eeg", "=", "True", ",", "exclude", "=", "[", "]", ")...
create an ad hoc noise covariance .
train
false
47,858
def cache_ns_key(namespace, increment=False): ns_key = ('ns:%s' % namespace) if increment: try: ns_val = cache.incr(ns_key) except ValueError: log.info(('Cache increment failed for key: %s. Resetting.' % ns_key)) ns_val = epoch(datetime.datetime.now()) cache.set(ns_key, ns_val, None) else: ns_val = cache.get(ns_key) if (ns_val is None): ns_val = epoch(datetime.datetime.now()) cache.set(ns_key, ns_val, None) return ('%s:%s' % (ns_val, ns_key))
[ "def", "cache_ns_key", "(", "namespace", ",", "increment", "=", "False", ")", ":", "ns_key", "=", "(", "'ns:%s'", "%", "namespace", ")", "if", "increment", ":", "try", ":", "ns_val", "=", "cache", ".", "incr", "(", "ns_key", ")", "except", "ValueError", ...
returns a key with namespace value appended .
train
false
47,860
def rmsle(actual, predicted): return np.sqrt(msle(actual, predicted))
[ "def", "rmsle", "(", "actual", ",", "predicted", ")", ":", "return", "np", ".", "sqrt", "(", "msle", "(", "actual", ",", "predicted", ")", ")" ]
computes the root mean squared log error .
train
false
47,861
def wrap_exceptions(fun): def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) except EnvironmentError as err: if (self.pid == 0): if (0 in pids()): raise AccessDenied(self.pid, self._name) else: raise if (err.errno in (errno.ENOENT, errno.ESRCH)): if (not pid_exists(self.pid)): raise NoSuchProcess(self.pid, self._name) else: raise ZombieProcess(self.pid, self._name, self._ppid) if (err.errno in (errno.EPERM, errno.EACCES)): raise AccessDenied(self.pid, self._name) raise return wrapper
[ "def", "wrap_exceptions", "(", "fun", ")", ":", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "fun", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "except", "EnvironmentError", "as", ...
call callable into a try/except clause and translate enoent .
train
false
47,862
def register_optimizer(name, opt): if (name in predefined_optimizers): raise ValueError(('Optimizer name already taken: %s' % name)) predefined_optimizers[name] = opt
[ "def", "register_optimizer", "(", "name", ",", "opt", ")", ":", "if", "(", "name", "in", "predefined_optimizers", ")", ":", "raise", "ValueError", "(", "(", "'Optimizer name already taken: %s'", "%", "name", ")", ")", "predefined_optimizers", "[", "name", "]", ...
add a optimizer which can be referred to by name in mode .
train
false
47,865
def njit(*args, **kws): if ('nopython' in kws): warnings.warn('nopython is set for njit and is ignored', RuntimeWarning) if ('forceobj' in kws): warnings.warn('forceobj is set for njit and is ignored', RuntimeWarning) kws.update({'nopython': True}) return jit(*args, **kws)
[ "def", "njit", "(", "*", "args", ",", "**", "kws", ")", ":", "if", "(", "'nopython'", "in", "kws", ")", ":", "warnings", ".", "warn", "(", "'nopython is set for njit and is ignored'", ",", "RuntimeWarning", ")", "if", "(", "'forceobj'", "in", "kws", ")", ...
equivalent to jit see documentation for jit function/decorator for full description .
train
false
47,868
def output_xml(data, code, headers=None): resp = make_response(dumps({'response': data}), code) resp.headers.extend((headers or {})) return resp
[ "def", "output_xml", "(", "data", ",", "code", ",", "headers", "=", "None", ")", ":", "resp", "=", "make_response", "(", "dumps", "(", "{", "'response'", ":", "data", "}", ")", ",", "code", ")", "resp", ".", "headers", ".", "extend", "(", "(", "hea...
makes a flask response with a xml encoded body .
train
true
47,869
def getAbsoluteFolderPath(filePath, folderName=''): absoluteFolderPath = os.path.dirname(os.path.abspath(filePath)) if (folderName == ''): return absoluteFolderPath return os.path.join(absoluteFolderPath, folderName)
[ "def", "getAbsoluteFolderPath", "(", "filePath", ",", "folderName", "=", "''", ")", ":", "absoluteFolderPath", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "filePath", ")", ")", "if", "(", "folderName", "==", "''",...
get the absolute folder path .
train
false
47,870
def values(): data = salt.config.master_config(__opts__['conf_file']) return data
[ "def", "values", "(", ")", ":", "data", "=", "salt", ".", "config", ".", "master_config", "(", "__opts__", "[", "'conf_file'", "]", ")", "return", "data" ]
returns a dictionary of field names to statistical values for a particular category id defined in idmap .
train
false
47,871
def isNumPosStrValue(value): return ((value and isinstance(value, basestring) and value.isdigit() and (int(value) > 0)) or (isinstance(value, int) and (value > 0)))
[ "def", "isNumPosStrValue", "(", "value", ")", ":", "return", "(", "(", "value", "and", "isinstance", "(", "value", ",", "basestring", ")", "and", "value", ".", "isdigit", "(", ")", "and", "(", "int", "(", "value", ")", ">", "0", ")", ")", "or", "("...
returns true if value is a string with a positive integer representation .
train
false
47,872
def file_descriptors_used(pid): try: pid = int(pid) if (pid < 0): raise IOError(("Process pids can't be negative: %s" % pid)) except (ValueError, TypeError): raise IOError(('Process pid was non-numeric: %s' % pid)) try: return len(os.listdir(('/proc/%i/fd' % pid))) except Exception as exc: raise IOError(('Unable to check number of file descriptors used: %s' % exc))
[ "def", "file_descriptors_used", "(", "pid", ")", ":", "try", ":", "pid", "=", "int", "(", "pid", ")", "if", "(", "pid", "<", "0", ")", ":", "raise", "IOError", "(", "(", "\"Process pids can't be negative: %s\"", "%", "pid", ")", ")", "except", "(", "Va...
provides the number of file descriptors currently being used by a process .
train
false
47,873
def locale_details(request, locale_code, leader_form=None, reviewer_form=None, editor_form=None): locale = get_object_or_404(Locale, locale=locale_code) leaders = locale.leaders.all().select_related('profile') reviewers = locale.reviewers.all().select_related('profile') editors = locale.editors.all().select_related('profile') active = active_contributors(from_date=(date.today() - timedelta(days=90)), locale=locale_code) user_can_edit = _user_can_edit(request.user, locale) return render(request, 'wiki/locale_details.html', {'locale': locale, 'leaders': leaders, 'reviewers': reviewers, 'editors': editors, 'active': active, 'user_can_edit': user_can_edit, 'leader_form': (leader_form or AddUserForm()), 'reviewer_form': (reviewer_form or AddUserForm()), 'editor_form': (editor_form or AddUserForm())})
[ "def", "locale_details", "(", "request", ",", "locale_code", ",", "leader_form", "=", "None", ",", "reviewer_form", "=", "None", ",", "editor_form", "=", "None", ")", ":", "locale", "=", "get_object_or_404", "(", "Locale", ",", "locale", "=", "locale_code", ...
show the locale details page .
train
false
47,876
def save_signal(sender, instance, **kw): if (not kw.get('raw')): save_translations(make_key(instance))
[ "def", "save_signal", "(", "sender", ",", "instance", ",", "**", "kw", ")", ":", "if", "(", "not", "kw", ".", "get", "(", "'raw'", ")", ")", ":", "save_translations", "(", "make_key", "(", "instance", ")", ")" ]
use this signal on a model to iterate through all the translations added to the hold queue and save them all .
train
false
47,877
def ensure_workdir_exists(context): ensure_context_attribute_exists(context, 'workdir', None) if (not context.workdir): context.workdir = os.path.abspath(WORKDIR) pathutil.ensure_directory_exists(context.workdir)
[ "def", "ensure_workdir_exists", "(", "context", ")", ":", "ensure_context_attribute_exists", "(", "context", ",", "'workdir'", ",", "None", ")", "if", "(", "not", "context", ".", "workdir", ")", ":", "context", ".", "workdir", "=", "os", ".", "path", ".", ...
ensures that the work directory exists .
train
true
47,878
def vrrp_config_change(app, instance_name, priority=None, advertisement_interval=None, preempt_mode=None, accept_mode=None): config_change = vrrp_event.EventVRRPConfigChangeRequest(instance_name, priority, advertisement_interval, preempt_mode, accept_mode) return app.send_event(vrrp_event.VRRP_MANAGER_NAME, config_change)
[ "def", "vrrp_config_change", "(", "app", ",", "instance_name", ",", "priority", "=", "None", ",", "advertisement_interval", "=", "None", ",", "preempt_mode", "=", "None", ",", "accept_mode", "=", "None", ")", ":", "config_change", "=", "vrrp_event", ".", "Even...
change configuration of an instance .
train
true
47,879
def pnio_update_config(config): conf.contribs['PNIO_RTC'].update(config)
[ "def", "pnio_update_config", "(", "config", ")", ":", "conf", ".", "contribs", "[", "'PNIO_RTC'", "]", ".", "update", "(", "config", ")" ]
update the pnio rtc config .
train
false
47,881
@contextlib.contextmanager def unset_organization(): qapp = QApplication.instance() orgname = qapp.organizationName() qapp.setOrganizationName(None) try: (yield) finally: qapp.setOrganizationName(orgname)
[ "@", "contextlib", ".", "contextmanager", "def", "unset_organization", "(", ")", ":", "qapp", "=", "QApplication", ".", "instance", "(", ")", "orgname", "=", "qapp", ".", "organizationName", "(", ")", "qapp", ".", "setOrganizationName", "(", "None", ")", "tr...
temporarily unset qapplication .
train
false
47,882
def getLoopsWithCorners(corners, importRadius, loops, pointTable): for corner in corners: if (corner not in pointTable): addWithLeastLength(importRadius, loops, corner) pointTable[corner] = None return euclidean.getSimplifiedLoops(loops, importRadius)
[ "def", "getLoopsWithCorners", "(", "corners", ",", "importRadius", ",", "loops", ",", "pointTable", ")", ":", "for", "corner", "in", "corners", ":", "if", "(", "corner", "not", "in", "pointTable", ")", ":", "addWithLeastLength", "(", "importRadius", ",", "lo...
add corners to the loops .
train
false
47,884
def _all_tags(repo): return [x for x in repo.tags() if (x[0] != 'tip')]
[ "def", "_all_tags", "(", "repo", ")", ":", "return", "[", "x", "for", "x", "in", "repo", ".", "tags", "(", ")", "if", "(", "x", "[", "0", "]", "!=", "'tip'", ")", "]" ]
returns all tags for the specified repo .
train
false
47,885
def debug_unittest(testclass): run_unittest(testclass, debug=1)
[ "def", "debug_unittest", "(", "testclass", ")", ":", "run_unittest", "(", "testclass", ",", "debug", "=", "1", ")" ]
debug tests from a unittest .
train
false
47,886
def fixed_ip_get_by_network_host(context, network_uuid, host): return IMPL.fixed_ip_get_by_network_host(context, network_uuid, host)
[ "def", "fixed_ip_get_by_network_host", "(", "context", ",", "network_uuid", ",", "host", ")", ":", "return", "IMPL", ".", "fixed_ip_get_by_network_host", "(", "context", ",", "network_uuid", ",", "host", ")" ]
get fixed ip for a host in a network .
train
false
47,887
def build_article(url='', config=None, **kwargs): config = (config or Configuration()) config = extend_config(config, kwargs) url = (url or '') a = Article(url, config=config) return a
[ "def", "build_article", "(", "url", "=", "''", ",", "config", "=", "None", ",", "**", "kwargs", ")", ":", "config", "=", "(", "config", "or", "Configuration", "(", ")", ")", "config", "=", "extend_config", "(", "config", ",", "kwargs", ")", "url", "=...
returns a constructed article object without downloading or parsing .
train
false
47,889
def S_hac_simple(x, nlags=None, weights_func=weights_bartlett): if (x.ndim == 1): x = x[:, None] n_periods = x.shape[0] if (nlags is None): nlags = int(np.floor((4 * ((n_periods / 100.0) ** (2.0 / 9.0))))) weights = weights_func(nlags) S = (weights[0] * np.dot(x.T, x)) for lag in range(1, (nlags + 1)): s = np.dot(x[lag:].T, x[:(- lag)]) S += (weights[lag] * (s + s.T)) return S
[ "def", "S_hac_simple", "(", "x", ",", "nlags", "=", "None", ",", "weights_func", "=", "weights_bartlett", ")", ":", "if", "(", "x", ".", "ndim", "==", "1", ")", ":", "x", "=", "x", "[", ":", ",", "None", "]", "n_periods", "=", "x", ".", "shape", ...
inner covariance matrix for hac sandwich assumes we have a single time series with zero axis consecutive .
train
false
47,890
def check_program(prog): tmp = init_app('which {0}'.format(prog)) if ((len(tmp) > 0) and ('/' in tmp)): return True else: return False
[ "def", "check_program", "(", "prog", ")", ":", "tmp", "=", "init_app", "(", "'which {0}'", ".", "format", "(", "prog", ")", ")", "if", "(", "(", "len", "(", "tmp", ")", ">", "0", ")", "and", "(", "'/'", "in", "tmp", ")", ")", ":", "return", "Tr...
check if program is installed and pathed properly .
train
false
47,892
def pop_bilateral(image, selem, out=None, mask=None, shift_x=False, shift_y=False, s0=10, s1=10): return _apply(bilateral_cy._pop, image, selem, out=out, mask=mask, shift_x=shift_x, shift_y=shift_y, s0=s0, s1=s1)
[ "def", "pop_bilateral", "(", "image", ",", "selem", ",", "out", "=", "None", ",", "mask", "=", "None", ",", "shift_x", "=", "False", ",", "shift_y", "=", "False", ",", "s0", "=", "10", ",", "s1", "=", "10", ")", ":", "return", "_apply", "(", "bil...
return the local number of pixels .
train
false
47,893
def _get_data(): raw = read_raw_fif(raw_fname).crop(0.0, 5.0).load_data() data_picks = pick_types(raw.info, meg=True, eeg=True) other_picks = pick_types(raw.info, meg=False, stim=True, eog=True) picks = np.sort(np.concatenate((data_picks[::16], other_picks))) raw = raw.pick_channels([raw.ch_names[p] for p in picks]) raw.info.normalize_proj() ecg = RawArray(np.zeros((1, len(raw.times))), create_info(['ECG 063'], raw.info['sfreq'], 'ecg')) for key in ('dev_head_t', 'buffer_size_sec', 'highpass', 'lowpass', 'dig'): ecg.info[key] = raw.info[key] raw.add_channels([ecg]) src = read_source_spaces(src_fname) trans = read_trans(trans_fname) sphere = make_sphere_model('auto', 'auto', raw.info) stc = _make_stc(raw, src) return (raw, src, stc, trans, sphere)
[ "def", "_get_data", "(", ")", ":", "raw", "=", "read_raw_fif", "(", "raw_fname", ")", ".", "crop", "(", "0.0", ",", "5.0", ")", ".", "load_data", "(", ")", "data_picks", "=", "pick_types", "(", "raw", ".", "info", ",", "meg", "=", "True", ",", "eeg...
helper to get the data array .
train
false
47,894
def _var_acf(coefs, sig_u): (p, k, k2) = coefs.shape assert (k == k2) A = util.comp_matrix(coefs) SigU = np.zeros(((k * p), (k * p))) SigU[:k, :k] = sig_u vecACF = L.solve((np.eye(((k * p) ** 2)) - np.kron(A, A)), vec(SigU)) acf = unvec(vecACF) acf = acf[:k].T.reshape((p, k, k)) return acf
[ "def", "_var_acf", "(", "coefs", ",", "sig_u", ")", ":", "(", "p", ",", "k", ",", "k2", ")", "=", "coefs", ".", "shape", "assert", "(", "k", "==", "k2", ")", "A", "=", "util", ".", "comp_matrix", "(", "coefs", ")", "SigU", "=", "np", ".", "ze...
compute autocovariance function acf_y(h) for h=1 .
train
false
47,895
def parse_multistring(db_string): if (not isinstance(db_string, basestring)): raise ValueError('Parsing into a multistring requires a string input.') strings = db_string.split(SEPARATOR) if (strings[(-1)] == PLURAL_PLACEHOLDER): strings = strings[:(-1)] plural = True else: plural = (len(strings) > 1) ms = multistring(strings, encoding='UTF-8') ms.plural = plural return ms
[ "def", "parse_multistring", "(", "db_string", ")", ":", "if", "(", "not", "isinstance", "(", "db_string", ",", "basestring", ")", ")", ":", "raise", "ValueError", "(", "'Parsing into a multistring requires a string input.'", ")", "strings", "=", "db_string", ".", ...
parses a db_string coming from the db into a multistring object .
train
false
47,896
def start_new_background_thread(target, args, kwargs=None): if (kwargs is None): kwargs = {} request = system_service_pb.StartBackgroundRequestRequest() response = system_service_pb.StartBackgroundRequestResponse() try: apiproxy_stub_map.MakeSyncCall('system', 'StartBackgroundRequest', request, response) except apiproxy_errors.ApplicationError as error: raise ERROR_MAP[error.application_error](error.error_detail) else: return background.EnqueueBackgroundThread(response.request_id(), target, args, kwargs)
[ "def", "start_new_background_thread", "(", "target", ",", "args", ",", "kwargs", "=", "None", ")", ":", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "request", "=", "system_service_pb", ".", "StartBackgroundRequestRequest", "(", ")",...
starts a new background thread .
train
false
47,897
def dictitems(d): return list(d.items())
[ "def", "dictitems", "(", "d", ")", ":", "return", "list", "(", "d", ".", "items", "(", ")", ")" ]
a pickleable version of dict .
train
false
47,898
def translations_for_field(field): if (field is None): return {} translation_id = getattr(field, 'id') qs = Translation.objects.filter(id=translation_id, localized_string__isnull=False) translations = dict(qs.values_list('locale', 'localized_string')) return translations
[ "def", "translations_for_field", "(", "field", ")", ":", "if", "(", "field", "is", "None", ")", ":", "return", "{", "}", "translation_id", "=", "getattr", "(", "field", ",", "'id'", ")", "qs", "=", "Translation", ".", "objects", ".", "filter", "(", "id...
return all the translations for a given field .
train
false
47,900
def device_memory_depends(devmem, *objs): depset = getattr(devmem, '_depends_', []) depset.extend(objs)
[ "def", "device_memory_depends", "(", "devmem", ",", "*", "objs", ")", ":", "depset", "=", "getattr", "(", "devmem", ",", "'_depends_'", ",", "[", "]", ")", "depset", ".", "extend", "(", "objs", ")" ]
add dependencies to the device memory .
train
false
47,901
def associate_qos_with_type(context, specs_id, type_id): try: get_qos_specs(context, specs_id) res = volume_types.get_volume_type_qos_specs(type_id) if res.get('qos_specs', None): if (res['qos_specs'].get('id') != specs_id): msg = (_('Type %(type_id)s is already associated with another qos specs: %(qos_specs_id)s') % {'type_id': type_id, 'qos_specs_id': res['qos_specs']['id']}) raise exception.InvalidVolumeType(reason=msg) else: db.qos_specs_associate(context, specs_id, type_id) except db_exc.DBError: LOG.exception(_LE('DB error:')) LOG.warning(_LW('Failed to associate qos specs %(id)s with type: %(vol_type_id)s'), dict(id=specs_id, vol_type_id=type_id)) raise exception.QoSSpecsAssociateFailed(specs_id=specs_id, type_id=type_id)
[ "def", "associate_qos_with_type", "(", "context", ",", "specs_id", ",", "type_id", ")", ":", "try", ":", "get_qos_specs", "(", "context", ",", "specs_id", ")", "res", "=", "volume_types", ".", "get_volume_type_qos_specs", "(", "type_id", ")", "if", "res", ".",...
associate qos_specs with volume type .
train
false
47,902
@commands(u'endmeeting') @example(u'.endmeeting') def endmeeting(bot, trigger): if (not ismeetingrunning(trigger.sender)): bot.say(u"Can't do that, start meeting first") return if (not ischair(trigger.nick, trigger.sender)): bot.say(u'Only meeting head or chairs can do that') return meeting_length = (time.time() - meetings_dict[trigger.sender][u'start']) bot.say((u'\x02Meeting ended!\x0f total meeting length %d seconds' % meeting_length)) logHTML_end(trigger.sender) htmllog_url = (meeting_log_baseurl + quote((((trigger.sender + u'/') + figure_logfile_name(trigger.sender)) + u'.html'))) logplain((u'Meeting ended by %s, total meeting length %d seconds' % (trigger.nick, meeting_length)), trigger.sender) bot.say((u'Meeting minutes: ' + htmllog_url)) meetings_dict[trigger.sender] = Ddict(dict) del meeting_actions[trigger.sender]
[ "@", "commands", "(", "u'endmeeting'", ")", "@", "example", "(", "u'.endmeeting'", ")", "def", "endmeeting", "(", "bot", ",", "trigger", ")", ":", "if", "(", "not", "ismeetingrunning", "(", "trigger", ".", "sender", ")", ")", ":", "bot", ".", "say", "(...
end a meeting .
train
false
47,904
def transpose_axes(data, axes, asaxes='CTZYX'): for ax in axes: if (ax not in asaxes): raise ValueError(('unknown axis %s' % ax)) shape = data.shape for ax in reversed(asaxes): if (ax not in axes): axes = (ax + axes) shape = ((1,) + shape) data = data.reshape(shape) data = data.transpose([axes.index(ax) for ax in asaxes]) return data
[ "def", "transpose_axes", "(", "data", ",", "axes", ",", "asaxes", "=", "'CTZYX'", ")", ":", "for", "ax", "in", "axes", ":", "if", "(", "ax", "not", "in", "asaxes", ")", ":", "raise", "ValueError", "(", "(", "'unknown axis %s'", "%", "ax", ")", ")", ...
return data with its axes permuted to match specified axes .
train
false