id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
42,531
def clear_subscribers(): _SUBSCRIBERS.clear()
[ "def", "clear_subscribers", "(", ")", ":", "_SUBSCRIBERS", ".", "clear", "(", ")" ]
empty subscribers dictionary .
train
false
42,533
def list_pipelines(region=None, key=None, keyid=None, profile=None): client = _get_client(region, key, keyid, profile) r = {} try: paginator = client.get_paginator('list_pipelines') pipelines = [] for page in paginator.paginate(): pipelines += page['pipelineIdList'] r['result'] = pipelines except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: r['error'] = str(e) return r
[ "def", "list_pipelines", "(", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "client", "=", "_get_client", "(", "region", ",", "key", ",", "keyid", ",", "profile", ")", "r", "=", "{...
get a list of pipeline ids and names for all pipelines .
train
true
42,534
def gen_diff_header(paths, modes, shas): (old_path, new_path) = paths (old_mode, new_mode) = modes (old_sha, new_sha) = shas (yield (((('diff --git ' + old_path) + ' ') + new_path) + '\n')) if (old_mode != new_mode): if (new_mode is not None): if (old_mode is not None): (yield ('old mode %o\n' % old_mode).encode('ascii')) (yield ('new mode %o\n' % new_mode).encode('ascii')) else: (yield ('deleted mode %o\n' % old_mode).encode('ascii')) (yield ((('index ' + shortid(old_sha)) + '..') + shortid(new_sha))) if (new_mode is not None): (yield (' %o' % new_mode).encode('ascii')) (yield '\n')
[ "def", "gen_diff_header", "(", "paths", ",", "modes", ",", "shas", ")", ":", "(", "old_path", ",", "new_path", ")", "=", "paths", "(", "old_mode", ",", "new_mode", ")", "=", "modes", "(", "old_sha", ",", "new_sha", ")", "=", "shas", "(", "yield", "("...
write a blob diff header .
train
false
42,536
def collaboration_weighted_projected_graph(B, nodes): if B.is_multigraph(): raise nx.NetworkXError('not defined for multigraphs') if B.is_directed(): pred = B.pred G = nx.DiGraph() else: pred = B.adj G = nx.Graph() G.graph.update(B.graph) G.add_nodes_from(((n, B.node[n]) for n in nodes)) for u in nodes: unbrs = set(B[u]) nbrs2 = (set((n for nbr in unbrs for n in B[nbr])) - set([u])) for v in nbrs2: vnbrs = set(pred[v]) common = (unbrs & vnbrs) weight = sum([(1.0 / (len(B[n]) - 1)) for n in common if (len(B[n]) > 1)]) G.add_edge(u, v, weight=weight) return G
[ "def", "collaboration_weighted_projected_graph", "(", "B", ",", "nodes", ")", ":", "if", "B", ".", "is_multigraph", "(", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'not defined for multigraphs'", ")", "if", "B", ".", "is_directed", "(", ")", ":", "...
newmans weighted projection of b onto one of its node sets .
train
false
42,539
def _sort_fields(elem, orders_dicts): try: order = orders_dicts[elem.tag] except KeyError: pass else: tmp = sorted([((order.get(child.tag, 1000000000.0), i), child) for (i, child) in enumerate(elem)]) elem[:] = [child for (key, child) in tmp] for child in elem: if len(child): _sort_fields(child, orders_dicts)
[ "def", "_sort_fields", "(", "elem", ",", "orders_dicts", ")", ":", "try", ":", "order", "=", "orders_dicts", "[", "elem", ".", "tag", "]", "except", "KeyError", ":", "pass", "else", ":", "tmp", "=", "sorted", "(", "[", "(", "(", "order", ".", "get", ...
sort the children of elem .
train
false
42,541
@LocalProxy def _proxy_stream(): ctx = _request_ctx_stack.top if (ctx is not None): return ctx.request.environ['wsgi.errors'] return sys.stderr
[ "@", "LocalProxy", "def", "_proxy_stream", "(", ")", ":", "ctx", "=", "_request_ctx_stack", ".", "top", "if", "(", "ctx", "is", "not", "None", ")", ":", "return", "ctx", ".", "request", ".", "environ", "[", "'wsgi.errors'", "]", "return", "sys", ".", "...
finds the most appropriate error stream for the application .
train
false
42,542
def get_project_from_domain(domain_ref): project_ref = domain_ref.copy() project_ref['is_domain'] = True project_ref['domain_id'] = None project_ref['parent_id'] = None return project_ref
[ "def", "get_project_from_domain", "(", "domain_ref", ")", ":", "project_ref", "=", "domain_ref", ".", "copy", "(", ")", "project_ref", "[", "'is_domain'", "]", "=", "True", "project_ref", "[", "'domain_id'", "]", "=", "None", "project_ref", "[", "'parent_id'", ...
create a project ref from the provided domain ref .
train
false
42,543
def getRow(): try: db = mysql.connect(host=mysqlHost, user=mysqlUser, passwd=mysqlPasswd) cursor = db.cursor() cursor.execute("SHOW STATUS LIKE '%wsrep%'") result = cursor.fetchall() except: print 'Error: unable to fetch data - Check your configuration!' sys.exit(13) db.close() return result
[ "def", "getRow", "(", ")", ":", "try", ":", "db", "=", "mysql", ".", "connect", "(", "host", "=", "mysqlHost", ",", "user", "=", "mysqlUser", ",", "passwd", "=", "mysqlPasswd", ")", "cursor", "=", "db", ".", "cursor", "(", ")", "cursor", ".", "exec...
test connection .
train
false
42,545
def test_adjust_gamma_zero(): image = np.random.uniform(0, 255, (8, 8)) result = exposure.adjust_gamma(image, 0) dtype = image.dtype.type assert_array_equal(result, dtype_range[dtype][1])
[ "def", "test_adjust_gamma_zero", "(", ")", ":", "image", "=", "np", ".", "random", ".", "uniform", "(", "0", ",", "255", ",", "(", "8", ",", "8", ")", ")", "result", "=", "exposure", ".", "adjust_gamma", "(", "image", ",", "0", ")", "dtype", "=", ...
white image should be returned for gamma equal to zero .
train
false
42,546
def s3_ownerstamp(): auth = current.auth utable = auth.settings.table_user s3_meta_owned_by_user = S3ReusableField('owned_by_user', utable, readable=False, writable=False, requires=None, default=(current.session.auth.user.id if auth.is_logged_in() else None), represent=(lambda id: ((id and s3_auth_user_represent(id)) or current.messages.UNKNOWN_OPT)), ondelete='RESTRICT') s3_meta_owned_by_group = S3ReusableField('owned_by_group', 'integer', readable=False, writable=False, requires=None, default=None, represent=S3Represent(lookup='auth_group', fields=['role'])) s3_meta_realm_entity = S3ReusableField('realm_entity', 'integer', default=None, readable=False, writable=False, requires=None, represent=(lambda val: current.s3db.pr_pentity_represent(val))) return (s3_meta_owned_by_user(), s3_meta_owned_by_group(), s3_meta_realm_entity())
[ "def", "s3_ownerstamp", "(", ")", ":", "auth", "=", "current", ".", "auth", "utable", "=", "auth", ".", "settings", ".", "table_user", "s3_meta_owned_by_user", "=", "S3ReusableField", "(", "'owned_by_user'", ",", "utable", ",", "readable", "=", "False", ",", ...
record ownership meta-fields .
train
false
42,548
def getAroundLoop(begin, end, loop): aroundLoop = [] if (end <= begin): end += len(loop) for pointIndex in xrange(begin, end): aroundLoop.append(loop[(pointIndex % len(loop))]) return aroundLoop
[ "def", "getAroundLoop", "(", "begin", ",", "end", ",", "loop", ")", ":", "aroundLoop", "=", "[", "]", "if", "(", "end", "<=", "begin", ")", ":", "end", "+=", "len", "(", "loop", ")", "for", "pointIndex", "in", "xrange", "(", "begin", ",", "end", ...
get an arc around a loop .
train
false
42,549
def write_pid_to_pidfile(pidfile_path): open_flags = ((os.O_CREAT | os.O_EXCL) | os.O_WRONLY) open_mode = 420 pidfile_fd = os.open(pidfile_path, open_flags, open_mode) pidfile = os.fdopen(pidfile_fd, 'w') pid = os.getpid() pidfile.write(('%s\n' % pid)) pidfile.close()
[ "def", "write_pid_to_pidfile", "(", "pidfile_path", ")", ":", "open_flags", "=", "(", "(", "os", ".", "O_CREAT", "|", "os", ".", "O_EXCL", ")", "|", "os", ".", "O_WRONLY", ")", "open_mode", "=", "420", "pidfile_fd", "=", "os", ".", "open", "(", "pidfil...
write the pid in the named pid file .
train
true
42,550
def find_filter_class(filtername): if (filtername in FILTERS): return FILTERS[filtername] for (name, cls) in find_plugin_filters(): if (name == filtername): return cls return None
[ "def", "find_filter_class", "(", "filtername", ")", ":", "if", "(", "filtername", "in", "FILTERS", ")", ":", "return", "FILTERS", "[", "filtername", "]", "for", "(", "name", ",", "cls", ")", "in", "find_plugin_filters", "(", ")", ":", "if", "(", "name", ...
lookup a filter by name .
train
true
42,551
def has_tag(node, kind): for d in node['directory']: if (d.kind == kind): return True return False
[ "def", "has_tag", "(", "node", ",", "kind", ")", ":", "for", "d", "in", "node", "[", "'directory'", "]", ":", "if", "(", "d", ".", "kind", "==", "kind", ")", ":", "return", "True", "return", "False" ]
check if the node contains a tag of a given kind .
train
false
42,552
def setup_client(client_secret_file, server_public_file, endpoint='tcp://127.0.0.1:9000'): client = zmq.Context.instance().socket(zmq.DEALER) (client_public, client_secret) = zmq.auth.load_certificate(client_secret_file) client.curve_secretkey = client_secret client.curve_publickey = client_public (server_public, _) = zmq.auth.load_certificate(server_public_file) client.curve_serverkey = server_public client.connect(endpoint) client_stream = zmqstream.ZMQStream(client) client_stream.on_recv(client_msg_recvd) return client_stream
[ "def", "setup_client", "(", "client_secret_file", ",", "server_public_file", ",", "endpoint", "=", "'tcp://127.0.0.1:9000'", ")", ":", "client", "=", "zmq", ".", "Context", ".", "instance", "(", ")", ".", "socket", "(", "zmq", ".", "DEALER", ")", "(", "clien...
setup a simple client with curve auth .
train
false
42,556
def doNotFailOnNetworkError(func): @functools.wraps(func) def wrapper(*a, **kw): try: func(*a, **kw) except FailTest as e: if e.args[0].startswith("'Failed to get object inventory from "): raise SkipTest('This test is prone to intermittent network errors. See ticket 8753. Exception was: {!r}'.format(e)) raise return wrapper
[ "def", "doNotFailOnNetworkError", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "a", ",", "**", "kw", ")", ":", "try", ":", "func", "(", "*", "a", ",", "**", "kw", ")", "except", "FailTest", ...
a decorator which makes apibuilder tests not fail because of intermittent network failures -- mamely .
train
false
42,558
def get_all_stats(): all_stats = {} secret = appscale_info.get_secret() logging.debug('Retrieved deployment secret: {}'.format(secret)) for ip in appscale_info.get_all_ips(): appcontroller_endpoint = 'https://{}:{}'.format(ip, hermes_constants.APPCONTROLLER_PORT) logging.debug('Connecting to AC at: {}'.format(appcontroller_endpoint)) server = SOAPpy.SOAPProxy(appcontroller_endpoint) try: all_stats[ip] = json.loads(server.get_all_stats(secret)) except SOAPpy.SOAPException as soap_exception: logging.exception('Exception while performing SOAP call to {}'.format(appcontroller_endpoint)) logging.exception(soap_exception) all_stats[ip] = {JSONTags.ERROR: JSONTags.UNREACHABLE} except socket_error as serr: logging.error('Socket error while performing SOAP call to {}'.format(appcontroller_endpoint)) logging.error(serr) all_stats[ip] = {JSONTags.ERROR: JSONTags.UNREACHABLE} return all_stats
[ "def", "get_all_stats", "(", ")", ":", "all_stats", "=", "{", "}", "secret", "=", "appscale_info", ".", "get_secret", "(", ")", "logging", ".", "debug", "(", "'Retrieved deployment secret: {}'", ".", "format", "(", "secret", ")", ")", "for", "ip", "in", "a...
collects platform stats from all deployment nodes .
train
false
42,559
def expr_eval(expr): c = test_expr(expr, _EXPR_OPCODES) return unsafe_eval(c)
[ "def", "expr_eval", "(", "expr", ")", ":", "c", "=", "test_expr", "(", "expr", ",", "_EXPR_OPCODES", ")", "return", "unsafe_eval", "(", "c", ")" ]
expr_eval -> value restricted python expression evaluation evaluates a string that contains an expression that only uses python constants .
train
false
42,560
@environmentfilter def do_random(environment, seq): try: return choice(seq) except IndexError: return environment.undefined('No random item, sequence was empty.')
[ "@", "environmentfilter", "def", "do_random", "(", "environment", ",", "seq", ")", ":", "try", ":", "return", "choice", "(", "seq", ")", "except", "IndexError", ":", "return", "environment", ".", "undefined", "(", "'No random item, sequence was empty.'", ")" ]
return a random item from the sequence .
train
false
42,561
def resource_data_set(context, resource_id, key, value, redact=False): if redact: (method, value) = crypt.encrypt(value) else: method = '' try: current = resource_data_get_by_key(context, resource_id, key) except exception.NotFound: current = models.ResourceData() current.key = key current.resource_id = resource_id current.redact = redact current.value = value current.decrypt_method = method current.save(session=context.session) return current
[ "def", "resource_data_set", "(", "context", ",", "resource_id", ",", "key", ",", "value", ",", "redact", "=", "False", ")", ":", "if", "redact", ":", "(", "method", ",", "value", ")", "=", "crypt", ".", "encrypt", "(", "value", ")", "else", ":", "met...
save resources key/value pair to database .
train
false
42,563
def format_certificate_for_user(username, cert): return {'username': username, 'course_key': cert.course_id, 'type': cert.mode, 'status': cert.status, 'grade': cert.grade, 'created': cert.created_date, 'modified': cert.modified_date, 'download_url': ((cert.download_url or get_certificate_url(cert.user.id, cert.course_id)) if (cert.status == CertificateStatuses.downloadable) else None)}
[ "def", "format_certificate_for_user", "(", "username", ",", "cert", ")", ":", "return", "{", "'username'", ":", "username", ",", "'course_key'", ":", "cert", ".", "course_id", ",", "'type'", ":", "cert", ".", "mode", ",", "'status'", ":", "cert", ".", "sta...
helper function to serialize an user certificate .
train
false
42,564
def collect_dirs(dir_paths, ignored_dir_paths=None): valid_dirs = list(filter((lambda fname: os.path.isdir(fname[0])), icollect(dir_paths, ignored_dir_paths))) if valid_dirs: (collected_dirs, _) = zip(*valid_dirs) return list(collected_dirs) else: return []
[ "def", "collect_dirs", "(", "dir_paths", ",", "ignored_dir_paths", "=", "None", ")", ":", "valid_dirs", "=", "list", "(", "filter", "(", "(", "lambda", "fname", ":", "os", ".", "path", ".", "isdir", "(", "fname", "[", "0", "]", ")", ")", ",", "icolle...
evaluate globs in directory paths and return all matching directories .
train
false
42,566
def authorize_client(client, auth_type=None, service=None, source=None, scopes=None, oauth_type=None, consumer_key=None, consumer_secret=None): settings = SettingsUtil() return settings.authorize_client(client=client, auth_type=auth_type, service=service, source=source, scopes=scopes, oauth_type=oauth_type, consumer_key=consumer_key, consumer_secret=consumer_secret)
[ "def", "authorize_client", "(", "client", ",", "auth_type", "=", "None", ",", "service", "=", "None", ",", "source", "=", "None", ",", "scopes", "=", "None", ",", "oauth_type", "=", "None", ",", "consumer_key", "=", "None", ",", "consumer_secret", "=", "...
uses command line arguments .
train
false
42,567
def _PackForSuds(obj, factory): if (obj in ({}, None)): return suds.null() elif isinstance(obj, dict): if ('xsi_type' in obj): try: new_obj = factory.create(obj['xsi_type']) except suds.TypeNotFound: new_obj = factory.create(':'.join(['ns0', obj['xsi_type']])) for (param, _) in new_obj: if param.endswith('.Type'): setattr(new_obj, param, obj['xsi_type']) else: setattr(new_obj, param, None) for key in obj: if (key == 'xsi_type'): continue setattr(new_obj, key, _PackForSuds(obj[key], factory)) else: new_obj = {} for key in obj: new_obj[key] = _PackForSuds(obj[key], factory) return new_obj elif isinstance(obj, (list, tuple)): return [_PackForSuds(item, factory) for item in obj] else: _RecurseOverObject(obj, factory) return obj
[ "def", "_PackForSuds", "(", "obj", ",", "factory", ")", ":", "if", "(", "obj", "in", "(", "{", "}", ",", "None", ")", ")", ":", "return", "suds", ".", "null", "(", ")", "elif", "isinstance", "(", "obj", ",", "dict", ")", ":", "if", "(", "'xsi_t...
packs soap input into the format we want for suds .
train
true
42,568
def rugplot(a, height=0.05, axis='x', ax=None, **kwargs): if (ax is None): ax = plt.gca() a = np.asarray(a) vertical = kwargs.pop('vertical', (axis == 'y')) func = (ax.axhline if vertical else ax.axvline) kwargs.setdefault('linewidth', 1) for pt in a: func(pt, 0, height, **kwargs) return ax
[ "def", "rugplot", "(", "a", ",", "height", "=", "0.05", ",", "axis", "=", "'x'", ",", "ax", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "ax", "is", "None", ")", ":", "ax", "=", "plt", ".", "gca", "(", ")", "a", "=", "np", ".", ...
plot datapoints in an array as sticks on an axis .
train
false
42,569
def send_schedule_change(email, session_name, link): message_settings = MessageSettings.query.filter_by(action=SESSION_SCHEDULE).first() if ((not message_settings) or (message_settings.mail_status == 1)): send_email(to=email, action=SESSION_SCHEDULE, subject=MAILS[SESSION_SCHEDULE]['subject'].format(session_name=session_name), html=MAILS[SESSION_SCHEDULE]['message'].format(email=str(email), session_name=str(session_name), link=link))
[ "def", "send_schedule_change", "(", "email", ",", "session_name", ",", "link", ")", ":", "message_settings", "=", "MessageSettings", ".", "query", ".", "filter_by", "(", "action", "=", "SESSION_SCHEDULE", ")", ".", "first", "(", ")", "if", "(", "(", "not", ...
send schedule change in session .
train
false
42,571
def assert_raises_with_msg(exc_class, expected_msg, callable_obj, *args, **kwargs): try: callable_obj(*args, **kwargs) except exc_class as err: assert_equal(expected_msg, unic(err), 'Correct exception but wrong message') else: if hasattr(exc_class, '__name__'): exc_name = exc_class.__name__ else: exc_name = str(exc_class) _report_failure(('%s not raised' % exc_name))
[ "def", "assert_raises_with_msg", "(", "exc_class", ",", "expected_msg", ",", "callable_obj", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "callable_obj", "(", "*", "args", ",", "**", "kwargs", ")", "except", "exc_class", "as", "err", ":", ...
similar to fail_unless_raises but also checks the exception message .
train
false
42,572
def slicer(dimensions): def decorator(extractor): @wraps(extractor) def make_slicer(*a, **k): return Slicer(dimensions=dimensions, extractor=partial(extractor, *a, **k)) return make_slicer return decorator
[ "def", "slicer", "(", "dimensions", ")", ":", "def", "decorator", "(", "extractor", ")", ":", "@", "wraps", "(", "extractor", ")", "def", "make_slicer", "(", "*", "a", ",", "**", "k", ")", ":", "return", "Slicer", "(", "dimensions", "=", "dimensions", ...
slicer decorator .
train
false
42,573
@deprecated('The functon distribute_covar_matrix_to_match_covariance_typeis deprecated in 0.18 and will be removed in 0.20.') def distribute_covar_matrix_to_match_covariance_type(tied_cv, covariance_type, n_components): if (covariance_type == 'spherical'): cv = np.tile((tied_cv.mean() * np.ones(tied_cv.shape[1])), (n_components, 1)) elif (covariance_type == 'tied'): cv = tied_cv elif (covariance_type == 'diag'): cv = np.tile(np.diag(tied_cv), (n_components, 1)) elif (covariance_type == 'full'): cv = np.tile(tied_cv, (n_components, 1, 1)) else: raise ValueError(('covariance_type must be one of ' + "'spherical', 'tied', 'diag', 'full'")) return cv
[ "@", "deprecated", "(", "'The functon distribute_covar_matrix_to_match_covariance_typeis deprecated in 0.18 and will be removed in 0.20.'", ")", "def", "distribute_covar_matrix_to_match_covariance_type", "(", "tied_cv", ",", "covariance_type", ",", "n_components", ")", ":", "if", "("...
create all the covariance matrices from a given template .
train
true
42,574
def skip_if_browser(browser): def decorator(test_function): @functools.wraps(test_function) def wrapper(self, *args, **kwargs): if (self.browser.name == browser): raise unittest.SkipTest('Skipping as this test will not work with {}'.format(browser)) test_function(self, *args, **kwargs) return wrapper return decorator
[ "def", "skip_if_browser", "(", "browser", ")", ":", "def", "decorator", "(", "test_function", ")", ":", "@", "functools", ".", "wraps", "(", "test_function", ")", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "("...
method decorator that skips a test if browser is browser args: browser : name of internet browser returns: decorated function .
train
false
42,575
def scorers_processors(): scorers = [fuzz.ratio, fuzz.partial_ratio] processors = [(lambda x: x), partial(utils.full_process, force_ascii=False), partial(utils.full_process, force_ascii=True)] splist = list(product(scorers, processors)) splist.extend([(fuzz.WRatio, partial(utils.full_process, force_ascii=True)), (fuzz.QRatio, partial(utils.full_process, force_ascii=True)), (fuzz.UWRatio, partial(utils.full_process, force_ascii=False)), (fuzz.UQRatio, partial(utils.full_process, force_ascii=False)), (fuzz.token_set_ratio, partial(utils.full_process, force_ascii=True)), (fuzz.token_sort_ratio, partial(utils.full_process, force_ascii=True)), (fuzz.partial_token_set_ratio, partial(utils.full_process, force_ascii=True)), (fuzz.partial_token_sort_ratio, partial(utils.full_process, force_ascii=True))]) return splist
[ "def", "scorers_processors", "(", ")", ":", "scorers", "=", "[", "fuzz", ".", "ratio", ",", "fuzz", ".", "partial_ratio", "]", "processors", "=", "[", "(", "lambda", "x", ":", "x", ")", ",", "partial", "(", "utils", ".", "full_process", ",", "force_asc...
generate a list of pairs for testing :return: [ .
train
false
42,576
def shapeFromPath(path, pen): stroker = QPainterPathStroker() stroker.setCapStyle(pen.capStyle()) stroker.setJoinStyle(pen.joinStyle()) stroker.setMiterLimit(pen.miterLimit()) stroker.setWidth(max(pen.widthF(), 1e-09)) shape = stroker.createStroke(path) shape.addPath(path) return shape
[ "def", "shapeFromPath", "(", "path", ",", "pen", ")", ":", "stroker", "=", "QPainterPathStroker", "(", ")", "stroker", ".", "setCapStyle", "(", "pen", ".", "capStyle", "(", ")", ")", "stroker", ".", "setJoinStyle", "(", "pen", ".", "joinStyle", "(", ")",...
create a qpainterpath shape from the path drawn with pen .
train
false
42,578
def parse_to_aware_datetime(value): if isinstance(value, datetime.datetime): datetime_obj = value else: datetime_obj = parse_timestamp(value) if (datetime_obj.tzinfo is None): datetime_obj = datetime_obj.replace(tzinfo=tzutc()) else: datetime_obj = datetime_obj.astimezone(tzutc()) return datetime_obj
[ "def", "parse_to_aware_datetime", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", ":", "datetime_obj", "=", "value", "else", ":", "datetime_obj", "=", "parse_timestamp", "(", "value", ")", "if", "(", "datetime...
converted the passed in value to a datetime object with tzinfo .
train
false
42,579
def whois_callers_caller(): import inspect frameObj = inspect.stack()[2][0] return inspect.getframeinfo(frameObj)
[ "def", "whois_callers_caller", "(", ")", ":", "import", "inspect", "frameObj", "=", "inspect", ".", "stack", "(", ")", "[", "2", "]", "[", "0", "]", "return", "inspect", ".", "getframeinfo", "(", "frameObj", ")" ]
returns: traceback namedtuple for our callers caller .
train
false
42,580
def update_settings(section, option): if ((section, option) == ('general', 'private-browsing')): cache_path = standarddir.cache() if (config.get('general', 'private-browsing') or (cache_path is None)): QWebSettings.setIconDatabasePath('') else: QWebSettings.setIconDatabasePath(cache_path) elif ((section == 'ui') and (option in ['hide-scrollbar', 'user-stylesheet'])): _set_user_stylesheet() websettings.update_mappings(MAPPINGS, section, option)
[ "def", "update_settings", "(", "section", ",", "option", ")", ":", "if", "(", "(", "section", ",", "option", ")", "==", "(", "'general'", ",", "'private-browsing'", ")", ")", ":", "cache_path", "=", "standarddir", ".", "cache", "(", ")", "if", "(", "co...
update global settings when qwebsettings changed .
train
false
42,581
@requires_segment_info def paste_indicator(pl, segment_info, text=u'PASTE'): return (text if int(vim.eval(u'&paste')) else None)
[ "@", "requires_segment_info", "def", "paste_indicator", "(", "pl", ",", "segment_info", ",", "text", "=", "u'PASTE'", ")", ":", "return", "(", "text", "if", "int", "(", "vim", ".", "eval", "(", "u'&paste'", ")", ")", "else", "None", ")" ]
return a paste mode indicator .
train
false
42,582
@scopes.add_arg_scope def global_step(device=''): global_step_ref = tf.get_collection(tf.GraphKeys.GLOBAL_STEP) if global_step_ref: return global_step_ref[0] else: collections = [VARIABLES_TO_RESTORE, tf.GraphKeys.GLOBAL_VARIABLES, tf.GraphKeys.GLOBAL_STEP] with tf.device(variable_device(device, 'global_step')): return tf.get_variable('global_step', shape=[], dtype=tf.int64, initializer=tf.zeros_initializer, trainable=False, collections=collections)
[ "@", "scopes", ".", "add_arg_scope", "def", "global_step", "(", "device", "=", "''", ")", ":", "global_step_ref", "=", "tf", ".", "get_collection", "(", "tf", ".", "GraphKeys", ".", "GLOBAL_STEP", ")", "if", "global_step_ref", ":", "return", "global_step_ref",...
returns the global step variable .
train
true
42,583
def obj2dict(obj, processed=None): processed = (processed if (not (processed is None)) else set()) if (obj is None): return None if isinstance(obj, (int, long, str, unicode, float, bool)): return obj if (id(obj) in processed): return '<reference>' processed.add(id(obj)) if isinstance(obj, (list, tuple)): return [obj2dict(item, processed) for item in obj] if ((not isinstance(obj, dict)) and hasattr(obj, '__dict__')): obj = obj.__dict__ else: return repr(obj) return dict(((key, obj2dict(value, processed)) for (key, value) in obj.items() if ((not key.startswith('_')) and (not (type(value) in (types.FunctionType, types.LambdaType, types.BuiltinFunctionType, types.BuiltinMethodType))))))
[ "def", "obj2dict", "(", "obj", ",", "processed", "=", "None", ")", ":", "processed", "=", "(", "processed", "if", "(", "not", "(", "processed", "is", "None", ")", ")", "else", "set", "(", ")", ")", "if", "(", "obj", "is", "None", ")", ":", "retur...
converts any object into a dict .
train
false
42,584
def _ensure_has_len(seq): try: len(seq) except TypeError: return list(seq) else: return seq
[ "def", "_ensure_has_len", "(", "seq", ")", ":", "try", ":", "len", "(", "seq", ")", "except", "TypeError", ":", "return", "list", "(", "seq", ")", "else", ":", "return", "seq" ]
if seq is an iterator .
train
false
42,585
def authorize_user_context(context, user_id): if is_user_context(context): if (not context.user_id): raise exception.Forbidden() elif (context.user_id != user_id): raise exception.Forbidden()
[ "def", "authorize_user_context", "(", "context", ",", "user_id", ")", ":", "if", "is_user_context", "(", "context", ")", ":", "if", "(", "not", "context", ".", "user_id", ")", ":", "raise", "exception", ".", "Forbidden", "(", ")", "elif", "(", "context", ...
ensures a request has permission to access the given user .
train
false
42,586
def reset(): _runtime.reset()
[ "def", "reset", "(", ")", ":", "_runtime", ".", "reset", "(", ")" ]
resets the db contents .
train
false
42,587
def set_caa_hostname(new_hostname): global hostname hostname = new_hostname
[ "def", "set_caa_hostname", "(", "new_hostname", ")", ":", "global", "hostname", "hostname", "=", "new_hostname" ]
set the base hostname for cover art archive requests .
train
false
42,588
@webob.dec.wsgify @microversion.version_handler(1.2) @util.check_accept('application/json') def get_resource_class(req): name = util.wsgi_path_item(req.environ, 'name') context = req.environ['placement.context'] rc = objects.ResourceClass.get_by_name(context, name) req.response.body = encodeutils.to_utf8(jsonutils.dumps(_serialize_resource_class(req.environ, rc))) req.response.content_type = 'application/json' return req.response
[ "@", "webob", ".", "dec", ".", "wsgify", "@", "microversion", ".", "version_handler", "(", "1.2", ")", "@", "util", ".", "check_accept", "(", "'application/json'", ")", "def", "get_resource_class", "(", "req", ")", ":", "name", "=", "util", ".", "wsgi_path...
get a single resource class .
train
false
42,589
def test_ipython_notebook(Chart, datas): chart = Chart() chart = make_data(chart, datas) assert chart._repr_svg_()
[ "def", "test_ipython_notebook", "(", "Chart", ",", "datas", ")", ":", "chart", "=", "Chart", "(", ")", "chart", "=", "make_data", "(", "chart", ",", "datas", ")", "assert", "chart", ".", "_repr_svg_", "(", ")" ]
test ipython notebook .
train
false
42,590
def read_user_dict(var_name, default_value): if (not isinstance(default_value, dict)): raise TypeError default_display = 'default' user_value = click.prompt(var_name, default=default_display, type=click.STRING, value_proc=process_json) if (user_value == default_display): return default_value return user_value
[ "def", "read_user_dict", "(", "var_name", ",", "default_value", ")", ":", "if", "(", "not", "isinstance", "(", "default_value", ",", "dict", ")", ")", ":", "raise", "TypeError", "default_display", "=", "'default'", "user_value", "=", "click", ".", "prompt", ...
prompt the user to provide a dictionary of data .
train
true
42,591
def process_file(filename): try: checksum_errors = verify_checksums(filename) if OPTIONS.compliance: compliance_errors = verify_compliance(filename) else: compliance_errors = 0 if ((OPTIONS.write_file and (checksum_errors == 0)) or OPTIONS.force): update(filename) return (checksum_errors + compliance_errors) except Exception as e: log.error('EXCEPTION {!r} .. {}'.format(filename, e)) return 1
[ "def", "process_file", "(", "filename", ")", ":", "try", ":", "checksum_errors", "=", "verify_checksums", "(", "filename", ")", "if", "OPTIONS", ".", "compliance", ":", "compliance_errors", "=", "verify_compliance", "(", "filename", ")", "else", ":", "compliance...
handle a single .
train
false
42,593
def line_prefix(prefix, string): return '\n'.join(((prefix + x) for x in string.splitlines()))
[ "def", "line_prefix", "(", "prefix", ",", "string", ")", ":", "return", "'\\n'", ".", "join", "(", "(", "(", "prefix", "+", "x", ")", "for", "x", "in", "string", ".", "splitlines", "(", ")", ")", ")" ]
return string with all lines prefixed by prefix .
train
false
42,594
def parse_config_h(fp, vars=None): if (vars is None): vars = {} import re define_rx = re.compile('#define ([A-Z][A-Za-z0-9_]+) (.*)\n') undef_rx = re.compile('/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n') while True: line = fp.readline() if (not line): break m = define_rx.match(line) if m: (n, v) = m.group(1, 2) try: v = int(v) except ValueError: pass vars[n] = v else: m = undef_rx.match(line) if m: vars[m.group(1)] = 0 return vars
[ "def", "parse_config_h", "(", "fp", ",", "vars", "=", "None", ")", ":", "if", "(", "vars", "is", "None", ")", ":", "vars", "=", "{", "}", "import", "re", "define_rx", "=", "re", ".", "compile", "(", "'#define ([A-Z][A-Za-z0-9_]+) (.*)\\n'", ")", "undef_r...
parse a config .
train
true
42,595
def strip_ns(xml, ns): it = ET.iterparse(StringIO(xml)) ns_prefix = ('{%s}' % XMLNS) for (_, el) in it: if el.tag.startswith(ns_prefix): el.tag = el.tag[len(ns_prefix):] return it.root
[ "def", "strip_ns", "(", "xml", ",", "ns", ")", ":", "it", "=", "ET", ".", "iterparse", "(", "StringIO", "(", "xml", ")", ")", "ns_prefix", "=", "(", "'{%s}'", "%", "XMLNS", ")", "for", "(", "_", ",", "el", ")", "in", "it", ":", "if", "el", "....
strip the provided name from tag names .
train
false
42,597
@receiver((post_save, post_delete), sender=CircuitTermination) def update_circuit(instance, **kwargs): Circuit.objects.filter(pk=instance.circuit_id).update(last_updated=timezone.now())
[ "@", "receiver", "(", "(", "post_save", ",", "post_delete", ")", ",", "sender", "=", "CircuitTermination", ")", "def", "update_circuit", "(", "instance", ",", "**", "kwargs", ")", ":", "Circuit", ".", "objects", ".", "filter", "(", "pk", "=", "instance", ...
when a circuittermination has been modified .
train
false
42,598
def get_sub_folders(session, ds_browser, ds_path): search_task = session._call_method(session.vim, 'SearchDatastore_Task', ds_browser, datastorePath=str(ds_path)) try: task_info = session._wait_for_task(search_task) except vexc.FileNotFoundException: return set() if hasattr(task_info.result, 'file'): return set([file.path for file in task_info.result.file]) return set()
[ "def", "get_sub_folders", "(", "session", ",", "ds_browser", ",", "ds_path", ")", ":", "search_task", "=", "session", ".", "_call_method", "(", "session", ".", "vim", ",", "'SearchDatastore_Task'", ",", "ds_browser", ",", "datastorePath", "=", "str", "(", "ds_...
return a set of subfolders for a path on a datastore .
train
false
42,599
def _uniquify_projs(projs, check_active=True, sort=True): final_projs = [] for proj in projs: if (not any((_proj_equal(p, proj, check_active) for p in final_projs))): final_projs.append(proj) my_count = count(len(final_projs)) def sorter(x): 'Sort in a nice way.' digits = [s for s in x['desc'] if s.isdigit()] if digits: sort_idx = int(digits[(-1)]) else: sort_idx = next(my_count) return (sort_idx, x['desc']) return (sorted(final_projs, key=sorter) if sort else final_projs)
[ "def", "_uniquify_projs", "(", "projs", ",", "check_active", "=", "True", ",", "sort", "=", "True", ")", ":", "final_projs", "=", "[", "]", "for", "proj", "in", "projs", ":", "if", "(", "not", "any", "(", "(", "_proj_equal", "(", "p", ",", "proj", ...
make unique projs .
train
false
42,600
@deco.keyword('Set Name Using Robot Name Attribute') def name_set_in_method_signature(a, b, *args, **kwargs): pass
[ "@", "deco", ".", "keyword", "(", "'Set Name Using Robot Name Attribute'", ")", "def", "name_set_in_method_signature", "(", "a", ",", "b", ",", "*", "args", ",", "**", "kwargs", ")", ":", "pass" ]
this makes sure that @deco .
train
false
42,602
def setgroups(groups): max_groups = None try: max_groups = os.sysconf(u'SC_NGROUPS_MAX') except Exception: pass try: return _setgroups_hack(groups[:max_groups]) except OSError as exc: if (exc.errno != errno.EPERM): raise if any(((group not in groups) for group in os.getgroups())): raise
[ "def", "setgroups", "(", "groups", ")", ":", "max_groups", "=", "None", "try", ":", "max_groups", "=", "os", ".", "sysconf", "(", "u'SC_NGROUPS_MAX'", ")", "except", "Exception", ":", "pass", "try", ":", "return", "_setgroups_hack", "(", "groups", "[", ":"...
set active groups from a list of group ids .
train
false
42,605
def p_postfix_expression_5(t): pass
[ "def", "p_postfix_expression_5", "(", "t", ")", ":", "pass" ]
postfix_expression : postfix_expression period id .
train
false
42,606
def load_features(jobs, host, port, path, tiles): features = [] lock = _Lock() args = (lock, host, port, path, tiles, features) threads = [Thread(target=load_tile_features, args=args) for i in range(jobs)] for thread in threads: thread.start() for thread in threads: thread.join() logging.debug(('Loaded %d features' % len(features))) return features
[ "def", "load_features", "(", "jobs", ",", "host", ",", "port", ",", "path", ",", "tiles", ")", ":", "features", "=", "[", "]", "lock", "=", "_Lock", "(", ")", "args", "=", "(", "lock", ",", "host", ",", "port", ",", "path", ",", "tiles", ",", "...
load data from tiles to features .
train
false
42,607
def provider_uri_rewrites(providers): return _load_provider_feature('uri_rewrites', providers)
[ "def", "provider_uri_rewrites", "(", "providers", ")", ":", "return", "_load_provider_feature", "(", "'uri_rewrites'", ",", "providers", ")" ]
load tuples from an ordered list of dotted-notation modules which contain a uri_rewrites function uri_rewrites should accept a list of rewrites and returns an augmented list of rewrites: this allows the addition of .
train
false
42,609
def custom_object_scope(*args): return CustomObjectScope(*args)
[ "def", "custom_object_scope", "(", "*", "args", ")", ":", "return", "CustomObjectScope", "(", "*", "args", ")" ]
provides a scope that changes to _global_custom_objects cannot escape .
train
false
42,610
def showScore(score): scoreDigits = [int(x) for x in list(str(score))] totalWidth = 0 for digit in scoreDigits: totalWidth += IMAGES['numbers'][digit].get_width() Xoffset = ((SCREENWIDTH - totalWidth) / 2) for digit in scoreDigits: SCREEN.blit(IMAGES['numbers'][digit], (Xoffset, (SCREENHEIGHT * 0.1))) Xoffset += IMAGES['numbers'][digit].get_width()
[ "def", "showScore", "(", "score", ")", ":", "scoreDigits", "=", "[", "int", "(", "x", ")", "for", "x", "in", "list", "(", "str", "(", "score", ")", ")", "]", "totalWidth", "=", "0", "for", "digit", "in", "scoreDigits", ":", "totalWidth", "+=", "IMA...
displays score in center of screen .
train
false
42,611
def before_new_template(force_new_urls): global new_style_url_tag, error_on_old_style_url_tag, errors new_style_url_tag = False error_on_old_style_url_tag = force_new_urls errors = []
[ "def", "before_new_template", "(", "force_new_urls", ")", ":", "global", "new_style_url_tag", ",", "error_on_old_style_url_tag", ",", "errors", "new_style_url_tag", "=", "False", "error_on_old_style_url_tag", "=", "force_new_urls", "errors", "=", "[", "]" ]
reset state ready for new template .
train
false
42,612
def run_all(plugin, args=''): data = _execute_cmd(plugin, args, 'cmd.run_all') return data
[ "def", "run_all", "(", "plugin", ",", "args", "=", "''", ")", ":", "data", "=", "_execute_cmd", "(", "plugin", ",", "args", ",", "'cmd.run_all'", ")", "return", "data" ]
run :mod:cmd .
train
false
42,613
def _normalize_query_parameter(value): if isinstance(value, Model): value = value.key() if (isinstance(value, datetime.date) and (not isinstance(value, datetime.datetime))): value = _date_to_datetime(value) elif isinstance(value, datetime.time): value = _time_to_datetime(value) return value
[ "def", "_normalize_query_parameter", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "Model", ")", ":", "value", "=", "value", ".", "key", "(", ")", "if", "(", "isinstance", "(", "value", ",", "datetime", ".", "date", ")", "and", "(", ...
make any necessary type conversions to a query parameter .
train
false
42,614
def _pack(x): try: return list(x) except TypeError: return [x]
[ "def", "_pack", "(", "x", ")", ":", "try", ":", "return", "list", "(", "x", ")", "except", "TypeError", ":", "return", "[", "x", "]" ]
pack parts into a ssh key blob .
train
false
42,615
def _estimate_gaussian_parameters(X, resp, reg_covar, covariance_type): nk = (resp.sum(axis=0) + (10 * np.finfo(resp.dtype).eps)) means = (np.dot(resp.T, X) / nk[:, np.newaxis]) covariances = {'full': _estimate_gaussian_covariances_full, 'tied': _estimate_gaussian_covariances_tied, 'diag': _estimate_gaussian_covariances_diag, 'spherical': _estimate_gaussian_covariances_spherical}[covariance_type](resp, X, nk, means, reg_covar) return (nk, means, covariances)
[ "def", "_estimate_gaussian_parameters", "(", "X", ",", "resp", ",", "reg_covar", ",", "covariance_type", ")", ":", "nk", "=", "(", "resp", ".", "sum", "(", "axis", "=", "0", ")", "+", "(", "10", "*", "np", ".", "finfo", "(", "resp", ".", "dtype", "...
estimate the gaussian distribution parameters .
train
false
42,616
def setup_env(): os.environ['AUTH_DOMAIN'] = 'appscale.com' os.environ['USER_EMAIL'] = '' os.environ['USER_NICKNAME'] = '' os.environ['APPLICATION_ID'] = ''
[ "def", "setup_env", "(", ")", ":", "os", ".", "environ", "[", "'AUTH_DOMAIN'", "]", "=", "'appscale.com'", "os", ".", "environ", "[", "'USER_EMAIL'", "]", "=", "''", "os", ".", "environ", "[", "'USER_NICKNAME'", "]", "=", "''", "os", ".", "environ", "[...
setup virtualenv for bench .
train
false
42,617
def create_lti_user(lti_user_id, lti_consumer): edx_password = str(uuid.uuid4()) created = False while (not created): try: edx_user_id = generate_random_edx_username() edx_email = '{}@{}'.format(edx_user_id, settings.LTI_USER_EMAIL_DOMAIN) with transaction.atomic(): edx_user = User.objects.create_user(username=edx_user_id, password=edx_password, email=edx_email) edx_user_profile = UserProfile(user=edx_user) edx_user_profile.save() created = True except IntegrityError: pass lti_user = LtiUser(lti_consumer=lti_consumer, lti_user_id=lti_user_id, edx_user=edx_user) lti_user.save() return lti_user
[ "def", "create_lti_user", "(", "lti_user_id", ",", "lti_consumer", ")", ":", "edx_password", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "created", "=", "False", "while", "(", "not", "created", ")", ":", "try", ":", "edx_user_id", "=", "generat...
generate a new user on the edx platform with a random username and password .
train
false
42,618
def _locate_repo_files(repo, rewrite=False): ret_val = [] files = [] conf_dirs = ['/etc/xbps.d/', '/usr/share/xbps.d/'] name_glob = '*.conf' regex = re.compile((('\\s*repository\\s*=\\s*' + repo) + '/?\\s*(#.*)?$')) for cur_dir in conf_dirs: files.extend(glob.glob((cur_dir + name_glob))) for filename in files: write_buff = [] with salt.utils.fopen(filename, 'r') as cur_file: for line in cur_file: if regex.match(line): ret_val.append(filename) else: write_buff.append(line) if (rewrite and (filename in ret_val)): if (len(write_buff) > 0): with salt.utils.fopen(filename, 'w') as rewrite_file: rewrite_file.write(''.join(write_buff)) else: os.remove(filename) return ret_val
[ "def", "_locate_repo_files", "(", "repo", ",", "rewrite", "=", "False", ")", ":", "ret_val", "=", "[", "]", "files", "=", "[", "]", "conf_dirs", "=", "[", "'/etc/xbps.d/'", ",", "'/usr/share/xbps.d/'", "]", "name_glob", "=", "'*.conf'", "regex", "=", "re",...
find what file a repo is called in .
train
false
42,620
def _parseClientSSL(**kwargs): from twisted.internet import ssl kwargs = _parseClientTCP(**kwargs) certKey = kwargs.pop('certKey', None) privateKey = kwargs.pop('privateKey', None) caCertsDir = kwargs.pop('caCertsDir', None) if (certKey is not None): certx509 = ssl.Certificate.loadPEM(FilePath(certKey).getContent()).original else: certx509 = None if (privateKey is not None): privateKey = ssl.PrivateCertificate.loadPEM(FilePath(privateKey).getContent()).privateKey.original else: privateKey = None if (caCertsDir is not None): verify = True caCerts = _loadCAsFromDir(FilePath(caCertsDir)) else: verify = False caCerts = None kwargs['sslContextFactory'] = ssl.CertificateOptions(method=ssl.SSL.SSLv23_METHOD, certificate=certx509, privateKey=privateKey, verify=verify, caCerts=caCerts) return kwargs
[ "def", "_parseClientSSL", "(", "**", "kwargs", ")", ":", "from", "twisted", ".", "internet", "import", "ssl", "kwargs", "=", "_parseClientTCP", "(", "**", "kwargs", ")", "certKey", "=", "kwargs", ".", "pop", "(", "'certKey'", ",", "None", ")", "privateKey"...
perform any argument value coercion necessary for ssl client parameters .
train
false
42,622
def get_limited_to(headers): policy_dict = dict() policy_dict['roles'] = headers.get('X-Roles', '').split(',') policy_dict['user_id'] = headers.get('X-User-Id') policy_dict['project_id'] = headers.get('X-Project-Id') rule_name = ('segregation' if _has_rule('segregation') else 'context_is_admin') if (not pecan.request.enforcer.enforce(rule_name, {}, policy_dict)): return (headers.get('X-User-Id'), headers.get('X-Project-Id')) return (None, None)
[ "def", "get_limited_to", "(", "headers", ")", ":", "policy_dict", "=", "dict", "(", ")", "policy_dict", "[", "'roles'", "]", "=", "headers", ".", "get", "(", "'X-Roles'", ",", "''", ")", ".", "split", "(", "','", ")", "policy_dict", "[", "'user_id'", "...
return the user and project the request should be limited to .
train
false
42,623
def dup_degree(f): if (not f): return (- oo) return (len(f) - 1)
[ "def", "dup_degree", "(", "f", ")", ":", "if", "(", "not", "f", ")", ":", "return", "(", "-", "oo", ")", "return", "(", "len", "(", "f", ")", "-", "1", ")" ]
return the leading degree of f in k[x] .
train
false
42,624
def numCores(): if hasattr(numCores, 'ncores'): return numCores.ncores try: numCores.ncores = int(quietRun('grep -c processor /proc/cpuinfo')) except ValueError: return 0 return numCores.ncores
[ "def", "numCores", "(", ")", ":", "if", "hasattr", "(", "numCores", ",", "'ncores'", ")", ":", "return", "numCores", ".", "ncores", "try", ":", "numCores", ".", "ncores", "=", "int", "(", "quietRun", "(", "'grep -c processor /proc/cpuinfo'", ")", ")", "exc...
returns number of cpu cores based on /proc/cpuinfo .
train
false
42,625
def split_escape(string, sep, maxsplit=None, escape_char='\\'): assert (len(sep) == 1) assert (len(escape_char) == 1) if isinstance(string, bytes): if isinstance(escape_char, text_type): escape_char = escape_char.encode('ascii') iter_ = iterbytes else: iter_ = iter if (maxsplit is None): maxsplit = len(string) empty = string[:0] result = [] current = empty escaped = False for char in iter_(string): if escaped: if ((char != escape_char) and (char != sep)): current += escape_char current += char escaped = False elif (char == escape_char): escaped = True elif ((char == sep) and (len(result) < maxsplit)): result.append(current) current = empty else: current += char result.append(current) return result
[ "def", "split_escape", "(", "string", ",", "sep", ",", "maxsplit", "=", "None", ",", "escape_char", "=", "'\\\\'", ")", ":", "assert", "(", "len", "(", "sep", ")", "==", "1", ")", "assert", "(", "len", "(", "escape_char", ")", "==", "1", ")", "if",...
like unicode/str/bytes .
train
true
42,628
def print_usage(actions): actions = actions.items() actions.sort() print ('usage: %s <action> [<options>]' % basename(sys.argv[0])) print (' %s --help' % basename(sys.argv[0])) print print 'actions:' for (name, (func, doc, arguments)) in actions: print (' %s:' % name) for line in doc.splitlines(): print (' %s' % line) if arguments: print for (arg, shortcut, default, argtype) in arguments: if isinstance(default, bool): print (' %s' % ((((shortcut and ('-%s, ' % shortcut)) or '') + '--') + arg)) else: print (' %-30s%-10s%s' % (((((shortcut and ('-%s, ' % shortcut)) or '') + '--') + arg), argtype, default)) print
[ "def", "print_usage", "(", "actions", ")", ":", "actions", "=", "actions", ".", "items", "(", ")", "actions", ".", "sort", "(", ")", "print", "(", "'usage: %s <action> [<options>]'", "%", "basename", "(", "sys", ".", "argv", "[", "0", "]", ")", ")", "p...
print the usage information .
train
true
42,629
def list_to_dict(a_list): new_dict = {} for (i, v) in enumerate(a_list): new_dict[i] = v return new_dict
[ "def", "list_to_dict", "(", "a_list", ")", ":", "new_dict", "=", "{", "}", "for", "(", "i", ",", "v", ")", "in", "enumerate", "(", "a_list", ")", ":", "new_dict", "[", "i", "]", "=", "v", "return", "new_dict" ]
convert a list to a dictionary .
train
false
42,630
def _comp_sum_eeg(beta, ctheta, lut_fun, n_fact): n_chunk = (50000000 // ((8 * max(n_fact.shape)) * 2)) lims = np.concatenate([np.arange(0, beta.size, n_chunk), [beta.size]]) s0 = np.empty(beta.shape) for (start, stop) in zip(lims[:(-1)], lims[1:]): coeffs = lut_fun(ctheta[start:stop]) betans = np.tile(beta[start:stop][:, np.newaxis], (1, n_fact.shape[0])) np.cumprod(betans, axis=1, out=betans) coeffs *= betans s0[start:stop] = np.dot(coeffs, n_fact) return s0
[ "def", "_comp_sum_eeg", "(", "beta", ",", "ctheta", ",", "lut_fun", ",", "n_fact", ")", ":", "n_chunk", "=", "(", "50000000", "//", "(", "(", "8", "*", "max", "(", "n_fact", ".", "shape", ")", ")", "*", "2", ")", ")", "lims", "=", "np", ".", "c...
lead field dot products using legendre polynomial series .
train
false
42,631
def scale_matrix(factor, origin=None, direction=None): if (direction is None): M = numpy.diag([factor, factor, factor, 1.0]) if (origin is not None): M[:3, 3] = origin[:3] M[:3, 3] *= (1.0 - factor) else: direction = unit_vector(direction[:3]) factor = (1.0 - factor) M = numpy.identity(4) M[:3, :3] -= (factor * numpy.outer(direction, direction)) if (origin is not None): M[:3, 3] = ((factor * numpy.dot(origin[:3], direction)) * direction) return M
[ "def", "scale_matrix", "(", "factor", ",", "origin", "=", "None", ",", "direction", "=", "None", ")", ":", "if", "(", "direction", "is", "None", ")", ":", "M", "=", "numpy", ".", "diag", "(", "[", "factor", ",", "factor", ",", "factor", ",", "1.0",...
return matrix to scale by factor around origin in direction .
train
true
42,632
def NodeName(node): if (node.type < 256): return token.tok_name[node.type] else: return pygram.python_grammar.number2symbol[node.type]
[ "def", "NodeName", "(", "node", ")", ":", "if", "(", "node", ".", "type", "<", "256", ")", ":", "return", "token", ".", "tok_name", "[", "node", ".", "type", "]", "else", ":", "return", "pygram", ".", "python_grammar", ".", "number2symbol", "[", "nod...
produce a string name for a given node .
train
false
42,633
def _blockdevicevolume_from_ebs_volume(ebs_volume): if ebs_volume.attachments: attached_to = unicode(ebs_volume.attachments[0]['InstanceId']) else: attached_to = None volume_dataset_id = _get_volume_tag(ebs_volume, DATASET_ID_LABEL) return BlockDeviceVolume(blockdevice_id=unicode(ebs_volume.id), size=int(GiB(ebs_volume.size).to_Byte().value), attached_to=attached_to, dataset_id=UUID(volume_dataset_id))
[ "def", "_blockdevicevolume_from_ebs_volume", "(", "ebs_volume", ")", ":", "if", "ebs_volume", ".", "attachments", ":", "attached_to", "=", "unicode", "(", "ebs_volume", ".", "attachments", "[", "0", "]", "[", "'InstanceId'", "]", ")", "else", ":", "attached_to",...
helper function to convert volume information from ebs format to flocker block device format .
train
false
42,634
def _estimate_gaussian_covariances_full(resp, X, nk, means, reg_covar): (n_components, n_features) = means.shape covariances = np.empty((n_components, n_features, n_features)) for k in range(n_components): diff = (X - means[k]) covariances[k] = (np.dot((resp[:, k] * diff.T), diff) / nk[k]) covariances[k].flat[::(n_features + 1)] += reg_covar return covariances
[ "def", "_estimate_gaussian_covariances_full", "(", "resp", ",", "X", ",", "nk", ",", "means", ",", "reg_covar", ")", ":", "(", "n_components", ",", "n_features", ")", "=", "means", ".", "shape", "covariances", "=", "np", ".", "empty", "(", "(", "n_componen...
estimate the full covariance matrices .
train
false
42,636
@utils.auth.requires_login @blueprint.route('/upload_archive', methods=['POST']) def upload_archive(): files = flask.request.files archive_file = get_tempfile(files['archive'], '.archive') if tarfile.is_tarfile(archive_file): archive = tarfile.open(archive_file, 'r') names = archive.getnames() elif zipfile.is_zipfile(archive_file): archive = zipfile.ZipFile(archive_file, 'r') names = archive.namelist() else: return (flask.jsonify({'status': 'Incorrect Archive Type'}), 500) if ('info.json' in names): tempdir = tempfile.mkdtemp() labels_file = None archive.extractall(path=tempdir) with open(os.path.join(tempdir, 'info.json')) as data_file: info = json.load(data_file) (valid, key) = validate_archive_keys(info) if (valid is False): return (flask.jsonify({'status': (("Missing Key '" + key) + "' in info.json")}), 500) weights_file = os.path.join(tempdir, info['snapshot file']) if ('model file' in info): model_file = os.path.join(tempdir, info['model file']) elif ('network file' in info): model_file = os.path.join(tempdir, info['network file']) else: return (flask.jsonify({'status': 'Missing model definition in info.json'}), 500) if ('labels file' in info): labels_file = os.path.join(tempdir, info['labels file']) job = PretrainedModelJob(weights_file, model_file, labels_file, info['framework'], username=utils.auth.get_username(), name=info['name']) scheduler.add_job(job) job.wait_completion() shutil.rmtree(tempdir, ignore_errors=True) return (flask.jsonify({'status': 'success'}), 200) else: return (flask.jsonify({'status': 'Missing or Incorrect json file'}), 500)
[ "@", "utils", ".", "auth", ".", "requires_login", "@", "blueprint", ".", "route", "(", "'/upload_archive'", ",", "methods", "=", "[", "'POST'", "]", ")", "def", "upload_archive", "(", ")", ":", "files", "=", "flask", ".", "request", ".", "files", "archiv...
a wrapper around the actual upload view function to clean up the temporary file afterwards .
train
false
42,637
def float_compare(value1, value2, precision_digits=None, precision_rounding=None): rounding_factor = _float_check_precision(precision_digits=precision_digits, precision_rounding=precision_rounding) value1 = float_round(value1, precision_rounding=rounding_factor) value2 = float_round(value2, precision_rounding=rounding_factor) delta = (value1 - value2) if float_is_zero(delta, precision_rounding=rounding_factor): return 0 return ((-1) if (delta < 0.0) else 1)
[ "def", "float_compare", "(", "value1", ",", "value2", ",", "precision_digits", "=", "None", ",", "precision_rounding", "=", "None", ")", ":", "rounding_factor", "=", "_float_check_precision", "(", "precision_digits", "=", "precision_digits", ",", "precision_rounding",...
compare value1 and value2 after rounding them according to the given precision .
train
false
42,639
def _switch_package_license_ids(old_ids, old_license_titles, map): new_ids = {} for (package_id, old_license_id) in old_ids.items(): if (old_license_id != None): old_license_title = old_license_titles[old_license_id] new_license_id = map[old_license_title] new_ids[package_id] = new_license_id print ('Switched license_id %s to %s' % (old_license_id, new_license_id)) return new_ids
[ "def", "_switch_package_license_ids", "(", "old_ids", ",", "old_license_titles", ",", "map", ")", ":", "new_ids", "=", "{", "}", "for", "(", "package_id", ",", "old_license_id", ")", "in", "old_ids", ".", "items", "(", ")", ":", "if", "(", "old_license_id", ...
returns a dict of new license ids .
train
false
42,642
def abs(a): return _abs(a)
[ "def", "abs", "(", "a", ")", ":", "return", "_abs", "(", "a", ")" ]
element-wise absolute value .
train
false
42,643
@register.simple_tag(takes_context=True) def avatar_urls(context, user, size, service_id=None): service = avatar_services.for_user(user, service_id) if (service is None): logging.error(u'Could not get a suitable avatar service for user %s.', user) urls = {} else: urls = {resolution: url for (resolution, url) in six.iteritems(service.get_avatar_urls(request=context[u'request'], user=user, size=size))} return mark_safe(json.dumps(urls))
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "avatar_urls", "(", "context", ",", "user", ",", "size", ",", "service_id", "=", "None", ")", ":", "service", "=", "avatar_services", ".", "for_user", "(", "user", ",", "s...
serialize the users avatar urls into a javascript object .
train
false
42,644
def quaternion_multiply(quaternion1, quaternion0): (w0, x0, y0, z0) = quaternion0 (w1, x1, y1, z1) = quaternion1 return numpy.array([(((((- x1) * x0) - (y1 * y0)) - (z1 * z0)) + (w1 * w0)), ((((x1 * w0) + (y1 * z0)) - (z1 * y0)) + (w1 * x0)), (((((- x1) * z0) + (y1 * w0)) + (z1 * x0)) + (w1 * y0)), ((((x1 * y0) - (y1 * x0)) + (z1 * w0)) + (w1 * z0))], dtype=numpy.float64)
[ "def", "quaternion_multiply", "(", "quaternion1", ",", "quaternion0", ")", ":", "(", "w0", ",", "x0", ",", "y0", ",", "z0", ")", "=", "quaternion0", "(", "w1", ",", "x1", ",", "y1", ",", "z1", ")", "=", "quaternion1", "return", "numpy", ".", "array",...
return multiplication of two quaternions .
train
true
42,645
def equateSphericalDotElevation(point, returnValue): radius = abs(point) if (radius <= 0.0): return azimuthComplex = point.dropAxis() azimuthRadius = abs(azimuthComplex) if (azimuthRadius <= 0.0): return elevationComplex = euclidean.getWiddershinsUnitPolar(math.radians(returnValue)) azimuthComplex *= ((radius / azimuthRadius) * elevationComplex.real) point.x = azimuthComplex.real point.y = azimuthComplex.imag point.z = (elevationComplex.imag * radius)
[ "def", "equateSphericalDotElevation", "(", "point", ",", "returnValue", ")", ":", "radius", "=", "abs", "(", "point", ")", "if", "(", "radius", "<=", "0.0", ")", ":", "return", "azimuthComplex", "=", "point", ".", "dropAxis", "(", ")", "azimuthRadius", "="...
get equation for spherical elevation .
train
false
42,647
def get_filterdate(filter_date, date_time): returnvalue = '' date_year = strftime('%Y', gmtime(date_time)) date_month = strftime('%m', gmtime(date_time)) date_day = strftime('%d', gmtime(date_time)) if ((filter_date == 'today') and (int(date_year) == int(localtime()[0])) and (int(date_month) == int(localtime()[1])) and (int(date_day) == int(localtime()[2]))): returnvalue = 'true' elif ((filter_date == 'thismonth') and (date_time >= (time() - 2592000))): returnvalue = 'true' elif ((filter_date == 'thisyear') and (int(date_year) == int(localtime()[0]))): returnvalue = 'true' elif ((filter_date == 'past7days') and (date_time >= (time() - 604800))): returnvalue = 'true' elif (filter_date == ''): returnvalue = 'true' return returnvalue
[ "def", "get_filterdate", "(", "filter_date", ",", "date_time", ")", ":", "returnvalue", "=", "''", "date_year", "=", "strftime", "(", "'%Y'", ",", "gmtime", "(", "date_time", ")", ")", "date_month", "=", "strftime", "(", "'%m'", ",", "gmtime", "(", "date_t...
get filterdate .
train
false
42,648
def graph_menus(): ret = MPMenuSubMenu('Graphs', []) for i in range(len(mestate.graphs)): g = mestate.graphs[i] path = g.name.split('/') name = path[(-1)] path = path[:(-1)] ret.add_to_submenu(path, MPMenuItem(name, name, ('# graph :%u' % i))) return ret
[ "def", "graph_menus", "(", ")", ":", "ret", "=", "MPMenuSubMenu", "(", "'Graphs'", ",", "[", "]", ")", "for", "i", "in", "range", "(", "len", "(", "mestate", ".", "graphs", ")", ")", ":", "g", "=", "mestate", ".", "graphs", "[", "i", "]", "path",...
return menu tree for graphs .
train
true
42,649
def _GetMSBuildPropertyGroup(spec, label, properties): group = ['PropertyGroup'] if label: group.append({'Label': label}) num_configurations = len(spec['configurations']) def GetEdges(node): edges = set() for value in sorted(properties[node].keys()): edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value) if ((v in properties) and (v != node))])) return edges properties_ordered = gyp.common.TopologicallySorted(properties.keys(), GetEdges) for name in reversed(properties_ordered): values = properties[name] for (value, conditions) in sorted(values.iteritems()): if (len(conditions) == num_configurations): group.append([name, value]) else: for condition in conditions: group.append([name, {'Condition': condition}, value]) return [group]
[ "def", "_GetMSBuildPropertyGroup", "(", "spec", ",", "label", ",", "properties", ")", ":", "group", "=", "[", "'PropertyGroup'", "]", "if", "label", ":", "group", ".", "append", "(", "{", "'Label'", ":", "label", "}", ")", "num_configurations", "=", "len",...
returns a propertygroup definition for the specified properties .
train
false
42,652
def add_section(data, label, icon, items): if (not items): return data.append({u'label': label, u'icon': icon, u'items': items})
[ "def", "add_section", "(", "data", ",", "label", ",", "icon", ",", "items", ")", ":", "if", "(", "not", "items", ")", ":", "return", "data", ".", "append", "(", "{", "u'label'", ":", "label", ",", "u'icon'", ":", "icon", ",", "u'items'", ":", "item...
adds a section to the module data .
train
false
42,653
def acc_check(expected, got, rel_err=2e-15, abs_err=5e-323): if (math.isinf(expected) and (got == expected)): return None error = (got - expected) permitted_error = max(abs_err, (rel_err * abs(expected))) if (abs(error) < permitted_error): return None return 'error = {}; permitted error = {}'.format(error, permitted_error)
[ "def", "acc_check", "(", "expected", ",", "got", ",", "rel_err", "=", "2e-15", ",", "abs_err", "=", "5e-323", ")", ":", "if", "(", "math", ".", "isinf", "(", "expected", ")", "and", "(", "got", "==", "expected", ")", ")", ":", "return", "None", "er...
determine whether non-nan floats a and b are equal to within a rounding error .
train
false
42,654
def is_scalar_neg(builder, value): return _scalar_pred_against_zero(builder, value, functools.partial(builder.fcmp_ordered, '<'), '<')
[ "def", "is_scalar_neg", "(", "builder", ",", "value", ")", ":", "return", "_scalar_pred_against_zero", "(", "builder", ",", "value", ",", "functools", ".", "partial", "(", "builder", ".", "fcmp_ordered", ",", "'<'", ")", ",", "'<'", ")" ]
is *value* negative? assumes *value* is signed .
train
false
42,655
def decode_header(header): if hasattr(header, '_chunks'): return [(_charset._encode(string, str(charset)), str(charset)) for (string, charset) in header._chunks] if (not ecre.search(header)): return [(header, None)] words = [] for line in header.splitlines(): parts = ecre.split(line) first = True while parts: unencoded = parts.pop(0) if first: unencoded = unencoded.lstrip() first = False if unencoded: words.append((unencoded, None, None)) if parts: charset = parts.pop(0).lower() encoding = parts.pop(0).lower() encoded = parts.pop(0) words.append((encoded, encoding, charset)) droplist = [] for (n, w) in enumerate(words): if ((n > 1) and w[1] and words[(n - 2)][1] and words[(n - 1)][0].isspace()): droplist.append((n - 1)) for d in reversed(droplist): del words[d] decoded_words = [] for (encoded_string, encoding, charset) in words: if (encoding is None): decoded_words.append((encoded_string, charset)) elif (encoding == 'q'): word = email.quoprimime.header_decode(encoded_string) decoded_words.append((word, charset)) elif (encoding == 'b'): paderr = (len(encoded_string) % 4) if paderr: encoded_string += '==='[:(4 - paderr)] try: word = email.base64mime.decode(encoded_string) except binascii.Error: raise HeaderParseError('Base64 decoding error') else: decoded_words.append((word, charset)) else: raise AssertionError(('Unexpected encoding: ' + encoding)) collapsed = [] last_word = last_charset = None for (word, charset) in decoded_words: if isinstance(word, unicode): word = bytes(word, 'raw-unicode-escape') if (last_word is None): last_word = word last_charset = charset elif (charset != last_charset): collapsed.append((last_word, last_charset)) last_word = word last_charset = charset elif (last_charset is None): last_word += (' ' + word) else: last_word += word collapsed.append((last_word, last_charset)) return collapsed
[ "def", "decode_header", "(", "header", ")", ":", "if", "hasattr", "(", "header", ",", "'_chunks'", ")", ":", "return", "[", "(", "_charset", ".", "_encode", "(", "string", ",", "str", "(", "charset", ")", ")", ",", "str", "(", "charset", ")", ")", ...
make sure the header is an unicode string .
train
true
42,656
def connected_masters(): port = 4505 config_port = __salt__['config.get']('publish_port') if config_port: port = config_port connected_masters_ips = _remote_port_tcp(port) return connected_masters_ips
[ "def", "connected_masters", "(", ")", ":", "port", "=", "4505", "config_port", "=", "__salt__", "[", "'config.get'", "]", "(", "'publish_port'", ")", "if", "config_port", ":", "port", "=", "config_port", "connected_masters_ips", "=", "_remote_port_tcp", "(", "po...
return current connected masters .
train
false
42,657
def freemem(): return utils.run('sync && echo 3 > /proc/sys/vm/drop_caches')
[ "def", "freemem", "(", ")", ":", "return", "utils", ".", "run", "(", "'sync && echo 3 > /proc/sys/vm/drop_caches'", ")" ]
free useless memoery .
train
false
42,658
def format_index(index, start, end): start -= start.utcoffset() end -= end.utcoffset() indexes = [] while (start.date() <= end.date()): indexes.append(start.strftime(index)) start += datetime.timedelta(days=1) return ','.join(indexes)
[ "def", "format_index", "(", "index", ",", "start", ",", "end", ")", ":", "start", "-=", "start", ".", "utcoffset", "(", ")", "end", "-=", "end", ".", "utcoffset", "(", ")", "indexes", "=", "[", "]", "while", "(", "start", ".", "date", "(", ")", "...
takes an index .
train
false
42,659
def type_coerce(expression, type_): type_ = type_api.to_instance(type_) if hasattr(expression, u'__clause_element__'): return type_coerce(expression.__clause_element__(), type_) elif isinstance(expression, BindParameter): bp = expression._clone() bp.type = type_ return bp elif (not isinstance(expression, Visitable)): if (expression is None): return Null() else: return literal(expression, type_=type_) else: return Label(None, expression, type_=type_)
[ "def", "type_coerce", "(", "expression", ",", "type_", ")", ":", "type_", "=", "type_api", ".", "to_instance", "(", "type_", ")", "if", "hasattr", "(", "expression", ",", "u'__clause_element__'", ")", ":", "return", "type_coerce", "(", "expression", ".", "__...
associate a sql expression with a particular type .
train
false
42,661
def render_page(request, page, current_language, slug): template_name = get_template_from_request(request, page, no_current_page=True) context = {} context['lang'] = current_language context['current_page'] = page context['has_change_permissions'] = user_can_change_page(request.user, page) context['has_view_permissions'] = user_can_view_page(request.user, page) if (not context['has_view_permissions']): return _handle_no_page(request, slug) response = TemplateResponse(request, template_name, context) response.add_post_render_callback(set_page_cache) xframe_options = page.get_xframe_options() if ((xframe_options == Page.X_FRAME_OPTIONS_INHERIT) or (xframe_options is None)): return response response.xframe_options_exempt = True if (xframe_options == Page.X_FRAME_OPTIONS_ALLOW): return response elif (xframe_options == Page.X_FRAME_OPTIONS_SAMEORIGIN): response['X-Frame-Options'] = 'SAMEORIGIN' elif (xframe_options == Page.X_FRAME_OPTIONS_DENY): response['X-Frame-Options'] = 'DENY' return response
[ "def", "render_page", "(", "request", ",", "page", ",", "current_language", ",", "slug", ")", ":", "template_name", "=", "get_template_from_request", "(", "request", ",", "page", ",", "no_current_page", "=", "True", ")", "context", "=", "{", "}", "context", ...
renders a page .
train
false
42,662
def sew_messages_and_reactions(messages, reactions): for message in messages: message['reactions'] = [] converted_messages = {message['id']: message for message in messages} for reaction in reactions: converted_messages[reaction['message_id']]['reactions'].append(reaction) return list(converted_messages.values())
[ "def", "sew_messages_and_reactions", "(", "messages", ",", "reactions", ")", ":", "for", "message", "in", "messages", ":", "message", "[", "'reactions'", "]", "=", "[", "]", "converted_messages", "=", "{", "message", "[", "'id'", "]", ":", "message", "for", ...
given a iterable of messages and reactions stitch reactions into messages .
train
false
42,663
def describe_file_set(modules): descriptor = FileSet() file_descriptors = [] for module in modules: file_descriptors.append(describe_file(module)) if file_descriptors: descriptor.files = file_descriptors return descriptor
[ "def", "describe_file_set", "(", "modules", ")", ":", "descriptor", "=", "FileSet", "(", ")", "file_descriptors", "=", "[", "]", "for", "module", "in", "modules", ":", "file_descriptors", ".", "append", "(", "describe_file", "(", "module", ")", ")", "if", ...
build a file set from a specified python modules .
train
true