id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
35,254
def get_statuses(pr, session): base_url = pr['_links']['statuses']['href'] statuses = [] for i in count(): new_statuses = fetch_page(base_url, i, session) if ((not new_statuses) or ('context' not in new_statuses[0])): break statuses.extend(new_statuses) by_context = {} for s in statuses: by_context.setdefault(s['context'], []).append(s) return map(final_status, by_context.values())
[ "def", "get_statuses", "(", "pr", ",", "session", ")", ":", "base_url", "=", "pr", "[", "'_links'", "]", "[", "'statuses'", "]", "[", "'href'", "]", "statuses", "=", "[", "]", "for", "i", "in", "count", "(", ")", ":", "new_statuses", "=", "fetch_page...
get all of the statuses for a pull request .
train
false
35,255
@click.command(u'remove-from-installed-apps') @click.argument(u'app') @pass_context def remove_from_installed_apps(context, app): from frappe.installer import remove_from_installed_apps for site in context.sites: try: frappe.init(site=site) frappe.connect() remove_from_installed_apps(app) finally: frappe.destroy()
[ "@", "click", ".", "command", "(", "u'remove-from-installed-apps'", ")", "@", "click", ".", "argument", "(", "u'app'", ")", "@", "pass_context", "def", "remove_from_installed_apps", "(", "context", ",", "app", ")", ":", "from", "frappe", ".", "installer", "imp...
remove app from sites installed-apps list .
train
false
35,257
def prepare_token_request(grant_type, body=u'', **kwargs): params = [(u'grant_type', grant_type)] if (u'scope' in kwargs): kwargs[u'scope'] = list_to_scope(kwargs[u'scope']) for k in kwargs: if kwargs[k]: params.append((unicode_type(k), kwargs[k])) return add_params_to_qs(body, params)
[ "def", "prepare_token_request", "(", "grant_type", ",", "body", "=", "u''", ",", "**", "kwargs", ")", ":", "params", "=", "[", "(", "u'grant_type'", ",", "grant_type", ")", "]", "if", "(", "u'scope'", "in", "kwargs", ")", ":", "kwargs", "[", "u'scope'", ...
prepare the access token request .
train
false
35,258
def getfslineno(obj): import _pytest._code try: code = _pytest._code.Code(obj) except TypeError: try: fn = (py.std.inspect.getsourcefile(obj) or py.std.inspect.getfile(obj)) except TypeError: return ('', (-1)) fspath = ((fn and py.path.local(fn)) or None) lineno = (-1) if fspath: try: (_, lineno) = findsource(obj) except IOError: pass else: fspath = code.path lineno = code.firstlineno assert isinstance(lineno, int) return (fspath, lineno)
[ "def", "getfslineno", "(", "obj", ")", ":", "import", "_pytest", ".", "_code", "try", ":", "code", "=", "_pytest", ".", "_code", ".", "Code", "(", "obj", ")", "except", "TypeError", ":", "try", ":", "fn", "=", "(", "py", ".", "std", ".", "inspect",...
return source location for the given object .
train
false
35,259
def get_anonymous_cart_from_token(token, cart_queryset=Cart.objects.all()): return cart_queryset.open().filter(token=token, user=None).first()
[ "def", "get_anonymous_cart_from_token", "(", "token", ",", "cart_queryset", "=", "Cart", ".", "objects", ".", "all", "(", ")", ")", ":", "return", "cart_queryset", ".", "open", "(", ")", ".", "filter", "(", "token", "=", "token", ",", "user", "=", "None"...
returns open anonymous cart with given token or none if not found .
train
false
35,260
def connect_cloudtrail(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): from boto.cloudtrail.layer1 import CloudTrailConnection return CloudTrailConnection(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, **kwargs)
[ "def", "connect_cloudtrail", "(", "aws_access_key_id", "=", "None", ",", "aws_secret_access_key", "=", "None", ",", "**", "kwargs", ")", ":", "from", "boto", ".", "cloudtrail", ".", "layer1", "import", "CloudTrailConnection", "return", "CloudTrailConnection", "(", ...
connect to aws cloudtrail :type aws_access_key_id: string .
train
false
35,261
def register_module(mod): for f in dir(mod.lib): f = getattr(mod.lib, f) if isinstance(f, BuiltinFunctionType): _ool_func_types[f] = mod.ffi.typeof(f) addr = mod.ffi.addressof(mod.lib, f.__name__) _ool_func_ptr[f] = int(mod.ffi.cast('uintptr_t', addr)) _ffi_instances.add(mod.ffi)
[ "def", "register_module", "(", "mod", ")", ":", "for", "f", "in", "dir", "(", "mod", ".", "lib", ")", ":", "f", "=", "getattr", "(", "mod", ".", "lib", ",", "f", ")", "if", "isinstance", "(", "f", ",", "BuiltinFunctionType", ")", ":", "_ool_func_ty...
add typing for all functions in an out-of-line cffi module to the typemap .
train
false
35,265
def etc_services(attrs=None, where=None): return _osquery_cmd(table='etc_services', attrs=attrs, where=where)
[ "def", "etc_services", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'etc_services'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return etc_services information from osquery cli example: .
train
false
35,266
def periphery(G, e=None): if (e is None): e = eccentricity(G) diameter = max(e.values()) p = [v for v in e if (e[v] == diameter)] return p
[ "def", "periphery", "(", "G", ",", "e", "=", "None", ")", ":", "if", "(", "e", "is", "None", ")", ":", "e", "=", "eccentricity", "(", "G", ")", "diameter", "=", "max", "(", "e", ".", "values", "(", ")", ")", "p", "=", "[", "v", "for", "v", ...
return the periphery of the graph g .
train
false
35,267
def performance_log(request): event = {'ip': _get_request_header(request, 'REMOTE_ADDR'), 'referer': _get_request_header(request, 'HTTP_REFERER'), 'accept_language': _get_request_header(request, 'HTTP_ACCEPT_LANGUAGE'), 'event_source': 'browser', 'event': _get_request_value(request, 'event'), 'agent': _get_request_header(request, 'HTTP_USER_AGENT'), 'page': _get_request_value(request, 'page'), 'id': _get_request_value(request, 'id'), 'expgroup': _get_request_value(request, 'expgroup'), 'value': _get_request_value(request, 'value'), 'time': datetime.datetime.utcnow(), 'host': _get_request_header(request, 'SERVER_NAME')} log.info(json.dumps(event, cls=DateTimeJSONEncoder)) return HttpResponse(status=204)
[ "def", "performance_log", "(", "request", ")", ":", "event", "=", "{", "'ip'", ":", "_get_request_header", "(", "request", ",", "'REMOTE_ADDR'", ")", ",", "'referer'", ":", "_get_request_header", "(", "request", ",", "'HTTP_REFERER'", ")", ",", "'accept_language...
log when post call to "performance" url is made by a user .
train
false
35,268
def add_error_class(klass, code): if (not isinstance(code, python.str_types)): code = code.decode('utf-8') if (not isinstance(klass, python.class_types)): raise TypeError('klass must be a class type') mro = inspect.getmro(klass) if (not (Exception in mro)): raise TypeError('Error classes must subclass the __builtin__.Exception class') if (code in ERROR_CLASS_MAP): raise ValueError(('Code %s is already registered' % (code,))) ERROR_CLASS_MAP[code] = klass
[ "def", "add_error_class", "(", "klass", ",", "code", ")", ":", "if", "(", "not", "isinstance", "(", "code", ",", "python", ".", "str_types", ")", ")", ":", "code", "=", "code", ".", "decode", "(", "'utf-8'", ")", "if", "(", "not", "isinstance", "(", ...
maps an exception class to a string code .
train
true
35,269
def is_increasing(f, interval=S.Reals, symbol=None): f = sympify(f) free_sym = f.free_symbols if (symbol is None): if (len(free_sym) > 1): raise NotImplementedError('is_increasing has not yet been implemented for all multivariate expressions') if (len(free_sym) == 0): return True symbol = free_sym.pop() df = f.diff(symbol) df_nonneg_interval = solveset((df >= 0), symbol, domain=S.Reals) return interval.is_subset(df_nonneg_interval)
[ "def", "is_increasing", "(", "f", ",", "interval", "=", "S", ".", "Reals", ",", "symbol", "=", "None", ")", ":", "f", "=", "sympify", "(", "f", ")", "free_sym", "=", "f", ".", "free_symbols", "if", "(", "symbol", "is", "None", ")", ":", "if", "("...
returns if a function is increasing or not .
train
false
35,270
def classlock(f): def inner(self, *args, **kwargs): curframe = inspect.currentframe() calframe = inspect.getouterframes(curframe, 2) if calframe[1][1].endswith('database.py'): return f(self, *args, **kwargs) with self._lock: return f(self, *args, **kwargs) return inner
[ "def", "classlock", "(", "f", ")", ":", "def", "inner", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "curframe", "=", "inspect", ".", "currentframe", "(", ")", "calframe", "=", "inspect", ".", "getouterframes", "(", "curframe", ",", ...
classlock decorator .
train
false
35,272
def _loadDescriptionFile(descriptionPyPath): global g_descriptionImportCount if (not os.path.isfile(descriptionPyPath)): raise RuntimeError((('Experiment description file %s does not exist or ' + 'is not a file') % (descriptionPyPath,))) mod = imp.load_source(('pf_description%d' % g_descriptionImportCount), descriptionPyPath) g_descriptionImportCount += 1 if (not hasattr(mod, 'descriptionInterface')): raise RuntimeError(('Experiment description file %s does not define %s' % (descriptionPyPath, 'descriptionInterface'))) if (not isinstance(mod.descriptionInterface, expdescriptionapi.DescriptionIface)): raise RuntimeError((('Experiment description file %s defines %s but it ' + 'is not DescriptionIface-based') % (descriptionPyPath, name))) return mod
[ "def", "_loadDescriptionFile", "(", "descriptionPyPath", ")", ":", "global", "g_descriptionImportCount", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "descriptionPyPath", ")", ")", ":", "raise", "RuntimeError", "(", "(", "(", "'Experiment description f...
loads a description file and returns it as a module .
train
true
35,273
def _set_properties_for_image(context, image_ref, properties, purge_props=False, session=None): orig_properties = {} for prop_ref in image_ref.properties: orig_properties[prop_ref.name] = prop_ref for (name, value) in properties.iteritems(): prop_values = {'image_id': image_ref.id, 'name': name, 'value': value} if (name in orig_properties): prop_ref = orig_properties[name] _image_property_update(context, prop_ref, prop_values, session=session) else: image_property_create(context, prop_values, session=session) if purge_props: for key in orig_properties.keys(): if (key not in properties): prop_ref = orig_properties[key] image_property_delete(context, prop_ref, session=session)
[ "def", "_set_properties_for_image", "(", "context", ",", "image_ref", ",", "properties", ",", "purge_props", "=", "False", ",", "session", "=", "None", ")", ":", "orig_properties", "=", "{", "}", "for", "prop_ref", "in", "image_ref", ".", "properties", ":", ...
create or update a set of image_properties for a given image .
train
false
35,275
def hubCapacity(): from multiprocessing import Pool import itertools print 'Hub capacity test' p = Pool(2) results = p.map(worker, itertools.product([1, 2, 3, 4, 5, 6, 7, 8], xrange(1, 2000, 200))) f = open('results-numPerfect.11.22.10.txt', 'w') for (i, r) in enumerate(results): print >>f, ('{%d,%d,%d,%d,%d,%d,%d,%d,%d},' % r) f.close()
[ "def", "hubCapacity", "(", ")", ":", "from", "multiprocessing", "import", "Pool", "import", "itertools", "print", "'Hub capacity test'", "p", "=", "Pool", "(", "2", ")", "results", "=", "p", ".", "map", "(", "worker", ",", "itertools", ".", "product", "(",...
study hub capacity .
train
false
35,276
def _parse_response(header_data, ignore_bad_cookies=False, ignore_bad_attributes=True): cookie_dicts = [] for line in Definitions.EOL.split(header_data.strip()): if (not line): break cookie_dict = parse_one_response(line, ignore_bad_cookies=ignore_bad_cookies, ignore_bad_attributes=ignore_bad_attributes) if (not cookie_dict): continue cookie_dicts.append(cookie_dict) if (not cookie_dicts): if (not ignore_bad_cookies): raise InvalidCookieError(data=header_data) _report_invalid_cookie(header_data) return cookie_dicts
[ "def", "_parse_response", "(", "header_data", ",", "ignore_bad_cookies", "=", "False", ",", "ignore_bad_attributes", "=", "True", ")", ":", "cookie_dicts", "=", "[", "]", "for", "line", "in", "Definitions", ".", "EOL", ".", "split", "(", "header_data", ".", ...
turn one or more lines of set-cookie: header data into a list of dicts mapping attribute names to attribute values .
train
true
35,277
@with_setup(step_runner_environ) def test_steps_are_aware_of_its_definitions(): f = Feature.from_string(FEATURE1) feature_result = f.run() scenario_result = feature_result.scenario_results[0] for step in scenario_result.steps_passed: assert step.has_definition step1 = scenario_result.steps_passed[0] assert_equals(step1.defined_at.line, 124) assert_equals(step1.defined_at.file, core.fs.relpath(__file__.rstrip('c')))
[ "@", "with_setup", "(", "step_runner_environ", ")", "def", "test_steps_are_aware_of_its_definitions", "(", ")", ":", "f", "=", "Feature", ".", "from_string", "(", "FEATURE1", ")", "feature_result", "=", "f", ".", "run", "(", ")", "scenario_result", "=", "feature...
steps are aware of its definitions line numbers and file names .
train
false
35,278
def get_volume_type(ctxt, id, expected_fields=None): if (id is None): msg = _('id cannot be None') raise exception.InvalidVolumeType(reason=msg) if (ctxt is None): ctxt = context.get_admin_context() return db.volume_type_get(ctxt, id, expected_fields=expected_fields)
[ "def", "get_volume_type", "(", "ctxt", ",", "id", ",", "expected_fields", "=", "None", ")", ":", "if", "(", "id", "is", "None", ")", ":", "msg", "=", "_", "(", "'id cannot be None'", ")", "raise", "exception", ".", "InvalidVolumeType", "(", "reason", "="...
retrieves single volume type by id .
train
false
35,279
@requires_segment_info def frame_lister(pl, segment_info, full_stack=False, maxframes=3): if full_stack: initial_stack_length = 0 frames = segment_info[u'pdb'].stack else: initial_stack_length = segment_info[u'initial_stack_length'] frames = segment_info[u'pdb'].stack[initial_stack_length:] if (len(frames) > maxframes): frames = frames[(- maxframes):] return (({u'curframe': frame[0], u'initial_stack_length': initial_stack_length}, {}) for frame in frames)
[ "@", "requires_segment_info", "def", "frame_lister", "(", "pl", ",", "segment_info", ",", "full_stack", "=", "False", ",", "maxframes", "=", "3", ")", ":", "if", "full_stack", ":", "initial_stack_length", "=", "0", "frames", "=", "segment_info", "[", "u'pdb'",...
list all frames in segment_info format .
train
false
35,280
def exhaust_iterator(iterator): data = b('') try: chunk = b(next(iterator)) except StopIteration: chunk = b('') while (len(chunk) > 0): data += chunk try: chunk = b(next(iterator)) except StopIteration: chunk = b('') return data
[ "def", "exhaust_iterator", "(", "iterator", ")", ":", "data", "=", "b", "(", "''", ")", "try", ":", "chunk", "=", "b", "(", "next", "(", "iterator", ")", ")", "except", "StopIteration", ":", "chunk", "=", "b", "(", "''", ")", "while", "(", "len", ...
exhaust an iterator and return all data returned by it .
train
false
35,282
def at_server_stop(): pass
[ "def", "at_server_stop", "(", ")", ":", "pass" ]
this is called just before the server is shut down .
train
false
35,284
def pvariance(data, mu=None): if (iter(data) is data): data = list(data) n = len(data) if (n < 1): raise StatisticsError('pvariance requires at least one data point') (T, ss) = _ss(data, mu) return _convert((ss / n), T)
[ "def", "pvariance", "(", "data", ",", "mu", "=", "None", ")", ":", "if", "(", "iter", "(", "data", ")", "is", "data", ")", ":", "data", "=", "list", "(", "data", ")", "n", "=", "len", "(", "data", ")", "if", "(", "n", "<", "1", ")", ":", ...
return the population variance of data .
train
true
35,286
def makeFactory(configdict): pubkeyfile = os.path.join(_GAME_DIR, 'server', 'ssh-public.key') privkeyfile = os.path.join(_GAME_DIR, 'server', 'ssh-private.key') def chainProtocolFactory(username=None): return insults.ServerProtocol(configdict['protocolFactory'], *configdict.get('protocolConfigdict', (username,)), **configdict.get('protocolKwArgs', {})) rlm = PassAvatarIdTerminalRealm() rlm.transportFactory = TerminalSessionTransport_getPeer rlm.chainedProtocolFactory = chainProtocolFactory factory = ConchFactory(Portal(rlm)) factory.sessionhandler = configdict['sessions'] try: (publicKey, privateKey) = getKeyPair(pubkeyfile, privkeyfile) factory.publicKeys = {'ssh-rsa': publicKey} factory.privateKeys = {'ssh-rsa': privateKey} except Exception as err: print('getKeyPair error: {err}\n WARNING: Evennia could not auto-generate SSH keypair. Using conch default keys instead.\nIf this error persists, create {pub} and {priv} yourself using third-party tools.'.format(err=err, pub=pubkeyfile, priv=privkeyfile)) factory.services = factory.services.copy() factory.services['ssh-userauth'] = ExtraInfoAuthServer factory.portal.registerChecker(PlayerDBPasswordChecker(factory)) return factory
[ "def", "makeFactory", "(", "configdict", ")", ":", "pubkeyfile", "=", "os", ".", "path", ".", "join", "(", "_GAME_DIR", ",", "'server'", ",", "'ssh-public.key'", ")", "privkeyfile", "=", "os", ".", "path", ".", "join", "(", "_GAME_DIR", ",", "'server'", ...
creates the ssh server factory .
train
false
35,288
def _convertToNewStyle(newClass, oldInstance): if (oldInstance.__class__.__name__ == 'ExperimentHandler'): newHandler = psychopy.data.ExperimentHandler() else: newHandler = newClass([], 0) for thisAttrib in dir(oldInstance): if ('instancemethod' in str(type(getattr(oldInstance, thisAttrib)))): continue elif (thisAttrib == '__weakref__'): continue else: value = getattr(oldInstance, thisAttrib) setattr(newHandler, thisAttrib, value) return newHandler
[ "def", "_convertToNewStyle", "(", "newClass", ",", "oldInstance", ")", ":", "if", "(", "oldInstance", ".", "__class__", ".", "__name__", "==", "'ExperimentHandler'", ")", ":", "newHandler", "=", "psychopy", ".", "data", ".", "ExperimentHandler", "(", ")", "els...
converts un-pickled old-style compatibility classes to new-style ones by initializing a new-style class and copying the old compatibility instances attributes .
train
false
35,291
def template_info(template_name): if (template_name in _template_info_cache): t_data = _template_info_cache[template_name] return (t_data['template_path'], t_data['template_type']) template_path = find_template(template_name) if (not template_path): raise TemplateNotFound(('Template %s cannot be found' % template_name)) t_type = template_type(template_path) if (not config.get('debug', False)): t_data = {'template_path': template_path, 'template_type': t_type} _template_info_cache[template_name] = t_data return (template_path, t_type)
[ "def", "template_info", "(", "template_name", ")", ":", "if", "(", "template_name", "in", "_template_info_cache", ")", ":", "t_data", "=", "_template_info_cache", "[", "template_name", "]", "return", "(", "t_data", "[", "'template_path'", "]", ",", "t_data", "["...
returns the path and type for a template .
train
false
35,292
def text_param(registry, xml_parent, data): base_param(registry, xml_parent, data, True, 'hudson.model.TextParameterDefinition')
[ "def", "text_param", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "base_param", "(", "registry", ",", "xml_parent", ",", "data", ",", "True", ",", "'hudson.model.TextParameterDefinition'", ")" ]
yaml: text a text parameter .
train
false
35,293
@manager.command def createall(): db.create_all()
[ "@", "manager", ".", "command", "def", "createall", "(", ")", ":", "db", ".", "create_all", "(", ")" ]
creates database tables .
train
false
35,294
def get_LEA_seq_consensus_seqs(fwd_read_f, rev_read_f, map_f, output_dir, barcode_type, barcode_len, barcode_correction_fn, max_barcode_errors, min_consensus, max_cluster_ratio, min_difference_in_bcs, fwd_length, rev_length, min_reads_per_random_bc, min_difference_clusters, barcode_column, reverse_primer_column): (bc_to_sid, bc_to_fwd_primers, bc_to_rev_primers) = process_mapping_file(map_f, barcode_len, barcode_type, barcode_column, reverse_primer_column) (random_bc_lookup, random_bc_reads, random_bcs, barcode_errors_exceed_max_count, barcode_not_in_map_count, primer_mismatch_count, seq_too_short_count, input_seqs_count, total_seqs_kept) = read_fwd_rev_read(fwd_read_f, rev_read_f, bc_to_sid, barcode_len, barcode_correction_fn, bc_to_fwd_primers, bc_to_rev_primers, max_barcode_errors, fwd_length, rev_length) consensus_seq_lookup = get_consensus_seqs_lookup(random_bc_lookup, random_bc_reads, random_bcs, min_difference_in_bcs, min_reads_per_random_bc, output_dir, min_difference_clusters, max_cluster_ratio, min_consensus) log_out = format_lea_seq_log(input_seqs_count, barcode_errors_exceed_max_count, barcode_not_in_map_count, primer_mismatch_count, seq_too_short_count, total_seqs_kept) return (consensus_seq_lookup, log_out)
[ "def", "get_LEA_seq_consensus_seqs", "(", "fwd_read_f", ",", "rev_read_f", ",", "map_f", ",", "output_dir", ",", "barcode_type", ",", "barcode_len", ",", "barcode_correction_fn", ",", "max_barcode_errors", ",", "min_consensus", ",", "max_cluster_ratio", ",", "min_differ...
reads mapping file .
train
false
35,296
def StringToCMakeTargetName(a): return a.translate(string.maketrans(' /():."', '_______'))
[ "def", "StringToCMakeTargetName", "(", "a", ")", ":", "return", "a", ".", "translate", "(", "string", ".", "maketrans", "(", "' /():.\"'", ",", "'_______'", ")", ")" ]
converts the given string a to a valid cmake target name .
train
false
35,298
def _number_format(val, dec): return ((dec and ((('%.' + str(dec)) + 'f') % val)) or int(round(val)))
[ "def", "_number_format", "(", "val", ",", "dec", ")", ":", "return", "(", "(", "dec", "and", "(", "(", "(", "'%.'", "+", "str", "(", "dec", ")", ")", "+", "'f'", ")", "%", "val", ")", ")", "or", "int", "(", "round", "(", "val", ")", ")", ")...
return float with dec decimals; if dec is 0 .
train
false
35,300
def retention_policy_add(database, name, duration, replication, default=False, user=None, password=None, host=None, port=None): client = _client(user=user, password=password, host=host, port=port) client.create_retention_policy(name, duration, replication, database, default) return True
[ "def", "retention_policy_add", "(", "database", ",", "name", ",", "duration", ",", "replication", ",", "default", "=", "False", ",", "user", "=", "None", ",", "password", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ")", ":", "clien...
add a retention policy .
train
true
35,301
def grab_certbot_log(): sudo('if [ -f /var/log/letsencrypt/letsencrypt.log ]; then cat /var/log/letsencrypt/letsencrypt.log; else echo "[novarlog]"; fi') sudo('if [ -f ./certbot.log ]; then cat ./certbot.log; else echo "[nolocallog]"; fi')
[ "def", "grab_certbot_log", "(", ")", ":", "sudo", "(", "'if [ -f /var/log/letsencrypt/letsencrypt.log ]; then cat /var/log/letsencrypt/letsencrypt.log; else echo \"[novarlog]\"; fi'", ")", "sudo", "(", "'if [ -f ./certbot.log ]; then cat ./certbot.log; else echo \"[nolocallog]\"; fi'",...
grabs letsencrypt .
train
false
35,302
def strip_ansi(source): return re.sub('\\033\\[(\\d|;)+?m', '', source)
[ "def", "strip_ansi", "(", "source", ")", ":", "return", "re", ".", "sub", "(", "'\\\\033\\\\[(\\\\d|;)+?m'", ",", "''", ",", "source", ")" ]
strip ansi sequences from text .
train
false
35,305
def update_version_number(file_obj, new_version_number): updated = u'{0}.updated_version_number'.format(file_obj.file_path) with zipfile.ZipFile(file_obj.file_path, 'r') as source: file_list = source.infolist() with zipfile.ZipFile(updated, 'w', zipfile.ZIP_DEFLATED) as dest: for file_ in file_list: content = source.read(file_.filename) if (file_.filename == 'install.rdf'): content = _update_version_in_install_rdf(content, new_version_number) if (file_.filename in ['package.json', 'manifest.json']): content = _update_version_in_json_manifest(content, new_version_number) dest.writestr(file_, content) shutil.move(updated, file_obj.file_path)
[ "def", "update_version_number", "(", "file_obj", ",", "new_version_number", ")", ":", "updated", "=", "u'{0}.updated_version_number'", ".", "format", "(", "file_obj", ".", "file_path", ")", "with", "zipfile", ".", "ZipFile", "(", "file_obj", ".", "file_path", ",",...
update the manifest to have the new version number .
train
false
35,306
def stripped_path_components(path, strip_prefixes): normalized_path = os.path.abspath(path) sorted_strip_prefixes = sorted(strip_prefixes, key=len, reverse=True) for bp in sorted_strip_prefixes: normalized_bp = os.path.abspath(bp) if (normalized_bp == '/'): continue if normalized_path.startswith(normalized_bp): prefix = normalized_path[:len(normalized_bp)] result = [] for p in normalized_path[len(normalized_bp):].split('/'): if p: prefix += '/' prefix += p result.append((p, prefix)) return result return path_components(path)
[ "def", "stripped_path_components", "(", "path", ",", "strip_prefixes", ")", ":", "normalized_path", "=", "os", ".", "path", ".", "abspath", "(", "path", ")", "sorted_strip_prefixes", "=", "sorted", "(", "strip_prefixes", ",", "key", "=", "len", ",", "reverse",...
strip any prefix in strip_prefixes from path and return a list of path components where each component is .
train
false
35,308
def get_global_size(*args, **kargs): raise _stub_error
[ "def", "get_global_size", "(", "*", "args", ",", "**", "kargs", ")", ":", "raise", "_stub_error" ]
opencl get_global_size() .
train
false
35,309
def _exit(code=0): try: code = int(code) except ValueError: pass raise SystemExit, code
[ "def", "_exit", "(", "code", "=", "0", ")", ":", "try", ":", "code", "=", "int", "(", "code", ")", "except", "ValueError", ":", "pass", "raise", "SystemExit", ",", "code" ]
internal function .
train
false
35,310
def win_get_unicode_stream(stream, excepted_fileno, output_handle, encoding): old_fileno = getattr(stream, 'fileno', (lambda : None))() if (old_fileno == excepted_fileno): from ctypes import windll, WINFUNCTYPE from ctypes.wintypes import DWORD, HANDLE GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(('GetStdHandle', windll.kernel32)) real_output_handle = GetStdHandle(DWORD(output_handle)) if win_handle_is_a_console(real_output_handle): return WinUnicodeConsoleOutput(real_output_handle, old_fileno, stream.name, encoding) return WinUnicodeOutput(stream, old_fileno, encoding)
[ "def", "win_get_unicode_stream", "(", "stream", ",", "excepted_fileno", ",", "output_handle", ",", "encoding", ")", ":", "old_fileno", "=", "getattr", "(", "stream", ",", "'fileno'", ",", "(", "lambda", ":", "None", ")", ")", "(", ")", "if", "(", "old_file...
returns a unicode-compatible stream .
train
false
35,311
@hook.command def munge(text): return formatting.munge(text)
[ "@", "hook", ".", "command", "def", "munge", "(", "text", ")", ":", "return", "formatting", ".", "munge", "(", "text", ")" ]
replaces characters in a string with visually similar characters to avoid pinging users in irc .
train
false
35,314
def enable_dhcp(): current = network() if (current['Network Settings']['DHCP_ENABLE']['VALUE'] == 'Y'): return True _xml = '<RIBCL VERSION="2.0">\n <LOGIN USER_LOGIN="adminname" PASSWORD="password">\n <RIB_INFO MODE="write">\n <MOD_NETWORK_SETTINGS>\n <DHCP_ENABLE value="Yes"/>\n </MOD_NETWORK_SETTINGS>\n </RIB_INFO>\n </LOGIN>\n </RIBCL>' return __execute_cmd('Enable_DHCP', _xml)
[ "def", "enable_dhcp", "(", ")", ":", "current", "=", "network", "(", ")", "if", "(", "current", "[", "'Network Settings'", "]", "[", "'DHCP_ENABLE'", "]", "[", "'VALUE'", "]", "==", "'Y'", ")", ":", "return", "True", "_xml", "=", "'<RIBCL VERSION=\"2.0\">\...
enable dhcp cli example: .
train
false
35,316
def assert_event_matches(expected, actual, tolerate=None): differences = get_event_differences(expected, actual, tolerate=tolerate) if (len(differences) > 0): debug_info = ['', 'Expected:', block_indent(expected), 'Actual:', block_indent(actual), 'Tolerating:', block_indent(EventMatchTolerates.default_if_not_defined(tolerate))] differences = [('* ' + d) for d in differences] message_lines = (differences + debug_info) raise AssertionError(('Unexpected differences found in structs:\n\n' + '\n'.join(message_lines)))
[ "def", "assert_event_matches", "(", "expected", ",", "actual", ",", "tolerate", "=", "None", ")", ":", "differences", "=", "get_event_differences", "(", "expected", ",", "actual", ",", "tolerate", "=", "tolerate", ")", "if", "(", "len", "(", "differences", "...
compare two event dictionaries .
train
false
35,317
def replace_file_special_chars(filename_path): return filename_path.replace(':', '_')
[ "def", "replace_file_special_chars", "(", "filename_path", ")", ":", "return", "filename_path", ".", "replace", "(", "':'", ",", "'_'", ")" ]
this is a *very* incomplete function which i added to fix a bug: URL and after realizing that it was very hard to perform a replace that worked for all platforms and when the thing to sanitize was a path+filename and not only a filename .
train
false
35,318
def award_badge(config, count, user): slug = config.get(count) if (not slug): return badge_class = BadgeClass.get_badge_class(slug=slug, issuing_component='openedx__course', create=False) if (not badge_class): return if (not badge_class.get_for_user(user)): badge_class.award(user)
[ "def", "award_badge", "(", "config", ",", "count", ",", "user", ")", ":", "slug", "=", "config", ".", "get", "(", "count", ")", "if", "(", "not", "slug", ")", ":", "return", "badge_class", "=", "BadgeClass", ".", "get_badge_class", "(", "slug", "=", ...
given one of the configurations for enrollments or completions .
train
false
35,320
def _community(G, u, community): node_u = G.node[u] try: return node_u[community] except KeyError: raise nx.NetworkXAlgorithmError('No community information')
[ "def", "_community", "(", "G", ",", "u", ",", "community", ")", ":", "node_u", "=", "G", ".", "node", "[", "u", "]", "try", ":", "return", "node_u", "[", "community", "]", "except", "KeyError", ":", "raise", "nx", ".", "NetworkXAlgorithmError", "(", ...
get the community of the given node .
train
false
35,321
def left_trim_lines(lines): lines_striped = list(zip(lines[1:], list(map(str.lstrip, lines[1:])))) lines_striped = list(filter(itemgetter(1), lines_striped)) indent = min(([(len(line) - len(striped)) for (line, striped) in lines_striped] + [sys.maxsize])) if (indent < sys.maxsize): return [line[indent:] for line in lines] else: return list(lines)
[ "def", "left_trim_lines", "(", "lines", ")", ":", "lines_striped", "=", "list", "(", "zip", "(", "lines", "[", "1", ":", "]", ",", "list", "(", "map", "(", "str", ".", "lstrip", ",", "lines", "[", "1", ":", "]", ")", ")", ")", ")", "lines_striped...
remove all unnecessary leading space from lines .
train
false
35,322
@require_POST @login_required def create_order(request): course_id = request.POST['course_id'] course_id = CourseKey.from_string(course_id) donation_for_course = request.session.get('donation_for_course', {}) contribution = request.POST.get('contribution', donation_for_course.get(unicode(course_id), 0)) try: amount = decimal.Decimal(contribution).quantize(decimal.Decimal('.01'), rounding=decimal.ROUND_DOWN) except decimal.InvalidOperation: return HttpResponseBadRequest(_('Selected price is not valid number.')) current_mode = None sku = request.POST.get('sku', None) if sku: try: current_mode = CourseMode.objects.get(sku=sku) except CourseMode.DoesNotExist: log.exception(u'Failed to find CourseMode with SKU [%s].', sku) if (not current_mode): paid_modes = CourseMode.paid_modes_for_course(course_id) if paid_modes: if (len(paid_modes) > 1): log.warn(u"Multiple paid course modes found for course '%s' for create order request", course_id) current_mode = paid_modes[0] if (not current_mode): log.warn(u"Create order requested for course '%s' without a paid mode.", course_id) return HttpResponseBadRequest(_("This course doesn't support paid certificates")) if CourseMode.is_professional_mode(current_mode): amount = current_mode.min_price if (amount < current_mode.min_price): return HttpResponseBadRequest(_('No selected price or selected price is below minimum.')) if current_mode.sku: payment_data = checkout_with_ecommerce_service(request.user, course_id, current_mode, request.POST.get('processor')) else: payment_data = checkout_with_shoppingcart(request, request.user, course_id, current_mode, amount) if ('processor' not in request.POST): payment_data = payment_data['payment_form_data'] return HttpResponse(json.dumps(payment_data), content_type='application/json')
[ "@", "require_POST", "@", "login_required", "def", "create_order", "(", "request", ")", ":", "course_id", "=", "request", ".", "POST", "[", "'course_id'", "]", "course_id", "=", "CourseKey", ".", "from_string", "(", "course_id", ")", "donation_for_course", "=", ...
helper method for creating an order for testing .
train
false
35,323
def topic_parent_document_link(obj): if (not obj.parent_topic): return '' url = reverse('admin:wiki_document_change', args=[obj.parent_topic.id]) return ('<a href="%s">Topic&nbsp;Parent&nbsp;(#%s)</a>' % (url, obj.parent_topic.id))
[ "def", "topic_parent_document_link", "(", "obj", ")", ":", "if", "(", "not", "obj", ".", "parent_topic", ")", ":", "return", "''", "url", "=", "reverse", "(", "'admin:wiki_document_change'", ",", "args", "=", "[", "obj", ".", "parent_topic", ".", "id", "]"...
html link to the parent document for admin change list .
train
false
35,324
def get_series(model, extra_field=None, source=None, **filters): extra = (() if (extra_field is None) else (extra_field,)) qs = model.search().order_by('-date').filter(**filters).values_dict('date', 'count', *extra) if source: qs = qs.source(source) for val in qs[:365]: date_ = parse(val['date'][0]).date() rv = dict(count=val['count'][0], date=date_, end=date_) if source: rv['data'] = extract(val[source]) elif extra_field: rv['data'] = extract(val[extra_field]) (yield rv)
[ "def", "get_series", "(", "model", ",", "extra_field", "=", "None", ",", "source", "=", "None", ",", "**", "filters", ")", ":", "extra", "=", "(", "(", ")", "if", "(", "extra_field", "is", "None", ")", "else", "(", "extra_field", ",", ")", ")", "qs...
get series .
train
false
35,325
def s2n_intel(string): x = 0 y = 0 for c in string: x = (x | (ord_(c) << y)) y += (+ 8) return x
[ "def", "s2n_intel", "(", "string", ")", ":", "x", "=", "0", "y", "=", "0", "for", "c", "in", "string", ":", "x", "=", "(", "x", "|", "(", "ord_", "(", "c", ")", "<<", "y", ")", ")", "y", "+=", "(", "+", "8", ")", "return", "x" ]
extract multi-byte integer in intel format .
train
true
35,326
def yenc_name_fixer(p): try: return p.decode('utf-8') except: return p.decode('cp1252', errors='replace').replace('?', '!')
[ "def", "yenc_name_fixer", "(", "p", ")", ":", "try", ":", "return", "p", ".", "decode", "(", "'utf-8'", ")", "except", ":", "return", "p", ".", "decode", "(", "'cp1252'", ",", "errors", "=", "'replace'", ")", ".", "replace", "(", "'?'", ",", "'!'", ...
return unicode name of 8bit ascii string .
train
false
35,327
def _report_new_cert(config, cert_path, fullchain_path): expiry = crypto_util.notAfter(cert_path).date() reporter_util = zope.component.getUtility(interfaces.IReporter) if fullchain_path: and_chain = 'and chain have' path = fullchain_path else: and_chain = 'has ' path = cert_path verbswitch = (' with the "certonly" option' if (config.verb == 'run') else '') msg = 'Congratulations! Your certificate {0} been saved at {1}. Your cert will expire on {2}. To obtain a new or tweaked version of this certificate in the future, simply run {3} again{4}. To non-interactively renew *all* of your certificates, run "{3} renew"'.format(and_chain, path, expiry, cli.cli_command, verbswitch) reporter_util.add_message(msg, reporter_util.MEDIUM_PRIORITY)
[ "def", "_report_new_cert", "(", "config", ",", "cert_path", ",", "fullchain_path", ")", ":", "expiry", "=", "crypto_util", ".", "notAfter", "(", "cert_path", ")", ".", "date", "(", ")", "reporter_util", "=", "zope", ".", "component", ".", "getUtility", "(", ...
reports the creation of a new certificate to the user .
train
false
35,328
def is_valid_ipv6_prefix(ipv6_prefix): if (not isinstance(ipv6_prefix, str)): return False tokens = ipv6_prefix.split('/') if (len(tokens) != 2): return False return (is_valid_ipv6(tokens[0]) and is_valid_ip_prefix(tokens[1], 128))
[ "def", "is_valid_ipv6_prefix", "(", "ipv6_prefix", ")", ":", "if", "(", "not", "isinstance", "(", "ipv6_prefix", ",", "str", ")", ")", ":", "return", "False", "tokens", "=", "ipv6_prefix", ".", "split", "(", "'/'", ")", "if", "(", "len", "(", "tokens", ...
returns true if given ipv6_prefix is a valid ipv6 prefix .
train
true
35,330
def test_escape_if_necessary_escapes_nothing_if_has_more_than_1_char(): assert_equals(strings.escape_if_necessary('NOT ESCAPED'), 'NOT ESCAPED')
[ "def", "test_escape_if_necessary_escapes_nothing_if_has_more_than_1_char", "(", ")", ":", "assert_equals", "(", "strings", ".", "escape_if_necessary", "(", "'NOT ESCAPED'", ")", ",", "'NOT ESCAPED'", ")" ]
escape if necessary does nothing if the string has more than 1 char .
train
false
35,331
@image_comparison(baseline_images=[u'tight_layout3']) def test_tight_layout3(): fig = plt.figure() ax1 = plt.subplot(221) ax2 = plt.subplot(223) ax3 = plt.subplot(122) example_plot(ax1) example_plot(ax2) example_plot(ax3) plt.tight_layout()
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'tight_layout3'", "]", ")", "def", "test_tight_layout3", "(", ")", ":", "fig", "=", "plt", ".", "figure", "(", ")", "ax1", "=", "plt", ".", "subplot", "(", "221", ")", "ax2", "=", "plt", ".",...
test tight_layout for mutiple subplots .
train
false
35,332
def update_entity_in_table(key, validated_entity, datastore): datastore.batch_put_entity(APP_ENTITY_TABLE, [key], APP_ENTITY_SCHEMA, validated_entity)
[ "def", "update_entity_in_table", "(", "key", ",", "validated_entity", ",", "datastore", ")", ":", "datastore", ".", "batch_put_entity", "(", "APP_ENTITY_TABLE", ",", "[", "key", "]", ",", "APP_ENTITY_SCHEMA", ",", "validated_entity", ")" ]
updates the app_entity_table with the valid entity .
train
false
35,334
def _calc_shared_phylotypes_multiple(otu_table, idxs): if (len(idxs) < 2): raise ValueError('calc_shared_phylotypes_multiple needs at least two sampleIDs to comapre') shared_phylos = ones(len(otu_table.ids(axis='observation'))) for id_ in idxs: shared_phylos = logical_and(shared_phylos, otu_table.data(id_, 'sample')) return shared_phylos.sum()
[ "def", "_calc_shared_phylotypes_multiple", "(", "otu_table", ",", "idxs", ")", ":", "if", "(", "len", "(", "idxs", ")", "<", "2", ")", ":", "raise", "ValueError", "(", "'calc_shared_phylotypes_multiple needs at least two sampleIDs to comapre'", ")", "shared_phylos", "...
calculate shared otus between several samples indexed by values in idxes .
train
false
35,335
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
35,336
def commit(): connection._commit() set_clean()
[ "def", "commit", "(", ")", ":", "connection", ".", "_commit", "(", ")", "set_clean", "(", ")" ]
to commit the changes loaded in the candidate configuration .
train
false
35,337
def dot_shape(x, y): if isinstance(x, LinearTransform): return dot_shape_from_shape(x, tuple(y.shape)) elif isinstance(y, LinearTransform): return dot_shape_from_shape(tuple(x.shape), y) else: raise TypeError('One of x or y should be a LinearTransform')
[ "def", "dot_shape", "(", "x", ",", "y", ")", ":", "if", "isinstance", "(", "x", ",", "LinearTransform", ")", ":", "return", "dot_shape_from_shape", "(", "x", ",", "tuple", "(", "y", ".", "shape", ")", ")", "elif", "isinstance", "(", "y", ",", "Linear...
return the linear transformation of y by x or x by y when one or both of x and y is a lineartransform instance parameters x : writeme y : writeme returns writeme .
train
false
35,338
def get_regions(service_name, region_cls=None, connection_cls=None): endpoints = load_regions() if (service_name not in endpoints): raise BotoClientError(("Service '%s' not found in endpoints." % service_name)) if (region_cls is None): region_cls = RegionInfo region_objs = [] for (region_name, endpoint) in endpoints.get(service_name, {}).items(): region_objs.append(region_cls(name=region_name, endpoint=endpoint, connection_cls=connection_cls)) return region_objs
[ "def", "get_regions", "(", "service_name", ",", "region_cls", "=", "None", ",", "connection_cls", "=", "None", ")", ":", "endpoints", "=", "load_regions", "(", ")", "if", "(", "service_name", "not", "in", "endpoints", ")", ":", "raise", "BotoClientError", "(...
given a service name .
train
false
35,340
def filter_mapping_file_from_mapping_f(mapping_f, sample_ids_to_keep, negate=False): (mapping_data, header, comments) = parse_mapping_file(mapping_f) filtered_mapping_data = [] sample_ids_to_keep = {}.fromkeys(sample_ids_to_keep) for mapping_datum in mapping_data: hit = (mapping_datum[0] in sample_ids_to_keep) if (hit and (not negate)): filtered_mapping_data.append(mapping_datum) elif ((not hit) and negate): filtered_mapping_data.append(mapping_datum) else: pass return format_mapping_file(header, filtered_mapping_data)
[ "def", "filter_mapping_file_from_mapping_f", "(", "mapping_f", ",", "sample_ids_to_keep", ",", "negate", "=", "False", ")", ":", "(", "mapping_data", ",", "header", ",", "comments", ")", "=", "parse_mapping_file", "(", "mapping_f", ")", "filtered_mapping_data", "=",...
filter rows from a metadata mapping file .
train
false
35,341
def format_timedelta(datetime_or_timedelta, granularity='second', threshold=0.85): return get_i18n().format_timedelta(datetime_or_timedelta, granularity, threshold)
[ "def", "format_timedelta", "(", "datetime_or_timedelta", ",", "granularity", "=", "'second'", ",", "threshold", "=", "0.85", ")", ":", "return", "get_i18n", "(", ")", ".", "format_timedelta", "(", "datetime_or_timedelta", ",", "granularity", ",", "threshold", ")" ...
wrapper around babels format_timedelta to make it user language aware .
train
false
35,344
@pytest.mark.usefixtures('break_getuser') @pytest.mark.skipif(sys.platform.startswith('win'), reason='no os.getuid on windows') def test_tmpdir_fallback_uid_not_found(testdir): testdir.makepyfile('\n import pytest\n def test_some(tmpdir):\n assert tmpdir.isdir()\n ') reprec = testdir.inline_run() reprec.assertoutcome(passed=1)
[ "@", "pytest", ".", "mark", ".", "usefixtures", "(", "'break_getuser'", ")", "@", "pytest", ".", "mark", ".", "skipif", "(", "sys", ".", "platform", ".", "startswith", "(", "'win'", ")", ",", "reason", "=", "'no os.getuid on windows'", ")", "def", "test_tm...
test that tmpdir works even if the current processs user id does not correspond to a valid user .
train
false
35,347
def check_env_cache(opts, env_cache): if (not os.path.isfile(env_cache)): return None try: with salt.utils.fopen(env_cache, 'rb') as fp_: log.trace('Returning env cache data from {0}'.format(env_cache)) serial = salt.payload.Serial(opts) return serial.load(fp_) except (IOError, OSError): pass return None
[ "def", "check_env_cache", "(", "opts", ",", "env_cache", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "env_cache", ")", ")", ":", "return", "None", "try", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "env_cache", ",", ...
returns cached env names .
train
true
35,349
def test_side_effect_completion(): _GlobalNameSpace.SideEffectContainer.foo = 1 side_effect = get_completion('SideEffectContainer', _GlobalNameSpace.__dict__) assert isinstance(side_effect._definition.base, mixed.MixedObject) foo = get_completion('SideEffectContainer.foo', _GlobalNameSpace.__dict__) assert (foo.name == 'foo')
[ "def", "test_side_effect_completion", "(", ")", ":", "_GlobalNameSpace", ".", "SideEffectContainer", ".", "foo", "=", "1", "side_effect", "=", "get_completion", "(", "'SideEffectContainer'", ",", "_GlobalNameSpace", ".", "__dict__", ")", "assert", "isinstance", "(", ...
in the repl its possible to cause side effects that are not documented in python code .
train
false
35,350
def remote_get(cwd, remote='origin', user=None, password=None, redact_auth=True, ignore_retcode=False): cwd = _expand_path(cwd, user) all_remotes = remotes(cwd, user=user, password=password, redact_auth=redact_auth, ignore_retcode=ignore_retcode) if (remote not in all_remotes): raise CommandExecutionError("Remote '{0}' not present in git checkout located at {1}".format(remote, cwd)) return all_remotes[remote]
[ "def", "remote_get", "(", "cwd", ",", "remote", "=", "'origin'", ",", "user", "=", "None", ",", "password", "=", "None", ",", "redact_auth", "=", "True", ",", "ignore_retcode", "=", "False", ")", ":", "cwd", "=", "_expand_path", "(", "cwd", ",", "user"...
get the fetch and push url for a specific remote cwd the path to the git checkout remote : origin name of the remote to query user user under which to run the git command .
train
true
35,351
def send_registration_completion_email(email, request, realm_creation=False): prereg_user = create_preregistration_user(email, request, realm_creation) context = {'support_email': settings.ZULIP_ADMINISTRATOR, 'verbose_support_offers': settings.VERBOSE_SUPPORT_OFFERS} return Confirmation.objects.send_confirmation(prereg_user, email, additional_context=context, host=request.get_host())
[ "def", "send_registration_completion_email", "(", "email", ",", "request", ",", "realm_creation", "=", "False", ")", ":", "prereg_user", "=", "create_preregistration_user", "(", "email", ",", "request", ",", "realm_creation", ")", "context", "=", "{", "'support_emai...
send an email with a confirmation link to the provided e-mail so the user can complete their registration .
train
false
35,352
def maybe_signature(d, app=None, clone=False): if (d is not None): if isinstance(d, abstract.CallableSignature): if clone: d = d.clone() elif isinstance(d, dict): d = signature(d) if (app is not None): d._app = app return d
[ "def", "maybe_signature", "(", "d", ",", "app", "=", "None", ",", "clone", "=", "False", ")", ":", "if", "(", "d", "is", "not", "None", ")", ":", "if", "isinstance", "(", "d", ",", "abstract", ".", "CallableSignature", ")", ":", "if", "clone", ":",...
ensure obj is a signature .
train
false
35,354
def justTransportResponse(transport): return Response(('HTTP', 1, 1), 200, 'OK', _boringHeaders, transport)
[ "def", "justTransportResponse", "(", "transport", ")", ":", "return", "Response", "(", "(", "'HTTP'", ",", "1", ",", "1", ")", ",", "200", ",", "'OK'", ",", "_boringHeaders", ",", "transport", ")" ]
helper function for creating a response which uses the given transport .
train
false
35,355
def mobilityTest(): info('* Simple mobility test\n') net = Mininet(topo=LinearTopo(3), switch=MobilitySwitch) info('* Starting network:\n') net.start() printConnections(net.switches) info('* Testing network\n') net.pingAll() info('* Identifying switch interface for h1\n') (h1, old) = net.get('h1', 's1') for s in (2, 3, 1): new = net[('s%d' % s)] port = randint(10, 20) info('* Moving', h1, 'from', old, 'to', new, 'port', port, '\n') (hintf, sintf) = moveHost(h1, old, new, newPort=port) info('*', hintf, 'is now connected to', sintf, '\n') info('* Clearing out old flows\n') for sw in net.switches: sw.dpctl('del-flows') info('* New network:\n') printConnections(net.switches) info('* Testing connectivity:\n') net.pingAll() old = new net.stop()
[ "def", "mobilityTest", "(", ")", ":", "info", "(", "'* Simple mobility test\\n'", ")", "net", "=", "Mininet", "(", "topo", "=", "LinearTopo", "(", "3", ")", ",", "switch", "=", "MobilitySwitch", ")", "info", "(", "'* Starting network:\\n'", ")", "net", ".", ...
a simple test of mobility .
train
false
35,356
def saveProfileDifferenceFromDefault(filename): global settingsList profileParser = ConfigParser.ConfigParser() profileParser.add_section('profile') for set in settingsList: if (set.isPreference() or set.isMachineSetting() or set.isAlteration()): continue if (set.getDefault() == set.getValue()): continue profileParser.set('profile', set.getName(), set.getValue().encode('utf-8')) try: profileParser.write(open(filename, 'w')) except: print ('Failed to write profile file: %s' % filename)
[ "def", "saveProfileDifferenceFromDefault", "(", "filename", ")", ":", "global", "settingsList", "profileParser", "=", "ConfigParser", ".", "ConfigParser", "(", ")", "profileParser", ".", "add_section", "(", "'profile'", ")", "for", "set", "in", "settingsList", ":", ...
save the current profile to an ini file .
train
false
35,357
def annotate(**kwargs): if (not kwargs): raise ValueError('annotations must be provided as keyword arguments') def dec(f): if hasattr(f, '__annotations__'): for (k, v) in kwargs.items(): f.__annotations__[k] = v else: f.__annotations__ = kwargs return f return dec
[ "def", "annotate", "(", "**", "kwargs", ")", ":", "if", "(", "not", "kwargs", ")", ":", "raise", "ValueError", "(", "'annotations must be provided as keyword arguments'", ")", "def", "dec", "(", "f", ")", ":", "if", "hasattr", "(", "f", ",", "'__annotations_...
python 3 compatible function annotation for python 2 .
train
false
35,358
def smartos_build(): with settings(hide('running', 'stdout')): return run('uname -v')
[ "def", "smartos_build", "(", ")", ":", "with", "settings", "(", "hide", "(", "'running'", ",", "'stdout'", ")", ")", ":", "return", "run", "(", "'uname -v'", ")" ]
get the build of smartos .
train
false
35,361
def _filter_values(vals, vlist=None, must=False): if (not vlist): return vals if isinstance(vlist, basestring): vlist = [vlist] res = [] for val in vlist: if (val in vals): res.append(val) if must: if res: return res else: raise MissingValue('Required attribute value missing') else: return res
[ "def", "_filter_values", "(", "vals", ",", "vlist", "=", "None", ",", "must", "=", "False", ")", ":", "if", "(", "not", "vlist", ")", ":", "return", "vals", "if", "isinstance", "(", "vlist", ",", "basestring", ")", ":", "vlist", "=", "[", "vlist", ...
removes values from *vals* that does not appear in vlist .
train
false
35,362
def remove_document_permissions(apps, schema_editor): ContentType = apps.get_model(u'contenttypes.ContentType') Permission = apps.get_model(u'auth.Permission') document_content_type = ContentType.objects.get(model=u'document', app_label=u'wagtaildocs') Permission.objects.filter(content_type=document_content_type, codename__in=(u'add_document', u'change_document', u'delete_document')).delete()
[ "def", "remove_document_permissions", "(", "apps", ",", "schema_editor", ")", ":", "ContentType", "=", "apps", ".", "get_model", "(", "u'contenttypes.ContentType'", ")", "Permission", "=", "apps", ".", "get_model", "(", "u'auth.Permission'", ")", "document_content_typ...
reverse the above additions of permissions .
train
false
35,363
def install_pandas(): chdir(SRC_DIR) apt_command('build-dep pandas') run_command('pip install pandas')
[ "def", "install_pandas", "(", ")", ":", "chdir", "(", "SRC_DIR", ")", "apt_command", "(", "'build-dep pandas'", ")", "run_command", "(", "'pip install pandas'", ")" ]
docstring for install_pandas .
train
false
35,364
def SimplifyNode(node): if (not node.getType()): return SimplifyNode(node.children[0]) elif ((node.getType() == QueryParser.CONJUNCTION) and (node.getChildCount() == 1)): return SimplifyNode(node.children[0]) elif ((node.getType() == QueryParser.DISJUNCTION) and (node.getChildCount() == 1)): return SimplifyNode(node.children[0]) elif (((node.getType() == QueryParser.EQ) or (node.getType() == QueryParser.HAS)) and (node.getChildCount() == 1)): return SimplifyNode(node.children[0]) for (i, child) in enumerate(node.children): node.setChild(i, SimplifyNode(child)) return node
[ "def", "SimplifyNode", "(", "node", ")", ":", "if", "(", "not", "node", ".", "getType", "(", ")", ")", ":", "return", "SimplifyNode", "(", "node", ".", "children", "[", "0", "]", ")", "elif", "(", "(", "node", ".", "getType", "(", ")", "==", "Que...
simplifies the node removing singleton conjunctions and others .
train
false
35,366
def _redirect_with_params(url_name, *args, **kwargs): url = urlresolvers.reverse(url_name, args=args) params = parse.urlencode(kwargs, True) return '{0}?{1}'.format(url, params)
[ "def", "_redirect_with_params", "(", "url_name", ",", "*", "args", ",", "**", "kwargs", ")", ":", "url", "=", "urlresolvers", ".", "reverse", "(", "url_name", ",", "args", "=", "args", ")", "params", "=", "parse", ".", "urlencode", "(", "kwargs", ",", ...
helper method to create a redirect response with url params .
train
true
35,368
def _get_function_from_pyopengl(funcname): func = None try: func = getattr(_GL, funcname) except AttributeError: try: func = getattr(_FBO, funcname) except AttributeError: func = None if (not bool(func)): if funcname.endswith('f'): try: func = getattr(_GL, funcname[:(-1)]) except AttributeError: pass if (func is None): func = _make_unavailable_func(funcname) logger.warning(('warning: %s not available' % funcname)) return func
[ "def", "_get_function_from_pyopengl", "(", "funcname", ")", ":", "func", "=", "None", "try", ":", "func", "=", "getattr", "(", "_GL", ",", "funcname", ")", "except", "AttributeError", ":", "try", ":", "func", "=", "getattr", "(", "_FBO", ",", "funcname", ...
try getting the given function from pyopengl .
train
true
35,370
@register.simple_tag(takes_context=True) def escape_format_html(context): return format_html('Hello {0}!', context['name'])
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "escape_format_html", "(", "context", ")", ":", "return", "format_html", "(", "'Hello {0}!'", ",", "context", "[", "'name'", "]", ")" ]
a tag that uses format_html .
train
false
35,371
def measure_partial(qubit, bits, format='sympy', normalize=True): m = qubit_to_matrix(qubit, format) if isinstance(bits, (int, Integer)): bits = (int(bits),) if (format == 'sympy'): if normalize: m = m.normalized() possible_outcomes = _get_possible_outcomes(m, bits) output = [] for outcome in possible_outcomes: prob_of_outcome = 0 prob_of_outcome += (outcome.H * outcome)[0] if (prob_of_outcome != 0): if normalize: next_matrix = matrix_to_qubit(outcome.normalized()) else: next_matrix = matrix_to_qubit(outcome) output.append((next_matrix, prob_of_outcome)) return output else: raise NotImplementedError("This function can't handle non-sympy matrix formats yet")
[ "def", "measure_partial", "(", "qubit", ",", "bits", ",", "format", "=", "'sympy'", ",", "normalize", "=", "True", ")", ":", "m", "=", "qubit_to_matrix", "(", "qubit", ",", "format", ")", "if", "isinstance", "(", "bits", ",", "(", "int", ",", "Integer"...
perform a partial ensemble measure on the specifed qubits .
train
false
35,372
def blend_channels_multiply(bottom_chan, top_chan): return (bottom_chan[:, :] * top_chan[:, :])
[ "def", "blend_channels_multiply", "(", "bottom_chan", ",", "top_chan", ")", ":", "return", "(", "bottom_chan", "[", ":", ",", ":", "]", "*", "top_chan", "[", ":", ",", ":", "]", ")" ]
return combination of bottom and top channels .
train
false
35,373
def restore_caches(old): global FS_CACHE, MR_CACHE (FS_CACHE, MR_CACHE) = old
[ "def", "restore_caches", "(", "old", ")", ":", "global", "FS_CACHE", ",", "MR_CACHE", "(", "FS_CACHE", ",", "MR_CACHE", ")", "=", "old" ]
restores caches from the result of a previous clear_caches call .
train
false
35,375
def delete_quota(tenant_id, profile=None): conn = _auth(profile) return conn.delete_quota(tenant_id)
[ "def", "delete_quota", "(", "tenant_id", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "delete_quota", "(", "tenant_id", ")" ]
delete the specified tenants quota value cli example: .
train
false
35,377
def multiclass_hinge_loss(predictions, targets, delta=1): num_cls = predictions.shape[1] if (targets.ndim == (predictions.ndim - 1)): targets = theano.tensor.extra_ops.to_one_hot(targets, num_cls) elif (targets.ndim != predictions.ndim): raise TypeError('rank mismatch between targets and predictions') corrects = predictions[targets.nonzero()] rest = theano.tensor.reshape(predictions[(1 - targets).nonzero()], ((-1), (num_cls - 1))) rest = theano.tensor.max(rest, axis=1) return theano.tensor.nnet.relu(((rest - corrects) + delta))
[ "def", "multiclass_hinge_loss", "(", "predictions", ",", "targets", ",", "delta", "=", "1", ")", ":", "num_cls", "=", "predictions", ".", "shape", "[", "1", "]", "if", "(", "targets", ".", "ndim", "==", "(", "predictions", ".", "ndim", "-", "1", ")", ...
computes the multi-class hinge loss between predictions and targets .
train
false
35,378
def p_multiplicative_expression_4(t): pass
[ "def", "p_multiplicative_expression_4", "(", "t", ")", ":", "pass" ]
multiplicative_expression : multiplicative_expression mod cast_expression .
train
false
35,380
def behave(cmdline, cwd='.', **kwargs): assert isinstance(cmdline, six.string_types) return run(('behave ' + cmdline), cwd=cwd, **kwargs)
[ "def", "behave", "(", "cmdline", ",", "cwd", "=", "'.'", ",", "**", "kwargs", ")", ":", "assert", "isinstance", "(", "cmdline", ",", "six", ".", "string_types", ")", "return", "run", "(", "(", "'behave '", "+", "cmdline", ")", ",", "cwd", "=", "cwd",...
run behave tests .
train
true
35,381
def funcstr(leaves, expr): (result, scope) = print_python(leaves, expr) leaf_names = [print_python([leaf], leaf)[0] for leaf in leaves] return (('lambda %s: %s' % (', '.join(leaf_names), result)), scope)
[ "def", "funcstr", "(", "leaves", ",", "expr", ")", ":", "(", "result", ",", "scope", ")", "=", "print_python", "(", "leaves", ",", "expr", ")", "leaf_names", "=", "[", "print_python", "(", "[", "leaf", "]", ",", "leaf", ")", "[", "0", "]", "for", ...
lambda string for an expresion .
train
false
35,382
def get_running(): ret = set() out = __salt__['cmd.run'](_systemctl_cmd('--full --no-legend --no-pager'), python_shell=False, ignore_retcode=True) for line in salt.utils.itertools.split(out, '\n'): try: comps = line.strip().split() fullname = comps[0] if (len(comps) > 3): active_state = comps[3] except ValueError as exc: log.error(exc) continue else: if (active_state != 'running'): continue try: (unit_name, unit_type) = fullname.rsplit('.', 1) except ValueError: continue if (unit_type in VALID_UNIT_TYPES): ret.add((unit_name if (unit_type == 'service') else fullname)) return sorted(ret)
[ "def", "get_running", "(", ")", ":", "ret", "=", "set", "(", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "_systemctl_cmd", "(", "'--full --no-legend --no-pager'", ")", ",", "python_shell", "=", "False", ",", "ignore_retcode", "=", "True", ")", ...
return a list of all running services .
train
true
35,384
def database_files_path(test_tmpdir, prefix='GALAXY'): environ_var = ('%s_TEST_DBPATH' % prefix) if (environ_var in os.environ): db_path = os.environ[environ_var] else: tempdir = tempfile.mkdtemp(dir=test_tmpdir) db_path = os.path.join(tempdir, 'database') return db_path
[ "def", "database_files_path", "(", "test_tmpdir", ",", "prefix", "=", "'GALAXY'", ")", ":", "environ_var", "=", "(", "'%s_TEST_DBPATH'", "%", "prefix", ")", "if", "(", "environ_var", "in", "os", ".", "environ", ")", ":", "db_path", "=", "os", ".", "environ...
create a mock database/ directory like in galaxy_root .
train
false
35,385
def _api_set_config_default(name, output, kwargs): keywords = kwargs.get('keyword', []) if (not isinstance(keywords, list)): keywords = [keywords] for keyword in keywords: item = config.get_config('misc', keyword) if item: item.set(item.default()) config.save_config() return report(output)
[ "def", "_api_set_config_default", "(", "name", ",", "output", ",", "kwargs", ")", ":", "keywords", "=", "kwargs", ".", "get", "(", "'keyword'", ",", "[", "]", ")", "if", "(", "not", "isinstance", "(", "keywords", ",", "list", ")", ")", ":", "keywords",...
api: reset requested config variables back to defaults .
train
false
35,388
def p_specifier_qualifier_list_1(t): pass
[ "def", "p_specifier_qualifier_list_1", "(", "t", ")", ":", "pass" ]
specifier_qualifier_list : type_specifier specifier_qualifier_list .
train
false
35,389
def tail(target, lines=None): if isinstance(target, str): with open(target) as target_file: for line in tail(target_file, lines): (yield line) return target.seek(0, 2) block_end_byte = target.tell() block_number = (-1) content = '' while (((lines is None) or (lines > 0)) and (block_end_byte > 0)): if ((block_end_byte - BLOCK_SIZE) > 0): target.seek((block_number * BLOCK_SIZE), 2) (content, completed_lines) = (target.read(BLOCK_SIZE) + content).split('\n', 1) else: target.seek(0, 0) completed_lines = (target.read(block_end_byte) + content) for line in reversed(completed_lines.splitlines()): if ((lines is None) or (lines > 0)): if (lines is not None): lines -= 1 (yield line) block_end_byte -= BLOCK_SIZE block_number -= 1
[ "def", "tail", "(", "target", ",", "lines", "=", "None", ")", ":", "if", "isinstance", "(", "target", ",", "str", ")", ":", "with", "open", "(", "target", ")", "as", "target_file", ":", "for", "line", "in", "tail", "(", "target_file", ",", "lines", ...
show and then tail the event log .
train
false
35,390
def wildcard_to_re(text): return ''.join([_wildcard_to_regex.get(ch, ch) for ch in text])
[ "def", "wildcard_to_re", "(", "text", ")", ":", "return", "''", ".", "join", "(", "[", "_wildcard_to_regex", ".", "get", "(", "ch", ",", "ch", ")", "for", "ch", "in", "text", "]", ")" ]
convert plain wildcard string to regex .
train
false
35,391
def projection_factory(projection, figure, rect, **kwargs): return get_projection_class(projection)(figure, rect, **kwargs)
[ "def", "projection_factory", "(", "projection", ",", "figure", ",", "rect", ",", "**", "kwargs", ")", ":", "return", "get_projection_class", "(", "projection", ")", "(", "figure", ",", "rect", ",", "**", "kwargs", ")" ]
get a new projection instance .
train
false
35,392
def test_logxml_check_isdir(testdir): result = testdir.runpytest('--junit-xml=.') result.stderr.fnmatch_lines(['*--junitxml must be a filename*'])
[ "def", "test_logxml_check_isdir", "(", "testdir", ")", ":", "result", "=", "testdir", ".", "runpytest", "(", "'--junit-xml=.'", ")", "result", ".", "stderr", ".", "fnmatch_lines", "(", "[", "'*--junitxml must be a filename*'", "]", ")" ]
give an error if --junit-xml is a directory .
train
false
35,394
def deleteSiteInfo(self, site): try: q = SiteDB.query.filter((SiteDB.sitename == site)).first() except Exception as e: Log.debug(self, '{0}'.format(e)) Log.error(self, 'Unable to query database') if (not q): Log.error(self, '{0} does not exist in database'.format(site)) try: db_session.delete(q) db_session.commit() except Exception as e: Log.debug(self, '{0}'.format(e)) Log.error(self, 'Unable to delete site from application database.')
[ "def", "deleteSiteInfo", "(", "self", ",", "site", ")", ":", "try", ":", "q", "=", "SiteDB", ".", "query", ".", "filter", "(", "(", "SiteDB", ".", "sitename", "==", "site", ")", ")", ".", "first", "(", ")", "except", "Exception", "as", "e", ":", ...
delete site record in database .
train
false
35,395
def is_valid_key(key): if (PY3 and isinstance(key, bytes)): raise TypeError('needs to be str not bytes') for c in key: if ((c < ' ') or (c > '}') or (c == '=')): return False else: return bool(key)
[ "def", "is_valid_key", "(", "key", ")", ":", "if", "(", "PY3", "and", "isinstance", "(", "key", ",", "bytes", ")", ")", ":", "raise", "TypeError", "(", "'needs to be str not bytes'", ")", "for", "c", "in", "key", ":", "if", "(", "(", "c", "<", "' '",...
return true if a string is a valid vorbis comment key .
train
true
35,396
def set_asynchronous_for_sqlite(sender, connection, **kwargs): if (connection.vendor == 'sqlite'): cursor = connection.cursor() cursor.execute('PRAGMA synchronous=OFF;')
[ "def", "set_asynchronous_for_sqlite", "(", "sender", ",", "connection", ",", "**", "kwargs", ")", ":", "if", "(", "connection", ".", "vendor", "==", "'sqlite'", ")", ":", "cursor", "=", "connection", ".", "cursor", "(", ")", "cursor", ".", "execute", "(", ...
make sqlite3 be asynchronous .
train
false