id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
52,804
def scan_video(path): if (not os.path.exists(path)): raise ValueError('Path does not exist') if (not path.endswith(VIDEO_EXTENSIONS)): raise ValueError(('%r is not a valid video extension' % os.path.splitext(path)[1])) (dirpath, filename) = os.path.split(path) logger.info('Scanning video %r in %r', filename, dirpath) video = Video.fromguess(path, guessit(path)) video.size = os.path.getsize(path) if (video.size > 10485760): logger.debug('Size is %d', video.size) video.hashes['opensubtitles'] = hash_opensubtitles(path) video.hashes['shooter'] = hash_shooter(path) video.hashes['thesubdb'] = hash_thesubdb(path) video.hashes['napiprojekt'] = hash_napiprojekt(path) logger.debug('Computed hashes %r', video.hashes) else: logger.warning('Size is lower than 10MB: hashes not computed') return video
[ "def", "scan_video", "(", "path", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", ":", "raise", "ValueError", "(", "'Path does not exist'", ")", "if", "(", "not", "path", ".", "endswith", "(", "VIDEO_EXTENSIONS", ")...
scan a video from a path .
train
true
52,805
def add_component(page, item_type, specific_type, is_advanced_problem=False): btn = page.q(css='.add-xblock-component .add-xblock-component-button[data-type={}]'.format(item_type)) multiple_templates = btn.filter((lambda el: ('multiple-templates' in el.get_attribute('class')))).present btn.click() if multiple_templates: sub_template_menu_div_selector = '.new-component-{}'.format(item_type) page.wait_for_element_visibility(sub_template_menu_div_selector, 'Wait for the templates sub-menu to appear') page.wait_for_element_invisibility('.add-xblock-component .new-component', 'Wait for the add component menu to disappear') if is_advanced_problem: advanced_tab = page.q(css='.problem-type-tabs a').filter(text='Advanced').first advanced_tab.click() css = '.problem-type-tabs li.ui-tabs-active a' page.wait_for((lambda : (len(page.q(css=css).filter(text='Advanced').execute()) > 0)), 'Waiting for the Advanced problem tab to be active') all_options = page.q(css='.new-component-{} ul.new-component-template li button span'.format(item_type)) chosen_option = all_options.filter(text=specific_type).first chosen_option.click() wait_for_notification(page) page.wait_for_ajax()
[ "def", "add_component", "(", "page", ",", "item_type", ",", "specific_type", ",", "is_advanced_problem", "=", "False", ")", ":", "btn", "=", "page", ".", "q", "(", "css", "=", "'.add-xblock-component .add-xblock-component-button[data-type={}]'", ".", "format", "(", ...
click one of the "add new component" buttons .
train
false
52,808
def _make_ctf_comp_coils(info, coils): logger.info('Setting up compensation data...') comp_num = get_current_comp(info) if ((comp_num is None) or (comp_num == 0)): logger.info(' No compensation set. Nothing more to do.') return None n_comp_ch = sum([(c['kind'] == FIFF.FIFFV_MEG_CH) for c in info['chs']]) logger.info((' %d out of %d channels have the compensation set.' % (n_comp_ch, len(coils)))) compensator = make_compensator(info, 0, comp_num, True) logger.info((' Desired compensation data (%s) found.' % comp_num)) logger.info(' All compensation channels found.') logger.info(' Preselector created.') logger.info(' Compensation data matrix created.') logger.info(' Postselector created.') return compensator
[ "def", "_make_ctf_comp_coils", "(", "info", ",", "coils", ")", ":", "logger", ".", "info", "(", "'Setting up compensation data...'", ")", "comp_num", "=", "get_current_comp", "(", "info", ")", "if", "(", "(", "comp_num", "is", "None", ")", "or", "(", "comp_n...
get the correct compensator for ctf coils .
train
false
52,809
def userdel(pwfile, user, runas=None, all_results=False): if (not os.path.exists(pwfile)): return 'Error: The specified htpasswd file does not exist' cmd = ['htpasswd', '-D', pwfile, user] if all_results: out = __salt__['cmd.run_all'](cmd, runas=runas, python_shell=False) else: out = __salt__['cmd.run'](cmd, runas=runas, python_shell=False).splitlines() return out
[ "def", "userdel", "(", "pwfile", ",", "user", ",", "runas", "=", "None", ",", "all_results", "=", "False", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "pwfile", ")", ")", ":", "return", "'Error: The specified htpasswd file does not e...
delete a user from the specified htpasswd file .
train
true
52,810
def is_kl_connected(G, k, l, low_memory=False): graphOK = True for edge in G.edges(): (u, v) = edge if low_memory: verts = set([u, v]) for i in range(k): [verts.update(G.neighbors(w)) for w in verts.copy()] G2 = G.subgraph(verts) else: G2 = copy.deepcopy(G) path = [u, v] cnt = 0 accept = 0 while path: cnt += 1 if (cnt >= l): accept = 1 break prev = u for w in path: if (w != prev): G2.remove_edge(prev, w) prev = w try: path = nx.shortest_path(G2, u, v) except nx.NetworkXNoPath: path = False if (accept == 0): graphOK = False break return graphOK
[ "def", "is_kl_connected", "(", "G", ",", "k", ",", "l", ",", "low_memory", "=", "False", ")", ":", "graphOK", "=", "True", "for", "edge", "in", "G", ".", "edges", "(", ")", ":", "(", "u", ",", "v", ")", "=", "edge", "if", "low_memory", ":", "ve...
returns true if and only if g is locally -connected .
train
false
52,811
@contextfunction def knowledge_item_list(context, items, skip_group=False): request = context['request'] response_format = 'html' if ('response_format' in context): response_format = context['response_format'] return Markup(render_to_string('knowledge/tags/item_list', {'items': items, 'skip_group': skip_group}, context_instance=RequestContext(request), response_format=response_format))
[ "@", "contextfunction", "def", "knowledge_item_list", "(", "context", ",", "items", ",", "skip_group", "=", "False", ")", ":", "request", "=", "context", "[", "'request'", "]", "response_format", "=", "'html'", "if", "(", "'response_format'", "in", "context", ...
print a list of items .
train
false
52,812
def generateHubSequences(nCoinc=10, hubs=[2, 6], seqLength=[5, 6, 7], nSeq=100): coincList = range(nCoinc) for hub in hubs: coincList.remove(hub) seqList = [] for i in xrange(nSeq): length = (random.choice(seqLength) - 1) seq = random.sample(coincList, length) seq.insert((length // 2), random.choice(hubs)) seqList.append(seq) return seqList
[ "def", "generateHubSequences", "(", "nCoinc", "=", "10", ",", "hubs", "=", "[", "2", ",", "6", "]", ",", "seqLength", "=", "[", "5", ",", "6", ",", "7", "]", ",", "nSeq", "=", "100", ")", ":", "coincList", "=", "range", "(", "nCoinc", ")", "for...
generate a set of hub sequences .
train
true
52,813
def make_transparent_proxy(global_conf, force_host=None, force_scheme='http'): return TransparentProxy(force_host=force_host, force_scheme=force_scheme)
[ "def", "make_transparent_proxy", "(", "global_conf", ",", "force_host", "=", "None", ",", "force_scheme", "=", "'http'", ")", ":", "return", "TransparentProxy", "(", "force_host", "=", "force_host", ",", "force_scheme", "=", "force_scheme", ")" ]
create a proxy that connects to a specific host .
train
false
52,814
def _get_configuration_from_db(shop): configuration = {} for conf_item in ConfigurationItem.objects.filter(shop=shop): configuration[conf_item.key] = conf_item.value return configuration
[ "def", "_get_configuration_from_db", "(", "shop", ")", ":", "configuration", "=", "{", "}", "for", "conf_item", "in", "ConfigurationItem", ".", "objects", ".", "filter", "(", "shop", "=", "shop", ")", ":", "configuration", "[", "conf_item", ".", "key", "]", ...
get global or shop specific configuration from database .
train
false
52,817
def _get_initializers(initializers, fields): result = {} for f in fields: if isinstance(initializers, dict): if (f in initializers): result[f] = _convert_to_initializer(initializers[f]) else: result[f] = _convert_to_initializer(initializers) return result
[ "def", "_get_initializers", "(", "initializers", ",", "fields", ")", ":", "result", "=", "{", "}", "for", "f", "in", "fields", ":", "if", "isinstance", "(", "initializers", ",", "dict", ")", ":", "if", "(", "f", "in", "initializers", ")", ":", "result"...
produces a nn initialization dict .
train
false
52,818
@jwt_required() def auth_jwt(): g.user = current_identity
[ "@", "jwt_required", "(", ")", "def", "auth_jwt", "(", ")", ":", "g", ".", "user", "=", "current_identity" ]
a helper function that throws jwterror if jwt is not set .
train
false
52,819
def dmp_subresultants(f, g, u, K): return dmp_inner_subresultants(f, g, u, K)[0]
[ "def", "dmp_subresultants", "(", "f", ",", "g", ",", "u", ",", "K", ")", ":", "return", "dmp_inner_subresultants", "(", "f", ",", "g", ",", "u", ",", "K", ")", "[", "0", "]" ]
computes subresultant prs of two polynomials in k[x] .
train
false
52,820
@singledispatch def assert_student_view_valid_html(block, html): pass
[ "@", "singledispatch", "def", "assert_student_view_valid_html", "(", "block", ",", "html", ")", ":", "pass" ]
asserts that the html generated by the student_view view is correct for the supplied block .
train
false
52,821
@requires_csrf_token def permission_denied(request, exception, template_name=ERROR_403_TEMPLATE_NAME): try: template = loader.get_template(template_name) except TemplateDoesNotExist: if (template_name != ERROR_403_TEMPLATE_NAME): raise return http.HttpResponseForbidden('<h1>403 Forbidden</h1>', content_type='text/html') return http.HttpResponseForbidden(template.render(request=request, context={'exception': force_text(exception)}))
[ "@", "requires_csrf_token", "def", "permission_denied", "(", "request", ",", "exception", ",", "template_name", "=", "ERROR_403_TEMPLATE_NAME", ")", ":", "try", ":", "template", "=", "loader", ".", "get_template", "(", "template_name", ")", "except", "TemplateDoesNo...
permission denied handler .
train
false
52,822
@task @use_master def update_supported_locales(ids, **kw): for chunk in chunked(ids, 50): for app in Webapp.objects.filter(id__in=chunk): try: if app.update_supported_locales(): _log(app, u'Updated supported locales') except Exception: _log(app, u'Updating supported locales failed.', exc_info=True)
[ "@", "task", "@", "use_master", "def", "update_supported_locales", "(", "ids", ",", "**", "kw", ")", ":", "for", "chunk", "in", "chunked", "(", "ids", ",", "50", ")", ":", "for", "app", "in", "Webapp", ".", "objects", ".", "filter", "(", "id__in", "=...
task intended to run via command line to update all apps supported locales based on the current version .
train
false
52,823
def _parse_see_args(topic, data): (host_name, dev_id) = parse_topic(topic, False) kwargs = {'dev_id': dev_id, 'host_name': host_name, 'gps': (data[WAYPOINT_LAT_KEY], data[WAYPOINT_LON_KEY])} if ('acc' in data): kwargs['gps_accuracy'] = data['acc'] if ('batt' in data): kwargs['battery'] = data['batt'] return (dev_id, kwargs)
[ "def", "_parse_see_args", "(", "topic", ",", "data", ")", ":", "(", "host_name", ",", "dev_id", ")", "=", "parse_topic", "(", "topic", ",", "False", ")", "kwargs", "=", "{", "'dev_id'", ":", "dev_id", ",", "'host_name'", ":", "host_name", ",", "'gps'", ...
parse the owntracks location parameters .
train
false
52,824
def lemmatize(word): lemma = nltk.corpus.wordnet.morphy(word, pos=nltk.corpus.wordnet.VERB) if (lemma is not None): return lemma return word
[ "def", "lemmatize", "(", "word", ")", ":", "lemma", "=", "nltk", ".", "corpus", ".", "wordnet", ".", "morphy", "(", "word", ",", "pos", "=", "nltk", ".", "corpus", ".", "wordnet", ".", "VERB", ")", "if", "(", "lemma", "is", "not", "None", ")", ":...
use morphy from wordnet to find the base form of verbs .
train
false
52,825
def unicode_dict(_dict): r = {} for (k, v) in iteritems(_dict): r[unicode_obj(k)] = unicode_obj(v) return r
[ "def", "unicode_dict", "(", "_dict", ")", ":", "r", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "iteritems", "(", "_dict", ")", ":", "r", "[", "unicode_obj", "(", "k", ")", "]", "=", "unicode_obj", "(", "v", ")", "return", "r" ]
make sure keys and values of dict is unicode .
train
true
52,827
def _step1(state): state.C -= state.C.min(axis=1)[:, np.newaxis] for (i, j) in zip(*np.where((state.C == 0))): if (state.col_uncovered[j] and state.row_uncovered[i]): state.marked[(i, j)] = 1 state.col_uncovered[j] = False state.row_uncovered[i] = False state._clear_covers() return _step3
[ "def", "_step1", "(", "state", ")", ":", "state", ".", "C", "-=", "state", ".", "C", ".", "min", "(", "axis", "=", "1", ")", "[", ":", ",", "np", ".", "newaxis", "]", "for", "(", "i", ",", "j", ")", "in", "zip", "(", "*", "np", ".", "wher...
steps 1 and 2 in the wikipedia page .
train
false
52,828
def set_default_proxy(proxy_type=None, addr=None, port=None, rdns=True, username=None, password=None): socksocket.default_proxy = (proxy_type, addr, port, rdns, (username.encode() if username else None), (password.encode() if password else None))
[ "def", "set_default_proxy", "(", "proxy_type", "=", "None", ",", "addr", "=", "None", ",", "port", "=", "None", ",", "rdns", "=", "True", ",", "username", "=", "None", ",", "password", "=", "None", ")", ":", "socksocket", ".", "default_proxy", "=", "("...
set_default_proxy sets a default proxy which all further socksocket objects will use .
train
true
52,831
def group_membership_status(): return s3_rest_controller()
[ "def", "group_membership_status", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
52,832
def _sendMessageWait(msg): m = msg.partition(': ') (img, imgtype) = _getAvatar(m[0]) return PyFred('ch.xtin.skypingalfred.wait', False).addItem(('skypename_' + m[0]), msg, ('Message to: ' + m[0]), _getLastMessageFormated(m[0]), True, None, img, imgtype).toXML()
[ "def", "_sendMessageWait", "(", "msg", ")", ":", "m", "=", "msg", ".", "partition", "(", "': '", ")", "(", "img", ",", "imgtype", ")", "=", "_getAvatar", "(", "m", "[", "0", "]", ")", "return", "PyFred", "(", "'ch.xtin.skypingalfred.wait'", ",", "False...
a valid skypename has been matched .
train
false
52,834
def sm_backend_conf_get(context, sm_backend_conf_id): return IMPL.sm_backend_conf_get(context, sm_backend_conf_id)
[ "def", "sm_backend_conf_get", "(", "context", ",", "sm_backend_conf_id", ")", ":", "return", "IMPL", ".", "sm_backend_conf_get", "(", "context", ",", "sm_backend_conf_id", ")" ]
get a specific sm backend config .
train
false
52,835
@pytest.fixture(scope='module') def static_file_directory(): current_file = inspect.getfile(inspect.currentframe()) current_directory = os.path.dirname(os.path.abspath(current_file)) static_directory = os.path.join(current_directory, 'static') return static_directory
[ "@", "pytest", ".", "fixture", "(", "scope", "=", "'module'", ")", "def", "static_file_directory", "(", ")", ":", "current_file", "=", "inspect", ".", "getfile", "(", "inspect", ".", "currentframe", "(", ")", ")", "current_directory", "=", "os", ".", "path...
the static directory to serve .
train
false
52,836
def get_mav_param(param, default=None): return mpstate.mav_param.get(param, default)
[ "def", "get_mav_param", "(", "param", ",", "default", "=", "None", ")", ":", "return", "mpstate", ".", "mav_param", ".", "get", "(", "param", ",", "default", ")" ]
return a eeprom parameter value .
train
false
52,838
def cache_root(environ=None): return zipline_path(['cache'], environ=environ)
[ "def", "cache_root", "(", "environ", "=", "None", ")", ":", "return", "zipline_path", "(", "[", "'cache'", "]", ",", "environ", "=", "environ", ")" ]
the root directory for zipline cache files .
train
false
52,839
def test_doc_api_right_edge(en_tokenizer): text = u'I have proposed to myself, for the sake of such as live under the government of the Romans, to translate those books into the Greek tongue.' heads = [2, 1, 0, (-1), (-1), (-3), 15, 1, (-2), (-1), 1, (-3), (-1), (-1), 1, (-2), (-1), 1, (-2), (-7), 1, (-19), 1, (-2), (-3), 2, 1, (-3), (-26)] tokens = en_tokenizer(text) doc = get_doc(tokens.vocab, [t.text for t in tokens], heads=heads) assert (doc[6].text == u'for') subtree = [w.text for w in doc[6].subtree] assert (subtree == [u'for', u'the', u'sake', u'of', u'such', u'as', u'live', u'under', u'the', u'government', u'of', u'the', u'Romans', u',']) assert (doc[6].right_edge.text == u',')
[ "def", "test_doc_api_right_edge", "(", "en_tokenizer", ")", ":", "text", "=", "u'I have proposed to myself, for the sake of such as live under the government of the Romans, to translate those books into the Greek tongue.'", "heads", "=", "[", "2", ",", "1", ",", "0", ",", "(", ...
test for bug occurring from unshift action .
train
false
52,841
def test_class_in_docstr(): a = '"\nclasses\n"' jedi.Script(a, 1, 0)._get_module() b = (a + '\nimport os') assert jedi.Script(b, 4, 8).goto_assignments()
[ "def", "test_class_in_docstr", "(", ")", ":", "a", "=", "'\"\\nclasses\\n\"'", "jedi", ".", "Script", "(", "a", ",", "1", ",", "0", ")", ".", "_get_module", "(", ")", "b", "=", "(", "a", "+", "'\\nimport os'", ")", "assert", "jedi", ".", "Script", "(...
regression test for a problem with classes in docstrings .
train
false
52,843
def compress_directory(directory, filename): mode = 'w:gz' name = path(directory).name with tarfile.open(filename, mode) as tar_file: tar_file.add(directory, arcname=name)
[ "def", "compress_directory", "(", "directory", ",", "filename", ")", ":", "mode", "=", "'w:gz'", "name", "=", "path", "(", "directory", ")", ".", "name", "with", "tarfile", ".", "open", "(", "filename", ",", "mode", ")", "as", "tar_file", ":", "tar_file"...
compress a directory into a tar .
train
false
52,844
def check_metadata_properties(metadata=None): if (not metadata): metadata = {} for (k, v) in metadata.items(): if (len(k) == 0): msg = _('Metadata property key blank.') LOG.debug(msg) raise exception.InvalidVolumeMetadata(reason=msg) if (len(k) > 255): msg = (_('Metadata property key %s greater than 255 characters.') % k) LOG.debug(msg) raise exception.InvalidVolumeMetadataSize(reason=msg) if (len(v) > 255): msg = (_('Metadata property key %s value greater than 255 characters.') % k) LOG.debug(msg) raise exception.InvalidVolumeMetadataSize(reason=msg)
[ "def", "check_metadata_properties", "(", "metadata", "=", "None", ")", ":", "if", "(", "not", "metadata", ")", ":", "metadata", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "metadata", ".", "items", "(", ")", ":", "if", "(", "len", "(", "...
checks that the volume metadata properties are valid .
train
false
52,845
def _has_access_descriptor(user, action, descriptor, course_key=None): def can_load(): "\n NOTE: This does not check that the student is enrolled in the course\n that contains this module. We may or may not want to allow non-enrolled\n students to see modules. If not, views should check the course, so we\n don't have to hit the enrollments table on every module load.\n " if (not _has_group_access(descriptor, user, course_key)): return ACCESS_DENIED if _has_staff_access_to_descriptor(user, descriptor, course_key): return ACCESS_GRANTED return (_visible_to_nonstaff_users(descriptor) and _can_access_descriptor_with_milestones(user, descriptor, course_key) and (_has_detached_class_tag(descriptor) or _can_access_descriptor_with_start_date(user, descriptor, course_key))) checkers = {'load': can_load, 'staff': (lambda : _has_staff_access_to_descriptor(user, descriptor, course_key)), 'instructor': (lambda : _has_instructor_access_to_descriptor(user, descriptor, course_key))} return _dispatch(checkers, action, user, descriptor)
[ "def", "_has_access_descriptor", "(", "user", ",", "action", ",", "descriptor", ",", "course_key", "=", "None", ")", ":", "def", "can_load", "(", ")", ":", "if", "(", "not", "_has_group_access", "(", "descriptor", ",", "user", ",", "course_key", ")", ")", ...
check if user has access to this descriptor .
train
false
52,847
@skip('silverlight', 'multiple_execute') def test_pack_module_relative_collision(): try: mod_backup = dict(sys.modules) _f_dir = path_combine(testpath.public_testdir, 'test_dir') _f_init = path_combine(_f_dir, '__init__.py') _f_foo_dir = path_combine(_f_dir, 'foo') _f_foo_py = path_combine(_f_foo_dir, 'foo.py') _f_foo_init = path_combine(_f_foo_dir, '__init__.py') ensure_directory_present(_f_dir) ensure_directory_present(_f_foo_dir) write_to_file(_f_init, 'from foo import bar') write_to_file(_f_foo_py, 'bar = "BAR"') write_to_file(_f_foo_init, 'from foo import bar') import test_dir AreEqual(test_dir.bar, 'BAR') finally: sys.modules = mod_backup os.unlink(_f_foo_py) os.unlink(_f_foo_init) os.unlink(_f_init) os.rmdir(_f_foo_dir) os.rmdir(_f_dir)
[ "@", "skip", "(", "'silverlight'", ",", "'multiple_execute'", ")", "def", "test_pack_module_relative_collision", "(", ")", ":", "try", ":", "mod_backup", "=", "dict", "(", "sys", ".", "modules", ")", "_f_dir", "=", "path_combine", "(", "testpath", ".", "public...
when importing a package item the package should be updated with the child .
train
false
52,848
def _validate_prototype(key, prototype, protparents, visited): assert isinstance(prototype, dict) if (id(prototype) in visited): raise RuntimeError((('%s has infinite nesting of prototypes.' % key) or prototype)) visited.append(id(prototype)) protstrings = prototype.get('prototype') if protstrings: for protstring in make_iter(protstrings): if ((key is not None) and (protstring == key)): raise RuntimeError((('%s tries to prototype itself.' % key) or prototype)) protparent = protparents.get(protstring) if (not protparent): raise RuntimeError(("%s's prototype '%s' was not found." % ((key or prototype), protstring))) _validate_prototype(protstring, protparent, protparents, visited)
[ "def", "_validate_prototype", "(", "key", ",", "prototype", ",", "protparents", ",", "visited", ")", ":", "assert", "isinstance", "(", "prototype", ",", "dict", ")", "if", "(", "id", "(", "prototype", ")", "in", "visited", ")", ":", "raise", "RuntimeError"...
run validation on a prototype .
train
false
52,849
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
52,850
def full(shape, fill_value, dtype=None): a = empty(shape, dtype) a.fill(fill_value) return a
[ "def", "full", "(", "shape", ",", "fill_value", ",", "dtype", "=", "None", ")", ":", "a", "=", "empty", "(", "shape", ",", "dtype", ")", "a", ".", "fill", "(", "fill_value", ")", "return", "a" ]
returns a new array of given shape and dtype .
train
false
52,852
def splitArgs(argstr): split = argstr.split(',') fn = split[0] params = split[1:] args = [makeNumeric(s) for s in params if ('=' not in s)] kwargs = {} for s in [p for p in params if ('=' in p)]: (key, val) = s.split('=', 1) kwargs[key] = makeNumeric(val) return (fn, args, kwargs)
[ "def", "splitArgs", "(", "argstr", ")", ":", "split", "=", "argstr", ".", "split", "(", "','", ")", "fn", "=", "split", "[", "0", "]", "params", "=", "split", "[", "1", ":", "]", "args", "=", "[", "makeNumeric", "(", "s", ")", "for", "s", "in",...
split argument string into usable python arguments argstr: argument string with format fn .
train
false
52,853
def plot_features(im, locs, circle=False): def draw_circle(c, r): t = ((arange(0, 1.01, 0.01) * 2) * pi) x = ((r * cos(t)) + c[0]) y = ((r * sin(t)) + c[1]) plot(x, y, 'b', linewidth=2) imshow(im) if circle: for p in locs: draw_circle(p[:2], p[2]) else: plot(locs[:, 0], locs[:, 1], 'ob') axis('off')
[ "def", "plot_features", "(", "im", ",", "locs", ",", "circle", "=", "False", ")", ":", "def", "draw_circle", "(", "c", ",", "r", ")", ":", "t", "=", "(", "(", "arange", "(", "0", ",", "1.01", ",", "0.01", ")", "*", "2", ")", "*", "pi", ")", ...
show image with features .
train
false
52,854
def load_syntax(): empty = dict() files = os.listdir(resources.SYNTAX_FILES) for f in files: if (not f.endswith('.json')): continue fname = os.path.join(resources.SYNTAX_FILES, f) structure = read_json(fname) if (structure == empty): continue name = os.path.splitext(f)[0] settings.SYNTAX[name] = structure for ext in structure.get('extension'): if (ext is not None): settings.EXTENSIONS[ext] = name
[ "def", "load_syntax", "(", ")", ":", "empty", "=", "dict", "(", ")", "files", "=", "os", ".", "listdir", "(", "resources", ".", "SYNTAX_FILES", ")", "for", "f", "in", "files", ":", "if", "(", "not", "f", ".", "endswith", "(", "'.json'", ")", ")", ...
load all the syntax files .
train
false
52,856
@task(queue='web') def finish_build(version_pk, build_pk, hostname=None, html=False, localmedia=False, search=False, pdf=False, epub=False): version = Version.objects.get(pk=version_pk) build = Build.objects.get(pk=build_pk) if html: version.active = True version.built = True version.save() if (not pdf): clear_pdf_artifacts(version) if (not epub): clear_epub_artifacts(version) move_files(version_pk=version_pk, hostname=hostname, html=html, localmedia=localmedia, search=search, pdf=pdf, epub=epub) broadcast(type='app', task=symlink_project, args=[version.project.pk]) update_static_metadata.delay(version.project.pk) fileify.delay(version.pk, commit=build.commit) update_search.delay(version.pk, commit=build.commit)
[ "@", "task", "(", "queue", "=", "'web'", ")", "def", "finish_build", "(", "version_pk", ",", "build_pk", ",", "hostname", "=", "None", ",", "html", "=", "False", ",", "localmedia", "=", "False", ",", "search", "=", "False", ",", "pdf", "=", "False", ...
build finished .
train
false
52,857
def vfs_construct_path(base_path, *path_components): path = base_path for component in path_components: if component.startswith('/'): path = component elif ((path == '') or path.endswith('/')): path += component else: path += ('/%s' % component) return path
[ "def", "vfs_construct_path", "(", "base_path", ",", "*", "path_components", ")", ":", "path", "=", "base_path", "for", "component", "in", "path_components", ":", "if", "component", ".", "startswith", "(", "'/'", ")", ":", "path", "=", "component", "elif", "(...
mimics behavior of os .
train
false
52,858
def script_args(f): args = [magic_arguments.argument('--out', type=str, help='The variable in which to store stdout from the script.\n If the script is backgrounded, this will be the stdout *pipe*,\n instead of the stderr text itself.\n '), magic_arguments.argument('--err', type=str, help='The variable in which to store stderr from the script.\n If the script is backgrounded, this will be the stderr *pipe*,\n instead of the stderr text itself.\n '), magic_arguments.argument('--bg', action='store_true', help='Whether to run the script in the background.\n If given, the only way to see the output of the command is\n with --out/err.\n '), magic_arguments.argument('--proc', type=str, help='The variable in which to store Popen instance.\n This is used only when --bg option is given.\n ')] for arg in args: f = arg(f) return f
[ "def", "script_args", "(", "f", ")", ":", "args", "=", "[", "magic_arguments", ".", "argument", "(", "'--out'", ",", "type", "=", "str", ",", "help", "=", "'The variable in which to store stdout from the script.\\n If the script is backgrounded, this will be the ...
single decorator for adding script args .
train
true
52,859
def intersect(valueA, valueB, lowerCase=False): retVal = [] if (valueA and valueB): valueA = arrayizeValue(valueA) valueB = arrayizeValue(valueB) if lowerCase: valueA = [(val.lower() if isinstance(val, basestring) else val) for val in valueA] valueB = [(val.lower() if isinstance(val, basestring) else val) for val in valueB] retVal = [val for val in valueA if (val in valueB)] return retVal
[ "def", "intersect", "(", "valueA", ",", "valueB", ",", "lowerCase", "=", "False", ")", ":", "retVal", "=", "[", "]", "if", "(", "valueA", "and", "valueB", ")", ":", "valueA", "=", "arrayizeValue", "(", "valueA", ")", "valueB", "=", "arrayizeValue", "("...
returns intersection of the array-ized values .
train
false
52,860
def assertFocused(self, name): info = self.c.window.info() assert (info['name'] == name), 'Got {0!r}, expected {1!r}'.format(info['name'], name)
[ "def", "assertFocused", "(", "self", ",", "name", ")", ":", "info", "=", "self", ".", "c", ".", "window", ".", "info", "(", ")", "assert", "(", "info", "[", "'name'", "]", "==", "name", ")", ",", "'Got {0!r}, expected {1!r}'", ".", "format", "(", "in...
asserts that window with specified name is currently focused .
train
false
52,861
def initialize_dbs(settings): global _MAIN_SETTINGS, _MAIN_SITEURL, _MAIN_LANG, _SUBSITE_QUEUE _MAIN_SETTINGS = settings _MAIN_LANG = settings['DEFAULT_LANG'] _MAIN_SITEURL = settings['SITEURL'] _SUBSITE_QUEUE = settings.get('I18N_SUBSITES', {}).copy() prepare_site_db_and_overrides() _SITES_RELPATH_DB.clear() _NATIVE_CONTENT_URL_DB.clear() _GENERATOR_DB.clear()
[ "def", "initialize_dbs", "(", "settings", ")", ":", "global", "_MAIN_SETTINGS", ",", "_MAIN_SITEURL", ",", "_MAIN_LANG", ",", "_SUBSITE_QUEUE", "_MAIN_SETTINGS", "=", "settings", "_MAIN_LANG", "=", "settings", "[", "'DEFAULT_LANG'", "]", "_MAIN_SITEURL", "=", "setti...
initialize internal dbs using the pelican settings dict this clears the dbs for e .
train
true
52,863
@ssl_required def aaq_step5(request, product_key, category_key): return aaq(request, product_key=product_key, category_key=category_key, showform=True, step=3)
[ "@", "ssl_required", "def", "aaq_step5", "(", "request", ",", "product_key", ",", "category_key", ")", ":", "return", "aaq", "(", "request", ",", "product_key", "=", "product_key", ",", "category_key", "=", "category_key", ",", "showform", "=", "True", ",", ...
step 5: show full question form .
train
false
52,865
def scan_default_sr(session): sr_ref = safe_find_sr(session) _scan_sr(session, sr_ref) return sr_ref
[ "def", "scan_default_sr", "(", "session", ")", ":", "sr_ref", "=", "safe_find_sr", "(", "session", ")", "_scan_sr", "(", "session", ",", "sr_ref", ")", "return", "sr_ref" ]
looks for the system default sr and triggers a re-scan .
train
false
52,867
def quota_usage_update_resource(context, old_res, new_res): return IMPL.quota_usage_update_resource(context, old_res, new_res)
[ "def", "quota_usage_update_resource", "(", "context", ",", "old_res", ",", "new_res", ")", ":", "return", "IMPL", ".", "quota_usage_update_resource", "(", "context", ",", "old_res", ",", "new_res", ")" ]
update resource field in quota_usages .
train
false
52,868
def metropolis_hastings_accept(energy_prev, energy_next, s_rng): ediff = (energy_prev - energy_next) return ((TT.exp(ediff) - s_rng.uniform(size=energy_prev.shape)) >= 0)
[ "def", "metropolis_hastings_accept", "(", "energy_prev", ",", "energy_next", ",", "s_rng", ")", ":", "ediff", "=", "(", "energy_prev", "-", "energy_next", ")", "return", "(", "(", "TT", ".", "exp", "(", "ediff", ")", "-", "s_rng", ".", "uniform", "(", "s...
performs a metropolis-hastings accept-reject move .
train
false
52,869
@contextlib.contextmanager def fix_import_path(args): orig = list(sys.path) changes = [] for arg in args: path = _get_python_path(arg) if (path in changes): continue else: changes.append(path) sys.path[:] = (changes + sys.path) try: (yield) finally: sys.path[:] = orig
[ "@", "contextlib", ".", "contextmanager", "def", "fix_import_path", "(", "args", ")", ":", "orig", "=", "list", "(", "sys", ".", "path", ")", "changes", "=", "[", "]", "for", "arg", "in", "args", ":", "path", "=", "_get_python_path", "(", "arg", ")", ...
prepare sys .
train
true
52,871
def safe_get_config_value(group, name): conf_group = getattr(CONF, group) try: return getattr(conf_group, name) except cfg.NoSuchOptError: return getattr(conf_group, config_opts_translator[name])
[ "def", "safe_get_config_value", "(", "group", ",", "name", ")", ":", "conf_group", "=", "getattr", "(", "CONF", ",", "group", ")", "try", ":", "return", "getattr", "(", "conf_group", ",", "name", ")", "except", "cfg", ".", "NoSuchOptError", ":", "return", ...
safely get oslo config opts from tempest .
train
false
52,873
def compare_partial_dicts(source, comparee): for (key, value) in six.iteritems(source): if ((key not in comparee) or (value != comparee[key])): return False return True
[ "def", "compare_partial_dicts", "(", "source", ",", "comparee", ")", ":", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "source", ")", ":", "if", "(", "(", "key", "not", "in", "comparee", ")", "or", "(", "value", "!=", "...
compare dicts in a "partial" manner .
train
false
52,875
def md5_digest(instr): if six.PY3: b = salt.utils.to_bytes(instr) return hashlib.md5(b).hexdigest() return hashlib.md5(instr).hexdigest()
[ "def", "md5_digest", "(", "instr", ")", ":", "if", "six", ".", "PY3", ":", "b", "=", "salt", ".", "utils", ".", "to_bytes", "(", "instr", ")", "return", "hashlib", ".", "md5", "(", "b", ")", ".", "hexdigest", "(", ")", "return", "hashlib", ".", "...
generate an md5 hash of a given string .
train
false
52,876
def find_node(v, cls, ignore_clients=False): if ((v.owner is not None) and (ignore_clients or (len(v.clients) == 1))): if isinstance(v.owner.op, cls): return v.owner elif (isinstance(v.owner.op, GpuFromHost) and (v.owner.inputs[0].owner is not None) and (ignore_clients or (len(v.owner.inputs[0].clients) == 1)) and isinstance(v.owner.inputs[0].owner.op, HostFromGpu)): return find_node(v.owner.inputs[0].owner.inputs[0], cls) else: return None
[ "def", "find_node", "(", "v", ",", "cls", ",", "ignore_clients", "=", "False", ")", ":", "if", "(", "(", "v", ".", "owner", "is", "not", "None", ")", "and", "(", "ignore_clients", "or", "(", "len", "(", "v", ".", "clients", ")", "==", "1", ")", ...
find the node that has an op of of type cls in v .
train
false
52,877
@cli.command('transpose') @click.option('-r', '--rotate', callback=convert_rotation, help='Rotates the image (in degrees)') @click.option('-f', '--flip', callback=convert_flip, help='Flips the image [LR / TB]') @processor def transpose_cmd(images, rotate, flip): for image in images: if (rotate is not None): (mode, degrees) = rotate click.echo(('Rotate "%s" by %ddeg' % (image.filename, degrees))) image = copy_filename(image.transpose(mode), image) if (flip is not None): (mode, direction) = flip click.echo(('Flip "%s" %s' % (image.filename, direction))) image = copy_filename(image.transpose(mode), image) (yield image)
[ "@", "cli", ".", "command", "(", "'transpose'", ")", "@", "click", ".", "option", "(", "'-r'", ",", "'--rotate'", ",", "callback", "=", "convert_rotation", ",", "help", "=", "'Rotates the image (in degrees)'", ")", "@", "click", ".", "option", "(", "'-f'", ...
transposes an image by either rotating or flipping it .
train
false
52,878
def get_secure_random_string(size): value = os.urandom(size) value = binascii.hexlify(value) value = value.decode('utf-8')[:size] return value
[ "def", "get_secure_random_string", "(", "size", ")", ":", "value", "=", "os", ".", "urandom", "(", "size", ")", "value", "=", "binascii", ".", "hexlify", "(", "value", ")", "value", "=", "value", ".", "decode", "(", "'utf-8'", ")", "[", ":", "size", ...
return a string of size random bytes .
train
false
52,881
def get_size_price(driver_type, driver_name, size_id): pricing = get_pricing(driver_type=driver_type, driver_name=driver_name) try: price = float(pricing[size_id]) except KeyError: price = None return price
[ "def", "get_size_price", "(", "driver_type", ",", "driver_name", ",", "size_id", ")", ":", "pricing", "=", "get_pricing", "(", "driver_type", "=", "driver_type", ",", "driver_name", "=", "driver_name", ")", "try", ":", "price", "=", "float", "(", "pricing", ...
return price for the provided size .
train
false
52,885
def get_file_url(object): filename = inspect.getsourcefile(object) lines = inspect.getsourcelines(object) uri = (u'file://%s#L%d' % (filename, lines[1])) if is_git_repo(): info = nipype.get_info() shortfile = os.path.join(u'nipype', filename.split(u'nipype/')[(-1)]) uri = (u'http://github.com/nipy/nipype/tree/%s/%s#L%d' % (info[u'commit_hash'], shortfile, lines[1])) return uri
[ "def", "get_file_url", "(", "object", ")", ":", "filename", "=", "inspect", ".", "getsourcefile", "(", "object", ")", "lines", "=", "inspect", ".", "getsourcelines", "(", "object", ")", "uri", "=", "(", "u'file://%s#L%d'", "%", "(", "filename", ",", "lines...
returns local or remote url for an object .
train
false
52,886
def bugreport(app): import billiard import celery import kombu try: conn = app.connection() driver_v = u'{0}:{1}'.format(conn.transport.driver_name, conn.transport.driver_version()) transport = conn.transport_cls except Exception: transport = driver_v = u'' return BUGREPORT_INFO.format(system=_platform.system(), arch=u', '.join((x for x in _platform.architecture() if x)), py_i=pyimplementation(), celery_v=celery.VERSION_BANNER, kombu_v=kombu.__version__, billiard_v=billiard.__version__, py_v=_platform.python_version(), driver_v=driver_v, transport=transport, results=maybe_sanitize_url((app.conf.result_backend or u'disabled')), human_settings=app.conf.humanize(), loader=qualname(app.loader.__class__))
[ "def", "bugreport", "(", "app", ")", ":", "import", "billiard", "import", "celery", "import", "kombu", "try", ":", "conn", "=", "app", ".", "connection", "(", ")", "driver_v", "=", "u'{0}:{1}'", ".", "format", "(", "conn", ".", "transport", ".", "driver_...
return a string containing information useful in bug-reports .
train
false
52,887
def grouper(n, iterable, fillvalue=None): args = ([iter(iterable)] * n) return izip_longest(fillvalue=fillvalue, *args)
[ "def", "grouper", "(", "n", ",", "iterable", ",", "fillvalue", "=", "None", ")", ":", "args", "=", "(", "[", "iter", "(", "iterable", ")", "]", "*", "n", ")", "return", "izip_longest", "(", "fillvalue", "=", "fillvalue", ",", "*", "args", ")" ]
grouper --> abc def gxx .
train
true
52,889
@functools.wraps def process_request_body(fn): def wrapped(*args, **kwargs): if (cherrypy.request.process_request_body is not False): fn(*args, **kwargs) return wrapped
[ "@", "functools", ".", "wraps", "def", "process_request_body", "(", "fn", ")", ":", "def", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "cherrypy", ".", "request", ".", "process_request_body", "is", "not", "False", ")", ":", "...
a decorator to skip a processor function if process_request_body is false .
train
true
52,891
def cyclic_equals(seq1, seq2): seq2 = tuple(seq2) return any(((x == tuple(seq2)) for x in cycles(seq1)))
[ "def", "cyclic_equals", "(", "seq1", ",", "seq2", ")", ":", "seq2", "=", "tuple", "(", "seq2", ")", "return", "any", "(", "(", "(", "x", "==", "tuple", "(", "seq2", ")", ")", "for", "x", "in", "cycles", "(", "seq1", ")", ")", ")" ]
decide whether two sequences are equal up to cyclic permutations .
train
false
52,894
def set_rate_limit(limit_or_interval=1.0, new_requests=1): global limit_interval global limit_requests global do_rate_limit if isinstance(limit_or_interval, bool): do_rate_limit = limit_or_interval else: if (limit_or_interval <= 0.0): raise ValueError("limit_or_interval can't be less than 0") if (new_requests <= 0): raise ValueError("new_requests can't be less than 0") do_rate_limit = True limit_interval = limit_or_interval limit_requests = new_requests
[ "def", "set_rate_limit", "(", "limit_or_interval", "=", "1.0", ",", "new_requests", "=", "1", ")", ":", "global", "limit_interval", "global", "limit_requests", "global", "do_rate_limit", "if", "isinstance", "(", "limit_or_interval", ",", "bool", ")", ":", "do_rate...
sets the rate limiting behavior of the module .
train
false
52,895
def readwav(file): wav = _wave.open(file) rate = wav.getframerate() nchannels = wav.getnchannels() sampwidth = wav.getsampwidth() nframes = wav.getnframes() data = wav.readframes(nframes) wav.close() array = _wav2array(nchannels, sampwidth, data) return (rate, sampwidth, array)
[ "def", "readwav", "(", "file", ")", ":", "wav", "=", "_wave", ".", "open", "(", "file", ")", "rate", "=", "wav", ".", "getframerate", "(", ")", "nchannels", "=", "wav", ".", "getnchannels", "(", ")", "sampwidth", "=", "wav", ".", "getsampwidth", "(",...
read a wav file .
train
false
52,896
def default_iget(fields, default=None): myiget = operator.itemgetter(*fields) L = len(fields) def f(thing): try: ans = list(myiget(thing)) if (L < 2): ans = [ans] return ans except KeyError: return [thing.get(x, default) for x in fields] f.__doc__ = ('itemgetter with default %r for fields %r' % (default, fields)) f.__name__ = 'default_itemgetter' return f
[ "def", "default_iget", "(", "fields", ",", "default", "=", "None", ")", ":", "myiget", "=", "operator", ".", "itemgetter", "(", "*", "fields", ")", "L", "=", "len", "(", "fields", ")", "def", "f", "(", "thing", ")", ":", "try", ":", "ans", "=", "...
itemgetter with default handling .
train
false
52,897
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) def do_diagnostics(cs, args): server = _find_server(cs, args.server) utils.print_dict(cs.servers.diagnostics(server)[1], wrap=80)
[ "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "def", "do_diagnostics", "(", "cs", ",", "args", ")", ":", "server", "=", "_find_server", "(", "cs", ",", "ar...
retrieve server diagnostics .
train
false
52,898
def _start_download(dest, tab): loader = _Downloader(tab, dest) loader.run()
[ "def", "_start_download", "(", "dest", ",", "tab", ")", ":", "loader", "=", "_Downloader", "(", "tab", ",", "dest", ")", "loader", ".", "run", "(", ")" ]
start downloading the current page and all assets to an mhtml file .
train
false
52,900
def _get_base_alphabet(alphabet): a = alphabet while isinstance(a, AlphabetEncoder): a = a.alphabet assert isinstance(a, Alphabet), ('Invalid alphabet found, %s' % repr(a)) return a
[ "def", "_get_base_alphabet", "(", "alphabet", ")", ":", "a", "=", "alphabet", "while", "isinstance", "(", "a", ",", "AlphabetEncoder", ")", ":", "a", "=", "a", ".", "alphabet", "assert", "isinstance", "(", "a", ",", "Alphabet", ")", ",", "(", "'Invalid a...
returns the non-gapped non-stop-codon alphabet object .
train
false
52,901
def get_meth_func(obj): if PY2: return obj.im_func else: return obj.__func__
[ "def", "get_meth_func", "(", "obj", ")", ":", "if", "PY2", ":", "return", "obj", ".", "im_func", "else", ":", "return", "obj", ".", "__func__" ]
return method function object .
train
false
52,902
def waypoint(): return s3_rest_controller()
[ "def", "waypoint", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller for gps waypoints .
train
false
52,904
def create_overlay_workflow(name='overlay'): overlay = pe.Workflow(name='overlay') overlaystats = pe.MapNode(interface=fsl.Overlay(), name='overlaystats', iterfield=['stat_image']) overlaystats.inputs.show_negative_stats = True overlaystats.inputs.auto_thresh_bg = True slicestats = pe.MapNode(interface=fsl.Slicer(), name='slicestats', iterfield=['in_file']) slicestats.inputs.all_axial = True slicestats.inputs.image_width = 512 overlay.connect(overlaystats, 'out_file', slicestats, 'in_file') return overlay
[ "def", "create_overlay_workflow", "(", "name", "=", "'overlay'", ")", ":", "overlay", "=", "pe", ".", "Workflow", "(", "name", "=", "'overlay'", ")", "overlaystats", "=", "pe", ".", "MapNode", "(", "interface", "=", "fsl", ".", "Overlay", "(", ")", ",", ...
setup overlay workflow .
train
false
52,906
def task_install_node_certificates(ca_cert, node_cert, node_key): return sequence([run('mkdir -p /etc/flocker'), run('chmod u=rwX,g=,o= /etc/flocker'), put(path='/etc/flocker/cluster.crt', content=ca_cert.getContent()), put(path='/etc/flocker/node.crt', content=node_cert.getContent()), put(path='/etc/flocker/node.key', content=node_key.getContent(), log_content_filter=_remove_private_key)])
[ "def", "task_install_node_certificates", "(", "ca_cert", ",", "node_cert", ",", "node_key", ")", ":", "return", "sequence", "(", "[", "run", "(", "'mkdir -p /etc/flocker'", ")", ",", "run", "(", "'chmod u=rwX,g=,o= /etc/flocker'", ")", ",", "put", "(", "path", "...
install certificates and private key required by a node .
train
false
52,907
def memory_size_from_info(shape, strides, itemsize): assert (len(shape) == len(strides)), '# dim mismatch' ndim = len(shape) (s, e) = mviewbuf.memoryview_get_extents_info(shape, strides, ndim, itemsize) return (e - s)
[ "def", "memory_size_from_info", "(", "shape", ",", "strides", ",", "itemsize", ")", ":", "assert", "(", "len", "(", "shape", ")", "==", "len", "(", "strides", ")", ")", ",", "'# dim mismatch'", "ndim", "=", "len", "(", "shape", ")", "(", "s", ",", "e...
et the byte size of a contiguous memory buffer given the shape .
train
false
52,908
def index_template_exists(name, hosts=None, profile=None): es = _get_instance(hosts, profile) try: if es.indices.exists_template(name=name): return True else: return False except elasticsearch.exceptions.NotFoundError: return None return None
[ "def", "index_template_exists", "(", "name", ",", "hosts", "=", "None", ",", "profile", "=", "None", ")", ":", "es", "=", "_get_instance", "(", "hosts", ",", "profile", ")", "try", ":", "if", "es", ".", "indices", ".", "exists_template", "(", "name", "...
return a boolean indicating whether given index template exists cli example:: salt myminion elasticsearch .
train
false
52,909
@with_setup(state.setup, state.teardown) def test_subunit_output_with_no_errors(): state.expect = [Includes({'id': 'one commented scenario: Do nothing', 'status': 'success', 'details': Keys('stdout', 'stderr', 'steps')})] runner = Runner(feature_name('commented_feature'), enable_subunit=True) runner.run()
[ "@", "with_setup", "(", "state", ".", "setup", ",", "state", ".", "teardown", ")", "def", "test_subunit_output_with_no_errors", "(", ")", ":", "state", ".", "expect", "=", "[", "Includes", "(", "{", "'id'", ":", "'one commented scenario: Do nothing'", ",", "'s...
test subunit output with no errors .
train
false
52,910
def logout_with_message(request, msg, redirect=True): logout(request) if redirect: response = http.HttpResponseRedirect(('%s?next=%s' % (settings.LOGOUT_URL, request.path))) else: response = http.HttpResponseRedirect(settings.LOGOUT_URL) add_logout_reason(request, response, msg) return response
[ "def", "logout_with_message", "(", "request", ",", "msg", ",", "redirect", "=", "True", ")", ":", "logout", "(", "request", ")", "if", "redirect", ":", "response", "=", "http", ".", "HttpResponseRedirect", "(", "(", "'%s?next=%s'", "%", "(", "settings", "....
send httpresponseredirect to logout_url .
train
true
52,912
def _configure_base(module, base, conf_file, disable_gpg_check, installroot='/'): conf = base.conf conf.debuglevel = 0 conf.gpgcheck = (not disable_gpg_check) conf.assumeyes = True conf.installroot = installroot if conf_file: if (not os.access(conf_file, os.R_OK)): module.fail_json(msg='cannot read configuration file', conf_file=conf_file) else: conf.config_file_path = conf_file conf.read()
[ "def", "_configure_base", "(", "module", ",", "base", ",", "conf_file", ",", "disable_gpg_check", ",", "installroot", "=", "'/'", ")", ":", "conf", "=", "base", ".", "conf", "conf", ".", "debuglevel", "=", "0", "conf", ".", "gpgcheck", "=", "(", "not", ...
configure the dnf base object .
train
false
52,913
def _ValidateVisiblePrintableAsciiNotReserved(value, name): for char in value: if (char not in _VISIBLE_PRINTABLE_ASCII): raise ValueError(('%r must be visible printable ASCII: %r' % (name, value))) if value.startswith('!'): raise ValueError(('%r must not start with "!": %r' % (name, value))) return value
[ "def", "_ValidateVisiblePrintableAsciiNotReserved", "(", "value", ",", "name", ")", ":", "for", "char", "in", "value", ":", "if", "(", "char", "not", "in", "_VISIBLE_PRINTABLE_ASCII", ")", ":", "raise", "ValueError", "(", "(", "'%r must be visible printable ASCII: %...
checks if value is a visible printable ascii string not starting with ! .
train
false
52,914
def make_constant(args): def conv(a): if (a is None): return a elif isinstance(a, slice): return slice(conv(a.start), conv(a.stop), conv(a.step)) elif isinstance(a, (integer_types, numpy.integer)): return scal.ScalarConstant(scal.int64, a) else: return a return tuple(map(conv, args))
[ "def", "make_constant", "(", "args", ")", ":", "def", "conv", "(", "a", ")", ":", "if", "(", "a", "is", "None", ")", ":", "return", "a", "elif", "isinstance", "(", "a", ",", "slice", ")", ":", "return", "slice", "(", "conv", "(", "a", ".", "sta...
convert python litterals to theano constants in subtensor arguments .
train
false
52,916
def get_certified_programs(student): certified_programs = [] for credential in get_user_credentials(student): if ('program_uuid' in credential['credential']): certified_programs.append(credential['credential']['program_uuid']) return certified_programs
[ "def", "get_certified_programs", "(", "student", ")", ":", "certified_programs", "=", "[", "]", "for", "credential", "in", "get_user_credentials", "(", "student", ")", ":", "if", "(", "'program_uuid'", "in", "credential", "[", "'credential'", "]", ")", ":", "c...
find the uuids of all the programs for which the student has already been awarded a certificate .
train
false
52,918
def scan_dovecot_line(date, line, collector): m = re.match('(imap|pop3)-login: Login: user=<(.*?)>, method=PLAIN, rip=(.*?),', line) if m: (prot, login, ip) = (m.group(1), m.group(2), m.group(3)) logins_key = ('%s-logins' % prot) if (ip != '127.0.0.1'): collector[logins_key].setdefault(login, {})[ip] = date collector['activity-by-hour'][logins_key][date.hour] += 1
[ "def", "scan_dovecot_line", "(", "date", ",", "line", ",", "collector", ")", ":", "m", "=", "re", ".", "match", "(", "'(imap|pop3)-login: Login: user=<(.*?)>, method=PLAIN, rip=(.*?),'", ",", "line", ")", "if", "m", ":", "(", "prot", ",", "login", ",", "ip", ...
scan a dovecot log line and extract interesting data .
train
false
52,919
def request_user_has_rule_trigger_permission(request, trigger): if (not cfg.CONF.rbac.enable): return True user_db = get_user_db_from_request(request=request) rules_resolver = resolvers.get_resolver_for_resource_type(ResourceType.RULE) has_trigger_permission = rules_resolver.user_has_trigger_permission(user_db=user_db, trigger=trigger) if has_trigger_permission: return True return False
[ "def", "request_user_has_rule_trigger_permission", "(", "request", ",", "trigger", ")", ":", "if", "(", "not", "cfg", ".", "CONF", ".", "rbac", ".", "enable", ")", ":", "return", "True", "user_db", "=", "get_user_db_from_request", "(", "request", "=", "request...
check that the currently logged-in has necessary permissions on the trigger used / referenced inside the rule .
train
false
52,921
def get_unicode_sys_argv(): from ctypes import POINTER, byref, cdll, c_int, windll from ctypes.wintypes import LPCWSTR, LPWSTR GetCommandLineW = cdll.kernel32.GetCommandLineW GetCommandLineW.argtypes = [] GetCommandLineW.restype = LPCWSTR CommandLineToArgvW = windll.shell32.CommandLineToArgvW CommandLineToArgvW.argtypes = [LPCWSTR, POINTER(c_int)] CommandLineToArgvW.restype = POINTER(LPWSTR) cmd = GetCommandLineW() argc = c_int(0) argv = CommandLineToArgvW(cmd, byref(argc)) if (argc.value > 0): start = (argc.value - len(sys.argv)) return [argv[i] for i in xrange(start, argc.value)]
[ "def", "get_unicode_sys_argv", "(", ")", ":", "from", "ctypes", "import", "POINTER", ",", "byref", ",", "cdll", ",", "c_int", ",", "windll", "from", "ctypes", ".", "wintypes", "import", "LPCWSTR", ",", "LPWSTR", "GetCommandLineW", "=", "cdll", ".", "kernel32...
uses shell32 .
train
false
52,922
def standby(): set_power_state('standby')
[ "def", "standby", "(", ")", ":", "set_power_state", "(", "'standby'", ")" ]
power-on suspend .
train
false
52,923
def push_context(ctx): _local.__dict__.setdefault('stack', []).append(ctx)
[ "def", "push_context", "(", "ctx", ")", ":", "_local", ".", "__dict__", ".", "setdefault", "(", "'stack'", ",", "[", "]", ")", ".", "append", "(", "ctx", ")" ]
pushes a new context to the current stack .
train
false
52,924
def prime_as_sum_of_two_squares(p): if (not ((p % 4) == 1)): return if ((p % 8) == 5): b = 2 else: b = 3 while (pow(b, ((p - 1) // 2), p) == 1): b = nextprime(b) b = pow(b, ((p - 1) // 4), p) a = p while ((b ** 2) > p): (a, b) = (b, (a % b)) return (int((a % b)), int(b))
[ "def", "prime_as_sum_of_two_squares", "(", "p", ")", ":", "if", "(", "not", "(", "(", "p", "%", "4", ")", "==", "1", ")", ")", ":", "return", "if", "(", "(", "p", "%", "8", ")", "==", "5", ")", ":", "b", "=", "2", "else", ":", "b", "=", "...
represent a prime p as a unique sum of two squares; this can only be done if the prime is congruent to 1 mod 4 .
train
false
52,925
@blueprint.route('/sources/<source>/projects') def list_projects_by_source(source): return _list_projects(source=source)
[ "@", "blueprint", ".", "route", "(", "'/sources/<source>/projects'", ")", "def", "list_projects_by_source", "(", "source", ")", ":", "return", "_list_projects", "(", "source", "=", "source", ")" ]
return a list project names for which the source is reporting data .
train
false
52,926
def p_stmt_compound(p): p[0] = [p[1]]
[ "def", "p_stmt_compound", "(", "p", ")", ":", "p", "[", "0", "]", "=", "[", "p", "[", "1", "]", "]" ]
stmt : compound_stmt .
train
false
52,928
def cross_entropy_reward_loss(logits, actions, rewards): cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, actions) loss = tf.reduce_sum(tf.mul(cross_entropy, rewards)) return loss
[ "def", "cross_entropy_reward_loss", "(", "logits", ",", "actions", ",", "rewards", ")", ":", "cross_entropy", "=", "tf", ".", "nn", ".", "sparse_softmax_cross_entropy_with_logits", "(", "logits", ",", "actions", ")", "loss", "=", "tf", ".", "reduce_sum", "(", ...
calculate the loss for policy gradient network .
train
false
52,930
def safe_sum_p_log_p(a, base=None): flat = ravel(a) nz = take(flat, nonzero(flat)[0]) logs = log(nz) if base: logs /= log(base) return sum((nz * logs), 0)
[ "def", "safe_sum_p_log_p", "(", "a", ",", "base", "=", "None", ")", ":", "flat", "=", "ravel", "(", "a", ")", "nz", "=", "take", "(", "flat", ",", "nonzero", "(", "flat", ")", "[", "0", "]", ")", "logs", "=", "log", "(", "nz", ")", "if", "bas...
calculates p * log(p) safely for an array that may contain zeros .
train
false
52,932
def dmp_validate(f, K=None): levels = _rec_validate(f, f, 0, K) u = levels.pop() if (not levels): return (_rec_strip(f, u), u) else: raise ValueError('invalid data structure for a multivariate polynomial')
[ "def", "dmp_validate", "(", "f", ",", "K", "=", "None", ")", ":", "levels", "=", "_rec_validate", "(", "f", ",", "f", ",", "0", ",", "K", ")", "u", "=", "levels", ".", "pop", "(", ")", "if", "(", "not", "levels", ")", ":", "return", "(", "_re...
return the number of levels in f and recursively strip it .
train
false
52,933
def fake_text_id(size=6, chars=(string.ascii_lowercase + string.digits)): return ''.join((random.choice(chars) for x in range(size)))
[ "def", "fake_text_id", "(", "size", "=", "6", ",", "chars", "=", "(", "string", ".", "ascii_lowercase", "+", "string", ".", "digits", ")", ")", ":", "return", "''", ".", "join", "(", "(", "random", ".", "choice", "(", "chars", ")", "for", "x", "in"...
create a random text id .
train
true
52,934
def _invalidate_ports(dpid): if (dpid in _dirty_switches): return t = Timer(_coalesce_period, _check_ports, args=(dpid,)) _dirty_switches[dpid] = t
[ "def", "_invalidate_ports", "(", "dpid", ")", ":", "if", "(", "dpid", "in", "_dirty_switches", ")", ":", "return", "t", "=", "Timer", "(", "_coalesce_period", ",", "_check_ports", ",", "args", "=", "(", "dpid", ",", ")", ")", "_dirty_switches", "[", "dpi...
registers the fact that port info for dpid may be out of date when the spanning tree adjusts the port flags .
train
false
52,935
def _factorize_from_iterable(values): from pandas.indexes.category import CategoricalIndex if (not is_list_like(values)): raise TypeError('Input must be list-like') if is_categorical(values): if isinstance(values, (ABCCategoricalIndex, ABCSeries)): values = values._values categories = CategoricalIndex(values.categories, categories=values.categories, ordered=values.ordered) codes = values.codes else: cat = Categorical(values, ordered=True) categories = cat.categories codes = cat.codes return (codes, categories)
[ "def", "_factorize_from_iterable", "(", "values", ")", ":", "from", "pandas", ".", "indexes", ".", "category", "import", "CategoricalIndex", "if", "(", "not", "is_list_like", "(", "values", ")", ")", ":", "raise", "TypeError", "(", "'Input must be list-like'", "...
factorize an input values into categories and codes .
train
true
52,936
def write_test_data(output_dir): test_data = get_test_data() for (k, v) in test_data.items(): f = open(join(output_dir, k), 'w') f.write('\n'.join(v)) f.close()
[ "def", "write_test_data", "(", "output_dir", ")", ":", "test_data", "=", "get_test_data", "(", ")", "for", "(", "k", ",", "v", ")", "in", "test_data", ".", "items", "(", ")", ":", "f", "=", "open", "(", "join", "(", "output_dir", ",", "k", ")", ","...
write small test data set files to output_dir this function is convenient for interactive testing .
train
false
52,939
def filename_to_hex(filename): names = filename.rsplit(os.path.sep, 2)[(-2):] errmsg = ('Invalid object filename: %s' % filename) assert (len(names) == 2), errmsg (base, rest) = names assert ((len(base) == 2) and (len(rest) == 38)), errmsg hex = (base + rest).encode('ascii') hex_to_sha(hex) return hex
[ "def", "filename_to_hex", "(", "filename", ")", ":", "names", "=", "filename", ".", "rsplit", "(", "os", ".", "path", ".", "sep", ",", "2", ")", "[", "(", "-", "2", ")", ":", "]", "errmsg", "=", "(", "'Invalid object filename: %s'", "%", "filename", ...
takes an object filename and returns its corresponding hex sha .
train
false
52,941
def get_rsa_public_key(n, e): n = int(binascii.hexlify(jwt.utils.base64url_decode(bytes(n))), 16) e = int(binascii.hexlify(jwt.utils.base64url_decode(bytes(e))), 16) pub = RSAPublicNumbers(e, n).public_key(default_backend()) return pub.public_bytes(encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo)
[ "def", "get_rsa_public_key", "(", "n", ",", "e", ")", ":", "n", "=", "int", "(", "binascii", ".", "hexlify", "(", "jwt", ".", "utils", ".", "base64url_decode", "(", "bytes", "(", "n", ")", ")", ")", ",", "16", ")", "e", "=", "int", "(", "binascii...
retrieve an rsa public key based on a module and exponent as provided by the jwks format .
train
false
52,942
def build_cache_keys(document_locale, document_slug): path_hash = hashlib.md5((u'%s/%s' % (document_locale, document_slug)).encode('utf8')) base_key = ('kumascript:%s:%%s' % path_hash.hexdigest()) etag_key = (base_key % 'etag') modified_key = (base_key % 'modified') body_key = (base_key % 'body') errors_key = (base_key % 'errors') return (etag_key, modified_key, body_key, errors_key)
[ "def", "build_cache_keys", "(", "document_locale", ",", "document_slug", ")", ":", "path_hash", "=", "hashlib", ".", "md5", "(", "(", "u'%s/%s'", "%", "(", "document_locale", ",", "document_slug", ")", ")", ".", "encode", "(", "'utf8'", ")", ")", "base_key",...
build the cache keys used for kumascript .
train
false
52,944
def parse_numeric_escape(source, info, ch, in_set): if (in_set or (ch == '0')): return parse_octal_escape(source, info, [ch], in_set) digits = ch saved_pos = source.pos ch = source.get() if (ch in DIGITS): digits += ch saved_pos = source.pos ch = source.get() if (is_octal(digits) and (ch in OCT_DIGITS)): encoding = (info.flags & _ALL_ENCODINGS) if ((encoding == ASCII) or (encoding == LOCALE)): octal_mask = 255 else: octal_mask = 511 value = (int((digits + ch), 8) & octal_mask) return make_character(info, value) source.pos = saved_pos if info.is_open_group(digits): raise error('cannot refer to an open group', source.string, source.pos) return make_ref_group(info, digits, source.pos)
[ "def", "parse_numeric_escape", "(", "source", ",", "info", ",", "ch", ",", "in_set", ")", ":", "if", "(", "in_set", "or", "(", "ch", "==", "'0'", ")", ")", ":", "return", "parse_octal_escape", "(", "source", ",", "info", ",", "[", "ch", "]", ",", "...
parses a numeric escape sequence .
train
false
52,945
def check_resolve(host): try: dummy = socket.getaddrinfo(host, None) except: return False return True
[ "def", "check_resolve", "(", "host", ")", ":", "try", ":", "dummy", "=", "socket", ".", "getaddrinfo", "(", "host", ",", "None", ")", "except", ":", "return", "False", "return", "True" ]
return true if host resolves .
train
false
52,947
def rand(*shape): return backend.id_srand(np.prod(shape)).reshape(shape)
[ "def", "rand", "(", "*", "shape", ")", ":", "return", "backend", ".", "id_srand", "(", "np", ".", "prod", "(", "shape", ")", ")", ".", "reshape", "(", "shape", ")" ]
generate standard uniform pseudorandom numbers via a very efficient lagged fibonacci method .
train
false