id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
48,824
def MkExFileWidget(w): msg = Tix.Message(w, relief=Tix.FLAT, width=240, anchor=Tix.N, text='The Tix ExFileSelectBox widget is more user friendly than the Motif style FileSelectBox.') box = Tix.ExFileSelectBox(w, bd=2, relief=Tix.RAISED) msg.pack(side=Tix.TOP, expand=1, fill=Tix.BOTH, padx=3, pady=3) box.pack(side=T...
[ "def", "MkExFileWidget", "(", "w", ")", ":", "msg", "=", "Tix", ".", "Message", "(", "w", ",", "relief", "=", "Tix", ".", "FLAT", ",", "width", "=", "240", ",", "anchor", "=", "Tix", ".", "N", ",", "text", "=", "'The Tix ExFileSelectBox widget is more ...
the tixexfileselectbox widget is more user friendly than the motif style fileselectbox .
train
false
48,826
def extract_id_from_snapshot_name(snap_name): regex = re.compile(CONF.snapshot_name_template.replace('%s', '(?P<uuid>.+)')) match = regex.match(snap_name) return (match.group('uuid') if match else None)
[ "def", "extract_id_from_snapshot_name", "(", "snap_name", ")", ":", "regex", "=", "re", ".", "compile", "(", "CONF", ".", "snapshot_name_template", ".", "replace", "(", "'%s'", ",", "'(?P<uuid>.+)'", ")", ")", "match", "=", "regex", ".", "match", "(", "snap_...
return a snapshots id from its name on the backend .
train
false
48,828
def requireAnomalyModel(func): def _decorator(self, *args, **kwargs): if (not (self.getInferenceType() == InferenceType.TemporalAnomaly)): raise RuntimeError('Method required a TemporalAnomaly model.') if (self._getAnomalyClassifier() is None): raise RuntimeError('Model does not support this command. Model m...
[ "def", "requireAnomalyModel", "(", "func", ")", ":", "def", "_decorator", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "(", "self", ".", "getInferenceType", "(", ")", "==", "InferenceType", ".", "TemporalAnomaly", ")", ...
decorator for functions that require anomaly models .
train
true
48,829
def header_decode(s): s = s.replace('_', ' ') return re.sub('=\\w{2}', _unquote_match, s)
[ "def", "header_decode", "(", "s", ")", ":", "s", "=", "s", ".", "replace", "(", "'_'", ",", "' '", ")", "return", "re", ".", "sub", "(", "'=\\\\w{2}'", ",", "_unquote_match", ",", "s", ")" ]
decode a string encoded with rfc 2045 mime header q encoding .
train
false
48,830
def gf_TC(f, K): if (not f): return K.zero else: return f[(-1)]
[ "def", "gf_TC", "(", "f", ",", "K", ")", ":", "if", "(", "not", "f", ")", ":", "return", "K", ".", "zero", "else", ":", "return", "f", "[", "(", "-", "1", ")", "]" ]
return the trailing coefficient of f .
train
false
48,831
@contextlib.contextmanager def current_ioloop(io_loop): orig_loop = tornado.ioloop.IOLoop.current() io_loop.make_current() try: (yield) finally: orig_loop.make_current()
[ "@", "contextlib", ".", "contextmanager", "def", "current_ioloop", "(", "io_loop", ")", ":", "orig_loop", "=", "tornado", ".", "ioloop", ".", "IOLoop", ".", "current", "(", ")", "io_loop", ".", "make_current", "(", ")", "try", ":", "(", "yield", ")", "fi...
a context manager that will set the current ioloop to io_loop for the context .
train
true
48,832
def pytest_fixture_setup(fixturedef, request): kwargs = {} for argname in fixturedef.argnames: fixdef = request._get_active_fixturedef(argname) (result, arg_cache_key, exc) = fixdef.cached_result request._check_scope(argname, request.scope, fixdef.scope) kwargs[argname] = result fixturefunc = fixturedef.func...
[ "def", "pytest_fixture_setup", "(", "fixturedef", ",", "request", ")", ":", "kwargs", "=", "{", "}", "for", "argname", "in", "fixturedef", ".", "argnames", ":", "fixdef", "=", "request", ".", "_get_active_fixturedef", "(", "argname", ")", "(", "result", ",",...
execution of fixture setup .
train
false
48,836
def thisjustin(command=None, username=None): user = (username if username else 'Dave') if (command == 'open the pod bay doors'): print ("I'm sorry, %s. afraid I can't do that." % user) elif (command == 'sing a song'): print "Daisy, Daisy, give me your answer do. I'm half crazy all for the love of ...
[ "def", "thisjustin", "(", "command", "=", "None", ",", "username", "=", "None", ")", ":", "user", "=", "(", "username", "if", "username", "else", "'Dave'", ")", "if", "(", "command", "==", "'open the pod bay doors'", ")", ":", "print", "(", "\"I'm sorry, %...
responds to a few choice hal 9000 commands form 2001: a space odyssey .
train
false
48,837
def _check_broadcast_up_to(arr_from, shape_to, name): shape_from = arr_from.shape if (len(shape_to) >= len(shape_from)): for (t, f) in zip(shape_to[::(-1)], shape_from[::(-1)]): if ((f != 1) and (f != t)): break else: if ((arr_from.size != 1) and (arr_from.shape != shape_to)): arr_from = (np.ones(sh...
[ "def", "_check_broadcast_up_to", "(", "arr_from", ",", "shape_to", ",", "name", ")", ":", "shape_from", "=", "arr_from", ".", "shape", "if", "(", "len", "(", "shape_to", ")", ">=", "len", "(", "shape_from", ")", ")", ":", "for", "(", "t", ",", "f", "...
helper to check that arr_from broadcasts up to shape_to .
train
false
48,838
def format_allowed(allowed): return_value = [] if (allowed.count(';') == 0): return [format_allowed_section(allowed)] else: sections = allowed.split(';') for section in sections: return_value.append(format_allowed_section(section)) return return_value
[ "def", "format_allowed", "(", "allowed", ")", ":", "return_value", "=", "[", "]", "if", "(", "allowed", ".", "count", "(", "';'", ")", "==", "0", ")", ":", "return", "[", "format_allowed_section", "(", "allowed", ")", "]", "else", ":", "sections", "=",...
format the allowed value so that it is gce compatible .
train
false
48,841
def _run_hook_from_repo_dir(repo_dir, hook_name, project_dir, context): with work_in(repo_dir): try: run_hook(hook_name, project_dir, context) except FailedHookException: rmtree(project_dir) logger.error(u"Stopping generation because {} hook script didn't exit successfully".format(hook_name)) raise
[ "def", "_run_hook_from_repo_dir", "(", "repo_dir", ",", "hook_name", ",", "project_dir", ",", "context", ")", ":", "with", "work_in", "(", "repo_dir", ")", ":", "try", ":", "run_hook", "(", "hook_name", ",", "project_dir", ",", "context", ")", "except", "Fai...
run hook from repo directory .
train
true
48,842
def collect_unioned_set_field(block_structure, transformer, merged_field_name, filter_by): for block_key in block_structure.topological_traversal(): result_set = ({block_key} if filter_by(block_key) else set()) for parent in block_structure.get_parents(block_key): result_set |= block_structure.get_transformer_b...
[ "def", "collect_unioned_set_field", "(", "block_structure", ",", "transformer", ",", "merged_field_name", ",", "filter_by", ")", ":", "for", "block_key", "in", "block_structure", ".", "topological_traversal", "(", ")", ":", "result_set", "=", "(", "{", "block_key", ...
recursively union a set field on the block structure .
train
false
48,844
def copy_byte(src, dest): byte = src.read(1) dest.write(byte) (val,) = unpack('B', byte) return val
[ "def", "copy_byte", "(", "src", ",", "dest", ")", ":", "byte", "=", "src", ".", "read", "(", "1", ")", "dest", ".", "write", "(", "byte", ")", "(", "val", ",", ")", "=", "unpack", "(", "'B'", ",", "byte", ")", "return", "val" ]
copy an unsigned byte between files .
train
false
48,846
def is_import(node): return (node.type in (syms.import_name, syms.import_from))
[ "def", "is_import", "(", "node", ")", ":", "return", "(", "node", ".", "type", "in", "(", "syms", ".", "import_name", ",", "syms", ".", "import_from", ")", ")" ]
returns true if the node is an import statement .
train
false
48,847
def _get_words_from_dataset(dataset): def tokenize(words): if isinstance(words, basestring): return word_tokenize(words, include_punc=False) else: return words all_words = chain.from_iterable((tokenize(words) for (words, _) in dataset)) return set(all_words)
[ "def", "_get_words_from_dataset", "(", "dataset", ")", ":", "def", "tokenize", "(", "words", ")", ":", "if", "isinstance", "(", "words", ",", "basestring", ")", ":", "return", "word_tokenize", "(", "words", ",", "include_punc", "=", "False", ")", "else", "...
return a set of all words in a dataset .
train
true
48,848
def rm_handlers(app, handlers, key=None): for (handler_name, func) in handlers.iteritems(): rm_handler(app, handler_name, func, key=key)
[ "def", "rm_handlers", "(", "app", ",", "handlers", ",", "key", "=", "None", ")", ":", "for", "(", "handler_name", ",", "func", ")", "in", "handlers", ".", "iteritems", "(", ")", ":", "rm_handler", "(", "app", ",", "handler_name", ",", "func", ",", "k...
remove multiple handlers from an application .
train
false
48,850
def _emit_problem_submitted_event(kwargs): root_type = get_event_transaction_type() if (not root_type): root_id = get_event_transaction_id() if (not root_id): root_id = create_new_event_transaction_id() set_event_transaction_type(PROBLEM_SUBMITTED_EVENT_TYPE) tracker.emit(unicode(PROBLEM_SUBMITTED_EVENT_TY...
[ "def", "_emit_problem_submitted_event", "(", "kwargs", ")", ":", "root_type", "=", "get_event_transaction_type", "(", ")", "if", "(", "not", "root_type", ")", ":", "root_id", "=", "get_event_transaction_id", "(", ")", "if", "(", "not", "root_id", ")", ":", "ro...
emits a problem submitted event only if there is no current event transaction type .
train
false
48,851
def test_url_incorrect_case_no_index(script, data): result = script.pip('install', '--no-index', '-f', data.find_links, 'upper') egg_folder = ((script.site_packages / 'Upper-1.0-py%s.egg-info') % pyversion) assert (egg_folder not in result.files_created), str(result) egg_folder = ((script.site_packages / 'Upper-2.0...
[ "def", "test_url_incorrect_case_no_index", "(", "script", ",", "data", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'--no-index'", ",", "'-f'", ",", "data", ".", "find_links", ",", "'upper'", ")", "egg_folder", "=", "(", "(", "scr...
same as test_url_req_case_mismatch_no_index .
train
false
48,852
def get_fg_bg(name): if requires_special_home_display(name): return (Color.HOME_FG, Color.HOME_BG) return (Color.PATH_FG, Color.PATH_BG)
[ "def", "get_fg_bg", "(", "name", ")", ":", "if", "requires_special_home_display", "(", "name", ")", ":", "return", "(", "Color", ".", "HOME_FG", ",", "Color", ".", "HOME_BG", ")", "return", "(", "Color", ".", "PATH_FG", ",", "Color", ".", "PATH_BG", ")" ...
returns the foreground and background color to use for the given name .
train
false
48,853
def _import_status(data, item, repo_name, repo_tag): status = item['status'] try: if ('Downloading from' in status): return elif all(((x in string.hexdigits) for x in status)): data['Image'] = '{0}:{1}'.format(repo_name, repo_tag) data['Id'] = status except (AttributeError, TypeError): pass
[ "def", "_import_status", "(", "data", ",", "item", ",", "repo_name", ",", "repo_tag", ")", ":", "status", "=", "item", "[", "'status'", "]", "try", ":", "if", "(", "'Downloading from'", "in", "status", ")", ":", "return", "elif", "all", "(", "(", "(", ...
process a status update from docker import .
train
true
48,855
def channelfromname(user): cached = userdata_cached(user) if cached: (user, channel_id) = cached else: qs = {'part': 'id,snippet', 'maxResults': 1, 'q': user, 'type': 'channel'} try: userinfo = pafy.call_gdata('search', qs)['items'] if (len(userinfo) > 0): snippet = userinfo[0].get('snippet', {}) ...
[ "def", "channelfromname", "(", "user", ")", ":", "cached", "=", "userdata_cached", "(", "user", ")", "if", "cached", ":", "(", "user", ",", "channel_id", ")", "=", "cached", "else", ":", "qs", "=", "{", "'part'", ":", "'id,snippet'", ",", "'maxResults'",...
query channel id from username .
train
false
48,856
def test_ncr_fit_single_class(): ncr = NeighbourhoodCleaningRule(random_state=RND_SEED) y_single_class = np.zeros((X.shape[0],)) assert_warns(UserWarning, ncr.fit, X, y_single_class)
[ "def", "test_ncr_fit_single_class", "(", ")", ":", "ncr", "=", "NeighbourhoodCleaningRule", "(", "random_state", "=", "RND_SEED", ")", "y_single_class", "=", "np", ".", "zeros", "(", "(", "X", ".", "shape", "[", "0", "]", ",", ")", ")", "assert_warns", "("...
test either if an error when there is a single class .
train
false
48,857
def sqlite_version(): return sqlite3.sqlite_version
[ "def", "sqlite_version", "(", ")", ":", "return", "sqlite3", ".", "sqlite_version" ]
return version of sqlite cli example: .
train
false
48,858
def draw_box(point1, point2, color=colors.lightgreen, border=None, colour=None, **kwargs): (x1, y1) = point1 (x2, y2) = point2 if (colour is not None): color = colour del colour (strokecolor, color) = _stroke_and_fill_colors(color, border) (x1, y1, x2, y2) = (min(x1, x2), min(y1, y2), max(x1, x2), max(y1, y2))...
[ "def", "draw_box", "(", "point1", ",", "point2", ",", "color", "=", "colors", ".", "lightgreen", ",", "border", "=", "None", ",", "colour", "=", "None", ",", "**", "kwargs", ")", ":", "(", "x1", ",", "y1", ")", "=", "point1", "(", "x2", ",", "y2"...
draw a box .
train
false
48,860
def _safely_castable_to_int(dt): int_size = np.dtype(int).itemsize safe = ((np.issubdtype(dt, int) and (dt.itemsize <= int_size)) or (np.issubdtype(dt, np.unsignedinteger) and (dt.itemsize < int_size))) return safe
[ "def", "_safely_castable_to_int", "(", "dt", ")", ":", "int_size", "=", "np", ".", "dtype", "(", "int", ")", ".", "itemsize", "safe", "=", "(", "(", "np", ".", "issubdtype", "(", "dt", ",", "int", ")", "and", "(", "dt", ".", "itemsize", "<=", "int_...
test whether the numpy data type dt can be safely cast to an int .
train
false
48,864
def _check_ori(pick_ori): if ((pick_ori is not None) and (pick_ori != 'normal')): raise RuntimeError(('pick_ori must be None or "normal", not %s' % pick_ori)) return pick_ori
[ "def", "_check_ori", "(", "pick_ori", ")", ":", "if", "(", "(", "pick_ori", "is", "not", "None", ")", "and", "(", "pick_ori", "!=", "'normal'", ")", ")", ":", "raise", "RuntimeError", "(", "(", "'pick_ori must be None or \"normal\", not %s'", "%", "pick_ori", ...
check pick_ori .
train
false
48,867
def get_override_for_ccx(ccx, block, name, default=None): overrides = _get_overrides_for_ccx(ccx) clean_ccx_key = _clean_ccx_key(block.location) block_overrides = overrides.get(clean_ccx_key, {}) block_overrides['course_edit_method'] = None if (name in block_overrides): try: return block.fields[name].from_jso...
[ "def", "get_override_for_ccx", "(", "ccx", ",", "block", ",", "name", ",", "default", "=", "None", ")", ":", "overrides", "=", "_get_overrides_for_ccx", "(", "ccx", ")", "clean_ccx_key", "=", "_clean_ccx_key", "(", "block", ".", "location", ")", "block_overrid...
gets the value of the overridden field for the ccx .
train
false
48,868
def instance_get_all_by_host_and_node(context, host, node, columns_to_join=None): return IMPL.instance_get_all_by_host_and_node(context, host, node, columns_to_join=columns_to_join)
[ "def", "instance_get_all_by_host_and_node", "(", "context", ",", "host", ",", "node", ",", "columns_to_join", "=", "None", ")", ":", "return", "IMPL", ".", "instance_get_all_by_host_and_node", "(", "context", ",", "host", ",", "node", ",", "columns_to_join", "=", ...
get all instances belonging to a node .
train
false
48,870
def TestFlowHelper(flow_urn_or_cls_name, client_mock=None, client_id=None, check_flow_errors=True, token=None, notification_event=None, sync=True, **kwargs): if (client_id or client_mock): client_mock = MockClient(client_id, client_mock, token=token) worker_mock = MockWorker(check_flow_errors=check_flow_errors, tok...
[ "def", "TestFlowHelper", "(", "flow_urn_or_cls_name", ",", "client_mock", "=", "None", ",", "client_id", "=", "None", ",", "check_flow_errors", "=", "True", ",", "token", "=", "None", ",", "notification_event", "=", "None", ",", "sync", "=", "True", ",", "**...
build a full test harness: client - worker + start flow .
train
false
48,871
def isNumber(value): try: float(value) except: return False else: return True
[ "def", "isNumber", "(", "value", ")", ":", "try", ":", "float", "(", "value", ")", "except", ":", "return", "False", "else", ":", "return", "True" ]
returns true if the given value is a number-like object .
train
false
48,872
def mime_encode_header(line): newline = '' pos = 0 while 1: res = mime_header.search(line, pos) if (res is None): break newline = ('%s%s%s=?%s?Q?%s?=' % (newline, line[pos:res.start(0)], res.group(1), CHARSET, mime_encode(res.group(2), 1))) pos = res.end(0) return (newline + line[pos:])
[ "def", "mime_encode_header", "(", "line", ")", ":", "newline", "=", "''", "pos", "=", "0", "while", "1", ":", "res", "=", "mime_header", ".", "search", "(", "line", ",", "pos", ")", "if", "(", "res", "is", "None", ")", ":", "break", "newline", "=",...
code a single header line as quoted-printable .
train
false
48,875
def get_vm_boot_spec(client_factory, device): config_spec = client_factory.create('ns0:VirtualMachineConfigSpec') boot_disk = client_factory.create('ns0:VirtualMachineBootOptionsBootableDiskDevice') boot_disk.deviceKey = device.key boot_options = client_factory.create('ns0:VirtualMachineBootOptions') boot_options....
[ "def", "get_vm_boot_spec", "(", "client_factory", ",", "device", ")", ":", "config_spec", "=", "client_factory", ".", "create", "(", "'ns0:VirtualMachineConfigSpec'", ")", "boot_disk", "=", "client_factory", ".", "create", "(", "'ns0:VirtualMachineBootOptionsBootableDiskD...
returns updated boot settings for the instance .
train
false
48,876
def get_cls_by_name(name, aliases={}, imp=None, package=None, **kwargs): if (imp is None): imp = importlib.import_module if (not isinstance(name, basestring)): return name name = (aliases.get(name) or name) (module_name, _, cls_name) = name.rpartition('.') if ((not module_name) and package): module_name = pa...
[ "def", "get_cls_by_name", "(", "name", ",", "aliases", "=", "{", "}", ",", "imp", "=", "None", ",", "package", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "imp", "is", "None", ")", ":", "imp", "=", "importlib", ".", "import_module", "if"...
get class by name .
train
false
48,879
def pformat(temp, **fmt): formatter = Formatter() mapping = _FormatDict(fmt) return formatter.vformat(temp, (), mapping)
[ "def", "pformat", "(", "temp", ",", "**", "fmt", ")", ":", "formatter", "=", "Formatter", "(", ")", "mapping", "=", "_FormatDict", "(", "fmt", ")", "return", "formatter", ".", "vformat", "(", "temp", ",", "(", ")", ",", "mapping", ")" ]
partially format a template string .
train
false
48,880
def write_error(request, code, message): error = {'error': {'code': code, 'message': message}} request.set_status(code) request.write(json.dumps(error))
[ "def", "write_error", "(", "request", ",", "code", ",", "message", ")", ":", "error", "=", "{", "'error'", ":", "{", "'code'", ":", "code", ",", "'message'", ":", "message", "}", "}", "request", ".", "set_status", "(", "code", ")", "request", ".", "w...
sets the response headers and body for error messages .
train
false
48,882
def symbol_string(symbol): if (symbol < (1 << 32)): return _key_names.get(symbol, str(symbol)) else: return ('user_key(%x)' % (symbol >> 32))
[ "def", "symbol_string", "(", "symbol", ")", ":", "if", "(", "symbol", "<", "(", "1", "<<", "32", ")", ")", ":", "return", "_key_names", ".", "get", "(", "symbol", ",", "str", "(", "symbol", ")", ")", "else", ":", "return", "(", "'user_key(%x)'", "%...
return a string describing a key symbol .
train
false
48,883
def iter_importers(fullname=''): if fullname.startswith('.'): raise ImportError('Relative module names not supported') if ('.' in fullname): pkg = '.'.join(fullname.split('.')[:(-1)]) if (pkg not in sys.modules): __import__(pkg) path = (getattr(sys.modules[pkg], '__path__', None) or []) else: for import...
[ "def", "iter_importers", "(", "fullname", "=", "''", ")", ":", "if", "fullname", ".", "startswith", "(", "'.'", ")", ":", "raise", "ImportError", "(", "'Relative module names not supported'", ")", "if", "(", "'.'", "in", "fullname", ")", ":", "pkg", "=", "...
yield pep 302 importers for the given module name if fullname contains a .
train
true
48,884
def is_aperiodic(G): if (not G.is_directed()): raise nx.NetworkXError('is_aperiodic not defined for undirected graphs') s = arbitrary_element(G) levels = {s: 0} this_level = [s] g = 0 l = 1 while this_level: next_level = [] for u in this_level: for v in G[u]: if (v in levels): g = gcd(g, ((leve...
[ "def", "is_aperiodic", "(", "G", ")", ":", "if", "(", "not", "G", ".", "is_directed", "(", ")", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'is_aperiodic not defined for undirected graphs'", ")", "s", "=", "arbitrary_element", "(", "G", ")", "levels...
return true if g is aperiodic .
train
false
48,885
def _get_critical_checks_snippet(request, unit): if (not unit.has_critical_checks()): return None can_review = check_user_permission(request.user, 'review', unit.store.parent) ctx = {'canreview': can_review, 'unit': unit} template = loader.get_template('editor/units/xhr_checks.html') return template.render(conte...
[ "def", "_get_critical_checks_snippet", "(", "request", ",", "unit", ")", ":", "if", "(", "not", "unit", ".", "has_critical_checks", "(", ")", ")", ":", "return", "None", "can_review", "=", "check_user_permission", "(", "request", ".", "user", ",", "'review'", ...
retrieves the critical checks snippet .
train
false
48,886
def delete_orphaned_document_files(): documents_path = os.path.join(settings.MEDIA_ROOT, 'documents') for filename in os.listdir(documents_path): fn = os.path.join(documents_path, filename) if (Document.objects.filter(doc_file__contains=filename).count() == 0): print ('Removing orphan document %s' % fn) try...
[ "def", "delete_orphaned_document_files", "(", ")", ":", "documents_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "MEDIA_ROOT", ",", "'documents'", ")", "for", "filename", "in", "os", ".", "listdir", "(", "documents_path", ")", ":", "fn", ...
deletes orphaned files of deleted documents .
train
false
48,887
@treeio_login_required def widget_welcome(request, response_format='html'): trial = False if (getattr(settings, 'HARDTREE_SUBSCRIPTION_USER_LIMIT') == 3): trial = True customization = getattr(settings, 'HARDTREE_SUBSCRIPTION_CUSTOMIZATION', True) return render_to_response('core/widgets/welcome', {'trial': trial, ...
[ "@", "treeio_login_required", "def", "widget_welcome", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "trial", "=", "False", "if", "(", "getattr", "(", "settings", ",", "'HARDTREE_SUBSCRIPTION_USER_LIMIT'", ")", "==", "3", ")", ":", "trial", ...
quick start widget .
train
false
48,888
def preDeploy(site): pass
[ "def", "preDeploy", "(", "site", ")", ":", "pass" ]
called prior to deploying the site a good time to configure custom headers .
train
false
48,889
def make_createblockdevicedataset_mixin(profiled_api): class Mixin(CreateBlockDeviceDatasetImplementationMixin, TestCase, ): def setUp(self): super(Mixin, self).setUp() if profiled_api: self.api = fakeprofiledloopbackblockdeviceapi_for_test(self, allocation_unit=LOOPBACK_ALLOCATION_UNIT) else: self....
[ "def", "make_createblockdevicedataset_mixin", "(", "profiled_api", ")", ":", "class", "Mixin", "(", "CreateBlockDeviceDatasetImplementationMixin", ",", "TestCase", ",", ")", ":", "def", "setUp", "(", "self", ")", ":", "super", "(", "Mixin", ",", "self", ")", "."...
constructs a base class for tests that verify the implementation of createblockdevicedataset .
train
false
48,891
def auto_sign_file(file_, is_beta=False): addon = file_.version.addon if file_.is_experiment: amo.log(amo.LOG.EXPERIMENT_SIGNED, file_) sign_file(file_, settings.SIGNING_SERVER) elif is_beta: if file_.validation.passed_auto_validation: amo.log(amo.LOG.BETA_SIGNED_VALIDATION_PASSED, file_) else: amo.log...
[ "def", "auto_sign_file", "(", "file_", ",", "is_beta", "=", "False", ")", ":", "addon", "=", "file_", ".", "version", ".", "addon", "if", "file_", ".", "is_experiment", ":", "amo", ".", "log", "(", "amo", ".", "LOG", ".", "EXPERIMENT_SIGNED", ",", "fil...
if the file should be automatically reviewed and signed .
train
false
48,892
def as_tuple(x, N, t=None): try: X = tuple(x) except TypeError: X = ((x,) * N) if ((t is not None) and (not all((isinstance(v, t) for v in X)))): raise TypeError('expected a single value or an iterable of {0}, got {1} instead'.format(t.__name__, x)) if (len(X) != N): raise ValueError('expected a single valu...
[ "def", "as_tuple", "(", "x", ",", "N", ",", "t", "=", "None", ")", ":", "try", ":", "X", "=", "tuple", "(", "x", ")", "except", "TypeError", ":", "X", "=", "(", "(", "x", ",", ")", "*", "N", ")", "if", "(", "(", "t", "is", "not", "None", ...
coerce a value to a tuple of given length .
train
false
48,893
def log_exc(logger): (t, v, tb) = sys.exc_info() logger.info(('Exception occured: %s' % t)) logger.info(('Exception value: %s' % v)) logger.info(('Exception Info:\n%s' % string.join(traceback.format_list(traceback.extract_tb(tb)))))
[ "def", "log_exc", "(", "logger", ")", ":", "(", "t", ",", "v", ",", "tb", ")", "=", "sys", ".", "exc_info", "(", ")", "logger", ".", "info", "(", "(", "'Exception occured: %s'", "%", "t", ")", ")", "logger", ".", "info", "(", "(", "'Exception value...
log an exception .
train
false
48,895
def get_char_description(char): if (char in CHAR_NAMES): return CHAR_NAMES[char] else: return _(u'Insert character {0}').format(char)
[ "def", "get_char_description", "(", "char", ")", ":", "if", "(", "char", "in", "CHAR_NAMES", ")", ":", "return", "CHAR_NAMES", "[", "char", "]", "else", ":", "return", "_", "(", "u'Insert character {0}'", ")", ".", "format", "(", "char", ")" ]
returns verbose description of a character .
train
false
48,898
def bin2float(min_, max_, nbits): def wrap(function): def wrapped_function(individual, *args, **kargs): nelem = (len(individual) / nbits) decoded = ([0] * nelem) for i in xrange(nelem): gene = int(''.join(map(str, individual[(i * nbits):((i * nbits) + nbits)])), 2) div = ((2 ** nbits) - 1) temp ...
[ "def", "bin2float", "(", "min_", ",", "max_", ",", "nbits", ")", ":", "def", "wrap", "(", "function", ")", ":", "def", "wrapped_function", "(", "individual", ",", "*", "args", ",", "**", "kargs", ")", ":", "nelem", "=", "(", "len", "(", "individual",...
convert a binary array into an array of float where each float is composed of *nbits* and is between *min_* and *max_* and return the result of the decorated function .
train
false
48,899
def _timedatectl(): ret = __salt__['cmd.run_all'](['timedatectl'], python_shell=False) if (ret['retcode'] != 0): msg = 'timedatectl failed: {0}'.format(ret['stderr']) raise CommandExecutionError(msg) return ret
[ "def", "_timedatectl", "(", ")", ":", "ret", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "[", "'timedatectl'", "]", ",", "python_shell", "=", "False", ")", "if", "(", "ret", "[", "'retcode'", "]", "!=", "0", ")", ":", "msg", "=", "'timedatectl fai...
get the output of timedatectl .
train
true
48,900
def __invoke(trans, workflow, workflow_run_config, workflow_invocation=None, populate_state=False): if populate_state: modules.populate_module_and_state(trans, workflow, workflow_run_config.param_map, allow_tool_state_corrections=workflow_run_config.allow_tool_state_corrections) invoker = WorkflowInvoker(trans, wor...
[ "def", "__invoke", "(", "trans", ",", "workflow", ",", "workflow_run_config", ",", "workflow_invocation", "=", "None", ",", "populate_state", "=", "False", ")", ":", "if", "populate_state", ":", "modules", ".", "populate_module_and_state", "(", "trans", ",", "wo...
run the supplied workflow in the supplied target_history .
train
false
48,903
def set_vif_host_backend_vhostuser_config(conf, mode, path): conf.net_type = 'vhostuser' conf.vhostuser_type = 'unix' conf.vhostuser_mode = mode conf.vhostuser_path = path
[ "def", "set_vif_host_backend_vhostuser_config", "(", "conf", ",", "mode", ",", "path", ")", ":", "conf", ".", "net_type", "=", "'vhostuser'", "conf", ".", "vhostuser_type", "=", "'unix'", "conf", ".", "vhostuser_mode", "=", "mode", "conf", ".", "vhostuser_path",...
populate a libvirtconfigguestinterface instance with host backend details for vhostuser socket .
train
false
48,904
def parse_block_scalar_indent(TokenClass): def callback(lexer, match, context): text = match.group() if (context.block_scalar_indent is None): if (len(text) <= max(context.indent, 0)): context.stack.pop() context.stack.pop() return context.block_scalar_indent = len(text) elif (len(text) < conte...
[ "def", "parse_block_scalar_indent", "(", "TokenClass", ")", ":", "def", "callback", "(", "lexer", ",", "match", ",", "context", ")", ":", "text", "=", "match", ".", "group", "(", ")", "if", "(", "context", ".", "block_scalar_indent", "is", "None", ")", "...
process indentation spaces in a block scalar .
train
true
48,905
def filter_foreign_columns(base_table, columns): return filter_list((lambda c: (c.table == base_table)), columns)
[ "def", "filter_foreign_columns", "(", "base_table", ",", "columns", ")", ":", "return", "filter_list", "(", "(", "lambda", "c", ":", "(", "c", ".", "table", "==", "base_table", ")", ")", ",", "columns", ")" ]
return list of columns that belong to passed table .
train
false
48,907
def stripList(listObj): return ' '.join((str(i) for i in listObj))
[ "def", "stripList", "(", "listObj", ")", ":", "return", "' '", ".", "join", "(", "(", "str", "(", "i", ")", "for", "i", "in", "listObj", ")", ")" ]
convert a list of numbers to a string of space-separated numbers .
train
false
48,908
def task_install_control_certificates(ca_cert, control_cert, control_key): return sequence([run('mkdir -p /etc/flocker'), run('chmod u=rwX,g=,o= /etc/flocker'), put(path='/etc/flocker/cluster.crt', content=ca_cert.getContent()), put(path='/etc/flocker/control-service.crt', content=control_cert.getContent()), put(path=...
[ "def", "task_install_control_certificates", "(", "ca_cert", ",", "control_cert", ",", "control_key", ")", ":", "return", "sequence", "(", "[", "run", "(", "'mkdir -p /etc/flocker'", ")", ",", "run", "(", "'chmod u=rwX,g=,o= /etc/flocker'", ")", ",", "put", "(", "p...
install certificates and private key required by the control service .
train
false
48,909
def get_all_primitive_params(key): params = [key] for datatype in PRIMITIVE_DATATYPES: if ((key == 1) and (datatype == 'ascii')): params.append('') else: params.append(get_sample(datatype)) return params
[ "def", "get_all_primitive_params", "(", "key", ")", ":", "params", "=", "[", "key", "]", "for", "datatype", "in", "PRIMITIVE_DATATYPES", ":", "if", "(", "(", "key", "==", "1", ")", "and", "(", "datatype", "==", "'ascii'", ")", ")", ":", "params", ".", ...
simple utility method used to give back a list of all possible primitive data sample types .
train
false
48,910
def propertyx(function): keys = ('fget', 'fset', 'fdel') func_locals = {'doc': function.__doc__} def probe_func(frame, event, arg): if (event == 'return'): locals = frame.f_locals func_locals.update(dict(((k, locals.get(k)) for k in keys))) sys.settrace(None) return probe_func sys.settrace(probe_func) ...
[ "def", "propertyx", "(", "function", ")", ":", "keys", "=", "(", "'fget'", ",", "'fset'", ",", "'fdel'", ")", "func_locals", "=", "{", "'doc'", ":", "function", ".", "__doc__", "}", "def", "probe_func", "(", "frame", ",", "event", ",", "arg", ")", ":...
decorator to easily create properties in classes .
train
true
48,911
def string_range(last): for k in range(len(last)): for x in product(string.ascii_lowercase, repeat=(k + 1)): result = ''.join(x) (yield result) if (result == last): return
[ "def", "string_range", "(", "last", ")", ":", "for", "k", "in", "range", "(", "len", "(", "last", ")", ")", ":", "for", "x", "in", "product", "(", "string", ".", "ascii_lowercase", ",", "repeat", "=", "(", "k", "+", "1", ")", ")", ":", "result", ...
compute the range of string between "a" and last .
train
true
48,912
def _safe_copy_vdi(session, sr_ref, instance, vdi_to_copy_ref): with _dummy_vm(session, instance, vdi_to_copy_ref) as vm_ref: label = 'snapshot' with snapshot_attached_here(session, instance, vm_ref, label) as vdi_uuids: imported_vhds = session.call_plugin_serialized('workarounds', 'safe_copy_vdis', sr_path=get...
[ "def", "_safe_copy_vdi", "(", "session", ",", "sr_ref", ",", "instance", ",", "vdi_to_copy_ref", ")", ":", "with", "_dummy_vm", "(", "session", ",", "instance", ",", "vdi_to_copy_ref", ")", "as", "vm_ref", ":", "label", "=", "'snapshot'", "with", "snapshot_att...
copy a vdi and return the new vdis reference .
train
false
48,914
@loader_option() def defaultload(loadopt, attr): return loadopt.set_relationship_strategy(attr, None)
[ "@", "loader_option", "(", ")", "def", "defaultload", "(", "loadopt", ",", "attr", ")", ":", "return", "loadopt", ".", "set_relationship_strategy", "(", "attr", ",", "None", ")" ]
indicate an attribute should load using its default loader style .
train
false
48,915
def blacklist(*field_list): return Role(Role.blacklist, field_list)
[ "def", "blacklist", "(", "*", "field_list", ")", ":", "return", "Role", "(", "Role", ".", "blacklist", ",", "field_list", ")" ]
returns a function that operates as a blacklist for the provided list of fields .
train
false
48,916
def dirscan(): logging.info('Scheduled or manual watched folder scan') DirScanner.do.scan()
[ "def", "dirscan", "(", ")", ":", "logging", ".", "info", "(", "'Scheduled or manual watched folder scan'", ")", "DirScanner", ".", "do", ".", "scan", "(", ")" ]
wrapper required for scheduler .
train
false
48,917
def _get_xblock(usage_key, user): store = modulestore() with store.bulk_operations(usage_key.course_key): try: return store.get_item(usage_key, depth=None) except ItemNotFoundError: if (usage_key.category in CREATE_IF_NOT_FOUND): return store.create_item(user.id, usage_key.course_key, usage_key.block_ty...
[ "def", "_get_xblock", "(", "usage_key", ",", "user", ")", ":", "store", "=", "modulestore", "(", ")", "with", "store", ".", "bulk_operations", "(", "usage_key", ".", "course_key", ")", ":", "try", ":", "return", "store", ".", "get_item", "(", "usage_key", ...
returns the xblock for the specified usage key .
train
false
48,918
def diagsvd(s, M, N): part = diag(s) typ = part.dtype.char MorN = len(s) if (MorN == M): return r_[('-1', part, zeros((M, (N - M)), typ))] elif (MorN == N): return r_[(part, zeros(((M - N), N), typ))] else: raise ValueError('Length of s must be M or N.')
[ "def", "diagsvd", "(", "s", ",", "M", ",", "N", ")", ":", "part", "=", "diag", "(", "s", ")", "typ", "=", "part", ".", "dtype", ".", "char", "MorN", "=", "len", "(", "s", ")", "if", "(", "MorN", "==", "M", ")", ":", "return", "r_", "[", "...
construct the sigma matrix in svd from singular values and size m .
train
false
48,920
def align_texts(source_blocks, target_blocks, params=LanguageIndependent): if (len(source_blocks) != len(target_blocks)): raise ValueError('Source and target texts do not have the same number of blocks.') return [align_blocks(source_block, target_block, params) for (source_block, target_block) in zip(source_blocks,...
[ "def", "align_texts", "(", "source_blocks", ",", "target_blocks", ",", "params", "=", "LanguageIndependent", ")", ":", "if", "(", "len", "(", "source_blocks", ")", "!=", "len", "(", "target_blocks", ")", ")", ":", "raise", "ValueError", "(", "'Source and targe...
creates the sentence alignment of two texts .
train
true
48,921
def fastprint(text, show_prefix=False, end='', flush=True): return puts(text=text, show_prefix=show_prefix, end=end, flush=flush)
[ "def", "fastprint", "(", "text", ",", "show_prefix", "=", "False", ",", "end", "=", "''", ",", "flush", "=", "True", ")", ":", "return", "puts", "(", "text", "=", "text", ",", "show_prefix", "=", "show_prefix", ",", "end", "=", "end", ",", "flush", ...
print text immediately .
train
false
48,922
def _get_firstbday(wkday): first = 1 if (wkday == 5): first = 3 elif (wkday == 6): first = 2 return first
[ "def", "_get_firstbday", "(", "wkday", ")", ":", "first", "=", "1", "if", "(", "wkday", "==", "5", ")", ":", "first", "=", "3", "elif", "(", "wkday", "==", "6", ")", ":", "first", "=", "2", "return", "first" ]
wkday is the result of monthrange if its a saturday or sunday .
train
false
48,923
def _get_cpu_thread_policy_constraints(flavor, image_meta): (flavor_policy, image_policy) = _get_flavor_image_meta('cpu_thread_policy', flavor, image_meta) if (flavor_policy in [None, fields.CPUThreadAllocationPolicy.PREFER]): policy = (flavor_policy or image_policy) elif (image_policy and (image_policy != flavor_...
[ "def", "_get_cpu_thread_policy_constraints", "(", "flavor", ",", "image_meta", ")", ":", "(", "flavor_policy", ",", "image_policy", ")", "=", "_get_flavor_image_meta", "(", "'cpu_thread_policy'", ",", "flavor", ",", "image_meta", ")", "if", "(", "flavor_policy", "in...
validate and return the requested cpu thread policy .
train
false
48,924
def _get_xml_iter(xml_source): if (not hasattr(xml_source, 'read')): try: xml_source = xml_source.encode('utf-8') except (AttributeError, UnicodeDecodeError): pass return BytesIO(xml_source) else: try: xml_source.seek(0) except: pass return xml_source
[ "def", "_get_xml_iter", "(", "xml_source", ")", ":", "if", "(", "not", "hasattr", "(", "xml_source", ",", "'read'", ")", ")", ":", "try", ":", "xml_source", "=", "xml_source", ".", "encode", "(", "'utf-8'", ")", "except", "(", "AttributeError", ",", "Uni...
possible inputs: strings .
train
true
48,925
def create_baseline(tag='baseline', config='root'): return __salt__['snapper.create_snapshot'](config=config, snapshot_type='single', description='baseline snapshot', cleanup_algorithm='number', userdata={'baseline_tag': tag})
[ "def", "create_baseline", "(", "tag", "=", "'baseline'", ",", "config", "=", "'root'", ")", ":", "return", "__salt__", "[", "'snapper.create_snapshot'", "]", "(", "config", "=", "config", ",", "snapshot_type", "=", "'single'", ",", "description", "=", "'baseli...
creates a snapshot marked as baseline tag tag name for the baseline config configuration name .
train
true
48,926
@error.context_aware def lv_umount(vg_name, lv_name, mount_loc): error.context('Unmounting the logical volume', logging.info) try: utils.run(('umount /dev/%s/%s' % (vg_name, lv_name))) except error.CmdError as ex: logging.warning(ex) return False return True
[ "@", "error", ".", "context_aware", "def", "lv_umount", "(", "vg_name", ",", "lv_name", ",", "mount_loc", ")", ":", "error", ".", "context", "(", "'Unmounting the logical volume'", ",", "logging", ".", "info", ")", "try", ":", "utils", ".", "run", "(", "("...
unmount a logical volume from a mount location .
train
false
48,927
@_ConfigurableFilter(executable='YUI_COMPRESSOR_EXECUTABLE') def yui_compressor(infile, executable=None): yuicompressor = executable if (not yuicompressor): try: subprocess.call('yui-compressor', stdout=open(os.devnull, 'w'), stderr=open(os.devnull, 'w')) yuicompressor = 'yui-compressor' except Exception: ...
[ "@", "_ConfigurableFilter", "(", "executable", "=", "'YUI_COMPRESSOR_EXECUTABLE'", ")", "def", "yui_compressor", "(", "infile", ",", "executable", "=", "None", ")", ":", "yuicompressor", "=", "executable", "if", "(", "not", "yuicompressor", ")", ":", "try", ":",...
run yui compressor on a file .
train
false
48,929
def async_trigger(hass, config, action): if (CONF_AFTER in config): after = config.get(CONF_AFTER) (hours, minutes, seconds) = (after.hour, after.minute, after.second) else: hours = config.get(CONF_HOURS) minutes = config.get(CONF_MINUTES) seconds = config.get(CONF_SECONDS) @callback def time_automation_l...
[ "def", "async_trigger", "(", "hass", ",", "config", ",", "action", ")", ":", "if", "(", "CONF_AFTER", "in", "config", ")", ":", "after", "=", "config", ".", "get", "(", "CONF_AFTER", ")", "(", "hours", ",", "minutes", ",", "seconds", ")", "=", "(", ...
listen for state changes based on configuration .
train
false
48,930
def test_write_noheader_no_bookend(): out = StringIO() ascii.write(dat, out, Writer=ascii.FixedWidthNoHeader, bookend=False) assert_equal_splitlines(out.getvalue(), '1.2 | "hello" | 1 | a\n2.4 | \'s worlds | 2 | 2\n')
[ "def", "test_write_noheader_no_bookend", "(", ")", ":", "out", "=", "StringIO", "(", ")", "ascii", ".", "write", "(", "dat", ",", "out", ",", "Writer", "=", "ascii", ".", "FixedWidthNoHeader", ",", "bookend", "=", "False", ")", "assert_equal_splitlines", "("...
write a table as a fixed width table with no bookend .
train
false
48,931
def test_duplicate_keywords_2(): initialize() yamlfile = '{\n "model": !obj:pylearn2.models.rbm.GaussianBinaryRBM {\n\n "vis_space" : &vis_space !obj:pylearn2.space.Conv2DSpace {\n "shape" : [32,32],\n "num_channels" : 3\n },\n ...
[ "def", "test_duplicate_keywords_2", "(", ")", ":", "initialize", "(", ")", "yamlfile", "=", "'{\\n \"model\": !obj:pylearn2.models.rbm.GaussianBinaryRBM {\\n\\n \"vis_space\" : &vis_space !obj:pylearn2.space.Conv2DSpace {\\n \"shape\" : [32,32],\\n ...
tests whether duplicate keywords as independent parameters works fine .
train
false
48,932
def deprecate_thing_type(thingTypeName, undoDeprecate=False, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) conn.deprecate_thing_type(thingTypeName=thingTypeName, undoDeprecate=undoDeprecate) deprecated = (True if (undoDeprecate is Fa...
[ "def", "deprecate_thing_type", "(", "thingTypeName", ",", "undoDeprecate", "=", "False", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "...
given a thing type name .
train
false
48,933
def make_fortran_patterns(): kwstr = 'access action advance allocatable allocate apostrophe assign assignment associate asynchronous backspace bind blank blockdata call case character class close common complex contains continue cycle data deallocate decimal delim default dimension direct do dowhile double doublepreci...
[ "def", "make_fortran_patterns", "(", ")", ":", "kwstr", "=", "'access action advance allocatable allocate apostrophe assign assignment associate asynchronous backspace bind blank blockdata call case character class close common complex contains continue cycle data deallocate decimal delim default dime...
strongly inspired from idlelib .
train
false
48,934
def split_virtual_offset(virtual_offset): start = (virtual_offset >> 16) return (start, (virtual_offset ^ (start << 16)))
[ "def", "split_virtual_offset", "(", "virtual_offset", ")", ":", "start", "=", "(", "virtual_offset", ">>", "16", ")", "return", "(", "start", ",", "(", "virtual_offset", "^", "(", "start", "<<", "16", ")", ")", ")" ]
divides a 64-bit bgzf virtual offset into block start & within block offsets .
train
false
48,935
@app.route('/<username>') def user_timeline(username): profile_user = query_db('select * from user where username = ?', [username], one=True) if (profile_user is None): abort(404) followed = False if g.user: followed = (query_db('select 1 from follower where\n follower.who_id = ? and follower.whom_id...
[ "@", "app", ".", "route", "(", "'/<username>'", ")", "def", "user_timeline", "(", "username", ")", ":", "profile_user", "=", "query_db", "(", "'select * from user where username = ?'", ",", "[", "username", "]", ",", "one", "=", "True", ")", "if", "(", "prof...
displays a users tweets .
train
false
48,936
def is_ipv4_addr(ip): parts = ip.split('.') try: return ((len(parts) == 4) and all(((0 <= int(part) < 256) for part in parts))) except: return False
[ "def", "is_ipv4_addr", "(", "ip", ")", ":", "parts", "=", "ip", ".", "split", "(", "'.'", ")", "try", ":", "return", "(", "(", "len", "(", "parts", ")", "==", "4", ")", "and", "all", "(", "(", "(", "0", "<=", "int", "(", "part", ")", "<", "...
simple way to check if ipv4 address .
train
false
48,937
def list_plugins(): plugin_list = os.listdir(PLUGINDIR) ret = [] for plugin in plugin_list: stat_f = os.path.join(PLUGINDIR, plugin) execute_bit = (stat.S_IXUSR & os.stat(stat_f)[stat.ST_MODE]) if execute_bit: ret.append(plugin) return ret
[ "def", "list_plugins", "(", ")", ":", "plugin_list", "=", "os", ".", "listdir", "(", "PLUGINDIR", ")", "ret", "=", "[", "]", "for", "plugin", "in", "plugin_list", ":", "stat_f", "=", "os", ".", "path", ".", "join", "(", "PLUGINDIR", ",", "plugin", ")...
list all the nagios plugins cli example: .
train
true
48,938
def zone_exists(zone, region=None, key=None, keyid=None, profile=None, retry_on_rate_limit=True, rate_limit_retries=5): if (region is None): region = 'universal' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) while (rate_limit_retries > 0): try: return bool(conn.get_zone(zone)) excep...
[ "def", "zone_exists", "(", "zone", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ",", "retry_on_rate_limit", "=", "True", ",", "rate_limit_retries", "=", "5", ")", ":", "if", "(", "region",...
check for the existence of a route53 hosted zone .
train
false
48,939
def _bem_explain_surface(id_): _rev_dict = dict(((val, key) for (key, val) in _surf_dict.items())) return _rev_dict[id_]
[ "def", "_bem_explain_surface", "(", "id_", ")", ":", "_rev_dict", "=", "dict", "(", "(", "(", "val", ",", "key", ")", "for", "(", "key", ",", "val", ")", "in", "_surf_dict", ".", "items", "(", ")", ")", ")", "return", "_rev_dict", "[", "id_", "]" ]
return a string corresponding to the given surface id .
train
false
48,940
def nativejoin(base, path): return url2pathname(pathjoin(base, path))
[ "def", "nativejoin", "(", "base", ",", "path", ")", ":", "return", "url2pathname", "(", "pathjoin", "(", "base", ",", "path", ")", ")" ]
joins two paths - returning a native file path .
train
false
48,941
def all_pairs_dijkstra_path_length(G, cutoff=None, weight='weight'): length = single_source_dijkstra_path_length for n in G: (yield (n, dict(length(G, n, cutoff=cutoff, weight=weight))))
[ "def", "all_pairs_dijkstra_path_length", "(", "G", ",", "cutoff", "=", "None", ",", "weight", "=", "'weight'", ")", ":", "length", "=", "single_source_dijkstra_path_length", "for", "n", "in", "G", ":", "(", "yield", "(", "n", ",", "dict", "(", "length", "(...
compute shortest path lengths between all nodes in a weighted graph .
train
false
48,942
@frappe.whitelist() def get_contact_number(contact_name, value, key): number = frappe.db.sql((u'select mobile_no, phone from tabContact where name=%s and %s=%s' % (u'%s', frappe.db.escape(key), u'%s')), (contact_name, value)) return ((number and (number[0][0] or number[0][1])) or u'')
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_contact_number", "(", "contact_name", ",", "value", ",", "key", ")", ":", "number", "=", "frappe", ".", "db", ".", "sql", "(", "(", "u'select mobile_no, phone from tabContact where name=%s and %s=%s'", "%", ...
returns mobile number of the contact .
train
false
48,943
def _define_nrt_decref(module, atomic_decr): fn_decref = module.get_or_insert_function(incref_decref_ty, name='NRT_decref') calldtor = module.add_function(ir.FunctionType(ir.VoidType(), [_pointer_type]), name='NRT_MemInfo_call_dtor') builder = ir.IRBuilder(fn_decref.append_basic_block()) [ptr] = fn_decref.args is_...
[ "def", "_define_nrt_decref", "(", "module", ",", "atomic_decr", ")", ":", "fn_decref", "=", "module", ".", "get_or_insert_function", "(", "incref_decref_ty", ",", "name", "=", "'NRT_decref'", ")", "calldtor", "=", "module", ".", "add_function", "(", "ir", ".", ...
implement nrt_decref in the module .
train
false
48,944
def GetActiveView(): try: (childFrame, bIsMaximised) = win32ui.GetMainFrame().MDIGetActive() return childFrame.GetActiveView() except win32ui.error: return None
[ "def", "GetActiveView", "(", ")", ":", "try", ":", "(", "childFrame", ",", "bIsMaximised", ")", "=", "win32ui", ".", "GetMainFrame", "(", ")", ".", "MDIGetActive", "(", ")", "return", "childFrame", ".", "GetActiveView", "(", ")", "except", "win32ui", ".", ...
gets the edit control with the focus .
train
false
48,947
def get_folder(fileName): return os.path.dirname(fileName)
[ "def", "get_folder", "(", "fileName", ")", ":", "return", "os", ".", "path", ".", "dirname", "(", "fileName", ")" ]
get the name of the folder containing the file or folder received .
train
false
48,948
def _gen_explain_command(coll, spec, projection, skip, limit, batch_size, options, read_concern): cmd = _gen_find_command(coll, spec, projection, skip, limit, batch_size, options) if read_concern.level: return SON([('explain', cmd), ('readConcern', read_concern.document)]) return SON([('explain', cmd)])
[ "def", "_gen_explain_command", "(", "coll", ",", "spec", ",", "projection", ",", "skip", ",", "limit", ",", "batch_size", ",", "options", ",", "read_concern", ")", ":", "cmd", "=", "_gen_find_command", "(", "coll", ",", "spec", ",", "projection", ",", "ski...
generate an explain command document .
train
true
48,949
@pytest.fixture(scope='module') def remote_resource(cloud_config): remote_uri = 'http://storage.googleapis.com/{}/'.format(cloud_config.storage_bucket) return (lambda path, tmpdir: fetch_gcs_resource((remote_uri + path.strip('/')), tmpdir))
[ "@", "pytest", ".", "fixture", "(", "scope", "=", "'module'", ")", "def", "remote_resource", "(", "cloud_config", ")", ":", "remote_uri", "=", "'http://storage.googleapis.com/{}/'", ".", "format", "(", "cloud_config", ".", "storage_bucket", ")", "return", "(", "...
provides a function that downloads the given resource from cloud storage .
train
false
48,950
def resetVector(x1, x2): size = len(x1) for i in range(size): x2[i] = x1[i]
[ "def", "resetVector", "(", "x1", ",", "x2", ")", ":", "size", "=", "len", "(", "x1", ")", "for", "i", "in", "range", "(", "size", ")", ":", "x2", "[", "i", "]", "=", "x1", "[", "i", "]" ]
copies the contents of vector x1 into vector x2 .
train
true
48,951
def fix_multiple_files(filenames, options, output=None): filenames = find_files(filenames, options.recursive, options.exclude) if (options.jobs > 1): import multiprocessing pool = multiprocessing.Pool(options.jobs) pool.map(_fix_file, [(name, options) for name in filenames]) else: for name in filenames: _...
[ "def", "fix_multiple_files", "(", "filenames", ",", "options", ",", "output", "=", "None", ")", ":", "filenames", "=", "find_files", "(", "filenames", ",", "options", ".", "recursive", ",", "options", ".", "exclude", ")", "if", "(", "options", ".", "jobs",...
fix list of files .
train
true
48,952
def getTemplatesPath(subName=''): return getJoinedPath(getFabmetheusUtilitiesPath('templates'), subName)
[ "def", "getTemplatesPath", "(", "subName", "=", "''", ")", ":", "return", "getJoinedPath", "(", "getFabmetheusUtilitiesPath", "(", "'templates'", ")", ",", "subName", ")" ]
get the templates directory path .
train
false
48,954
def modifiers_string(modifiers): mod_names = [] if (modifiers & MOD_SHIFT): mod_names.append('MOD_SHIFT') if (modifiers & MOD_CTRL): mod_names.append('MOD_CTRL') if (modifiers & MOD_ALT): mod_names.append('MOD_ALT') if (modifiers & MOD_CAPSLOCK): mod_names.append('MOD_CAPSLOCK') if (modifiers & MOD_NUMLOC...
[ "def", "modifiers_string", "(", "modifiers", ")", ":", "mod_names", "=", "[", "]", "if", "(", "modifiers", "&", "MOD_SHIFT", ")", ":", "mod_names", ".", "append", "(", "'MOD_SHIFT'", ")", "if", "(", "modifiers", "&", "MOD_CTRL", ")", ":", "mod_names", "....
return a string describing a set of modifiers .
train
true
48,955
def _handle_Integral(expr, func, order, hint): global y x = func.args[0] f = func.func if (hint == '1st_exact'): sol = expr.doit().subs(y, f(x)) del y elif (hint == '1st_exact_Integral'): sol = Eq(Subs(expr.lhs, y, f(x)), expr.rhs) del y elif (hint == 'nth_linear_constant_coeff_homogeneous'): sol = expr...
[ "def", "_handle_Integral", "(", "expr", ",", "func", ",", "order", ",", "hint", ")", ":", "global", "y", "x", "=", "func", ".", "args", "[", "0", "]", "f", "=", "func", ".", "func", "if", "(", "hint", "==", "'1st_exact'", ")", ":", "sol", "=", ...
converts a solution with integrals in it into an actual solution .
train
false
48,956
def _remove_deactivated(contexts): stack_contexts = tuple([h for h in contexts[0] if h.active]) head = contexts[1] while ((head is not None) and (not head.active)): head = head.old_contexts[1] ctx = head while (ctx is not None): parent = ctx.old_contexts[1] while (parent is not None): if parent.active: ...
[ "def", "_remove_deactivated", "(", "contexts", ")", ":", "stack_contexts", "=", "tuple", "(", "[", "h", "for", "h", "in", "contexts", "[", "0", "]", "if", "h", ".", "active", "]", ")", "head", "=", "contexts", "[", "1", "]", "while", "(", "(", "hea...
remove deactivated handlers from the chain .
train
true
48,957
def migrate_data_dirs(): check_data_writable() vcs = data_dir('vcs') if (os.path.exists(appsettings.GIT_ROOT) and (not os.path.exists(vcs))): shutil.move(appsettings.GIT_ROOT, vcs) whoosh = data_dir('whoosh') if (os.path.exists(appsettings.WHOOSH_INDEX) and (not os.path.exists(whoosh))): shutil.move(appsetting...
[ "def", "migrate_data_dirs", "(", ")", ":", "check_data_writable", "(", ")", "vcs", "=", "data_dir", "(", "'vcs'", ")", "if", "(", "os", ".", "path", ".", "exists", "(", "appsettings", ".", "GIT_ROOT", ")", "and", "(", "not", "os", ".", "path", ".", "...
migrate data directory from old locations to new consolidated data directory .
train
false
48,960
def terminal_len(value): if isinstance(value, bytes): value = value.decode('utf8', 'ignore') return len(value)
[ "def", "terminal_len", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "bytes", ")", ":", "value", "=", "value", ".", "decode", "(", "'utf8'", ",", "'ignore'", ")", "return", "len", "(", "value", ")" ]
returns the length of the string it would be when displayed .
train
false
48,961
def libvlc_media_discoverer_localized_name(p_mdis): f = (_Cfunctions.get('libvlc_media_discoverer_localized_name', None) or _Cfunction('libvlc_media_discoverer_localized_name', ((1,),), string_result, ctypes.c_void_p, MediaDiscoverer)) return f(p_mdis)
[ "def", "libvlc_media_discoverer_localized_name", "(", "p_mdis", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_discoverer_localized_name'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_discoverer_localized_name'", ",", "(", "(", "...
get media service discover object its localized name .
train
true