id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
41,868
def send_mail_to_admin(email_subject, email_body): app_id = app_identity_services.get_application_id() body = ('(Sent from %s)\n\n%s' % (app_id, email_body)) email_services.send_mail(feconf.SYSTEM_EMAIL_ADDRESS, feconf.ADMIN_EMAIL_ADDRESS, email_subject, body, body.replace('\n', '<br/>'), bcc_admin=False)
[ "def", "send_mail_to_admin", "(", "email_subject", ",", "email_body", ")", ":", "app_id", "=", "app_identity_services", ".", "get_application_id", "(", ")", "body", "=", "(", "'(Sent from %s)\\n\\n%s'", "%", "(", "app_id", ",", "email_body", ")", ")", "email_servi...
send an email to the admin email address .
train
false
41,869
@verbose def apply_forward_raw(fwd, stc, info, start=None, stop=None, verbose=None): for ch_name in fwd['sol']['row_names']: if (ch_name not in info['ch_names']): raise ValueError(('Channel %s of forward operator not present in info.' % ch_name)) (data, times) = _apply_forward(fwd, stc, start, stop) sfreq = (1.0 / stc.tstep) info = _fill_measurement_info(info, fwd, sfreq) info['projs'] = [] raw = RawArray(data, info) raw.preload = True raw._first_samps = np.array([int(np.round((times[0] * sfreq)))]) raw._last_samps = np.array([((raw.first_samp + raw._data.shape[1]) - 1)]) raw._projector = None raw._update_times() return raw
[ "@", "verbose", "def", "apply_forward_raw", "(", "fwd", ",", "stc", ",", "info", ",", "start", "=", "None", ",", "stop", "=", "None", ",", "verbose", "=", "None", ")", ":", "for", "ch_name", "in", "fwd", "[", "'sol'", "]", "[", "'row_names'", "]", ...
project source space currents to sensor space using a forward operator .
train
false
41,871
def format_currency(number, currency, format=None): return get_i18n().format_currency(number, currency, format)
[ "def", "format_currency", "(", "number", ",", "currency", ",", "format", "=", "None", ")", ":", "return", "get_i18n", "(", ")", ".", "format_currency", "(", "number", ",", "currency", ",", "format", ")" ]
see :meth:i18n .
train
false
41,872
def addClassAdvisor(callback, depth=2): frame = sys._getframe(depth) (kind, module, caller_locals, caller_globals) = getFrameInfo(frame) previousMetaclass = caller_locals.get('__metaclass__') if __python3: defaultMetaclass = caller_globals.get('__metaclass__', type) else: defaultMetaclass = caller_globals.get('__metaclass__', ClassType) def advise(name, bases, cdict): if ('__metaclass__' in cdict): del cdict['__metaclass__'] if (previousMetaclass is None): if bases: meta = determineMetaclass(bases) else: meta = defaultMetaclass elif isClassAdvisor(previousMetaclass): meta = previousMetaclass else: meta = determineMetaclass(bases, previousMetaclass) newClass = meta(name, bases, cdict) return callback(newClass) advise.previousMetaclass = previousMetaclass advise.callback = callback caller_locals['__metaclass__'] = advise
[ "def", "addClassAdvisor", "(", "callback", ",", "depth", "=", "2", ")", ":", "frame", "=", "sys", ".", "_getframe", "(", "depth", ")", "(", "kind", ",", "module", ",", "caller_locals", ",", "caller_globals", ")", "=", "getFrameInfo", "(", "frame", ")", ...
set up callback to be passed the containing class upon creation this function is designed to be called by an "advising" function executed in a class suite .
train
false
41,873
def RequiresCryptography(fn): def CryptographyCheckingWrapper(*args, **kwargs): if (not X509): raise Asn1Error('%s requires cryptography, which is not available', fn) return fn(*args, **kwargs) return CryptographyCheckingWrapper
[ "def", "RequiresCryptography", "(", "fn", ")", ":", "def", "CryptographyCheckingWrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "X509", ")", ":", "raise", "Asn1Error", "(", "'%s requires cryptography, which is not available'", ",", "...
decorator to support limited functionality if cryptography is missing .
train
false
41,874
def print_plugins(): pluginlist = list(livestreamer.get_plugins().keys()) pluginlist_formatted = ', '.join(sorted(pluginlist)) if console.json: console.msg_json(pluginlist) else: console.msg('Loaded plugins: {0}', pluginlist_formatted)
[ "def", "print_plugins", "(", ")", ":", "pluginlist", "=", "list", "(", "livestreamer", ".", "get_plugins", "(", ")", ".", "keys", "(", ")", ")", "pluginlist_formatted", "=", "', '", ".", "join", "(", "sorted", "(", "pluginlist", ")", ")", "if", "console"...
outputs a list of all plugins livestreamer has loaded .
train
true
41,875
def get_eip_address_info(addresses=None, allocation_ids=None, region=None, key=None, keyid=None, profile=None): if (type(addresses) == type('string')): addresses = [addresses] if (type(allocation_ids) == type('string')): allocation_ids = [allocation_ids] ret = _get_all_eip_addresses(addresses=addresses, allocation_ids=allocation_ids, region=region, key=key, keyid=keyid, profile=profile) interesting = ['allocation_id', 'association_id', 'domain', 'instance_id', 'network_interface_id', 'network_interface_owner_id', 'public_ip', 'private_ip_address'] return [dict([(x, getattr(address, x)) for x in interesting]) for address in ret]
[ "def", "get_eip_address_info", "(", "addresses", "=", "None", ",", "allocation_ids", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "(", "type", "(", "addresses"...
get interesting info about some .
train
true
41,876
def get_auto_conf(agentConfig, check_name): from config import PathNotFound, get_auto_confd_path try: auto_confd_path = get_auto_confd_path() except PathNotFound: log.error("Couldn't find the check auto-configuration folder, no auto configuration will be used.") return None auto_conf_path = os.path.join(auto_confd_path, ('%s.yaml' % check_name)) if (not os.path.exists(auto_conf_path)): log.error(("Couldn't find any auto configuration file for the %s check." % check_name)) return None try: auto_conf = check_yaml(auto_conf_path) except Exception as e: log.error(('Enable to load the auto-config, yaml file.Auto-config will not work for this check.\n%s' % str(e))) return None return auto_conf
[ "def", "get_auto_conf", "(", "agentConfig", ",", "check_name", ")", ":", "from", "config", "import", "PathNotFound", ",", "get_auto_confd_path", "try", ":", "auto_confd_path", "=", "get_auto_confd_path", "(", ")", "except", "PathNotFound", ":", "log", ".", "error"...
return the yaml auto_config dict for a check name .
train
false
41,878
def _get_table_names(metadata, patterns): if patterns: table_names = set() for pattern in patterns: if (('.' in pattern) or ('/' in pattern)): (_, filename) = os.path.split(pattern) (table_name, _) = os.path.splitext(filename) pattern = table_name table_names.update(fnmatch.filter(metadata.tables.keys(), pattern)) else: table_names = metadata.tables.keys() return list(table_names)
[ "def", "_get_table_names", "(", "metadata", ",", "patterns", ")", ":", "if", "patterns", ":", "table_names", "=", "set", "(", ")", "for", "pattern", "in", "patterns", ":", "if", "(", "(", "'.'", "in", "pattern", ")", "or", "(", "'/'", "in", "pattern", ...
returns a list of table names from the given metadata .
train
false
41,879
def init_interactive_shell(d): readline.set_completer(RainbowCompleter(d).complete) readline.parse_and_bind('set skip-completed-text on') if ('libedit' in readline.__doc__): readline.parse_and_bind('bind ^I rl_complete') else: readline.parse_and_bind('tab: complete')
[ "def", "init_interactive_shell", "(", "d", ")", ":", "readline", ".", "set_completer", "(", "RainbowCompleter", "(", "d", ")", ".", "complete", ")", "readline", ".", "parse_and_bind", "(", "'set skip-completed-text on'", ")", "if", "(", "'libedit'", "in", "readl...
init the rainbow shell .
train
false
41,880
def engines(opts, functions, runners, proxy=None): pack = {'__salt__': functions, '__runners__': runners, '__proxy__': proxy} return LazyLoader(_module_dirs(opts, 'engines'), opts, tag='engines', pack=pack)
[ "def", "engines", "(", "opts", ",", "functions", ",", "runners", ",", "proxy", "=", "None", ")", ":", "pack", "=", "{", "'__salt__'", ":", "functions", ",", "'__runners__'", ":", "runners", ",", "'__proxy__'", ":", "proxy", "}", "return", "LazyLoader", "...
return the master services plugins .
train
true
41,882
def any(*validators): def validate_any(fields): errors = {} for validator in validators: validation_errors = validator(fields) if (not validation_errors): return errors.update(validation_errors) return errors validate_any.__doc__ = ' or '.join((validator.__doc__ for validator in validators)) return validate_any
[ "def", "any", "(", "*", "validators", ")", ":", "def", "validate_any", "(", "fields", ")", ":", "errors", "=", "{", "}", "for", "validator", "in", "validators", ":", "validation_errors", "=", "validator", "(", "fields", ")", "if", "(", "not", "validation...
if any of the specified validators pass the validation succeeds .
train
true
41,883
def rollback(): connection._rollback() set_clean()
[ "def", "rollback", "(", ")", ":", "connection", ".", "_rollback", "(", ")", "set_clean", "(", ")" ]
rollbacks the configuration .
train
false
41,884
def load_from_directory(list_name): data = {} dir_path = os.path.join(SECURITY_LISTS_DIR, list_name) for kd_name in listdir(dir_path): kd = datetime.strptime(kd_name, DATE_FORMAT).replace(tzinfo=pytz.utc) data[kd] = {} kd_path = os.path.join(dir_path, kd_name) for ld_name in listdir(kd_path): ld = datetime.strptime(ld_name, DATE_FORMAT).replace(tzinfo=pytz.utc) data[kd][ld] = {} ld_path = os.path.join(kd_path, ld_name) for fname in listdir(ld_path): fpath = os.path.join(ld_path, fname) with open(fpath) as f: symbols = f.read().splitlines() data[kd][ld][fname] = symbols return data
[ "def", "load_from_directory", "(", "list_name", ")", ":", "data", "=", "{", "}", "dir_path", "=", "os", ".", "path", ".", "join", "(", "SECURITY_LISTS_DIR", ",", "list_name", ")", "for", "kd_name", "in", "listdir", "(", "dir_path", ")", ":", "kd", "=", ...
to resolve the symbol in the leveraged_etf list .
train
true
41,885
def get_discovery_srid36s(): srs = Subreddit._by_name(g.live_config['discovery_srs']) return [sr._id36 for sr in srs.itervalues()]
[ "def", "get_discovery_srid36s", "(", ")", ":", "srs", "=", "Subreddit", ".", "_by_name", "(", "g", ".", "live_config", "[", "'discovery_srs'", "]", ")", "return", "[", "sr", ".", "_id36", "for", "sr", "in", "srs", ".", "itervalues", "(", ")", "]" ]
get list of srs that help people discover other srs .
train
false
41,887
def _find_file_meta(metadata, bucket_name, saltenv, path): env_meta = (metadata[saltenv] if (saltenv in metadata) else {}) bucket_meta = (env_meta[bucket_name] if (bucket_name in env_meta) else {}) files_meta = list(list(filter((lambda k: ('Key' in k)), bucket_meta))) for item_meta in files_meta: if (('Key' in item_meta) and (item_meta['Key'] == path)): try: item_meta['ETag'] = item_meta['ETag'].strip('"') except KeyError: pass return item_meta
[ "def", "_find_file_meta", "(", "metadata", ",", "bucket_name", ",", "saltenv", ",", "path", ")", ":", "env_meta", "=", "(", "metadata", "[", "saltenv", "]", "if", "(", "saltenv", "in", "metadata", ")", "else", "{", "}", ")", "bucket_meta", "=", "(", "e...
looks for a files metadata in the s3 bucket cache file .
train
true
41,888
def list_opts(): ext_plugins = plugins.TempestTestPluginManager() opt_list = [(g, o) for (g, o) in _opts] opt_list.extend(ext_plugins.get_plugin_options_list()) return opt_list
[ "def", "list_opts", "(", ")", ":", "ext_plugins", "=", "plugins", ".", "TempestTestPluginManager", "(", ")", "opt_list", "=", "[", "(", "g", ",", "o", ")", "for", "(", "g", ",", "o", ")", "in", "_opts", "]", "opt_list", ".", "extend", "(", "ext_plugi...
return a list of oslo .
train
false
41,889
def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader): Loader.add_multi_constructor(tag_prefix, multi_constructor)
[ "def", "add_multi_constructor", "(", "tag_prefix", ",", "multi_constructor", ",", "Loader", "=", "Loader", ")", ":", "Loader", ".", "add_multi_constructor", "(", "tag_prefix", ",", "multi_constructor", ")" ]
add a multi-constructor for the given tag prefix .
train
false
41,891
def unprotected_ids(doc, options): identifiedElements = findElementsWithId(doc.documentElement) if (not (options.protect_ids_noninkscape or options.protect_ids_list or options.protect_ids_prefix)): return identifiedElements if options.protect_ids_list: protect_ids_list = options.protect_ids_list.split(',') if options.protect_ids_prefix: protect_ids_prefixes = options.protect_ids_prefix.split(',') for id in identifiedElements.keys(): protected = False if (options.protect_ids_noninkscape and (not id[(-1)].isdigit())): protected = True if (options.protect_ids_list and (id in protect_ids_list)): protected = True if options.protect_ids_prefix: for prefix in protect_ids_prefixes: if id.startswith(prefix): protected = True if protected: del identifiedElements[id] return identifiedElements
[ "def", "unprotected_ids", "(", "doc", ",", "options", ")", ":", "identifiedElements", "=", "findElementsWithId", "(", "doc", ".", "documentElement", ")", "if", "(", "not", "(", "options", ".", "protect_ids_noninkscape", "or", "options", ".", "protect_ids_list", ...
returns a list of unprotected ids within the document doc .
train
true
41,893
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
41,894
def for_each_ref_basename(refs, git=git): out = git.for_each_ref(refs, format=u'%(refname)')[STDOUT] output = out.splitlines() non_heads = [x for x in output if (not x.endswith(u'/HEAD'))] return list(map((lambda x: x[(len(refs) + 1):]), non_heads))
[ "def", "for_each_ref_basename", "(", "refs", ",", "git", "=", "git", ")", ":", "out", "=", "git", ".", "for_each_ref", "(", "refs", ",", "format", "=", "u'%(refname)'", ")", "[", "STDOUT", "]", "output", "=", "out", ".", "splitlines", "(", ")", "non_he...
return refs starting with refs .
train
false
41,895
def zero_node(name): ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''} if __opts__['test']: ret['comment'] = 'Zeroing local node statistics' return ret __salt__['trafficserver.zero_node']() ret['result'] = True ret['comment'] = 'Zeroed local node statistics' return ret
[ "def", "zero_node", "(", "name", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "None", ",", "'comment'", ":", "''", "}", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", ...
reset performance statistics to zero on the local node .
train
true
41,896
def dup_ext_factor(f, K): (n, lc) = (dup_degree(f), dup_LC(f, K)) f = dup_monic(f, K) if (n <= 0): return (lc, []) if (n == 1): return (lc, [(f, 1)]) (f, F) = (dup_sqf_part(f, K), f) (s, g, r) = dup_sqf_norm(f, K) factors = dup_factor_list_include(r, K.dom) if (len(factors) == 1): return (lc, [(f, (n // dup_degree(f)))]) H = (s * K.unit) for (i, (factor, _)) in enumerate(factors): h = dup_convert(factor, K.dom, K) (h, _, g) = dup_inner_gcd(h, g, K) h = dup_shift(h, H, K) factors[i] = h factors = dup_trial_division(F, factors, K) return (lc, factors)
[ "def", "dup_ext_factor", "(", "f", ",", "K", ")", ":", "(", "n", ",", "lc", ")", "=", "(", "dup_degree", "(", "f", ")", ",", "dup_LC", "(", "f", ",", "K", ")", ")", "f", "=", "dup_monic", "(", "f", ",", "K", ")", "if", "(", "n", "<=", "0"...
factor univariate polynomials over algebraic number fields .
train
false
41,897
def expm1(x): return Expm1()(x)
[ "def", "expm1", "(", "x", ")", ":", "return", "Expm1", "(", ")", "(", "x", ")" ]
elementwise exponential minus one function .
train
false
41,898
def sprint_timeline(bugs, sprint): timeline = [] history = get_history(bugs, sprint) for (when, bug, who, removed, added) in history: reason = 'NO COMMENT' for comment in bug.get('comments', []): if ((comment['time'] == when) and (comment['creator'] == who)): reason = comment['text'] break timeline.append((when, bug['id'], who, removed, added, reason)) timeline.sort(key=(lambda item: item[0])) for mem in timeline: print ('%s: %s: %s' % (mem[0], mem[1], mem[2])) print (' %s -> %s' % ((mem[3] if mem[3] else 'unassigned'), mem[4])) print wrap(mem[5]) print ''
[ "def", "sprint_timeline", "(", "bugs", ",", "sprint", ")", ":", "timeline", "=", "[", "]", "history", "=", "get_history", "(", "bugs", ",", "sprint", ")", "for", "(", "when", ",", "bug", ",", "who", ",", "removed", ",", "added", ")", "in", "history",...
print timeline block .
train
false
41,899
def _calculate_from_transitions(trans_probs): transitions = dict() for (from_state, to_state) in trans_probs: try: transitions[from_state].append(to_state) except KeyError: transitions[from_state] = [to_state] return transitions
[ "def", "_calculate_from_transitions", "(", "trans_probs", ")", ":", "transitions", "=", "dict", "(", ")", "for", "(", "from_state", ",", "to_state", ")", "in", "trans_probs", ":", "try", ":", "transitions", "[", "from_state", "]", ".", "append", "(", "to_sta...
calculate which from transitions are allowed for each state this looks through all of the trans_probs .
train
false
41,900
def libvlc_audio_filter_list_get(p_instance): f = (_Cfunctions.get('libvlc_audio_filter_list_get', None) or _Cfunction('libvlc_audio_filter_list_get', ((1,),), None, ctypes.POINTER(ModuleDescription), Instance)) return f(p_instance)
[ "def", "libvlc_audio_filter_list_get", "(", "p_instance", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_audio_filter_list_get'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_audio_filter_list_get'", ",", "(", "(", "1", ",", ")", ",", ...
returns a list of audio filters that are available .
train
true
41,901
@frappe.whitelist() def get_stock_balance(item_code, warehouse, posting_date=None, posting_time=None, with_valuation_rate=False): from erpnext.stock.stock_ledger import get_previous_sle if (not posting_date): posting_date = nowdate() if (not posting_time): posting_time = nowtime() last_entry = get_previous_sle({u'item_code': item_code, u'warehouse': warehouse, u'posting_date': posting_date, u'posting_time': posting_time}) if with_valuation_rate: return ((last_entry.qty_after_transaction, last_entry.valuation_rate) if last_entry else (0.0, 0.0)) else: return (last_entry.qty_after_transaction or 0.0)
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "get_stock_balance", "(", "item_code", ",", "warehouse", ",", "posting_date", "=", "None", ",", "posting_time", "=", "None", ",", "with_valuation_rate", "=", "False", ")", ":", "from", "erpnext", ".", "stock"...
returns stock balance quantity at given warehouse on given posting date or current date .
train
false
41,902
def loop_lifting(func_ir, typingctx, targetctx, flags, locals): blocks = func_ir.blocks.copy() cfg = compute_cfg_from_blocks(blocks) loopinfos = _loop_lift_get_candidate_infos(cfg, blocks, func_ir.variable_lifetime.livemap) loops = [] for loopinfo in loopinfos: lifted = _loop_lift_modify_blocks(func_ir, loopinfo, blocks, typingctx, targetctx, flags, locals) loops.append(lifted) main = func_ir.derive(blocks=blocks) return (main, loops)
[ "def", "loop_lifting", "(", "func_ir", ",", "typingctx", ",", "targetctx", ",", "flags", ",", "locals", ")", ":", "blocks", "=", "func_ir", ".", "blocks", ".", "copy", "(", ")", "cfg", "=", "compute_cfg_from_blocks", "(", "blocks", ")", "loopinfos", "=", ...
loop lifting transformation .
train
false
41,903
def make_field_set_iterable(headers, fields, mdata): return product(*[make_field_value_list(headers, f, mdata) for f in fields])
[ "def", "make_field_set_iterable", "(", "headers", ",", "fields", ",", "mdata", ")", ":", "return", "product", "(", "*", "[", "make_field_value_list", "(", "headers", ",", "f", ",", "mdata", ")", "for", "f", "in", "fields", "]", ")" ]
return product of lists of unique values in order of the passed fields .
train
false
41,905
def test_summarize_clusters(): clu = (np.random.random([1, 20484]), [(np.array([0]), np.array([0, 2, 4]))], np.array([0.02, 0.1]), np.array([12, (-14), 30])) stc_sum = summarize_clusters_stc(clu) assert_true((stc_sum.data.shape[1] == 2)) clu[2][0] = 0.3 assert_raises(RuntimeError, summarize_clusters_stc, clu)
[ "def", "test_summarize_clusters", "(", ")", ":", "clu", "=", "(", "np", ".", "random", ".", "random", "(", "[", "1", ",", "20484", "]", ")", ",", "[", "(", "np", ".", "array", "(", "[", "0", "]", ")", ",", "np", ".", "array", "(", "[", "0", ...
test cluster summary stcs .
train
false
41,907
def sh_chebyu(n, monic=False): base = sh_jacobi(n, 2.0, 1.5, monic=monic) if monic: return base factor = (4 ** n) base._scale(factor) return base
[ "def", "sh_chebyu", "(", "n", ",", "monic", "=", "False", ")", ":", "base", "=", "sh_jacobi", "(", "n", ",", "2.0", ",", "1.5", ",", "monic", "=", "monic", ")", "if", "monic", ":", "return", "base", "factor", "=", "(", "4", "**", "n", ")", "bas...
shifted chebyshev polynomial of the second kind .
train
false
41,908
def upgrade(refresh=True): ret = {'changes': {}, 'result': True, 'comment': ''} old = list_pkgs() if salt.utils.is_true(refresh): refresh_db() result = _call_brew('brew upgrade', failhard=False) __context__.pop('pkg.list_pkgs', None) new = list_pkgs() ret = salt.utils.compare_dicts(old, new) if (result['retcode'] != 0): raise CommandExecutionError('Problem encountered upgrading packages', info={'changes': ret, 'result': result}) return ret
[ "def", "upgrade", "(", "refresh", "=", "True", ")", ":", "ret", "=", "{", "'changes'", ":", "{", "}", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", "}", "old", "=", "list_pkgs", "(", ")", "if", "salt", ".", "utils", ".", "is_true", "...
upgrade all packages to the latest possible version .
train
true
41,909
@with_open_mode('rb') @with_sizes('medium') def read_seek_bytewise(f): f.seek(0) while f.read(1): f.seek(1, 1)
[ "@", "with_open_mode", "(", "'rb'", ")", "@", "with_sizes", "(", "'medium'", ")", "def", "read_seek_bytewise", "(", "f", ")", ":", "f", ".", "seek", "(", "0", ")", "while", "f", ".", "read", "(", "1", ")", ":", "f", ".", "seek", "(", "1", ",", ...
alternate read & seek one unit .
train
false
41,911
def remove_credit_requirement_status(username, course_key, req_namespace, req_name): req_to_remove = CreditRequirement.get_course_requirements(course_key, namespace=req_namespace, name=req_name) if (not req_to_remove): log.error(u'Could not remove credit requirement in course "%s" with namespace "%s" and name "%s" because the requirement does not exist. ', unicode(course_key), req_namespace, req_name) return CreditRequirementStatus.remove_requirement_status(username, req_to_remove)
[ "def", "remove_credit_requirement_status", "(", "username", ",", "course_key", ",", "req_namespace", ",", "req_name", ")", ":", "req_to_remove", "=", "CreditRequirement", ".", "get_course_requirements", "(", "course_key", ",", "namespace", "=", "req_namespace", ",", "...
remove the users requirement status .
train
false
41,912
def _retcode_quiet(cmd, cwd=None, stdin=None, runas=None, shell=DEFAULT_SHELL, python_shell=False, env=None, clean_env=False, template=None, umask=None, output_loglevel='quiet', log_callback=None, timeout=None, reset_system_locale=True, ignore_retcode=False, saltenv='base', use_vt=False, password=None, **kwargs): return retcode(cmd, cwd=cwd, stdin=stdin, runas=runas, shell=shell, python_shell=python_shell, env=env, clean_env=clean_env, template=template, umask=umask, output_loglevel=output_loglevel, log_callback=log_callback, timeout=timeout, reset_system_locale=reset_system_locale, ignore_retcode=ignore_retcode, saltenv=saltenv, use_vt=use_vt, password=password, **kwargs)
[ "def", "_retcode_quiet", "(", "cmd", ",", "cwd", "=", "None", ",", "stdin", "=", "None", ",", "runas", "=", "None", ",", "shell", "=", "DEFAULT_SHELL", ",", "python_shell", "=", "False", ",", "env", "=", "None", ",", "clean_env", "=", "False", ",", "...
helper for running commands quietly for minion startup .
train
false
41,913
def remote_exception(exc, tb): if (type(exc) in exceptions): typ = exceptions[type(exc)] return typ(exc, tb) else: try: typ = type(exc.__class__.__name__, (RemoteException, type(exc)), {'exception_type': type(exc)}) exceptions[type(exc)] = typ return typ(exc, tb) except TypeError: return exc
[ "def", "remote_exception", "(", "exc", ",", "tb", ")", ":", "if", "(", "type", "(", "exc", ")", "in", "exceptions", ")", ":", "typ", "=", "exceptions", "[", "type", "(", "exc", ")", "]", "return", "typ", "(", "exc", ",", "tb", ")", "else", ":", ...
metaclass that wraps exception type in remoteexception .
train
true
41,914
def table_add_column(table, name, col_type, session, default=None): if isinstance(table, basestring): table = table_schema(table, session) if (name in table_columns(table, session)): return if (not isinstance(col_type, TypeEngine)): col_type = col_type() type_string = session.bind.engine.dialect.type_compiler.process(col_type) statement = (u'ALTER TABLE %s ADD %s %s' % (table.name, name, type_string)) session.execute(statement) if (default is not None): table = table_schema(table.name, session) if (not isinstance(default, (ColumnDefault, Sequence))): default = ColumnDefault(default) default._set_parent(getattr(table.c, name)) statement = table.update().values({name: default.execute(bind=session.bind)}) session.execute(statement)
[ "def", "table_add_column", "(", "table", ",", "name", ",", "col_type", ",", "session", ",", "default", "=", "None", ")", ":", "if", "isinstance", "(", "table", ",", "basestring", ")", ":", "table", "=", "table_schema", "(", "table", ",", "session", ")", ...
adds a column to a table .
train
false
41,916
def _get_role_arn(name, region=None, key=None, keyid=None, profile=None): if name.startswith('arn:aws:iam:'): return name account_id = __salt__['boto_iam.get_account_id'](region=region, key=key, keyid=keyid, profile=profile) return 'arn:aws:iam::{0}:role/{1}'.format(account_id, name)
[ "def", "_get_role_arn", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "name", ".", "startswith", "(", "'arn:aws:iam:'", ")", ":", "return", "name", "account_id"...
helper function to get an arn if name does not look like an arn .
train
true
41,917
def parse_if_int(s): try: return int(s) except ValueError: return s
[ "def", "parse_if_int", "(", "s", ")", ":", "try", ":", "return", "int", "(", "s", ")", "except", "ValueError", ":", "return", "s" ]
try to parse a string to an int .
train
false
41,918
def _read_lay(fname): with open(fname) as f: box = None (names, pos, ids) = ([], [], []) for line in f: splits = line.split() if (len(splits) == 7): (cid, x, y, dx, dy, chkind, nb) = splits name = ((chkind + ' ') + nb) else: (cid, x, y, dx, dy, name) = splits pos.append(np.array([x, y, dx, dy], dtype=np.float)) names.append(name) ids.append(int(cid)) pos = np.array(pos) return (box, pos, names, ids)
[ "def", "_read_lay", "(", "fname", ")", ":", "with", "open", "(", "fname", ")", "as", "f", ":", "box", "=", "None", "(", "names", ",", "pos", ",", "ids", ")", "=", "(", "[", "]", ",", "[", "]", ",", "[", "]", ")", "for", "line", "in", "f", ...
aux function .
train
false
41,919
def single_source_dijkstra_path(G, source, cutoff=None, weight='weight'): return multi_source_dijkstra_path(G, {source}, cutoff=cutoff, weight=weight)
[ "def", "single_source_dijkstra_path", "(", "G", ",", "source", ",", "cutoff", "=", "None", ",", "weight", "=", "'weight'", ")", ":", "return", "multi_source_dijkstra_path", "(", "G", ",", "{", "source", "}", ",", "cutoff", "=", "cutoff", ",", "weight", "="...
find shortest weighted paths in g from a source node .
train
false
41,920
@nodes_or_number(0) def empty_graph(n=0, create_using=None): if (create_using is None): G = nx.Graph() else: G = create_using G.clear() (n_name, nodes) = n G.name = ('empty_graph(%s)' % (n_name,)) G.add_nodes_from(nodes) return G
[ "@", "nodes_or_number", "(", "0", ")", "def", "empty_graph", "(", "n", "=", "0", ",", "create_using", "=", "None", ")", ":", "if", "(", "create_using", "is", "None", ")", ":", "G", "=", "nx", ".", "Graph", "(", ")", "else", ":", "G", "=", "create...
return the empty graph with n nodes and zero edges .
train
false
41,921
def _smallest_size_at_least(height, width, smallest_side): smallest_side = tf.convert_to_tensor(smallest_side, dtype=tf.int32) height = tf.to_float(height) width = tf.to_float(width) smallest_side = tf.to_float(smallest_side) scale = tf.cond(tf.greater(height, width), (lambda : (smallest_side / width)), (lambda : (smallest_side / height))) new_height = tf.to_int32((height * scale)) new_width = tf.to_int32((width * scale)) return (new_height, new_width)
[ "def", "_smallest_size_at_least", "(", "height", ",", "width", ",", "smallest_side", ")", ":", "smallest_side", "=", "tf", ".", "convert_to_tensor", "(", "smallest_side", ",", "dtype", "=", "tf", ".", "int32", ")", "height", "=", "tf", ".", "to_float", "(", ...
computes new shape with the smallest side equal to smallest_side .
train
true
41,922
def _disconnect_volume(*args, **kwargs): pass
[ "def", "_disconnect_volume", "(", "*", "args", ",", "**", "kwargs", ")", ":", "pass" ]
return without doing anything .
train
false
41,923
def test_ident(i): return ('a' + i).isidentifier()
[ "def", "test_ident", "(", "i", ")", ":", "return", "(", "'a'", "+", "i", ")", ".", "isidentifier", "(", ")" ]
is the unicode string valid in a python 3 identifer .
train
false
41,924
def non_token_view_using_request_processor(request): context = RequestContext(request, processors=[csrf]) template = Template('') return HttpResponse(template.render(context))
[ "def", "non_token_view_using_request_processor", "(", "request", ")", ":", "context", "=", "RequestContext", "(", "request", ",", "processors", "=", "[", "csrf", "]", ")", "template", "=", "Template", "(", "''", ")", "return", "HttpResponse", "(", "template", ...
a view that doesnt use the token .
train
false
41,925
@pytest.fixture def template_name(): return 'cookiedozer_load'
[ "@", "pytest", ".", "fixture", "def", "template_name", "(", ")", ":", "return", "'cookiedozer_load'" ]
fixture to return a valid template_name .
train
false
41,926
def _GetFilesRecursively(path, excl_regexps=['#.*', '\\..+', '.*~$', '.*\\.pyc$', '.*_test.py$', '.*_pkg.py$']): entries = os.listdir(path) dirs = [e for e in entries if (os.path.isdir(os.path.join(path, e)) and _FileMatches(e, excl_regexps))] files = [os.path.join(path, e) for e in entries if (os.path.isfile(os.path.join(path, e)) and _FileMatches(e, excl_regexps))] for d in dirs: files += _GetFilesRecursively(os.path.join(path, d), excl_regexps) return files
[ "def", "_GetFilesRecursively", "(", "path", ",", "excl_regexps", "=", "[", "'#.*'", ",", "'\\\\..+'", ",", "'.*~$'", ",", "'.*\\\\.pyc$'", ",", "'.*_test.py$'", ",", "'.*_pkg.py$'", "]", ")", ":", "entries", "=", "os", ".", "listdir", "(", "path", ")", "di...
recursively walks the source directory and locates matching files .
train
false
41,928
def rst2md(text): pandoc = subprocess.Popen(['pandoc', '--from=rst', '--to=markdown', '--no-wrap'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, _) = pandoc.communicate(text.encode('utf-8')) md = stdout.decode('utf-8').strip() return re.sub('^- ', '- ', md, flags=re.M)
[ "def", "rst2md", "(", "text", ")", ":", "pandoc", "=", "subprocess", ".", "Popen", "(", "[", "'pandoc'", ",", "'--from=rst'", ",", "'--to=markdown'", ",", "'--no-wrap'", "]", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ...
use pandoc to convert text from rest to markdown .
train
false
41,929
def which(program): (filepath, filename) = os.path.split(program) os_path = os.environ['PATH'].split(os.pathsep) if (sys.platform == 'win32'): try: prog_files = os.environ['PROGRAMFILES'] except KeyError: prog_files = 'C:\\Program Files' likely_dirs = (['', prog_files, os.path.join(prog_files, 'paml41'), os.path.join(prog_files, 'paml43'), os.path.join(prog_files, 'paml44'), os.path.join(prog_files, 'paml45')] + sys.path) os_path.extend(likely_dirs) for path in os.environ['PATH'].split(os.pathsep): exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file return None
[ "def", "which", "(", "program", ")", ":", "(", "filepath", ",", "filename", ")", "=", "os", ".", "path", ".", "split", "(", "program", ")", "os_path", "=", "os", ".", "environ", "[", "'PATH'", "]", ".", "split", "(", "os", ".", "pathsep", ")", "i...
search path for executable files with the given name .
train
false
41,930
def create_dummy_vm(name, service_instance, vm_folder, resource_pool, datastore): vm_name = ('MARVEL-' + name) datastore_path = ((('[' + datastore) + '] ') + vm_name) vmx_file = vim.vm.FileInfo(logDirectory=None, snapshotDirectory=None, suspendDirectory=None, vmPathName=datastore_path) config = vim.vm.ConfigSpec(name=vm_name, memoryMB=128, numCPUs=1, files=vmx_file, guestId='dosGuest', version='vmx-07') print 'Creating VM {}...'.format(vm_name) task = vm_folder.CreateVM_Task(config=config, pool=resource_pool) tasks.wait_for_tasks(service_instance, [task])
[ "def", "create_dummy_vm", "(", "name", ",", "service_instance", ",", "vm_folder", ",", "resource_pool", ",", "datastore", ")", ":", "vm_name", "=", "(", "'MARVEL-'", "+", "name", ")", "datastore_path", "=", "(", "(", "(", "'['", "+", "datastore", ")", "+",...
creates a dummy virtualmachine with 1 vcpu .
train
false
41,931
def insert_system_path(opts, paths): if isinstance(paths, str): paths = [paths] for path in paths: path_options = {'path': path, 'root_dir': opts['root_dir']} prepend_root_dir(path_options, path_options) if (os.path.isdir(path_options['path']) and (path_options['path'] not in sys.path)): sys.path.insert(0, path_options['path'])
[ "def", "insert_system_path", "(", "opts", ",", "paths", ")", ":", "if", "isinstance", "(", "paths", ",", "str", ")", ":", "paths", "=", "[", "paths", "]", "for", "path", "in", "paths", ":", "path_options", "=", "{", "'path'", ":", "path", ",", "'root...
inserts path into python path taking into consideration root_dir option .
train
false
41,932
def available_languages(): global _LANGUAGE_HANDLER if (not _LANGUAGE_HANDLER): try: _LANGUAGE_HANDLER = LanguageHandler.objects.get(db_key='language_handler') except LanguageHandler.DoesNotExist: if (not _LANGUAGE_HANDLER): from evennia import create_script _LANGUAGE_HANDLER = create_script(LanguageHandler) return list(_LANGUAGE_HANDLER.attributes.get('language_storage', {}))
[ "def", "available_languages", "(", ")", ":", "global", "_LANGUAGE_HANDLER", "if", "(", "not", "_LANGUAGE_HANDLER", ")", ":", "try", ":", "_LANGUAGE_HANDLER", "=", "LanguageHandler", ".", "objects", ".", "get", "(", "db_key", "=", "'language_handler'", ")", "exce...
returns all available language keys .
train
false
41,933
@click.group() @click.option('--config', help='Path to configuration file. Default: ~/.curator/curator.yml', type=click.Path(exists=True), default=settings.config_file()) @click.pass_context def repo_mgr_cli(ctx, config): ctx.obj = {} ctx.obj['client_args'] = process_config(config) logger = logging.getLogger(__name__) logger.debug('Client and logging options validated.')
[ "@", "click", ".", "group", "(", ")", "@", "click", ".", "option", "(", "'--config'", ",", "help", "=", "'Path to configuration file. Default: ~/.curator/curator.yml'", ",", "type", "=", "click", ".", "Path", "(", "exists", "=", "True", ")", ",", "default", ...
repository manager for elasticsearch curator .
train
false
41,934
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
41,935
def has_website_permission(doc, ptype, user, verbose=False): contact_name = frappe.db.get_value(u'Contact', {u'email_id': frappe.session.user}) if contact_name: contact = frappe.get_doc(u'Contact', contact_name) return contact.has_common_link(doc) lead_name = frappe.db.get_value(u'Lead', {u'email_id': frappe.session.user}) if lead_name: return doc.has_link(u'Lead', lead_name) return False
[ "def", "has_website_permission", "(", "doc", ",", "ptype", ",", "user", ",", "verbose", "=", "False", ")", ":", "contact_name", "=", "frappe", ".", "db", ".", "get_value", "(", "u'Contact'", ",", "{", "u'email_id'", ":", "frappe", ".", "session", ".", "u...
returns true if there is a related lead or contact related to this document .
train
false
41,937
@np.deprecate(message='scipy.special.sph_jn is deprecated in scipy 0.18.0. Use scipy.special.spherical_jn instead. Note that the new function has a different signature.') def sph_jn(n, z): if (not (isscalar(n) and isscalar(z))): raise ValueError('arguments must be scalars.') if ((n != floor(n)) or (n < 0)): raise ValueError('n must be a non-negative integer.') if (n < 1): n1 = 1 else: n1 = n if iscomplex(z): (nm, jn, jnp, yn, ynp) = specfun.csphjy(n1, z) else: (nm, jn, jnp) = specfun.sphj(n1, z) return (jn[:(n + 1)], jnp[:(n + 1)])
[ "@", "np", ".", "deprecate", "(", "message", "=", "'scipy.special.sph_jn is deprecated in scipy 0.18.0. Use scipy.special.spherical_jn instead. Note that the new function has a different signature.'", ")", "def", "sph_jn", "(", "n", ",", "z", ")", ":", "if", "(", "not", "(", ...
compute spherical bessel function jn(z) and derivative .
train
false
41,939
def test_meta_path_before_builtins(): class MyException(Exception, ): pass class K: def find_module(self, name, path): if (name == 'time'): return self return None def load_module(self, name): raise MyException if ('time' in sys.modules): del sys.modules['time'] loader = K() sys.meta_path.append(loader) try: import time AssertUnreachable() except MyException: pass sys.meta_path.remove(loader) import time
[ "def", "test_meta_path_before_builtins", "(", ")", ":", "class", "MyException", "(", "Exception", ",", ")", ":", "pass", "class", "K", ":", "def", "find_module", "(", "self", ",", "name", ",", "path", ")", ":", "if", "(", "name", "==", "'time'", ")", "...
the meta path should be consulted before builtins are loaded .
train
false
41,940
def test_slices_overlap_wrong_mode(): with pytest.raises(ValueError) as e: overlap_slices((5,), (3,), (0,), mode=u'full') assert (u'Mode can be only' in str(e.value))
[ "def", "test_slices_overlap_wrong_mode", "(", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", "as", "e", ":", "overlap_slices", "(", "(", "5", ",", ")", ",", "(", "3", ",", ")", ",", "(", "0", ",", ")", ",", "mode", "=", "u'full'...
call overlap_slices with non-existing mode .
train
false
41,941
def make_name(variable, anon='anonymous_variable'): if (hasattr(variable, 'name') and (variable.name is not None)): return variable.name return anon
[ "def", "make_name", "(", "variable", ",", "anon", "=", "'anonymous_variable'", ")", ":", "if", "(", "hasattr", "(", "variable", ",", "'name'", ")", "and", "(", "variable", ".", "name", "is", "not", "None", ")", ")", ":", "return", "variable", ".", "nam...
if variable has a name .
train
false
41,942
def organisation_needs_item(): s3.prep = (lambda r: ((r.method == 'options') and (r.representation == 's3json'))) return s3_rest_controller()
[ "def", "organisation_needs_item", "(", ")", ":", "s3", ".", "prep", "=", "(", "lambda", "r", ":", "(", "(", "r", ".", "method", "==", "'options'", ")", "and", "(", "r", ".", "representation", "==", "'s3json'", ")", ")", ")", "return", "s3_rest_controll...
restful controller for option lookups .
train
false
41,943
def plugins_cmd(config, plugins): logger.debug('Expected interfaces: %s', config.ifaces) ifaces = ([] if (config.ifaces is None) else config.ifaces) filtered = plugins.visible().ifaces(ifaces) logger.debug('Filtered plugins: %r', filtered) if ((not config.init) and (not config.prepare)): print(str(filtered)) return filtered.init(config) verified = filtered.verify(ifaces) logger.debug('Verified plugins: %r', verified) if (not config.prepare): print(str(verified)) return verified.prepare() available = verified.available() logger.debug('Prepared plugins: %s', available) print(str(available))
[ "def", "plugins_cmd", "(", "config", ",", "plugins", ")", ":", "logger", ".", "debug", "(", "'Expected interfaces: %s'", ",", "config", ".", "ifaces", ")", "ifaces", "=", "(", "[", "]", "if", "(", "config", ".", "ifaces", "is", "None", ")", "else", "co...
list server software plugins .
train
false
41,944
def assert_trade_protocol(event): assert_datasource_protocol(event) assert (event.type == DATASOURCE_TYPE.TRADE) assert isinstance(event.price, numbers.Real) assert isinstance(event.volume, numbers.Integral) assert isinstance(event.dt, datetime)
[ "def", "assert_trade_protocol", "(", "event", ")", ":", "assert_datasource_protocol", "(", "event", ")", "assert", "(", "event", ".", "type", "==", "DATASOURCE_TYPE", ".", "TRADE", ")", "assert", "isinstance", "(", "event", ".", "price", ",", "numbers", ".", ...
assert that an event meets the protocol for datasource trade outputs .
train
true
41,945
def transact(): ctx.db.commit() ctx.db_transaction = True
[ "def", "transact", "(", ")", ":", "ctx", ".", "db", ".", "commit", "(", ")", "ctx", ".", "db_transaction", "=", "True" ]
start a transaction .
train
false
41,946
def generate_page_toc(soup): found_depth_counts = collections.defaultdict(int) for tag in soup.find_all(_heading_re): if (tag.get(u'id') or tag.get(u'name')): found_depth_counts[hdepth(tag)] += 1 depth_list = [i for i in range(100) if (1 < found_depth_counts[i])] depth_list = depth_list[:4] toc = [] for tag in soup.find_all(_heading_re): depth = hdepth(tag) if (depth in depth_list): toc.append(dict(depth=(depth_list.index(depth) + 1), link=(tag.get(u'id') or tag.get(u'name')), text=tag.text)) return toc
[ "def", "generate_page_toc", "(", "soup", ")", ":", "found_depth_counts", "=", "collections", ".", "defaultdict", "(", "int", ")", "for", "tag", "in", "soup", ".", "find_all", "(", "_heading_re", ")", ":", "if", "(", "tag", ".", "get", "(", "u'id'", ")", ...
return page-level toc template data for soup .
train
true
41,947
@jit(nopython=True, cache=True) def best_response_2p(payoff_matrix, opponent_mixed_action, tol=1e-08): (n, m) = payoff_matrix.shape payoff_max = (- np.inf) payoff_vector = np.zeros(n) for a in range(n): for b in range(m): payoff_vector[a] += (payoff_matrix[(a, b)] * opponent_mixed_action[b]) if (payoff_vector[a] > payoff_max): payoff_max = payoff_vector[a] for a in range(n): if (payoff_vector[a] >= (payoff_max - tol)): return a
[ "@", "jit", "(", "nopython", "=", "True", ",", "cache", "=", "True", ")", "def", "best_response_2p", "(", "payoff_matrix", ",", "opponent_mixed_action", ",", "tol", "=", "1e-08", ")", ":", "(", "n", ",", "m", ")", "=", "payoff_matrix", ".", "shape", "p...
numba-optimized version of player .
train
false
41,948
def make_test_environ_builder(app, path='/', base_url=None, *args, **kwargs): http_host = app.config.get('SERVER_NAME') app_root = app.config.get('APPLICATION_ROOT') if (base_url is None): url = url_parse(path) base_url = ('http://%s/' % (url.netloc or http_host or 'localhost')) if app_root: base_url += app_root.lstrip('/') if url.netloc: path = url.path if url.query: path += ('?' + url.query) return EnvironBuilder(path, base_url, *args, **kwargs)
[ "def", "make_test_environ_builder", "(", "app", ",", "path", "=", "'/'", ",", "base_url", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "http_host", "=", "app", ".", "config", ".", "get", "(", "'SERVER_NAME'", ")", "app_root", "=", "app...
creates a new test builder with some application defaults thrown in .
train
false
41,950
def translation_set_language(request, select_language): select_language = translation_allowed_language(select_language) if translation.check_for_language(select_language): fallback = False else: select_language = django_settings.LANGUAGES[0][0] fallback = True translation.activate(select_language) request.LANGUAGE_CODE = translation.get_language() if hasattr(request, u'session'): if (select_language != request.session.get(LANGUAGE_SESSION_KEY)): request.session[LANGUAGE_SESSION_KEY] = select_language elif ((request.method == u'GET') and (not fallback)): response = HttpResponseRedirect(request.get_full_path()) response.set_cookie(str(LANGUAGE_COOKIE_NAME), select_language) return response
[ "def", "translation_set_language", "(", "request", ",", "select_language", ")", ":", "select_language", "=", "translation_allowed_language", "(", "select_language", ")", "if", "translation", ".", "check_for_language", "(", "select_language", ")", ":", "fallback", "=", ...
set and activate a language .
train
false
41,952
def _get_indices_Pow(expr): (base, exp) = expr.as_base_exp() (binds, bsyms) = get_indices(base) (einds, esyms) = get_indices(exp) inds = (binds | einds) symmetries = {} return (inds, symmetries)
[ "def", "_get_indices_Pow", "(", "expr", ")", ":", "(", "base", ",", "exp", ")", "=", "expr", ".", "as_base_exp", "(", ")", "(", "binds", ",", "bsyms", ")", "=", "get_indices", "(", "base", ")", "(", "einds", ",", "esyms", ")", "=", "get_indices", "...
determine outer indices of a power or an exponential .
train
false
41,953
def _ssh_run_with_recursion(ssh_bin, address, ec2_key_pair_file, keyfile, cmd_args): if ('!' in address): if (keyfile is None): raise ValueError('SSH key file path cannot be None') (host1, host2) = address.split('!') more_args = ['ssh', '-i', keyfile, '-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null', ('hadoop@%s' % (host2,))] return _ssh_run(ssh_bin, host1, ec2_key_pair_file, (more_args + list(cmd_args))) else: return _ssh_run(ssh_bin, address, ec2_key_pair_file, cmd_args)
[ "def", "_ssh_run_with_recursion", "(", "ssh_bin", ",", "address", ",", "ec2_key_pair_file", ",", "keyfile", ",", "cmd_args", ")", ":", "if", "(", "'!'", "in", "address", ")", ":", "if", "(", "keyfile", "is", "None", ")", ":", "raise", "ValueError", "(", ...
some files exist on the master and can be accessed directly via ssh .
train
false
41,955
def _decode_ack(packet, packet_buff, offset): if (len(packet_buff) != (offset + 2)): raise InvalidPacketException((u'ACK packet has invalid size (!=%s): %s' % ((offset + 2), hexlify(packet_buff)))) (block_number,) = struct.unpack_from('!H', packet_buff, offset) packet['block_number'] = block_number return packet
[ "def", "_decode_ack", "(", "packet", ",", "packet_buff", ",", "offset", ")", ":", "if", "(", "len", "(", "packet_buff", ")", "!=", "(", "offset", "+", "2", ")", ")", ":", "raise", "InvalidPacketException", "(", "(", "u'ACK packet has invalid size (!=%s): %s'",...
decodes a ack packet .
train
false
41,956
def log_err(errmsg): try: errmsg = str(errmsg) except Exception as e: errmsg = str(e) for line in errmsg.splitlines(): log.msg(('[EE] %s' % line))
[ "def", "log_err", "(", "errmsg", ")", ":", "try", ":", "errmsg", "=", "str", "(", "errmsg", ")", "except", "Exception", "as", "e", ":", "errmsg", "=", "str", "(", "e", ")", "for", "line", "in", "errmsg", ".", "splitlines", "(", ")", ":", "log", "...
prints/logs an error message to the server log .
train
false
41,957
def get_language_from_path(path, supported=None): if (supported is None): from django.conf import settings supported = dict(settings.LANGUAGES) regex_match = language_code_prefix_re.match(path) if regex_match: lang_code = regex_match.group(1) if ((lang_code in supported) and check_for_language(lang_code)): return lang_code
[ "def", "get_language_from_path", "(", "path", ",", "supported", "=", "None", ")", ":", "if", "(", "supported", "is", "None", ")", ":", "from", "django", ".", "conf", "import", "settings", "supported", "=", "dict", "(", "settings", ".", "LANGUAGES", ")", ...
returns the language-code if there is a valid language-code found in the path .
train
false
41,958
def test_X21(): p = symbols('p', positive=True) n = symbols('n', positive=True, integer=True) s = fourier_series(x, (x, (- p), p)) assert (s.an.formula == 0) assert (s.bn.formula.subs(s.bn.variables[0], 0) == 0) assert (s.bn.formula.subs(s.bn.variables[0], n) == ((((((-2) * p) / pi) * ((-1) ** n)) / n) * sin((((n * pi) * x) / p))))
[ "def", "test_X21", "(", ")", ":", "p", "=", "symbols", "(", "'p'", ",", "positive", "=", "True", ")", "n", "=", "symbols", "(", "'n'", ",", "positive", "=", "True", ",", "integer", "=", "True", ")", "s", "=", "fourier_series", "(", "x", ",", "(",...
test whether fourier_series of x periodical on the [-p .
train
false
41,960
def _execute_command(cmd, at_time=None): if at_time: cmd = "echo '{0}' | at {1}".format(cmd, _cmd_quote(at_time)) return (not bool(__salt__['cmd.retcode'](cmd, python_shell=True)))
[ "def", "_execute_command", "(", "cmd", ",", "at_time", "=", "None", ")", ":", "if", "at_time", ":", "cmd", "=", "\"echo '{0}' | at {1}\"", ".", "format", "(", "cmd", ",", "_cmd_quote", "(", "at_time", ")", ")", "return", "(", "not", "bool", "(", "__salt_...
helper function to execute the command .
train
true
41,961
def cycle_logfile(logfile): logfile_old = (logfile + '.old') if os.path.exists(logfile): if os.path.exists(logfile_old): os.remove(logfile_old) os.rename(logfile, logfile_old)
[ "def", "cycle_logfile", "(", "logfile", ")", ":", "logfile_old", "=", "(", "logfile", "+", "'.old'", ")", "if", "os", ".", "path", ".", "exists", "(", "logfile", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "logfile_old", ")", ":", "os", ...
rotate the old log files to <filename> .
train
false
41,962
def generate_prototypes_from_files(files): all_prototypes = [] for f in files: src_text = f.read() prototypes = generate_prototypes_from_src(src_text) all_prototypes += prototypes if (u'void setup()' in all_prototypes): all_prototypes.remove(u'void setup()') if (u'void loop()' in all_prototypes): all_prototypes.remove(u'void loop()') return all_prototypes
[ "def", "generate_prototypes_from_files", "(", "files", ")", ":", "all_prototypes", "=", "[", "]", "for", "f", "in", "files", ":", "src_text", "=", "f", ".", "read", "(", ")", "prototypes", "=", "generate_prototypes_from_src", "(", "src_text", ")", "all_prototy...
generate prototypes for all functions of a given file list .
train
false
41,963
def _attempt_to_acquire_lock(s3_fs, lock_uri, sync_wait_time, job_key, mins_to_expiration=None): key = _lock_acquire_step_1(s3_fs, lock_uri, job_key, mins_to_expiration) if (key is None): return False time.sleep(sync_wait_time) return _lock_acquire_step_2(key, job_key)
[ "def", "_attempt_to_acquire_lock", "(", "s3_fs", ",", "lock_uri", ",", "sync_wait_time", ",", "job_key", ",", "mins_to_expiration", "=", "None", ")", ":", "key", "=", "_lock_acquire_step_1", "(", "s3_fs", ",", "lock_uri", ",", "job_key", ",", "mins_to_expiration",...
returns true if this session successfully took ownership of the lock specified by lock_uri .
train
false
41,964
def _CheckNumber(value, name, should_be_finite=False): if (not isinstance(value, (int, long, float))): raise TypeError(('%s must be a int, long or float, got %s' % (name, value.__class__.__name__))) if (should_be_finite and (not _IsFinite(value))): raise ValueError(('%s must be a finite value (got %f)' % (name, value))) return value
[ "def", "_CheckNumber", "(", "value", ",", "name", ",", "should_be_finite", "=", "False", ")", ":", "if", "(", "not", "isinstance", "(", "value", ",", "(", "int", ",", "long", ",", "float", ")", ")", ")", ":", "raise", "TypeError", "(", "(", "'%s must...
checks whether value is a number .
train
false
41,965
def refresh_modules(async=True): try: if async: ret = __salt__['event.fire']({}, 'module_refresh') else: eventer = salt.utils.event.get_event('minion', opts=__opts__, listen=True) ret = __salt__['event.fire']({'notify': True}, 'module_refresh') log.trace('refresh_modules waiting for module refresh to complete') eventer.get_event(tag='/salt/minion/minion_mod_complete', wait=30) except KeyError: log.error('Event module not available. Module refresh failed.') ret = False return ret
[ "def", "refresh_modules", "(", "async", "=", "True", ")", ":", "try", ":", "if", "async", ":", "ret", "=", "__salt__", "[", "'event.fire'", "]", "(", "{", "}", ",", "'module_refresh'", ")", "else", ":", "eventer", "=", "salt", ".", "utils", ".", "eve...
signal the minion to refresh the module and grain data the default is to refresh module asynchronously .
train
false
41,966
def ne_chunk_sents(tagged_sentences, binary=False): if binary: chunker_pickle = _BINARY_NE_CHUNKER else: chunker_pickle = _MULTICLASS_NE_CHUNKER chunker = load(chunker_pickle) return chunker.parse_sents(tagged_sentences)
[ "def", "ne_chunk_sents", "(", "tagged_sentences", ",", "binary", "=", "False", ")", ":", "if", "binary", ":", "chunker_pickle", "=", "_BINARY_NE_CHUNKER", "else", ":", "chunker_pickle", "=", "_MULTICLASS_NE_CHUNKER", "chunker", "=", "load", "(", "chunker_pickle", ...
use nltks currently recommended named entity chunker to chunk the given list of tagged sentences .
train
false
41,967
def clear_cluster(): if _TRAFFICCTL: cmd = _traffic_ctl('metric', 'clear', '--cluster') else: cmd = _traffic_line('-C') log.debug('Running: %s', cmd) return _subprocess(cmd)
[ "def", "clear_cluster", "(", ")", ":", "if", "_TRAFFICCTL", ":", "cmd", "=", "_traffic_ctl", "(", "'metric'", ",", "'clear'", ",", "'--cluster'", ")", "else", ":", "cmd", "=", "_traffic_line", "(", "'-C'", ")", "log", ".", "debug", "(", "'Running: %s'", ...
clears accumulated statistics on all nodes in the cluster .
train
false
41,970
def checkout(cwd, remote, target=None, user=None, username=None, password=None, *opts): opts += (remote,) if target: opts += (target,) return _run_svn('checkout', cwd, user, username, password, opts)
[ "def", "checkout", "(", "cwd", ",", "remote", ",", "target", "=", "None", ",", "user", "=", "None", ",", "username", "=", "None", ",", "password", "=", "None", ",", "*", "opts", ")", ":", "opts", "+=", "(", "remote", ",", ")", "if", "target", ":"...
interface to git-checkout(1)_ cwd the path to the git checkout opts any additional options to add to the command line .
train
true
41,971
def _get_label_flip(labels, label_vertidx, src): from .label import label_sign_flip label_flip = list() for (label, vertidx) in zip(labels, label_vertidx): if (label.hemi == 'both'): raise ValueError('BiHemiLabel not supported when using sign-flip') if (vertidx is not None): flip = label_sign_flip(label, src)[:, None] else: flip = None label_flip.append(flip) return label_flip
[ "def", "_get_label_flip", "(", "labels", ",", "label_vertidx", ",", "src", ")", ":", "from", ".", "label", "import", "label_sign_flip", "label_flip", "=", "list", "(", ")", "for", "(", "label", ",", "vertidx", ")", "in", "zip", "(", "labels", ",", "label...
get sign-flip for labels .
train
false
41,972
def is_keras_tensor(x): if hasattr(x, '_keras_shape'): return True else: return False
[ "def", "is_keras_tensor", "(", "x", ")", ":", "if", "hasattr", "(", "x", ",", "'_keras_shape'", ")", ":", "return", "True", "else", ":", "return", "False" ]
returns whether x is a keras tensor .
train
false
41,973
def lookup(path, parent=None, user=None, exists=None): url = build_url(RESOURCE, route='lookup') params = make_params(path=path, parent=parent, user=user, exists=exists) return request('get', url, params=params)
[ "def", "lookup", "(", "path", ",", "parent", "=", "None", ",", "user", "=", "None", ",", "exists", "=", "None", ")", ":", "url", "=", "build_url", "(", "RESOURCE", ",", "route", "=", "'lookup'", ")", "params", "=", "make_params", "(", "path", "=", ...
retrieve a plot file from plotly without needing a fid .
train
false
41,974
def s3_truncate(text, length=48, nice=True): text = s3_unicode(text) if (len(text) > length): if nice: return ('%s...' % text[:length].rsplit(' ', 1)[0][:(length - 3)]) else: return ('%s...' % text[:(length - 3)]) else: return text
[ "def", "s3_truncate", "(", "text", ",", "length", "=", "48", ",", "nice", "=", "True", ")", ":", "text", "=", "s3_unicode", "(", "text", ")", "if", "(", "len", "(", "text", ")", ">", "length", ")", ":", "if", "nice", ":", "return", "(", "'%s...'"...
nice truncating of text .
train
false
41,975
def mkquickgenesis(initial_alloc={}, db=None): assert (db is not None) return blocks.genesis(env(db), start_alloc=initial_alloc, difficulty=1)
[ "def", "mkquickgenesis", "(", "initial_alloc", "=", "{", "}", ",", "db", "=", "None", ")", ":", "assert", "(", "db", "is", "not", "None", ")", "return", "blocks", ".", "genesis", "(", "env", "(", "db", ")", ",", "start_alloc", "=", "initial_alloc", "...
set initial_difficulty to a value that is quickly minable .
train
false
41,976
def deflate_and_base64_encode(string_val): return base64.b64encode(zlib.compress(string_val)[2:(-4)])
[ "def", "deflate_and_base64_encode", "(", "string_val", ")", ":", "return", "base64", ".", "b64encode", "(", "zlib", ".", "compress", "(", "string_val", ")", "[", "2", ":", "(", "-", "4", ")", "]", ")" ]
deflates and the base64 encodes a string .
train
false
41,977
def p_container_type(p): p[0] = p[1]
[ "def", "p_container_type", "(", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
container_type : map_type | list_type | set_type .
train
false
41,978
def group_add_asset(group, asset_id=None, asset_ip=None): if asset_id: asset = get_object(Asset, id=asset_id) else: asset = get_object(Asset, ip=asset_ip) if asset: group.asset_set.add(asset)
[ "def", "group_add_asset", "(", "group", ",", "asset_id", "=", "None", ",", "asset_ip", "=", "None", ")", ":", "if", "asset_id", ":", "asset", "=", "get_object", "(", "Asset", ",", "id", "=", "asset_id", ")", "else", ":", "asset", "=", "get_object", "("...
asset group add a asset .
train
false
41,979
def setglobal(name, value): (global_dict, local_dict) = get_twill_glocals() global_dict[name] = value
[ "def", "setglobal", "(", "name", ",", "value", ")", ":", "(", "global_dict", ",", "local_dict", ")", "=", "get_twill_glocals", "(", ")", "global_dict", "[", "name", "]", "=", "value" ]
setglobal <name> <value> sets the variable <name> to the value <value> in the global namespace .
train
false
41,980
def test_json_camelcase(): test_data = {'under_score': 'values_can', 'be_converted': [{'to_camelcase': 'value'}, 'wont_be_convert']} output = hug.output_format.json_camelcase(test_data).decode('utf8') assert ('underScore' in output) assert ('values_can' in output) assert ('beConverted' in output) assert ('toCamelcase' in output) assert ('value' in output) assert ('wont_be_convert' in output)
[ "def", "test_json_camelcase", "(", ")", ":", "test_data", "=", "{", "'under_score'", ":", "'values_can'", ",", "'be_converted'", ":", "[", "{", "'to_camelcase'", ":", "'value'", "}", ",", "'wont_be_convert'", "]", "}", "output", "=", "hug", ".", "output_format...
ensure that its possible to output a hug api method as camelcased json .
train
false
41,981
def set_environment(env): settings.environment = env
[ "def", "set_environment", "(", "env", ")", ":", "settings", ".", "environment", "=", "env" ]
change your configuration environment .
train
false
41,982
def akaike_info_criterion(log_likelihood, n_params, n_samples): if ((n_samples / float(n_params)) >= 40.0): aic = (2.0 * (n_params - log_likelihood)) else: aic = ((2.0 * (n_params - log_likelihood)) + (((2.0 * n_params) * (n_params + 1.0)) / ((n_samples - n_params) - 1.0))) return aic
[ "def", "akaike_info_criterion", "(", "log_likelihood", ",", "n_params", ",", "n_samples", ")", ":", "if", "(", "(", "n_samples", "/", "float", "(", "n_params", ")", ")", ">=", "40.0", ")", ":", "aic", "=", "(", "2.0", "*", "(", "n_params", "-", "log_li...
computes the akaike information criterion .
train
false
41,983
def maxRstat(Z, R, i): Z = np.asarray(Z, order='c') R = np.asarray(R, order='c') is_valid_linkage(Z, throw=True, name='Z') is_valid_im(R, throw=True, name='R') if (type(i) is not int): raise TypeError('The third argument must be an integer.') if ((i < 0) or (i > 3)): raise ValueError('i must be an integer between 0 and 3 inclusive.') if (Z.shape[0] != R.shape[0]): raise ValueError('The inconsistency matrix and linkage matrix each have a different number of rows.') n = (Z.shape[0] + 1) MR = np.zeros(((n - 1),)) [Z, R] = _copy_arrays_if_base_present([Z, R]) _hierarchy.get_max_Rfield_for_each_cluster(Z, R, MR, int(n), i) return MR
[ "def", "maxRstat", "(", "Z", ",", "R", ",", "i", ")", ":", "Z", "=", "np", ".", "asarray", "(", "Z", ",", "order", "=", "'c'", ")", "R", "=", "np", ".", "asarray", "(", "R", ",", "order", "=", "'c'", ")", "is_valid_linkage", "(", "Z", ",", ...
returns the maximum statistic for each non-singleton cluster and its descendents .
train
false
41,984
def _num_conv_units(conv_layer): assert isinstance(conv_layer, (MaxoutConvC01B, ConvElemwise)) weights = conv_layer.get_params()[0].get_value() if isinstance(conv_layer, MaxoutConvC01B): return weights.shape[(-1)] elif isinstance(conv_layer, ConvElemwise): return weights.shape[0]
[ "def", "_num_conv_units", "(", "conv_layer", ")", ":", "assert", "isinstance", "(", "conv_layer", ",", "(", "MaxoutConvC01B", ",", "ConvElemwise", ")", ")", "weights", "=", "conv_layer", ".", "get_params", "(", ")", "[", "0", "]", ".", "get_value", "(", ")...
returns a conv layers number of output channels .
train
false
41,986
def _FormatServiceHealthReport(report): assert (report.get('status') == 'ALERT') message = '' sub_messages = [] alerts = report.get('alerts') if (len(alerts) > 1): message += ('(%d Alerts):' % len(alerts)) for a in alerts: sub_message = (' ' + a.get('description', a.get('name'))) if a.get('cluster', False): sub_message += '(Cluster)' else: sub_message += ('(%d machines)' % a.get('count')) sub_messages.append(sub_message) return (message + ','.join(sub_messages))
[ "def", "_FormatServiceHealthReport", "(", "report", ")", ":", "assert", "(", "report", ".", "get", "(", "'status'", ")", "==", "'ALERT'", ")", "message", "=", "''", "sub_messages", "=", "[", "]", "alerts", "=", "report", ".", "get", "(", "'alerts'", ")",...
create a formatted message from a service health report .
train
false
41,987
def process_static_urls(text, replacement_function, data_dir=None): def wrap_part_extraction(match): '\n Unwraps a match group for the captures specified in _url_replace_regex\n and forward them on as function arguments\n ' original = match.group(0) prefix = match.group('prefix') quote = match.group('quote') rest = match.group('rest') full_url = (prefix + rest) starts_with_static_url = full_url.startswith(unicode(settings.STATIC_URL)) starts_with_prefix = full_url.startswith(XBLOCK_STATIC_RESOURCE_PREFIX) contains_prefix = (XBLOCK_STATIC_RESOURCE_PREFIX in full_url) if (starts_with_prefix or (starts_with_static_url and contains_prefix)): return original return replacement_function(original, prefix, quote, rest) return re.sub(_url_replace_regex(u'(?:{static_url}|/static/)(?!{data_dir})'.format(static_url=settings.STATIC_URL, data_dir=data_dir)), wrap_part_extraction, text)
[ "def", "process_static_urls", "(", "text", ",", "replacement_function", ",", "data_dir", "=", "None", ")", ":", "def", "wrap_part_extraction", "(", "match", ")", ":", "original", "=", "match", ".", "group", "(", "0", ")", "prefix", "=", "match", ".", "grou...
run an arbitrary replacement function on any urls matching the static file directory .
train
false