id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
46,111
@login_required @require_POST def save_zen(request, project, subproject, lang): translation = get_translation(request, project, subproject, lang) user_locked = translation.is_user_locked(request.user) form = TranslationForm(translation, None, request.POST) if (not can_translate(request.user, translation)): messages.error(request, _(u"You don't have privileges to save translations!")) elif (not form.is_valid()): messages.error(request, _(u'Failed to save translation!')) elif (not user_locked): unit = form.cleaned_data[u'unit'] perform_translation(unit, form, request) return render(request, u'zen-response.html', {})
[ "@", "login_required", "@", "require_POST", "def", "save_zen", "(", "request", ",", "project", ",", "subproject", ",", "lang", ")", ":", "translation", "=", "get_translation", "(", "request", ",", "project", ",", "subproject", ",", "lang", ")", "user_locked", ...
save handler for zen mode .
train
false
46,112
def is_text_string(obj): if PY2: return isinstance(obj, basestring) else: return isinstance(obj, str)
[ "def", "is_text_string", "(", "obj", ")", ":", "if", "PY2", ":", "return", "isinstance", "(", "obj", ",", "basestring", ")", "else", ":", "return", "isinstance", "(", "obj", ",", "str", ")" ]
return true if obj is a text string .
train
false
46,113
def is_single_index(slc): if (slc.step is None): step = 1 else: step = slc.step return ((slc.start is not None) and (slc.stop is not None) and ((slc.start + step) >= slc.stop))
[ "def", "is_single_index", "(", "slc", ")", ":", "if", "(", "slc", ".", "step", "is", "None", ")", ":", "step", "=", "1", "else", ":", "step", "=", "slc", ".", "step", "return", "(", "(", "slc", ".", "start", "is", "not", "None", ")", "and", "("...
is the slice equivalent to a single index? .
train
false
46,114
def FormatThousands(value): if isinstance(value, float): value = ('%.2f' % value) else: value = str(value) if ('.' in value): (head, tail) = value.split('.', 1) tail = ('.' + tail) elif ('e' in value): (head, tail) = value.split('e', 1) tail = ('e' + tail) else: head = value tail = '' sign = '' if head.startswith('-'): sign = '-' head = head[1:] while (len(head) > 3): tail = ((',' + head[(-3):]) + tail) head = head[:(-3)] return ((sign + head) + tail)
[ "def", "FormatThousands", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "float", ")", ":", "value", "=", "(", "'%.2f'", "%", "value", ")", "else", ":", "value", "=", "str", "(", "value", ")", "if", "(", "'.'", "in", "value", ")", ...
format a numerical value .
train
false
46,115
def list_resource_records_command(args): records = list_resource_records(args.project_id, args.name) for record in records: print record
[ "def", "list_resource_records_command", "(", "args", ")", ":", "records", "=", "list_resource_records", "(", "args", ".", "project_id", ",", "args", ".", "name", ")", "for", "record", "in", "records", ":", "print", "record" ]
list all resource records for a zone .
train
false
46,116
def register_vcs(vcs): if vcs.is_supported(): key = vcs.name.lower() VCS_REGISTRY[key] = vcs VCS_CHOICES.append((key, vcs.name)) return vcs
[ "def", "register_vcs", "(", "vcs", ")", ":", "if", "vcs", ".", "is_supported", "(", ")", ":", "key", "=", "vcs", ".", "name", ".", "lower", "(", ")", "VCS_REGISTRY", "[", "key", "]", "=", "vcs", "VCS_CHOICES", ".", "append", "(", "(", "key", ",", ...
registers vcs if its supported .
train
false
46,117
def test_oss_fit(): oss = OneSidedSelection(random_state=RND_SEED) oss.fit(X, Y) assert_equal(oss.min_c_, 0) assert_equal(oss.maj_c_, 1) assert_equal(oss.stats_c_[0], 6) assert_equal(oss.stats_c_[1], 9)
[ "def", "test_oss_fit", "(", ")", ":", "oss", "=", "OneSidedSelection", "(", "random_state", "=", "RND_SEED", ")", "oss", ".", "fit", "(", "X", ",", "Y", ")", "assert_equal", "(", "oss", ".", "min_c_", ",", "0", ")", "assert_equal", "(", "oss", ".", "...
test the fitting method .
train
false
46,119
def loadInstanceFromDatastore(klass, key, codec=None): if (not issubclass(klass, (db.Model, db.Expando))): raise TypeError(('expected db.Model/db.Expando class, got %s' % (klass,))) if (not isinstance(key, basestring)): raise TypeError('string expected for key, got %s', (repr(key),)) key = str(key) if (codec is None): return klass.get(key) gae_objects = getGAEObjects(codec.context) try: return gae_objects.getClassKey(klass, key) except KeyError: pass obj = klass.get(key) gae_objects.addClassKey(klass, key, obj) return obj
[ "def", "loadInstanceFromDatastore", "(", "klass", ",", "key", ",", "codec", "=", "None", ")", ":", "if", "(", "not", "issubclass", "(", "klass", ",", "(", "db", ".", "Model", ",", "db", ".", "Expando", ")", ")", ")", ":", "raise", "TypeError", "(", ...
attempt to load an instance from the datastore .
train
false
46,120
def screenshot(): config_id = (request.args(0) or 1) size = get_vars.get('size') if (size == 'Letter'): height = 2550 width = 3300 elif (size == 'A4'): height = 2480 width = 3508 elif (size == 'A3'): height = 3508 width = 4962 elif (size == 'A2'): height = 4962 width = 7017 elif (size == 'A1'): height = 7017 width = 9933 elif (size == 'A0'): height = 9933 width = 14061 else: height = get_vars.get('height') try: height = int(height) except (ValueError, TypeError): height = 2480 width = get_vars.get('width') try: width = int(width) except (ValueError, TypeError): width = 3508 filename = gis.get_screenshot(config_id, height=height, width=width) if filename: redirect(URL(c='static', f='cache', args=['jpg', filename])) else: raise HTTP(500, 'Screenshot not taken')
[ "def", "screenshot", "(", ")", ":", "config_id", "=", "(", "request", ".", "args", "(", "0", ")", "or", "1", ")", "size", "=", "get_vars", ".", "get", "(", "'size'", ")", "if", "(", "size", "==", "'Letter'", ")", ":", "height", "=", "2550", "widt...
take a screenshot of a map .
train
false
46,121
def get_pydoc_text(module): doc = pydoc.TextDoc() loc = (doc.getdocloc(pydoc_mod) or '') if loc: loc = (('\nMODULE DOCS\n ' + loc) + '\n') output = doc.docmodule(module) patt = re.compile('\x08.') output = patt.sub('', output) return (output.strip(), loc)
[ "def", "get_pydoc_text", "(", "module", ")", ":", "doc", "=", "pydoc", ".", "TextDoc", "(", ")", "loc", "=", "(", "doc", ".", "getdocloc", "(", "pydoc_mod", ")", "or", "''", ")", "if", "loc", ":", "loc", "=", "(", "(", "'\\nMODULE DOCS\\n '", "+",...
returns pydoc generated output as text .
train
false
46,123
def describe_method(method): method_info = method.remote descriptor = MethodDescriptor() descriptor.name = method_info.method.func_name descriptor.request_type = method_info.request_type.definition_name() descriptor.response_type = method_info.response_type.definition_name() return descriptor
[ "def", "describe_method", "(", "method", ")", ":", "method_info", "=", "method", ".", "remote", "descriptor", "=", "MethodDescriptor", "(", ")", "descriptor", ".", "name", "=", "method_info", ".", "method", ".", "func_name", "descriptor", ".", "request_type", ...
build descriptor for service method .
train
false
46,124
def is_informational(status): return ((100 <= status) and (status <= 199))
[ "def", "is_informational", "(", "status", ")", ":", "return", "(", "(", "100", "<=", "status", ")", "and", "(", "status", "<=", "199", ")", ")" ]
check if http status code is informational .
train
false
46,125
def _magnetic_dipole_objective(x, B, B2, coils, scale, method): if (method == 'forward'): fwd = _magnetic_dipole_field_vec(x[np.newaxis, :], coils) else: from .preprocessing.maxwell import _sss_basis fwd = _sss_basis(dict(origin=x, int_order=1, ext_order=0), coils).T fwd = np.dot(fwd, scale.T) one = np.dot(linalg.svd(fwd, full_matrices=False)[2][:3], B) one *= one Bm2 = one.sum() return (B2 - Bm2)
[ "def", "_magnetic_dipole_objective", "(", "x", ",", "B", ",", "B2", ",", "coils", ",", "scale", ",", "method", ")", ":", "if", "(", "method", "==", "'forward'", ")", ":", "fwd", "=", "_magnetic_dipole_field_vec", "(", "x", "[", "np", ".", "newaxis", ",...
project data onto right eigenvectors of whitened forward .
train
false
46,126
def get_hardcoded_cert_keystore(files): try: print '[INFO] Getting Hardcoded Certificates/Keystores' dat = '' certz = '' key_store = '' for file_name in files: ext = file_name.split('.')[(-1)] if re.search('cer|pem|cert|crt|pub|key|pfx|p12', ext): certz += (escape(file_name) + '</br>') if re.search('jks|bks', ext): key_store += (escape(file_name) + '</br>') if (len(certz) > 1): dat += (('<tr><td>Certificate/Key Files Hardcoded inside the App.</td><td>' + certz) + '</td><tr>') if (len(key_store) > 1): dat += (('<tr><td>Hardcoded Keystore Found.</td><td>' + key_store) + '</td><tr>') return dat except: PrintException('[ERROR] Getting Hardcoded Certificates/Keystores')
[ "def", "get_hardcoded_cert_keystore", "(", "files", ")", ":", "try", ":", "print", "'[INFO] Getting Hardcoded Certificates/Keystores'", "dat", "=", "''", "certz", "=", "''", "key_store", "=", "''", "for", "file_name", "in", "files", ":", "ext", "=", "file_name", ...
returns the hardcoded certificate keystore .
train
false
46,127
def _save_coverage(cov, result, rootdir, testing_path): from ..utils.console import color_print if (result != 0): return try: cov.get_data() except AttributeError: cov._harvest_data() lines = cov.data.lines else: lines = cov.data._lines for key in list(lines.keys()): new_path = os.path.relpath(os.path.realpath(key), os.path.realpath(testing_path)) new_path = os.path.abspath(os.path.join(rootdir, new_path)) lines[new_path] = lines.pop(key) color_print(u'Saving coverage data in .coverage...', u'green') cov.save() color_print(u'Saving HTML coverage report in htmlcov...', u'green') cov.html_report(directory=os.path.join(rootdir, u'htmlcov'))
[ "def", "_save_coverage", "(", "cov", ",", "result", ",", "rootdir", ",", "testing_path", ")", ":", "from", ".", ".", "utils", ".", "console", "import", "color_print", "if", "(", "result", "!=", "0", ")", ":", "return", "try", ":", "cov", ".", "get_data...
this method is called after the tests have been run in coverage mode to cleanup and then save the coverage data and report .
train
false
46,128
def peak_snr(img1, img2): if (img1.ndim == 3): (img1, img2) = (rgb2gray(img1.copy()), rgb2gray(img2.copy())) img1 = skimage.img_as_float(img1) img2 = skimage.img_as_float(img2) mse = ((1.0 / img1.size) * np.square((img1 - img2)).sum()) (_, max_) = dtype_range[img1.dtype.type] return (20 * np.log((max_ / mse)))
[ "def", "peak_snr", "(", "img1", ",", "img2", ")", ":", "if", "(", "img1", ".", "ndim", "==", "3", ")", ":", "(", "img1", ",", "img2", ")", "=", "(", "rgb2gray", "(", "img1", ".", "copy", "(", ")", ")", ",", "rgb2gray", "(", "img2", ".", "copy...
peak signal to noise ratio of two images parameters img1 : array-like img2 : array-like returns peak_snr : float peak signal to noise ratio .
train
false
46,130
def lower_case_hostname(url): parsed = moves.urllib.parse.urlparse(url) replaced = parsed._replace(netloc=parsed.netloc.lower()) return moves.urllib.parse.urlunparse(replaced)
[ "def", "lower_case_hostname", "(", "url", ")", ":", "parsed", "=", "moves", ".", "urllib", ".", "parse", ".", "urlparse", "(", "url", ")", "replaced", "=", "parsed", ".", "_replace", "(", "netloc", "=", "parsed", ".", "netloc", ".", "lower", "(", ")", ...
change the urls hostname to lowercase .
train
false
46,132
def plaintext2html(text, container_tag=False): text = cgi.escape(ustr(text)) text = text.replace('\n', '<br/>') text = text.replace('\r', '<br/>') text = html_keep_url(text) idx = 0 final = '<p>' br_tags = re.compile('(([<]\\s*[bB][rR]\\s*\\/?[>]\\s*){2,})') for item in re.finditer(br_tags, text): final += (text[idx:item.start()] + '</p><p>') idx = item.end() final += (text[idx:] + '</p>') if container_tag: final = ('<%s>%s</%s>' % (container_tag, final, container_tag)) return ustr(final)
[ "def", "plaintext2html", "(", "text", ",", "container_tag", "=", "False", ")", ":", "text", "=", "cgi", ".", "escape", "(", "ustr", "(", "text", ")", ")", "text", "=", "text", ".", "replace", "(", "'\\n'", ",", "'<br/>'", ")", "text", "=", "text", ...
convert plaintext into html .
train
false
46,133
def UpdateResources(dstpath, data, type_, names=None, languages=None): res = GetResources(dstpath, [type_], names, languages) if ((not (type_ in res)) and (type_ != '*')): res[type_] = {} if names: for name in names: if ((not (name in res[type_])) and (name != '*')): res[type_][name] = [] if languages: for language in languages: if ((not (language in res[type_][name])) and (language != '*')): res[type_][name].append(language) hdst = win32api.BeginUpdateResource(dstpath, 0) for type_ in res: for name in res[type_]: for language in res[type_][name]: logger.info('Updating resource type %s name %s language %s', type_, name, language) win32api.UpdateResource(hdst, type_, name, data.encode('UTF-8'), language) win32api.EndUpdateResource(hdst, 0)
[ "def", "UpdateResources", "(", "dstpath", ",", "data", ",", "type_", ",", "names", "=", "None", ",", "languages", "=", "None", ")", ":", "res", "=", "GetResources", "(", "dstpath", ",", "[", "type_", "]", ",", "names", ",", "languages", ")", "if", "(...
update or add resource data in dll/exe file dstpath .
train
true
46,134
def actions_get(context, instance_uuid): actions = model_query(context, models.InstanceAction).filter_by(instance_uuid=instance_uuid).order_by(desc('created_at')).all() return actions
[ "def", "actions_get", "(", "context", ",", "instance_uuid", ")", ":", "actions", "=", "model_query", "(", "context", ",", "models", ".", "InstanceAction", ")", ".", "filter_by", "(", "instance_uuid", "=", "instance_uuid", ")", ".", "order_by", "(", "desc", "...
get all instance actions for the provided instance .
train
false
46,135
def update_vpnservice(vpnservice, desc, profile=None): conn = _auth(profile) return conn.update_vpnservice(vpnservice, desc)
[ "def", "update_vpnservice", "(", "vpnservice", ",", "desc", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "update_vpnservice", "(", "vpnservice", ",", "desc", ")" ]
updates a vpn service cli example: .
train
true
46,136
def get_context_from_an_zip(app_dic, man_data_dic, man_an_dic, code_an_dic): try: context = {'title': 'Static Analysis', 'name': app_dic['app_name'], 'size': app_dic['size'], 'md5': app_dic['md5'], 'sha1': app_dic['sha1'], 'sha256': app_dic['sha256'], 'packagename': man_data_dic['packagename'], 'mainactivity': man_data_dic['mainactivity'], 'targetsdk': man_data_dic['target_sdk'], 'maxsdk': man_data_dic['max_sdk'], 'minsdk': man_data_dic['min_sdk'], 'androvername': man_data_dic['androvername'], 'androver': man_data_dic['androver'], 'manifest': man_an_dic['manifest_anal'], 'permissions': man_an_dic['permissons'], 'files': app_dic['files'], 'certz': app_dic['certz'], 'activities': man_data_dic['activities'], 'receivers': man_data_dic['receivers'], 'providers': man_data_dic['providers'], 'services': man_data_dic['services'], 'libraries': man_data_dic['libraries'], 'browsable_activities': man_an_dic['browsable_activities'], 'act_count': man_an_dic['cnt_act'], 'prov_count': man_an_dic['cnt_pro'], 'serv_count': man_an_dic['cnt_ser'], 'bro_count': man_an_dic['cnt_bro'], 'native': code_an_dic['native'], 'dynamic': code_an_dic['dynamic'], 'reflection': code_an_dic['reflect'], 'crypto': code_an_dic['crypto'], 'obfus': code_an_dic['obfus'], 'api': code_an_dic['api'], 'dang': code_an_dic['dang'], 'urls': code_an_dic['urls'], 'domains': code_an_dic['domains'], 'emails': code_an_dic['emails'], 'mani': app_dic['mani'], 'e_act': man_an_dic['exported_cnt']['act'], 'e_ser': man_an_dic['exported_cnt']['ser'], 'e_bro': man_an_dic['exported_cnt']['bro'], 'e_cnt': man_an_dic['exported_cnt']['cnt']} return context except: PrintException('[ERROR] Rendering to Template')
[ "def", "get_context_from_an_zip", "(", "app_dic", ",", "man_data_dic", ",", "man_an_dic", ",", "code_an_dic", ")", ":", "try", ":", "context", "=", "{", "'title'", ":", "'Static Analysis'", ",", "'name'", ":", "app_dic", "[", "'app_name'", "]", ",", "'size'", ...
get the context for the website .
train
false
46,137
def autoremove(jail=None, chroot=None, root=None, dryrun=False): opts = '' if dryrun: opts += 'n' else: opts += 'y' cmd = _pkg(jail, chroot, root) cmd.append('autoremove') if opts: cmd.append(('-' + opts)) return __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False)
[ "def", "autoremove", "(", "jail", "=", "None", ",", "chroot", "=", "None", ",", "root", "=", "None", ",", "dryrun", "=", "False", ")", ":", "opts", "=", "''", "if", "dryrun", ":", "opts", "+=", "'n'", "else", ":", "opts", "+=", "'y'", "cmd", "=",...
delete packages which were automatically installed as dependencies and are not required anymore .
train
true
46,140
def _get_xmlsec_cryptobackend(path=None, search_paths=None, debug=False): if (path is None): path = get_xmlsec_binary(paths=search_paths) return CryptoBackendXmlSec1(path, debug=debug)
[ "def", "_get_xmlsec_cryptobackend", "(", "path", "=", "None", ",", "search_paths", "=", "None", ",", "debug", "=", "False", ")", ":", "if", "(", "path", "is", "None", ")", ":", "path", "=", "get_xmlsec_binary", "(", "paths", "=", "search_paths", ")", "re...
initialize a cryptobackendxmlsec1 crypto backend .
train
false
46,142
def _set_spine_position(spine, position): axis = spine.axis if (axis is not None): cla = axis.cla axis.cla = axis.reset_ticks spine.set_position(position) if (axis is not None): axis.cla = cla
[ "def", "_set_spine_position", "(", "spine", ",", "position", ")", ":", "axis", "=", "spine", ".", "axis", "if", "(", "axis", "is", "not", "None", ")", ":", "cla", "=", "axis", ".", "cla", "axis", ".", "cla", "=", "axis", ".", "reset_ticks", "spine", ...
set the spines position without resetting an associated axis .
train
false
46,143
def _csf_to_list(option): result = [] line = get_option(option) if line: csv = line.split('=')[1].replace(' ', '').replace('"', '') result = csv.split(',') return result
[ "def", "_csf_to_list", "(", "option", ")", ":", "result", "=", "[", "]", "line", "=", "get_option", "(", "option", ")", "if", "line", ":", "csv", "=", "line", ".", "split", "(", "'='", ")", "[", "1", "]", ".", "replace", "(", "' '", ",", "''", ...
extract comma-separated values from a csf .
train
true
46,144
def _api_queue_rating(output, value, kwargs): vote_map = {'up': Rating.VOTE_UP, 'down': Rating.VOTE_DOWN} flag_map = {'spam': Rating.FLAG_SPAM, 'encrypted': Rating.FLAG_ENCRYPTED, 'expired': Rating.FLAG_EXPIRED, 'other': Rating.FLAG_OTHER, 'comment': Rating.FLAG_COMMENT} content_type = kwargs.get('type') setting = kwargs.get('setting') if value: try: video = audio = vote = flag = None if ((content_type == 'video') and (setting != '-')): video = setting if ((content_type == 'audio') and (setting != '-')): audio = setting if (content_type == 'vote'): vote = vote_map[setting] if (content_type == 'flag'): flag = flag_map[setting] if cfg.rating_enable(): Rating.do.update_user_rating(value, video, audio, vote, flag, kwargs.get('detail')) return report(output) except: return report(output, _MSG_BAD_SERVER_PARMS) else: return report(output, _MSG_NO_VALUE)
[ "def", "_api_queue_rating", "(", "output", ",", "value", ",", "kwargs", ")", ":", "vote_map", "=", "{", "'up'", ":", "Rating", ".", "VOTE_UP", ",", "'down'", ":", "Rating", ".", "VOTE_DOWN", "}", "flag_map", "=", "{", "'spam'", ":", "Rating", ".", "FLA...
api: accepts output .
train
false
46,145
def get_console_output(name=None, location=None, instance_id=None, call=None, kwargs=None): if (call != 'action'): raise SaltCloudSystemExit('The get_console_output action must be called with -a or --action.') if (location is None): location = get_location() if (not instance_id): instance_id = _get_node(name)['instanceId'] if (kwargs is None): kwargs = {} if (instance_id is None): if ('instance_id' in kwargs): instance_id = kwargs['instance_id'] del kwargs['instance_id'] params = {'Action': 'GetConsoleOutput', 'InstanceId': instance_id} ret = {} data = aws.query(params, return_root=True, location=location, provider=get_provider(), opts=__opts__, sigver='4') for item in data: if (next(six.iterkeys(item)) == 'output'): ret['output_decoded'] = binascii.a2b_base64(next(six.itervalues(item))) else: ret[next(six.iterkeys(item))] = next(six.itervalues(item)) return ret
[ "def", "get_console_output", "(", "name", "=", "None", ",", "location", "=", "None", ",", "instance_id", "=", "None", ",", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "(", "call", "!=", "'action'", ")", ":", "raise", "SaltCloudSyst...
show the console output from the instance .
train
true
46,146
def batch_matmul(a, b, transa=False, transb=False): return BatchMatMul(transa=transa, transb=transb)(a, b)
[ "def", "batch_matmul", "(", "a", ",", "b", ",", "transa", "=", "False", ",", "transb", "=", "False", ")", ":", "return", "BatchMatMul", "(", "transa", "=", "transa", ",", "transb", "=", "transb", ")", "(", "a", ",", "b", ")" ]
computes the batch matrix multiplications of two sets of arrays .
train
false
46,147
def bringToFront(title): if (sys.platform != 'win32'): return import ctypes find_window = ctypes.windll.user32.FindWindowA set_foreground_window = ctypes.windll.user32.SetForegroundWindow hwnd = find_window(None, title) if (hwnd == 0): raise Exception(('There is no window titled: "%s"' % title)) set_foreground_window(hwnd)
[ "def", "bringToFront", "(", "title", ")", ":", "if", "(", "sys", ".", "platform", "!=", "'win32'", ")", ":", "return", "import", "ctypes", "find_window", "=", "ctypes", ".", "windll", ".", "user32", ".", "FindWindowA", "set_foreground_window", "=", "ctypes",...
bring a top-level window with a given title to the front on windows .
train
false
46,150
def pillar_format(ret, keys, value): if (value is None): return ret pillar_value = yaml.load(value) keyvalue = keys.pop() pil = {keyvalue: pillar_value} keys.reverse() for k in keys: pil = {k: pil} return dict_merge(ret, pil)
[ "def", "pillar_format", "(", "ret", ",", "keys", ",", "value", ")", ":", "if", "(", "value", "is", "None", ")", ":", "return", "ret", "pillar_value", "=", "yaml", ".", "load", "(", "value", ")", "keyvalue", "=", "keys", ".", "pop", "(", ")", "pil",...
perform data formatting to be used as pillar data and merge it with the current pillar data .
train
false
46,151
def find_wheels(projects, search_dirs): wheels = [] for project in projects: for dirname in search_dirs: files = glob.glob(os.path.join(dirname, (project + '-*.whl'))) if files: wheels.append(os.path.abspath(files[0])) break else: logger.fatal(('Cannot find a wheel for %s' % (project,))) return wheels
[ "def", "find_wheels", "(", "projects", ",", "search_dirs", ")", ":", "wheels", "=", "[", "]", "for", "project", "in", "projects", ":", "for", "dirname", "in", "search_dirs", ":", "files", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", ...
find wheels from which we can import projects .
train
true
46,154
def scenario_rheader(r, tabs=[]): rheader = None if (r.representation == 'html'): if (r.name == 'scenario'): T = current.T settings = current.deployment_settings tabs = [(T('Scenario Details'), None)] if settings.has_module('hrm'): tabs.append((T('Human Resources'), 'human_resource')) if settings.has_module('asset'): tabs.append((T('Assets'), 'asset')) tabs.append((T('Organizations'), 'organisation')) tabs.append((T('Facilities'), 'site')) if settings.has_module('project'): tabs.append((T('Tasks'), 'task')) tabs.append((T('Map Profile'), 'config')) rheader_tabs = s3_rheader_tabs(r, tabs) record = r.record if record: rheader = DIV(TABLE(TR(TH(('%s: ' % T('Name'))), record.name), TR(TH(('%s: ' % T('Comments'))), record.comments)), rheader_tabs) return rheader
[ "def", "scenario_rheader", "(", "r", ",", "tabs", "=", "[", "]", ")", ":", "rheader", "=", "None", "if", "(", "r", ".", "representation", "==", "'html'", ")", ":", "if", "(", "r", ".", "name", "==", "'scenario'", ")", ":", "T", "=", "current", "....
resource headers for component views .
train
false
46,155
def _parse_image_multilogs_string(ret): (image_logs, infos) = ([], None) if (ret and ret.strip().startswith('{') and ret.strip().endswith('}')): pushd = 0 buf = '' for char in ret: buf += char if (char == '{'): pushd += 1 if (char == '}'): pushd -= 1 if (pushd == 0): try: buf = json.loads(buf) except Exception: pass else: image_logs.append(buf) buf = '' image_logs.reverse() valid_states = ['Download complete', 'Already exists'] for ilog in image_logs: if isinstance(ilog, dict): if ((ilog.get('status') in valid_states) and ilog.get('id')): infos = _get_image_infos(ilog['id']) break return (image_logs, infos)
[ "def", "_parse_image_multilogs_string", "(", "ret", ")", ":", "(", "image_logs", ",", "infos", ")", "=", "(", "[", "]", ",", "None", ")", "if", "(", "ret", "and", "ret", ".", "strip", "(", ")", ".", "startswith", "(", "'{'", ")", "and", "ret", ".",...
parse image log strings into grokable data .
train
false
46,156
@log def reset_logpath(logpath): global logger if (not logpath): return if os.path.isfile(def_logpath): if (os.path.getsize(def_logpath) == 0): os.remove(def_logpath) handlers = logger.handlers for handler in handlers: logger.removeHandler(handler) handler = logging.FileHandler(logpath) handler.setFormatter(formatter) logger.addHandler(handler)
[ "@", "log", "def", "reset_logpath", "(", "logpath", ")", ":", "global", "logger", "if", "(", "not", "logpath", ")", ":", "return", "if", "os", ".", "path", ".", "isfile", "(", "def_logpath", ")", ":", "if", "(", "os", ".", "path", ".", "getsize", "...
reset logpath to path from command line .
train
false
46,157
def create_and_copy(test, fixture): pool = fixture(test) service = service_for_pool(test, pool) volume = service.get(MY_VOLUME) pool2 = fixture(test) service2 = service_for_pool(test, pool2) volume2 = Volume(node_id=service.node_id, name=MY_VOLUME, service=service2) d = pool.create(volume) def created_filesystem(filesystem): path = filesystem.get_path() path.child('file').setContent('some bytes') path.child('directory').makedirs() copying = copy(volume, volume2) copying.addCallback((lambda ignored: CopyVolumes(from_volume=volume, to_volume=volume2))) return copying d.addCallback(created_filesystem) return d
[ "def", "create_and_copy", "(", "test", ",", "fixture", ")", ":", "pool", "=", "fixture", "(", "test", ")", "service", "=", "service_for_pool", "(", "test", ",", "pool", ")", "volume", "=", "service", ".", "get", "(", "MY_VOLUME", ")", "pool2", "=", "fi...
create a volumes filesystem on one pool .
train
false
46,158
def do_pdp_descriptor(conf, cert): pdp = md.PDPDescriptor() pdp.protocol_support_enumeration = samlp.NAMESPACE endps = conf.getattr('endpoints', 'pdp') if endps: for (endpoint, instlist) in do_endpoints(endps, ENDPOINTS['pdp']).items(): setattr(pdp, endpoint, instlist) _do_nameid_format(pdp, conf, 'pdp') if cert: pdp.key_descriptor = do_key_descriptor(cert) return pdp
[ "def", "do_pdp_descriptor", "(", "conf", ",", "cert", ")", ":", "pdp", "=", "md", ".", "PDPDescriptor", "(", ")", "pdp", ".", "protocol_support_enumeration", "=", "samlp", ".", "NAMESPACE", "endps", "=", "conf", ".", "getattr", "(", "'endpoints'", ",", "'p...
create a policy decision point descriptor .
train
true
46,161
def sort_and_count(a): if (len(a) == 1): return (a, 0) (b, x) = sort_and_count(a[:(len(a) / 2)]) (c, y) = sort_and_count(a[(len(a) / 2):]) (d, z) = merge_and_count_inv(b, c) return (d, ((x + y) + z))
[ "def", "sort_and_count", "(", "a", ")", ":", "if", "(", "len", "(", "a", ")", "==", "1", ")", ":", "return", "(", "a", ",", "0", ")", "(", "b", ",", "x", ")", "=", "sort_and_count", "(", "a", "[", ":", "(", "len", "(", "a", ")", "/", "2",...
counts the number of inversions in an array and returns the count and the sorted array in o time .
train
false
46,162
def importanceMixing(oldpoints, oldpdf, newpdf, newdistr, forcedRefresh=0.01): reuseindices = [] batch = len(oldpoints) for (i, sample) in enumerate(oldpoints): r = uniform(0, 1) if (r < (((1 - forcedRefresh) * newpdf(sample)) / oldpdf(sample))): reuseindices.append(i) if ((batch - len(reuseindices)) <= max(1, (batch * forcedRefresh))): break newpoints = [] while ((len(reuseindices) + len(newpoints)) < batch): r = uniform(0, 1) sample = newdistr() if (r < forcedRefresh): newpoints.append(sample) elif (r < (1 - (oldpdf(sample) / newpdf(sample)))): newpoints.append(sample) return (reuseindices, newpoints)
[ "def", "importanceMixing", "(", "oldpoints", ",", "oldpdf", ",", "newpdf", ",", "newdistr", ",", "forcedRefresh", "=", "0.01", ")", ":", "reuseindices", "=", "[", "]", "batch", "=", "len", "(", "oldpoints", ")", "for", "(", "i", ",", "sample", ")", "in...
implements importance mixing .
train
false
46,164
def _serialize_discussion_entities(request, context, discussion_entities, requested_fields, discussion_entity_type): results = [] usernames = [] include_profile_image = _include_profile_image(requested_fields) for entity in discussion_entities: if (discussion_entity_type == DiscussionEntity.thread): serialized_entity = ThreadSerializer(entity, context=context).data elif (discussion_entity_type == DiscussionEntity.comment): serialized_entity = CommentSerializer(entity, context=context).data results.append(serialized_entity) if include_profile_image: if (serialized_entity['author'] and (serialized_entity['author'] not in usernames)): usernames.append(serialized_entity['author']) if (('endorsed' in serialized_entity) and serialized_entity['endorsed'] and ('endorsed_by' in serialized_entity) and serialized_entity['endorsed_by'] and (serialized_entity['endorsed_by'] not in usernames)): usernames.append(serialized_entity['endorsed_by']) results = _add_additional_response_fields(request, results, usernames, discussion_entity_type, include_profile_image) return results
[ "def", "_serialize_discussion_entities", "(", "request", ",", "context", ",", "discussion_entities", ",", "requested_fields", ",", "discussion_entity_type", ")", ":", "results", "=", "[", "]", "usernames", "=", "[", "]", "include_profile_image", "=", "_include_profile...
it serializes discussion entity and add additional data if requested .
train
false
46,165
def classdot2func(path): func = ('CLASS_L' + path.replace('.', '_').replace('$', '_')) return func
[ "def", "classdot2func", "(", "path", ")", ":", "func", "=", "(", "'CLASS_L'", "+", "path", ".", "replace", "(", "'.'", ",", "'_'", ")", ".", "replace", "(", "'$'", ",", "'_'", ")", ")", "return", "func" ]
convert a path such as android .
train
false
46,167
def get_egg_info(prefix, all_pkgs=False): installed_pkgs = linked_data(prefix) sp_dir = get_site_packages_dir(installed_pkgs) if (sp_dir is None): return set() conda_files = set() for info in itervalues(installed_pkgs): conda_files.update(info.get(u'files', [])) res = set() for path in get_egg_info_files(join(prefix, sp_dir)): f = rel_path(prefix, path) if (all_pkgs or (f not in conda_files)): try: dist = parse_egg_info(path) except UnicodeDecodeError: dist = None if dist: res.add(Dist(dist)) return res
[ "def", "get_egg_info", "(", "prefix", ",", "all_pkgs", "=", "False", ")", ":", "installed_pkgs", "=", "linked_data", "(", "prefix", ")", "sp_dir", "=", "get_site_packages_dir", "(", "installed_pkgs", ")", "if", "(", "sp_dir", "is", "None", ")", ":", "return"...
return a set of canonical names of all python packages .
train
false
46,168
@cache(request.env.path_info, time_expire=5, cache_model=cache.disk) def cache_controller_on_disk(): t = time.ctime() return dict(time=t, link=A('click to reload', _href=URL(r=request)))
[ "@", "cache", "(", "request", ".", "env", ".", "path_info", ",", "time_expire", "=", "5", ",", "cache_model", "=", "cache", ".", "disk", ")", "def", "cache_controller_on_disk", "(", ")", ":", "t", "=", "time", ".", "ctime", "(", ")", "return", "dict", ...
cache the output of the controller on disk .
train
false
46,170
def get_headers_from_environ(environ): headers = wsgiref.headers.Headers([]) for (header, value) in environ.iteritems(): if header.startswith('HTTP_'): headers[header[5:].replace('_', '-')] = value if ('CONTENT_TYPE' in environ): headers['CONTENT-TYPE'] = environ['CONTENT_TYPE'] return headers
[ "def", "get_headers_from_environ", "(", "environ", ")", ":", "headers", "=", "wsgiref", ".", "headers", ".", "Headers", "(", "[", "]", ")", "for", "(", "header", ",", "value", ")", "in", "environ", ".", "iteritems", "(", ")", ":", "if", "header", ".", ...
get a wsgiref .
train
true
46,171
def _trace_symbanded(a, b, lower=0): if lower: t = _zero_triband((a * b), lower=1) return (t[0].sum() + (2 * t[1:].sum())) else: t = _zero_triband((a * b), lower=0) return (t[(-1)].sum() + (2 * t[:(-1)].sum()))
[ "def", "_trace_symbanded", "(", "a", ",", "b", ",", "lower", "=", "0", ")", ":", "if", "lower", ":", "t", "=", "_zero_triband", "(", "(", "a", "*", "b", ")", ",", "lower", "=", "1", ")", "return", "(", "t", "[", "0", "]", ".", "sum", "(", "...
compute the trace for two upper or banded real symmetric matrices stored either in either upper or lower form .
train
false
46,173
def get_sme_classified(): df = fd.get_stock_basics() df.reset_index(level=0, inplace=True) df = df[ct.FOR_CLASSIFY_B_COLS] df = df.ix[(df.code.str[0:3] == '002')] df = df.sort('code').reset_index(drop=True) return df
[ "def", "get_sme_classified", "(", ")", ":", "df", "=", "fd", ".", "get_stock_basics", "(", ")", "df", ".", "reset_index", "(", "level", "=", "0", ",", "inplace", "=", "True", ")", "df", "=", "df", "[", "ct", ".", "FOR_CLASSIFY_B_COLS", "]", "df", "="...
return dataframe code :股票代码 name :股票名称 .
train
false
46,174
def thread_last(val, *forms): def evalform_back(val, form): if callable(form): return form(val) if isinstance(form, tuple): (func, args) = (form[0], form[1:]) args = (args + (val,)) return func(*args) return reduce(evalform_back, forms, val)
[ "def", "thread_last", "(", "val", ",", "*", "forms", ")", ":", "def", "evalform_back", "(", "val", ",", "form", ")", ":", "if", "callable", "(", "form", ")", ":", "return", "form", "(", "val", ")", "if", "isinstance", "(", "form", ",", "tuple", ")"...
thread value through a sequence of functions/forms .
train
false
46,175
def get_python_module_names(file_list, file_suffix='.py'): module_names = [m[:m.rfind(file_suffix)] for m in file_list if m.endswith(file_suffix)] return module_names
[ "def", "get_python_module_names", "(", "file_list", ",", "file_suffix", "=", "'.py'", ")", ":", "module_names", "=", "[", "m", "[", ":", "m", ".", "rfind", "(", "file_suffix", ")", "]", "for", "m", "in", "file_list", "if", "m", ".", "endswith", "(", "f...
return a list of module names from a filename list .
train
false
46,176
def startup_modpython(req=None): if (cherrypy.engine.state == cherrypy._cpengine.STOPPED): if req: if ('nullreq' in req.get_options()): cherrypy.engine.request_class = NullRequest cherrypy.engine.response_class = NullResponse ab_opt = req.get_options().get('ab', '') if ab_opt: global AB_PATH AB_PATH = ab_opt cherrypy.engine.start() if (cherrypy.engine.state == cherrypy._cpengine.STARTING): cherrypy.engine.wait() return 0
[ "def", "startup_modpython", "(", "req", "=", "None", ")", ":", "if", "(", "cherrypy", ".", "engine", ".", "state", "==", "cherrypy", ".", "_cpengine", ".", "STOPPED", ")", ":", "if", "req", ":", "if", "(", "'nullreq'", "in", "req", ".", "get_options", ...
start the cherrypy app server in serverless mode .
train
false
46,177
def _pool_hash_and_name(bootstrap_actions): for bootstrap_action in bootstrap_actions: if (bootstrap_action.name == 'master'): args = [arg.value for arg in bootstrap_action.args] if ((len(args) == 2) and args[0].startswith('pool-')): return (args[0][5:], args[1]) return (None, None)
[ "def", "_pool_hash_and_name", "(", "bootstrap_actions", ")", ":", "for", "bootstrap_action", "in", "bootstrap_actions", ":", "if", "(", "bootstrap_action", ".", "name", "==", "'master'", ")", ":", "args", "=", "[", "arg", ".", "value", "for", "arg", "in", "b...
return the hash and pool name for the given cluster .
train
false
46,178
def embeddings_with_init(vocab_size, embedding_dim, name): return tf.get_variable(name=name, shape=[vocab_size, embedding_dim], initializer=tf.random_normal_initializer(stddev=math.sqrt((1.0 / embedding_dim))))
[ "def", "embeddings_with_init", "(", "vocab_size", ",", "embedding_dim", ",", "name", ")", ":", "return", "tf", ".", "get_variable", "(", "name", "=", "name", ",", "shape", "=", "[", "vocab_size", ",", "embedding_dim", "]", ",", "initializer", "=", "tf", "....
creates and initializes the embedding tensors .
train
false
46,180
def new_datetime(d): kw = [d.year, d.month, d.day] if isinstance(d, real_datetime): kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo]) return datetime(*kw)
[ "def", "new_datetime", "(", "d", ")", ":", "kw", "=", "[", "d", ".", "year", ",", "d", ".", "month", ",", "d", ".", "day", "]", "if", "isinstance", "(", "d", ",", "real_datetime", ")", ":", "kw", ".", "extend", "(", "[", "d", ".", "hour", ","...
generate a safe datetime from a datetime .
train
true
46,182
def port_remove(br, port, if_exists=True): param_if_exists = _param_if_exists(if_exists) if (port and (not br)): cmd = 'ovs-vsctl {1}del-port {0}'.format(port, param_if_exists) else: cmd = 'ovs-vsctl {2}del-port {0} {1}'.format(br, port, param_if_exists) result = __salt__['cmd.run_all'](cmd) retcode = result['retcode'] return _retcode_to_bool(retcode)
[ "def", "port_remove", "(", "br", ",", "port", ",", "if_exists", "=", "True", ")", ":", "param_if_exists", "=", "_param_if_exists", "(", "if_exists", ")", "if", "(", "port", "and", "(", "not", "br", ")", ")", ":", "cmd", "=", "'ovs-vsctl {1}del-port {0}'", ...
deletes port .
train
true
46,185
def execute_file(filename, **kw): if (filename == '-'): inp = sys.stdin else: inp = open(filename) kw['source'] = filename _execute_script(inp, **kw)
[ "def", "execute_file", "(", "filename", ",", "**", "kw", ")", ":", "if", "(", "filename", "==", "'-'", ")", ":", "inp", "=", "sys", ".", "stdin", "else", ":", "inp", "=", "open", "(", "filename", ")", "kw", "[", "'source'", "]", "=", "filename", ...
execute commands from a file .
train
false
46,186
def mvsk2mnc(args): (mc, mc2, skew, kurt) = args mnc = mc mnc2 = (mc2 + (mc * mc)) mc3 = (skew * (mc2 ** 1.5)) mnc3 = ((mc3 + ((3 * mc) * mc2)) + (mc ** 3)) mc4 = ((kurt + 3.0) * (mc2 ** 2.0)) mnc4 = (((mc4 + ((4 * mc) * mc3)) + (((6 * mc) * mc) * mc2)) + (mc ** 4)) return (mnc, mnc2, mnc3, mnc4)
[ "def", "mvsk2mnc", "(", "args", ")", ":", "(", "mc", ",", "mc2", ",", "skew", ",", "kurt", ")", "=", "args", "mnc", "=", "mc", "mnc2", "=", "(", "mc2", "+", "(", "mc", "*", "mc", ")", ")", "mc3", "=", "(", "skew", "*", "(", "mc2", "**", "...
convert mean .
train
false
46,188
def get_pywin32_module_file_attribute(module_name): statement = "\n from PyInstaller.utils.win32 import winutils\n module = winutils.import_pywin32_module('%s')\n print(module.__file__)\n " return exec_statement((statement % module_name))
[ "def", "get_pywin32_module_file_attribute", "(", "module_name", ")", ":", "statement", "=", "\"\\n from PyInstaller.utils.win32 import winutils\\n module = winutils.import_pywin32_module('%s')\\n print(module.__file__)\\n \"", "return", "exec_statement", "(", "(", "...
get the absolute path of the pywin32 dll specific to the pywin32 module with the passed name .
train
false
46,190
def is_feature_enabled(course): return (settings.FEATURES.get('ENABLE_TEAMS', False) and course.teams_enabled)
[ "def", "is_feature_enabled", "(", "course", ")", ":", "return", "(", "settings", ".", "FEATURES", ".", "get", "(", "'ENABLE_TEAMS'", ",", "False", ")", "and", "course", ".", "teams_enabled", ")" ]
returns whether the feature flag to enable microsite has been set .
train
false
46,192
def genrange(*a): (start, stop, step) = handleargs(a) value = start while (value < stop): (yield value) value += step
[ "def", "genrange", "(", "*", "a", ")", ":", "(", "start", ",", "stop", ",", "step", ")", "=", "handleargs", "(", "a", ")", "value", "=", "start", "while", "(", "value", "<", "stop", ")", ":", "(", "yield", "value", ")", "value", "+=", "step" ]
function to implement range as a generator .
train
false
46,193
def twoform_to_matrix(expr): if ((covariant_order(expr) != 2) or contravariant_order(expr)): raise ValueError('The input expression is not a two-form.') coord_sys = expr.atoms(CoordSystem) if (len(coord_sys) != 1): raise ValueError('The input expression concerns more than one coordinate systems, hence there is no unambiguous way to choose a coordinate system for the matrix.') coord_sys = coord_sys.pop() vectors = coord_sys.base_vectors() expr = expr.expand() matrix_content = [[expr.rcall(v1, v2) for v1 in vectors] for v2 in vectors] return Matrix(matrix_content)
[ "def", "twoform_to_matrix", "(", "expr", ")", ":", "if", "(", "(", "covariant_order", "(", "expr", ")", "!=", "2", ")", "or", "contravariant_order", "(", "expr", ")", ")", ":", "raise", "ValueError", "(", "'The input expression is not a two-form.'", ")", "coor...
return the matrix representing the twoform .
train
false
46,194
def calendar_by_periods(request, calendar_slug, periods=None, template_name='schedule/calendar_by_period.html'): calendar = get_object_or_404(Calendar, slug=calendar_slug) date = coerce_date_dict(request.GET) if date: try: date = datetime.datetime(**date) except ValueError: raise Http404 else: date = datetime.datetime.now() event_list = GET_EVENTS_FUNC(request, calendar) period_objects = dict([(period.__name__.lower(), period(event_list, date)) for period in periods]) return render_to_response(template_name, {'date': date, 'periods': period_objects, 'calendar': calendar, 'weekday_names': weekday_names, 'here': quote(request.get_full_path())}, context_instance=RequestContext(request))
[ "def", "calendar_by_periods", "(", "request", ",", "calendar_slug", ",", "periods", "=", "None", ",", "template_name", "=", "'schedule/calendar_by_period.html'", ")", ":", "calendar", "=", "get_object_or_404", "(", "Calendar", ",", "slug", "=", "calendar_slug", ")",...
this view is for getting a calendar .
train
false
46,196
def with_metaclass(meta, base=object): return meta('NewBase', (base,), {})
[ "def", "with_metaclass", "(", "meta", ",", "base", "=", "object", ")", ":", "return", "meta", "(", "'NewBase'", ",", "(", "base", ",", ")", ",", "{", "}", ")" ]
class decorator to set metaclass .
train
false
46,197
@app.route('/<username>/unfollow') def unfollow_user(username): if (not g.user): abort(401) whom_id = get_user_id(username) if (whom_id is None): abort(404) db = get_db() db.execute('delete from follower where who_id=? and whom_id=?', [session['user_id'], whom_id]) db.commit() flash(('You are no longer following "%s"' % username)) return redirect(url_for('user_timeline', username=username))
[ "@", "app", ".", "route", "(", "'/<username>/unfollow'", ")", "def", "unfollow_user", "(", "username", ")", ":", "if", "(", "not", "g", ".", "user", ")", ":", "abort", "(", "401", ")", "whom_id", "=", "get_user_id", "(", "username", ")", "if", "(", "...
removes the current user as follower of the given user .
train
false
46,199
@bdd.then(bdd.parsers.re('the (?P<category>error|message|warning) "(?P<message>.*)" should be shown')) def expect_message(quteproc, httpbin, category, message): category_to_loglevel = {'message': logging.INFO, 'error': logging.ERROR, 'warning': logging.WARNING} message = message.replace('(port)', str(httpbin.port)) quteproc.mark_expected(category='message', loglevel=category_to_loglevel[category], message=message)
[ "@", "bdd", ".", "then", "(", "bdd", ".", "parsers", ".", "re", "(", "'the (?P<category>error|message|warning) \"(?P<message>.*)\" should be shown'", ")", ")", "def", "expect_message", "(", "quteproc", ",", "httpbin", ",", "category", ",", "message", ")", ":", "ca...
expect the given message in the qutebrowser log .
train
false
46,200
def test_monitorGetGammaAndGrid(): newGrid = numpy.array([[0, 150, 2.0], [0, 30, 2.0], [0, 110, 2.0], [0, 10, 2.0]]) mon = monitors.Monitor('test') mon.setGammaGrid(newGrid) mon.setGamma([3, 3, 3]) win = visual.Window([100, 100], monitor=mon, autoLog=False) assert numpy.alltrue((win.gamma == numpy.array([2.0, 2.0, 2.0])))
[ "def", "test_monitorGetGammaAndGrid", "(", ")", ":", "newGrid", "=", "numpy", ".", "array", "(", "[", "[", "0", ",", "150", ",", "2.0", "]", ",", "[", "0", ",", "30", ",", "2.0", "]", ",", "[", "0", ",", "110", ",", "2.0", "]", ",", "[", "0",...
test what happens if gamma and gammagrid are both present .
train
false
46,201
def _LSAGuessPayloadClass(p, **kargs): cls = conf.raw_layer if (len(p) >= 4): typ = struct.unpack('!B', p[3])[0] clsname = _OSPF_LSclasses.get(typ, 'Raw') cls = globals()[clsname] return cls(p, **kargs)
[ "def", "_LSAGuessPayloadClass", "(", "p", ",", "**", "kargs", ")", ":", "cls", "=", "conf", ".", "raw_layer", "if", "(", "len", "(", "p", ")", ">=", "4", ")", ":", "typ", "=", "struct", ".", "unpack", "(", "'!B'", ",", "p", "[", "3", "]", ")", ...
guess the correct lsa class for a given payload .
train
true
46,202
def unlink_paths_older_than(filepaths, mtime): for fpath in filepaths: try: if (os.path.getmtime(fpath) < mtime): os.unlink(fpath) except OSError: pass
[ "def", "unlink_paths_older_than", "(", "filepaths", ",", "mtime", ")", ":", "for", "fpath", "in", "filepaths", ":", "try", ":", "if", "(", "os", ".", "path", ".", "getmtime", "(", "fpath", ")", "<", "mtime", ")", ":", "os", ".", "unlink", "(", "fpath...
remove any files from the given list that that were last modified before mtime .
train
false
46,203
def load_doctype_module(doctype, module=None, prefix=u'', suffix=u''): if (not module): module = get_doctype_module(doctype) app = get_module_app(module) key = (app, doctype, prefix, suffix) module_name = get_module_name(doctype, module, prefix, suffix) try: if (key not in doctype_python_modules): doctype_python_modules[key] = frappe.get_module(module_name) except ImportError: raise ImportError, u'Module import failed for {0} ({1})'.format(doctype, module_name) return doctype_python_modules[key]
[ "def", "load_doctype_module", "(", "doctype", ",", "module", "=", "None", ",", "prefix", "=", "u''", ",", "suffix", "=", "u''", ")", ":", "if", "(", "not", "module", ")", ":", "module", "=", "get_doctype_module", "(", "doctype", ")", "app", "=", "get_m...
returns the module object for given doctype .
train
false
46,204
@socketio.on('disconnect', namespace='/jobs') def on_disconnect_jobs(): pass
[ "@", "socketio", ".", "on", "(", "'disconnect'", ",", "namespace", "=", "'/jobs'", ")", "def", "on_disconnect_jobs", "(", ")", ":", "pass" ]
somebody disconnected from a jobs page .
train
false
46,206
def reload_(name): term(name)
[ "def", "reload_", "(", "name", ")", ":", "term", "(", "name", ")" ]
restart the named service .
train
false
46,207
def wordrelationships(relationshiplist): relationships = etree.fromstring('<Relationships xmlns="http://schemas.openxmlformats.org/package/2006/relationships"></Relationships>') count = 0 for relationship in relationshiplist: rel_elm = makeelement('Relationship', nsprefix=None, attributes={'Id': ('rId' + str((count + 1))), 'Type': relationship[0], 'Target': relationship[1]}) relationships.append(rel_elm) count += 1 return relationships
[ "def", "wordrelationships", "(", "relationshiplist", ")", ":", "relationships", "=", "etree", ".", "fromstring", "(", "'<Relationships xmlns=\"http://schemas.openxmlformats.org/package/2006/relationships\"></Relationships>'", ")", "count", "=", "0", "for", "relationship", "in",...
generate a word relationships file .
train
true
46,212
def is_simple_path(G, nodes): if (len(nodes) == 0): return False if (len(nodes) == 1): return (nodes[0] in G) return ((len(set(nodes)) == len(nodes)) and all(((v in G[u]) for (u, v) in pairwise(nodes))))
[ "def", "is_simple_path", "(", "G", ",", "nodes", ")", ":", "if", "(", "len", "(", "nodes", ")", "==", "0", ")", ":", "return", "False", "if", "(", "len", "(", "nodes", ")", "==", "1", ")", ":", "return", "(", "nodes", "[", "0", "]", "in", "G"...
returns true if and only if the given nodes form a simple path in g .
train
false
46,213
@functools.lru_cache() def xontrib_metadata(): with open(xontribs_json(), 'r') as f: md = json.load(f) return md
[ "@", "functools", ".", "lru_cache", "(", ")", "def", "xontrib_metadata", "(", ")", ":", "with", "open", "(", "xontribs_json", "(", ")", ",", "'r'", ")", "as", "f", ":", "md", "=", "json", ".", "load", "(", "f", ")", "return", "md" ]
loads and returns the xontribs .
train
false
46,217
def get_k_fold_splits(cv): for (k, (train, test)) in enumerate(cv): valid_k = (k + 1) if (valid_k == len(cv)): valid_k = 0 valid = cv[valid_k][1] train = np.setdiff1d(train, valid) (yield (train, valid, test))
[ "def", "get_k_fold_splits", "(", "cv", ")", ":", "for", "(", "k", ",", "(", "train", ",", "test", ")", ")", "in", "enumerate", "(", "cv", ")", ":", "valid_k", "=", "(", "k", "+", "1", ")", "if", "(", "valid_k", "==", "len", "(", "cv", ")", ")...
use the test set from the next fold as the validation set for this fold .
train
false
46,218
def test_keyword_type_construction(): class x(object, ): def __new__(cls, *args, **kwargs): return object.__new__(cls) def __init__(self, *args, **kwargs): for (x, y) in kwargs.iteritems(): setattr(self, x, y) return object.__init__(self) obj = type.__call__(x, *(), **{'abc': 2}) AreEqual(obj.abc, 2) obj = x.__call__(*(), **{'abc': 3}) AreEqual(obj.abc, 3)
[ "def", "test_keyword_type_construction", "(", ")", ":", "class", "x", "(", "object", ",", ")", ":", "def", "__new__", "(", "cls", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "object", ".", "__new__", "(", "cls", ")", "def", "__init__", ...
using type .
train
false
46,219
def list_ignored(): cmd = ['softwareupdate', '--list', '--ignore'] out = salt.utils.mac_utils.execute_return_result(cmd) rexp = re.compile('(?m)^ ["]?([^,|\\s].*[^"|\\n|,])[,|"]?') return rexp.findall(out)
[ "def", "list_ignored", "(", ")", ":", "cmd", "=", "[", "'softwareupdate'", ",", "'--list'", ",", "'--ignore'", "]", "out", "=", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_result", "(", "cmd", ")", "rexp", "=", "re", ".", "compile", "(",...
list all updates that have been ignored .
train
true
46,222
def set_pidlockfile_scenario(testcase, scenario_name, clear_tracker=True): testcase.scenario = testcase.pidlockfile_scenarios[scenario_name] setup_lockfile_method_mocks(testcase, testcase.scenario, 'lockfile.LinkFileLock') testcase.pidlockfile_args = dict(path=testcase.scenario['path']) testcase.test_instance = pidlockfile.PIDLockFile(**testcase.pidlockfile_args) if clear_tracker: testcase.mock_tracker.clear()
[ "def", "set_pidlockfile_scenario", "(", "testcase", ",", "scenario_name", ",", "clear_tracker", "=", "True", ")", ":", "testcase", ".", "scenario", "=", "testcase", ".", "pidlockfile_scenarios", "[", "scenario_name", "]", "setup_lockfile_method_mocks", "(", "testcase"...
set up the test case to the specified scenario .
train
false
46,224
def _date_time_match(cron, **kwargs): return all([((kwargs.get(x) is None) or (cron[x] == str(kwargs[x])) or ((str(kwargs[x]).lower() == 'random') and (cron[x] != '*'))) for x in ('minute', 'hour', 'daymonth', 'month', 'dayweek')])
[ "def", "_date_time_match", "(", "cron", ",", "**", "kwargs", ")", ":", "return", "all", "(", "[", "(", "(", "kwargs", ".", "get", "(", "x", ")", "is", "None", ")", "or", "(", "cron", "[", "x", "]", "==", "str", "(", "kwargs", "[", "x", "]", "...
returns true if the minute .
train
false
46,226
def get_all_phrases(): phrases = [] modules = brain.Brain.get_modules() for module in modules: phrases.extend(get_phrases_from_module(module)) return sorted(list(set(phrases)))
[ "def", "get_all_phrases", "(", ")", ":", "phrases", "=", "[", "]", "modules", "=", "brain", ".", "Brain", ".", "get_modules", "(", ")", "for", "module", "in", "modules", ":", "phrases", ".", "extend", "(", "get_phrases_from_module", "(", "module", ")", "...
gets phrases from all modules .
train
false
46,228
def get_safe_part(key): version = key[0] assert version c_link_key = key[1] for key_element in c_link_key[1:]: if (isinstance(key_element, string_types) and key_element.startswith('md5:')): md5 = key_element[4:] break return (key[0] + (md5,))
[ "def", "get_safe_part", "(", "key", ")", ":", "version", "=", "key", "[", "0", "]", "assert", "version", "c_link_key", "=", "key", "[", "1", "]", "for", "key_element", "in", "c_link_key", "[", "1", ":", "]", ":", "if", "(", "isinstance", "(", "key_el...
return a tuple containing a subset of key .
train
false
46,230
def add_event_source(event_source, lambda_arn, target_function, boto_session, dry=False): (event_source_obj, ctx, funk) = get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False) if (not dry): if (not event_source_obj.status(funk)): event_source_obj.add(funk) if event_source_obj.status(funk): return 'successful' else: return 'failed' else: return 'exists' return 'dryrun'
[ "def", "add_event_source", "(", "event_source", ",", "lambda_arn", ",", "target_function", ",", "boto_session", ",", "dry", "=", "False", ")", ":", "(", "event_source_obj", ",", "ctx", ",", "funk", ")", "=", "get_event_source", "(", "event_source", ",", "lambd...
given an event_source dictionary .
train
true
46,231
def test_get_config_with_defaults(): conf = config.get_config('tests/test-config/valid-partial-config.yaml') default_cookiecutters_dir = os.path.expanduser('~/.cookiecutters/') default_replay_dir = os.path.expanduser('~/.cookiecutter_replay/') expected_conf = {'cookiecutters_dir': default_cookiecutters_dir, 'replay_dir': default_replay_dir, 'default_context': {'full_name': 'Firstname Lastname', 'email': 'firstname.lastname@gmail.com', 'github_username': 'example'}, 'abbreviations': {'gh': 'https://github.com/{0}.git', 'bb': 'https://bitbucket.org/{0}'}} assert (conf == expected_conf)
[ "def", "test_get_config_with_defaults", "(", ")", ":", "conf", "=", "config", ".", "get_config", "(", "'tests/test-config/valid-partial-config.yaml'", ")", "default_cookiecutters_dir", "=", "os", ".", "path", ".", "expanduser", "(", "'~/.cookiecutters/'", ")", "default_...
a config file that overrides 1 of 3 defaults .
train
false
46,232
def _refresh_mine_cache(wrapped): @functools.wraps(wrapped) def wrapper(*args, **kwargs): '\n refresh salt mine on exit.\n ' returned = wrapped(*args, **salt.utils.clean_kwargs(**kwargs)) __salt__['mine.send']('dockerng.ps', verbose=True, all=True, host=True) return returned return wrapper
[ "def", "_refresh_mine_cache", "(", "wrapped", ")", ":", "@", "functools", ".", "wraps", "(", "wrapped", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "returned", "=", "wrapped", "(", "*", "args", ",", "**", "salt", ".", "uti...
decorator to trigger a refresh of salt mine data .
train
false
46,233
def correct_barcode(barcode, barcode_to_sample_id, correction_fn): try: sample_id = barcode_to_sample_id[barcode] except KeyError: sample_id = None if ((sample_id is not None) or (correction_fn is None) or ('N' in barcode)): return (0, barcode, False, sample_id) else: (corrected_barcode, num_errors) = correction_fn(barcode) try: sample_id = barcode_to_sample_id[corrected_barcode] except KeyError: sample_id = None return (num_errors, corrected_barcode, True, sample_id)
[ "def", "correct_barcode", "(", "barcode", ",", "barcode_to_sample_id", ",", "correction_fn", ")", ":", "try", ":", "sample_id", "=", "barcode_to_sample_id", "[", "barcode", "]", "except", "KeyError", ":", "sample_id", "=", "None", "if", "(", "(", "sample_id", ...
correct barcode given barcode .
train
false
46,234
def check_is_admin(roles): init() action = 'context_is_admin' match_list = (('rule:%s' % action),) target = {'project_id': ''} credentials = {'roles': roles} return policy.enforce(match_list, target, credentials)
[ "def", "check_is_admin", "(", "roles", ")", ":", "init", "(", ")", "action", "=", "'context_is_admin'", "match_list", "=", "(", "(", "'rule:%s'", "%", "action", ")", ",", ")", "target", "=", "{", "'project_id'", ":", "''", "}", "credentials", "=", "{", ...
whether or not roles contains admin role according to policy setting .
train
false
46,235
@api_view(['GET']) def locales_api_view(request): locales = {} for lang in settings.SUMO_LANGUAGES: if (lang == 'xx'): continue locale = {'name': LOCALES[lang].english, 'localized_name': LOCALES[lang].native, 'aaq_enabled': QuestionLocale.objects.filter(locale=lang).exists()} locales[lang] = locale return Response(locales)
[ "@", "api_view", "(", "[", "'GET'", "]", ")", "def", "locales_api_view", "(", "request", ")", ":", "locales", "=", "{", "}", "for", "lang", "in", "settings", ".", "SUMO_LANGUAGES", ":", "if", "(", "lang", "==", "'xx'", ")", ":", "continue", "locale", ...
api endpoint listing all supported locales .
train
false
46,236
def exceptionFromStreamError(element): error = _parseError(element, NS_XMPP_STREAMS) exception = StreamError(error['condition'], error['text'], error['textLang'], error['appCondition']) return exception
[ "def", "exceptionFromStreamError", "(", "element", ")", ":", "error", "=", "_parseError", "(", "element", ",", "NS_XMPP_STREAMS", ")", "exception", "=", "StreamError", "(", "error", "[", "'condition'", "]", ",", "error", "[", "'text'", "]", ",", "error", "["...
build an exception object from a stream error .
train
false
46,237
def get_module_complexity(module_path, threshold=7): with open(module_path, 'rU') as mod: code = mod.read() return get_code_complexity(code, threshold, filename=module_path)
[ "def", "get_module_complexity", "(", "module_path", ",", "threshold", "=", "7", ")", ":", "with", "open", "(", "module_path", ",", "'rU'", ")", "as", "mod", ":", "code", "=", "mod", ".", "read", "(", ")", "return", "get_code_complexity", "(", "code", ","...
returns the complexity of a module .
train
false
46,238
def unordlist(cs): return ''.join((chr(c) for c in cs))
[ "def", "unordlist", "(", "cs", ")", ":", "return", "''", ".", "join", "(", "(", "chr", "(", "c", ")", "for", "c", "in", "cs", ")", ")" ]
unordlist -> str takes a list of ascii values and returns the corresponding string .
train
false
46,239
@contextfunction def get_listed_products(context, n_products, ordering=None, filter_dict=None, orderable_only=True): request = context['request'] customer = request.customer shop = request.shop if (not filter_dict): filter_dict = {} products_qs = Product.objects.listed(shop=shop, customer=customer, language=get_language()).filter(**filter_dict) if ordering: products_qs = products_qs.order_by(ordering) if orderable_only: suppliers = Supplier.objects.all() products = [] for product in products_qs[:(n_products * 4)]: if (len(products) == n_products): break shop_product = product.get_shop_instance(shop, allow_cache=True) for supplier in suppliers: if shop_product.is_orderable(supplier, customer, shop_product.minimum_purchase_quantity): products.append(product) break return products products = products_qs[:n_products] return products
[ "@", "contextfunction", "def", "get_listed_products", "(", "context", ",", "n_products", ",", "ordering", "=", "None", ",", "filter_dict", "=", "None", ",", "orderable_only", "=", "True", ")", ":", "request", "=", "context", "[", "'request'", "]", "customer", ...
returns all products marked as listed that are determined to be visible based on the current context .
train
false
46,241
def validate_post_plugin_load(): if (('dhcp_agents_per_network' in cfg.CONF) and (cfg.CONF.dhcp_agents_per_network <= 0)): msg = (_("dhcp_agents_per_network must be >= 1. '%s' is invalid.") % cfg.CONF.dhcp_agents_per_network) return msg
[ "def", "validate_post_plugin_load", "(", ")", ":", "if", "(", "(", "'dhcp_agents_per_network'", "in", "cfg", ".", "CONF", ")", "and", "(", "cfg", ".", "CONF", ".", "dhcp_agents_per_network", "<=", "0", ")", ")", ":", "msg", "=", "(", "_", "(", "\"dhcp_ag...
checks if the configuration variables are valid .
train
false
46,244
def do_reverse(value): if isinstance(value, basestring): return value[::(-1)] try: return reversed(value) except TypeError: try: rv = list(value) rv.reverse() return rv except TypeError: raise FilterArgumentError('argument must be iterable')
[ "def", "do_reverse", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "basestring", ")", ":", "return", "value", "[", ":", ":", "(", "-", "1", ")", "]", "try", ":", "return", "reversed", "(", "value", ")", "except", "TypeError", ":", ...
reverse the object or return an iterator the iterates over it the other way round .
train
true
46,245
def get_lapack_funcs(names, arrays=(), dtype=None): return _get_funcs(names, arrays, dtype, 'LAPACK', _flapack, _clapack, 'flapack', 'clapack', _lapack_alias)
[ "def", "get_lapack_funcs", "(", "names", ",", "arrays", "=", "(", ")", ",", "dtype", "=", "None", ")", ":", "return", "_get_funcs", "(", "names", ",", "arrays", ",", "dtype", ",", "'LAPACK'", ",", "_flapack", ",", "_clapack", ",", "'flapack'", ",", "'c...
return available lapack function objects from names .
train
false
46,246
def _imghdr_what_wrapper(data): return (imghdr.what(None, h=data) or _wider_test_jpeg(data))
[ "def", "_imghdr_what_wrapper", "(", "data", ")", ":", "return", "(", "imghdr", ".", "what", "(", "None", ",", "h", "=", "data", ")", "or", "_wider_test_jpeg", "(", "data", ")", ")" ]
a wrapper around imghdr .
train
false
46,247
def bind_cache_grant(app, provider, current_user, config_prefix='OAUTH2'): cache = Cache(app, config_prefix) @provider.grantsetter def create_grant(client_id, code, request, *args, **kwargs): 'Sets the grant token with the configured cache system' grant = Grant(cache, client_id=client_id, code=code['code'], redirect_uri=request.redirect_uri, scopes=request.scopes, user=current_user()) log.debug(('Set Grant Token with key %s' % grant.key)) cache.set(grant.key, dict(grant)) @provider.grantgetter def get(client_id, code): 'Gets the grant token with the configured cache system' grant = Grant(cache, client_id=client_id, code=code) ret = cache.get(grant.key) if (not ret): log.debug(('Grant Token not found with key %s' % grant.key)) return None log.debug(('Grant Token found with key %s' % grant.key)) for (k, v) in ret.items(): setattr(grant, k, v) return grant
[ "def", "bind_cache_grant", "(", "app", ",", "provider", ",", "current_user", ",", "config_prefix", "=", "'OAUTH2'", ")", ":", "cache", "=", "Cache", "(", "app", ",", "config_prefix", ")", "@", "provider", ".", "grantsetter", "def", "create_grant", "(", "clie...
configures an :class:oauth2provider instance to use various caching systems to get and set the grant token .
train
true
46,248
def drop_continuous_query(database, name, **client_args): client = _client(**client_args) query = 'DROP CONTINUOUS QUERY {0} ON {1}'.format(name, database) client.query(query) return True
[ "def", "drop_continuous_query", "(", "database", ",", "name", ",", "**", "client_args", ")", ":", "client", "=", "_client", "(", "**", "client_args", ")", "query", "=", "'DROP CONTINUOUS QUERY {0} ON {1}'", ".", "format", "(", "name", ",", "database", ")", "cl...
drop a continuous query .
train
true
46,249
def _to_epoch_time(date): if hasattr(date, 'timestamp'): return int(date.timestamp()) else: epoch = datetime.fromtimestamp(0) delta = (date - epoch) return int(delta.total_seconds())
[ "def", "_to_epoch_time", "(", "date", ")", ":", "if", "hasattr", "(", "date", ",", "'timestamp'", ")", ":", "return", "int", "(", "date", ".", "timestamp", "(", ")", ")", "else", ":", "epoch", "=", "datetime", ".", "fromtimestamp", "(", "0", ")", "de...
convert a datetime object to an integer number of seconds since the unix epoch .
train
false
46,250
def load_folder_list(path=''): return [os.path.join(path, o) for o in os.listdir(path) if os.path.isdir(os.path.join(path, o))]
[ "def", "load_folder_list", "(", "path", "=", "''", ")", ":", "return", "[", "os", ".", "path", ".", "join", "(", "path", ",", "o", ")", "for", "o", "in", "os", ".", "listdir", "(", "path", ")", "if", "os", ".", "path", ".", "isdir", "(", "os", ...
return a folder list in a folder by given a folder path .
train
true
46,252
def _paren_groups(source): count = 0 start = 0 result = [] for pos in xrange(len(source)): if (source[pos] == '('): if (count == 0): start = pos count += 1 if (source[pos] == ')'): count -= 1 if (count == 0): result.append(source[start:(pos + 1)]) return result
[ "def", "_paren_groups", "(", "source", ")", ":", "count", "=", "0", "start", "=", "0", "result", "=", "[", "]", "for", "pos", "in", "xrange", "(", "len", "(", "source", ")", ")", ":", "if", "(", "source", "[", "pos", "]", "==", "'('", ")", ":",...
split a string into parenthesized groups .
train
false
46,253
def get_certs(context=_DEFAULT_CONTEXT, store=_DEFAULT_STORE): ret = dict() cmd = list() blacklist_keys = ['DnsNameList'] store_path = 'Cert:\\{0}\\{1}'.format(context, store) _validate_cert_path(name=store_path) cmd.append("Get-ChildItem -Path '{0}' | Select-Object".format(store_path)) cmd.append(' DnsNameList, SerialNumber, Subject, Thumbprint, Version') items = _cmd_run(cmd=str().join(cmd), as_json=True) for item in items: cert_info = dict() for key in item: if (key not in blacklist_keys): cert_info[key.lower()] = item[key] cert_info['dnsnames'] = [name['Unicode'] for name in item['DnsNameList']] ret[item['Thumbprint']] = cert_info return ret
[ "def", "get_certs", "(", "context", "=", "_DEFAULT_CONTEXT", ",", "store", "=", "_DEFAULT_STORE", ")", ":", "ret", "=", "dict", "(", ")", "cmd", "=", "list", "(", ")", "blacklist_keys", "=", "[", "'DnsNameList'", "]", "store_path", "=", "'Cert:\\\\{0}\\\\{1}...
get the available certificates in the given store .
train
false
46,254
def flt(s, precision=None): if isinstance(s, basestring): s = s.replace(u',', u'') try: num = float(s) if (precision is not None): num = rounded(num, precision) except Exception: num = 0 return num
[ "def", "flt", "(", "s", ",", "precision", "=", "None", ")", ":", "if", "isinstance", "(", "s", ",", "basestring", ")", ":", "s", "=", "s", ".", "replace", "(", "u','", ",", "u''", ")", "try", ":", "num", "=", "float", "(", "s", ")", "if", "("...
convert to float .
train
false