id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
11,526
def vb_create_machine(name=None): vbox = vb_get_box() log.info('Create virtualbox machine %s ', name) groups = None os_type_id = 'Other' new_machine = vbox.createMachine(None, name, groups, os_type_id, None) vbox.registerMachine(new_machine) log.info('Finished creating %s', name) return vb_xpcom_to_attribute_dict(new_machine, 'IMachine')
[ "def", "vb_create_machine", "(", "name", "=", "None", ")", ":", "vbox", "=", "vb_get_box", "(", ")", "log", ".", "info", "(", "'Create virtualbox machine %s '", ",", "name", ")", "groups", "=", "None", "os_type_id", "=", "'Other'", "new_machine", "=", "vbox"...
creates a machine on the virtualbox hypervisor todo pass more params to customize machine creation .
train
true
11,527
def cart2pol(x, y, units='deg'): radius = numpy.hypot(x, y) theta = numpy.arctan2(y, x) if (units in ('deg', 'degs')): theta = ((theta * 180) / numpy.pi) return (theta, radius)
[ "def", "cart2pol", "(", "x", ",", "y", ",", "units", "=", "'deg'", ")", ":", "radius", "=", "numpy", ".", "hypot", "(", "x", ",", "y", ")", "theta", "=", "numpy", ".", "arctan2", "(", "y", ",", "x", ")", "if", "(", "units", "in", "(", "'deg'"...
convert from cartesian to polar coordinates .
train
false
11,531
def CreateSortKeyPrefix(timestamp, randomness=True, reverse=False): assert (timestamp < (1L << 32)), timestamp if reverse: timestamp = (((1L << 32) - int(timestamp)) - 1) if randomness: random_bits = (random.getrandbits(16) & 65535) else: random_bits = 0 return base64hex.B64HexEncode(struct.pack('>IH', int(timestamp), random_bits))
[ "def", "CreateSortKeyPrefix", "(", "timestamp", ",", "randomness", "=", "True", ",", "reverse", "=", "False", ")", ":", "assert", "(", "timestamp", "<", "(", "1", "L", "<<", "32", ")", ")", ",", "timestamp", "if", "reverse", ":", "timestamp", "=", "(",...
returns a sort key which will sort by timestamp .
train
false
11,532
def yaml_formatter(table_name, table_dict): special_table_list = ['name', 'OS/2', 'TTFA'] if (table_name in special_table_list): if (table_name == 'name'): return name_yaml_formatter(table_dict) elif (table_name == 'OS/2'): return os2_yaml_formatter(table_dict) elif (table_name == 'TTFA'): return ttfa_yaml_formatter(table_dict) else: table_string = (table_name.strip() + ': {\n') for field in table_dict.keys(): table_string = (((((table_string + (' ' * 4)) + field) + ': ') + str(table_dict[field])) + ',\n') table_string += '}\n\n' return table_string
[ "def", "yaml_formatter", "(", "table_name", ",", "table_dict", ")", ":", "special_table_list", "=", "[", "'name'", ",", "'OS/2'", ",", "'TTFA'", "]", "if", "(", "table_name", "in", "special_table_list", ")", ":", "if", "(", "table_name", "==", "'name'", ")",...
creates a yaml formatted string for opentype table font reports .
train
false
11,533
def create_rdf_parser_without_externals(target, store): parser = _rdfxml_create_parser(target, store) parser.setFeature(feature_external_ges, 0) parser.setFeature(feature_external_pes, 0) return parser
[ "def", "create_rdf_parser_without_externals", "(", "target", ",", "store", ")", ":", "parser", "=", "_rdfxml_create_parser", "(", "target", ",", "store", ")", "parser", ".", "setFeature", "(", "feature_external_ges", ",", "0", ")", "parser", ".", "setFeature", "...
create an rdf parser that does not support general entity expansion .
train
false
11,534
def RestoreFromIndexValue(index_value, data_type): raw_type = _PROPERTY_TYPE_TO_INDEX_VALUE_TYPE.get(data_type) if (raw_type is None): raise datastore_errors.BadValueError(('Unsupported data type (%r)' % data_type)) if (index_value is None): return index_value if (not isinstance(index_value, raw_type)): raise datastore_errors.BadValueError(('Unsupported converstion. Expected %r got %r' % (type(index_value), raw_type))) meaning = _PROPERTY_MEANINGS.get(data_type) if (isinstance(index_value, str) and (meaning not in _NON_UTF8_MEANINGS)): index_value = unicode(index_value, 'utf-8') conv = _PROPERTY_CONVERSIONS.get(meaning) if (not conv): return index_value try: value = conv(index_value) except (KeyError, ValueError, IndexError, TypeError, AttributeError) as msg: raise datastore_errors.BadValueError(('Error converting value: %r\nException was: %s' % (index_value, msg))) return value
[ "def", "RestoreFromIndexValue", "(", "index_value", ",", "data_type", ")", ":", "raw_type", "=", "_PROPERTY_TYPE_TO_INDEX_VALUE_TYPE", ".", "get", "(", "data_type", ")", "if", "(", "raw_type", "is", "None", ")", ":", "raise", "datastore_errors", ".", "BadValueErro...
restores a index value to the correct datastore type .
train
false
11,536
def conda_installed_files(prefix, exclude_self_build=False): res = set() for (dist, meta) in iteritems(linked_data(prefix)): if (exclude_self_build and (u'file_hash' in meta)): continue res.update(set(meta.get(u'files', ()))) return res
[ "def", "conda_installed_files", "(", "prefix", ",", "exclude_self_build", "=", "False", ")", ":", "res", "=", "set", "(", ")", "for", "(", "dist", ",", "meta", ")", "in", "iteritems", "(", "linked_data", "(", "prefix", ")", ")", ":", "if", "(", "exclud...
return the set of files which have been installed into a given prefix .
train
false
11,537
def forward_migrate_group_curators(apps, schema_editor): Group = apps.get_model(u'groups', u'Group') for group in Group.objects.all(): if group.curator: group.curators.add(group.curator) group.save()
[ "def", "forward_migrate_group_curators", "(", "apps", ",", "schema_editor", ")", ":", "Group", "=", "apps", ".", "get_model", "(", "u'groups'", ",", "u'Group'", ")", "for", "group", "in", "Group", ".", "objects", ".", "all", "(", ")", ":", "if", "group", ...
migrate forwards all the data for the foreignkey curator to the m2m field curators .
train
false
11,539
@image_comparison(baseline_images=[u'test_eventplot_problem_kwargs'], extensions=[u'png'], remove_text=True) def test_eventplot_problem_kwargs(): np.random.seed(0) data1 = np.random.random([20]).tolist() data2 = np.random.random([10]).tolist() data = [data1, data2] fig = plt.figure() axobj = fig.add_subplot(111) with warnings.catch_warnings(record=True) as w: warnings.simplefilter(u'always') colls = axobj.eventplot(data, colors=[u'r', u'b'], color=[u'c', u'm'], linewidths=[2, 1], linewidth=[1, 2], linestyles=[u'solid', u'dashed'], linestyle=[u'dashdot', u'dotted']) assert (len(w) == 3) assert all((issubclass(wi.category, IgnoredKeywordWarning) for wi in w))
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'test_eventplot_problem_kwargs'", "]", ",", "extensions", "=", "[", "u'png'", "]", ",", "remove_text", "=", "True", ")", "def", "test_eventplot_problem_kwargs", "(", ")", ":", "np", ".", "random", ".",...
test that singular versions of linecollection props raise an ignoredkeywordwarning rather than overriding the plural versions .
train
false
11,540
def outcome(): return s3_rest_controller()
[ "def", "outcome", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful controller for outcomes .
train
false
11,541
def test_list_partitions(): from fabtools.disk import partitions partitions = partitions() for (pname, ptype) in partitions.items(): puts(('%s is %s' % (pname, hex(ptype))))
[ "def", "test_list_partitions", "(", ")", ":", "from", "fabtools", ".", "disk", "import", "partitions", "partitions", "=", "partitions", "(", ")", "for", "(", "pname", ",", "ptype", ")", "in", "partitions", ".", "items", "(", ")", ":", "puts", "(", "(", ...
list disk partitions .
train
false
11,543
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
11,544
def decode_factory(text): if isinstance(text, str): return unicoder(text) elif isinstance(text, list): new_text = [] for t in text: new_text.append(decode_factory(t)) return new_text elif isinstance(text, dict): new_text = {} for key in text: new_text[key] = decode_factory(text[key]) return new_text else: return text
[ "def", "decode_factory", "(", "text", ")", ":", "if", "isinstance", "(", "text", ",", "str", ")", ":", "return", "unicoder", "(", "text", ")", "elif", "isinstance", "(", "text", ",", "list", ")", ":", "new_text", "=", "[", "]", "for", "t", "in", "t...
recursively looks through the supplied argument and converts and text to unicode .
train
false
11,546
def encode_id(config_id_secret, obj_id): id_cipher = Blowfish.new(config_id_secret) s = str(obj_id) s = (('!' * (8 - (len(s) % 8))) + s) return id_cipher.encrypt(s).encode('hex')
[ "def", "encode_id", "(", "config_id_secret", ",", "obj_id", ")", ":", "id_cipher", "=", "Blowfish", ".", "new", "(", "config_id_secret", ")", "s", "=", "str", "(", "obj_id", ")", "s", "=", "(", "(", "'!'", "*", "(", "8", "-", "(", "len", "(", "s", ...
utility method to encode ids .
train
false
11,547
def WriteMacros(out, eclipse_langs, defines): out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.settingswizards.Macros">\n') out.write(' <language name="holder for library settings"></language>\n') for lang in eclipse_langs: out.write((' <language name="%s">\n' % lang)) for key in sorted(defines.iterkeys()): out.write((' <macro><name>%s</name><value>%s</value></macro>\n' % (escape(key), escape(defines[key])))) out.write(' </language>\n') out.write(' </section>\n')
[ "def", "WriteMacros", "(", "out", ",", "eclipse_langs", ",", "defines", ")", ":", "out", ".", "write", "(", "' <section name=\"org.eclipse.cdt.internal.ui.wizards.settingswizards.Macros\">\\n'", ")", "out", ".", "write", "(", "' <language name=\"holder for library setting...
write the macros section of a cdt settings export file .
train
false
11,548
def draw_landmarks(im, landmarks, color=(0, 255, 0)): for landmark in landmarks: cv2.circle(im, landmark, color=color, thickness=(-1), radius=1) return im
[ "def", "draw_landmarks", "(", "im", ",", "landmarks", ",", "color", "=", "(", "0", ",", "255", ",", "0", ")", ")", ":", "for", "landmark", "in", "landmarks", ":", "cv2", ".", "circle", "(", "im", ",", "landmark", ",", "color", "=", "color", ",", ...
landmarks = [ .
train
false
11,549
def languages(): print_available_languages()
[ "def", "languages", "(", ")", ":", "print_available_languages", "(", ")" ]
list all the currently supported language prefixes .
train
false
11,550
@dual_decorator def empty_arg_shortcircuit(return_code=u'[]', position=1): @decorator def wrapper(function, *args, **kw): if (len(args[position]) == 0): return ast.literal_eval(return_code) else: return function(*args, **kw) return wrapper
[ "@", "dual_decorator", "def", "empty_arg_shortcircuit", "(", "return_code", "=", "u'[]'", ",", "position", "=", "1", ")", ":", "@", "decorator", "def", "wrapper", "(", "function", ",", "*", "args", ",", "**", "kw", ")", ":", "if", "(", "len", "(", "arg...
decorate a function to shortcircuit and return something immediately if the length of a positional arg is 0 .
train
false
11,552
def clear_all_regions(): for window in sublime.windows(): for view in window.views(): for region_key in view.settings().get('bracket_highlighter.regions', []): view.erase_regions(region_key) view.settings().set('bracket_highlighter.locations', {'open': {}, 'close': {}, 'unmatched': {}, 'icon': {}})
[ "def", "clear_all_regions", "(", ")", ":", "for", "window", "in", "sublime", ".", "windows", "(", ")", ":", "for", "view", "in", "window", ".", "views", "(", ")", ":", "for", "region_key", "in", "view", ".", "settings", "(", ")", ".", "get", "(", "...
clear all regions .
train
false
11,553
def create_class_from_xml_string(target_class, xml_string): tree = ElementTree.fromstring(xml_string) return create_class_from_element_tree(target_class, tree)
[ "def", "create_class_from_xml_string", "(", "target_class", ",", "xml_string", ")", ":", "tree", "=", "ElementTree", ".", "fromstring", "(", "xml_string", ")", "return", "create_class_from_element_tree", "(", "target_class", ",", "tree", ")" ]
creates an instance of the target class from a string .
train
false
11,554
def fromarray(obj, mode=None): arr = obj.__array_interface__ shape = arr['shape'] ndim = len(shape) try: strides = arr['strides'] except KeyError: strides = None if (mode is None): try: typekey = (((1, 1) + shape[2:]), arr['typestr']) (mode, rawmode) = _fromarray_typemap[typekey] except KeyError: raise TypeError('Cannot handle this data type') else: rawmode = mode if (mode in ['1', 'L', 'I', 'P', 'F']): ndmax = 2 elif (mode == 'RGB'): ndmax = 3 else: ndmax = 4 if (ndim > ndmax): raise ValueError(('Too many dimensions: %d > %d.' % (ndim, ndmax))) size = (shape[1], shape[0]) if (strides is not None): if hasattr(obj, 'tobytes'): obj = obj.tobytes() else: obj = obj.tostring() return frombuffer(mode, size, obj, 'raw', rawmode, 0, 1)
[ "def", "fromarray", "(", "obj", ",", "mode", "=", "None", ")", ":", "arr", "=", "obj", ".", "__array_interface__", "shape", "=", "arr", "[", "'shape'", "]", "ndim", "=", "len", "(", "shape", ")", "try", ":", "strides", "=", "arr", "[", "'strides'", ...
creates an image memory from an object exporting the array interface .
train
false
11,556
def husl_palette(n_colors=6, h=(15 / 360.0), s=0.9, l=0.65): hues = np.linspace(0, 1, (n_colors + 1))[:(-1)] hues += h hues %= 1 hues *= 359 s *= 99 l *= 99 palette = [husl.husl_to_rgb(h_i, s, l) for h_i in hues] return palette
[ "def", "husl_palette", "(", "n_colors", "=", "6", ",", "h", "=", "(", "15", "/", "360.0", ")", ",", "s", "=", "0.9", ",", "l", "=", "0.65", ")", ":", "hues", "=", "np", ".", "linspace", "(", "0", ",", "1", ",", "(", "n_colors", "+", "1", ")...
get a set of evenly spaced colors in husl hue space .
train
true
11,558
def _add_patch_info(s): nearest = s['nearest'] if (nearest is None): s['pinfo'] = None s['patch_inds'] = None return logger.info(' Computing patch statistics...') indn = np.argsort(nearest) nearest_sorted = nearest[indn] steps = (np.where((nearest_sorted[1:] != nearest_sorted[:(-1)]))[0] + 1) starti = np.r_[([0], steps)] stopi = np.r_[(steps, [len(nearest)])] pinfo = list() for (start, stop) in zip(starti, stopi): pinfo.append(np.sort(indn[start:stop])) s['pinfo'] = pinfo patch_verts = nearest_sorted[(steps - 1)] s['patch_inds'] = np.searchsorted(patch_verts, s['vertno']) logger.info(' Patch information added...')
[ "def", "_add_patch_info", "(", "s", ")", ":", "nearest", "=", "s", "[", "'nearest'", "]", "if", "(", "nearest", "is", "None", ")", ":", "s", "[", "'pinfo'", "]", "=", "None", "s", "[", "'patch_inds'", "]", "=", "None", "return", "logger", ".", "inf...
patch information in a source space .
train
false
11,559
def dsa_view(redirect_name=None): def dec(func): @wraps(func) def wrapper(request, backend, *args, **kwargs): if redirect_name: redirect = reverse(redirect_name, args=(backend,)) else: redirect = request.path request.social_auth_backend = get_backend(backend, request, redirect) if (request.social_auth_backend is None): raise WrongBackend(backend) return func(request, request.social_auth_backend, *args, **kwargs) return wrapper return dec
[ "def", "dsa_view", "(", "redirect_name", "=", "None", ")", ":", "def", "dec", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "request", ",", "backend", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "redirect_...
decorate djangos-social-auth views .
train
false
11,560
@api_versions.wraps('2.22') @utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) @utils.arg('migration', metavar='<migration>', help=_('ID of migration.')) def do_live_migration_force_complete(cs, args): server = _find_server(cs, args.server) cs.server_migrations.live_migrate_force_complete(server, args.migration)
[ "@", "api_versions", ".", "wraps", "(", "'2.22'", ")", "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "@", "utils", ".", "arg", "(", "'migration'", ",", "me...
force on-going live migration to complete .
train
false
11,561
def push_notification_enabled(): return PushNotificationConfig.is_enabled()
[ "def", "push_notification_enabled", "(", ")", ":", "return", "PushNotificationConfig", ".", "is_enabled", "(", ")" ]
returns whether the push notification feature is enabled .
train
false
11,562
def pass_app(fn): fn._pass_app = True return fn
[ "def", "pass_app", "(", "fn", ")", ":", "fn", ".", "_pass_app", "=", "True", "return", "fn" ]
pass the application instance as parameter to the method .
train
false
11,563
def test_port(): schema = vol.Schema(cv.port) for value in ('invalid', None, (-1), 0, 80000, '81000'): with pytest.raises(vol.MultipleInvalid): schema(value) for value in ('1000', 21, 24574): schema(value)
[ "def", "test_port", "(", ")", ":", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "port", ")", "for", "value", "in", "(", "'invalid'", ",", "None", ",", "(", "-", "1", ")", ",", "0", ",", "80000", ",", "'81000'", ")", ":", "with", "pytest...
test tcp/udp network port .
train
false
11,565
def validate_namespace(value, exception=BadValueError): if (not isinstance(value, basestring)): raise exception(('value should be a string; received %r (a %s):' % (value, type(value)))) if (not _NAMESPACE_RE.match(value)): raise exception(('value "%s" does not match regex "%s"' % (value, _NAMESPACE_PATTERN)))
[ "def", "validate_namespace", "(", "value", ",", "exception", "=", "BadValueError", ")", ":", "if", "(", "not", "isinstance", "(", "value", ",", "basestring", ")", ")", ":", "raise", "exception", "(", "(", "'value should be a string; received %r (a %s):'", "%", "...
raises an exception if value is not a valid namespace string .
train
false
11,566
def _log2_floor_filter(value): return int(math.log(value, 2))
[ "def", "_log2_floor_filter", "(", "value", ")", ":", "return", "int", "(", "math", ".", "log", "(", "value", ",", "2", ")", ")" ]
returns the logarithm base 2 of the given value .
train
false
11,567
def decode_methods_request(offset, data): if ((len(data) - offset) < 2): return (offset, None) (version, number_of_methods) = struct.unpack_from('!BB', data, offset) if (not (version == SOCKS_VERSION)): return (offset, None) offset += 2 methods = set([]) for i in range(number_of_methods): (method,) = struct.unpack_from('!B', data, offset) methods.add(method) offset += 1 return (offset, MethodRequest(version, methods))
[ "def", "decode_methods_request", "(", "offset", ",", "data", ")", ":", "if", "(", "(", "len", "(", "data", ")", "-", "offset", ")", "<", "2", ")", ":", "return", "(", "offset", ",", "None", ")", "(", "version", ",", "number_of_methods", ")", "=", "...
try to decodes a method request .
train
false
11,568
def dummy_nested(d1, d2, method='full'): if (method == 'full'): return d2 (start1, end1) = dummy_limits(d1) (start2, end2) = dummy_limits(d2) first = np.in1d(start2, start1) last = np.in1d(end2, end1) equal = (first == last) col_dropf = ((~ first) * (~ equal)) col_dropl = ((~ last) * (~ equal)) if (method == 'drop-last'): d12rl = dummy_product(d1[:, :(-1)], d2[:, :(-1)]) dd = np.column_stack((np.ones(d1.shape[0], int), d1[:, :(-1)], d2[:, col_dropl])) elif (method == 'drop-first'): d12r = dummy_product(d1[:, 1:], d2[:, 1:]) dd = np.column_stack((np.ones(d1.shape[0], int), d1[:, 1:], d2[:, col_dropf])) else: raise ValueError('method not recognized') return (dd, col_dropf, col_dropl)
[ "def", "dummy_nested", "(", "d1", ",", "d2", ",", "method", "=", "'full'", ")", ":", "if", "(", "method", "==", "'full'", ")", ":", "return", "d2", "(", "start1", ",", "end1", ")", "=", "dummy_limits", "(", "d1", ")", "(", "start2", ",", "end2", ...
unfinished and incomplete mainly copy past dummy_product dummy variable from product of two dummy variables parameters d1 .
train
false
11,570
def skip_deprecated(app, what, name, obj, skip, options): doc = obj.__doc__ return (skip or (doc and ('.. deprecated::' in doc)))
[ "def", "skip_deprecated", "(", "app", ",", "what", ",", "name", ",", "obj", ",", "skip", ",", "options", ")", ":", "doc", "=", "obj", ".", "__doc__", "return", "(", "skip", "or", "(", "doc", "and", "(", "'.. deprecated::'", "in", "doc", ")", ")", "...
all attributes containing a deprecated note shouldnt be documented anymore .
train
false
11,571
@profiler.trace def subnetpool_create(request, name, prefixes, **kwargs): LOG.debug(('subnetpool_create(): name=%s, prefixes=%s, kwargs=%s' % (name, prefixes, kwargs))) body = {'subnetpool': {'name': name, 'prefixes': prefixes}} if ('tenant_id' not in kwargs): kwargs['tenant_id'] = request.user.project_id body['subnetpool'].update(kwargs) subnetpool = neutronclient(request).create_subnetpool(body=body).get('subnetpool') return SubnetPool(subnetpool)
[ "@", "profiler", ".", "trace", "def", "subnetpool_create", "(", "request", ",", "name", ",", "prefixes", ",", "**", "kwargs", ")", ":", "LOG", ".", "debug", "(", "(", "'subnetpool_create(): name=%s, prefixes=%s, kwargs=%s'", "%", "(", "name", ",", "prefixes", ...
create a subnetpool .
train
true
11,572
def mage_hsv_tuple_to_rgb(hsv): hsv_0_to_1 = ((hsv[0] / 360.0), (hsv[1] / 100.0), (hsv[2] / 100.0)) rgb = hsv_to_rgb(*tuple(hsv_0_to_1)) return (int((rgb[0] * 255)), int((rgb[1] * 255)), int((rgb[2] * 255)))
[ "def", "mage_hsv_tuple_to_rgb", "(", "hsv", ")", ":", "hsv_0_to_1", "=", "(", "(", "hsv", "[", "0", "]", "/", "360.0", ")", ",", "(", "hsv", "[", "1", "]", "/", "100.0", ")", ",", "(", "hsv", "[", "2", "]", "/", "100.0", ")", ")", "rgb", "=",...
converts hsv tuple on mage scale to rgb on 0-255 scale .
train
false
11,574
def convert_to_list_dict(lst, label): if (not lst): return None if (not isinstance(lst, list)): lst = [lst] return [{label: x} for x in lst]
[ "def", "convert_to_list_dict", "(", "lst", ",", "label", ")", ":", "if", "(", "not", "lst", ")", ":", "return", "None", "if", "(", "not", "isinstance", "(", "lst", ",", "list", ")", ")", ":", "lst", "=", "[", "lst", "]", "return", "[", "{", "labe...
convert a value or list into a list of dicts .
train
false
11,575
def _date_bin_set_datetime(new_date): cmd = ['date'] if (new_date.utcoffset() is not None): new_date = (new_date - new_date.utcoffset()) new_date = new_date.replace(tzinfo=_FixedOffset(0)) cmd.append('-u') non_posix = '{1:02}{2:02}{3:02}{4:02}{0:04}.{5:02}'.format(*new_date.timetuple()) non_posix_cmd = (cmd + [non_posix]) ret_non_posix = __salt__['cmd.run_all'](non_posix_cmd, python_shell=False) if (ret_non_posix['retcode'] != 0): posix = ' {1:02}{2:02}{3:02}{4:02}{0:04}'.format(*new_date.timetuple()) posix_cmd = (cmd + [posix]) ret_posix = __salt__['cmd.run_all'](posix_cmd, python_shell=False) if (ret_posix['retcode'] != 0): msg = 'date failed: {0}'.format(ret_non_posix['stderr']) raise CommandExecutionError(msg) return True
[ "def", "_date_bin_set_datetime", "(", "new_date", ")", ":", "cmd", "=", "[", "'date'", "]", "if", "(", "new_date", ".", "utcoffset", "(", ")", "is", "not", "None", ")", ":", "new_date", "=", "(", "new_date", "-", "new_date", ".", "utcoffset", "(", ")",...
set the system date/time using the date command note using a strictly posix-compliant date binary we can only set the date up to the minute .
train
true
11,577
def unicode_repr(obj): if PY3: return repr(obj) if hasattr(obj, 'unicode_repr'): return obj.unicode_repr() if isinstance(obj, unicode): return repr(obj)[1:] return repr(obj)
[ "def", "unicode_repr", "(", "obj", ")", ":", "if", "PY3", ":", "return", "repr", "(", "obj", ")", "if", "hasattr", "(", "obj", ",", "'unicode_repr'", ")", ":", "return", "obj", ".", "unicode_repr", "(", ")", "if", "isinstance", "(", "obj", ",", "unic...
for classes that was fixed with @python_2_unicode_compatible unicode_repr returns obj .
train
false
11,579
def build_random_tree(size): root = TreeNode(0) for i in range(1, size): cursor = root while True: choice = random.choice(['L', 'R']) if (choice == 'L'): if cursor.left: cursor = cursor.left else: cursor.left = TreeNode(i) break elif (choice == 'R'): if cursor.right: cursor = cursor.right else: cursor.right = TreeNode(i) break return root
[ "def", "build_random_tree", "(", "size", ")", ":", "root", "=", "TreeNode", "(", "0", ")", "for", "i", "in", "range", "(", "1", ",", "size", ")", ":", "cursor", "=", "root", "while", "True", ":", "choice", "=", "random", ".", "choice", "(", "[", ...
create a randomly constructred tree that is fairly balanced .
train
false
11,580
def document_create(index, doc_type, body=None, id=None, hosts=None, profile=None): es = _get_instance(hosts, profile) try: result = es.index(index=index, doc_type=doc_type, body=body, id=id) return True except elasticsearch.exceptions.NotFoundError: return None return None
[ "def", "document_create", "(", "index", ",", "doc_type", ",", "body", "=", "None", ",", "id", "=", "None", ",", "hosts", "=", "None", ",", "profile", "=", "None", ")", ":", "es", "=", "_get_instance", "(", "hosts", ",", "profile", ")", "try", ":", ...
create a document in a specified index cli example:: salt myminion elasticsearch .
train
false
11,581
def _extract_yarn_counters(counters_record): if (not isinstance(counters_record, dict)): return {} group_records = counters_record.get('groups') if (not isinstance(group_records, list)): return {} counters = {} for group_record in group_records: if (not isinstance(group_record, dict)): continue group = group_record.get('displayName') if (not isinstance(group, string_types)): continue counter_records = group_record.get('counts') if (not isinstance(counter_records, list)): continue for counter_record in counter_records: counter = counter_record.get('displayName') if (not isinstance(counter, string_types)): continue amount = counter_record.get('value') if (not isinstance(amount, integer_types)): continue counters.setdefault(group, {}) counters[group].setdefault(counter, 0) counters[group][counter] += amount return counters
[ "def", "_extract_yarn_counters", "(", "counters_record", ")", ":", "if", "(", "not", "isinstance", "(", "counters_record", ",", "dict", ")", ")", ":", "return", "{", "}", "group_records", "=", "counters_record", ".", "get", "(", "'groups'", ")", "if", "(", ...
convert avro-json counter data structure to our group -> counter -> amount format .
train
false
11,583
def unpolarify(eq, subs={}, exponents_only=False): if isinstance(eq, bool): return eq eq = sympify(eq) if (subs != {}): return unpolarify(eq.subs(subs)) changed = True pause = False if exponents_only: pause = True while changed: changed = False res = _unpolarify(eq, exponents_only, pause) if (res != eq): changed = True eq = res if isinstance(res, bool): return res return res.subs({exp_polar(0): 1, polar_lift(0): 0})
[ "def", "unpolarify", "(", "eq", ",", "subs", "=", "{", "}", ",", "exponents_only", "=", "False", ")", ":", "if", "isinstance", "(", "eq", ",", "bool", ")", ":", "return", "eq", "eq", "=", "sympify", "(", "eq", ")", "if", "(", "subs", "!=", "{", ...
if p denotes the projection from the riemann surface of the logarithm to the complex line .
train
false
11,584
@pytest.mark.parametrize('parallel', [True, False]) def test_strip_line_trailing_whitespace(parallel, read_basic): text = 'a b c\n1 2 \n3 4 5' with pytest.raises(CParserError) as e: ascii.read(StringIO(text), format='fast_basic', guess=False) assert ('not enough columns found in line 1' in str(e)) text = 'a b c\n 1 2 3 DCTB \n 4 5 6 ' table = read_basic(text, parallel=parallel) expected = Table([[1, 4], [2, 5], [3, 6]], names=('a', 'b', 'c')) assert_table_equal(table, expected)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'parallel'", ",", "[", "True", ",", "False", "]", ")", "def", "test_strip_line_trailing_whitespace", "(", "parallel", ",", "read_basic", ")", ":", "text", "=", "'a b c\\n1 2 \\n3 4 5'", "with", "pytest", "....
readers that strip whitespace from lines should ignore trailing whitespace after the last data value of each row .
train
false
11,586
def _parse_state_value(state, user): (uri, token) = state.rsplit(':', 1) if xsrfutil.validate_token(xsrf_secret_key(), token, user.user_id(), action_id=uri): return uri else: return None
[ "def", "_parse_state_value", "(", "state", ",", "user", ")", ":", "(", "uri", ",", "token", ")", "=", "state", ".", "rsplit", "(", "':'", ",", "1", ")", "if", "xsrfutil", ".", "validate_token", "(", "xsrf_secret_key", "(", ")", ",", "token", ",", "us...
parse the value of the state parameter .
train
true
11,588
@runs_once def code_prep(): rev = hg_revision() assert (not rev.endswith('+')), 'Client has pending changes, cannot install.' fprint(('Preparing local code tarball (rev %s)' % rev)) filename = ('viewfinder.%s.tar.gz' % rev) local('hg identify -i > hg_revision.txt') local(('tar czf %s --exclude "*.o" --exclude "*~" --exclude "*.pyc" __init__.py scripts/ marketing/ backend/ resources/ secrets/viewfinder.co hg_revision.txt' % filename)) return rev
[ "@", "runs_once", "def", "code_prep", "(", ")", ":", "rev", "=", "hg_revision", "(", ")", "assert", "(", "not", "rev", ".", "endswith", "(", "'+'", ")", ")", ",", "'Client has pending changes, cannot install.'", "fprint", "(", "(", "'Preparing local code tarball...
generate the code tarball and return the hg revision .
train
false
11,589
def fcontext_get_policy(name, filetype=None, sel_type=None, sel_user=None, sel_level=None): if filetype: _validate_filetype(filetype) re_spacer = '[ ]{2,}' cmd_kwargs = {'spacer': re_spacer, 'filespec': re.escape(name), 'sel_user': (sel_user or '[^:]+'), 'sel_role': '[^:]+', 'sel_type': (sel_type or '[^:]+'), 'sel_level': (sel_level or '[^:]+')} cmd_kwargs['filetype'] = ('[[:alpha:] ]+' if (filetype is None) else filetype_id_to_string(filetype)) cmd = ('semanage fcontext -l | egrep ' + "'^{filespec}{spacer}{filetype}{spacer}{sel_user}:{sel_role}:{sel_type}:{sel_level}$'".format(**cmd_kwargs)) current_entry_text = __salt__['cmd.shell'](cmd) if (current_entry_text == ''): return None ret = {} current_entry_list = re.split(re_spacer, current_entry_text) ret['filespec'] = current_entry_list[0] ret['filetype'] = current_entry_list[1] ret.update(_context_string_to_dict(current_entry_list[2])) return ret
[ "def", "fcontext_get_policy", "(", "name", ",", "filetype", "=", "None", ",", "sel_type", "=", "None", ",", "sel_user", "=", "None", ",", "sel_level", "=", "None", ")", ":", "if", "filetype", ":", "_validate_filetype", "(", "filetype", ")", "re_spacer", "=...
returns the current entry in the selinux policy list as a dictionary .
train
false
11,590
def normalize_aws_facts(metadata, facts): for interface in sorted(metadata['network']['interfaces']['macs'].values(), key=(lambda x: x['device-number'])): int_info = dict() var_map = {'ips': 'local-ipv4s', 'public_ips': 'public-ipv4s'} for (ips_var, int_var) in iteritems(var_map): ips = interface.get(int_var) if isinstance(ips, string_types): int_info[ips_var] = [ips] else: int_info[ips_var] = ips if ('vpc-id' in interface): int_info['network_type'] = 'vpc' else: int_info['network_type'] = 'classic' if (int_info['network_type'] == 'vpc'): int_info['network_id'] = interface['subnet-id'] else: int_info['network_id'] = None facts['network']['interfaces'].append(int_info) facts['zone'] = metadata['placement']['availability-zone'] facts['network']['ip'] = metadata.get('local-ipv4') facts['network']['public_ip'] = metadata.get('public-ipv4') facts['network']['hostname'] = metadata.get('local-hostname') facts['network']['public_hostname'] = metadata.get('public-hostname') return facts
[ "def", "normalize_aws_facts", "(", "metadata", ",", "facts", ")", ":", "for", "interface", "in", "sorted", "(", "metadata", "[", "'network'", "]", "[", "'interfaces'", "]", "[", "'macs'", "]", ".", "values", "(", ")", ",", "key", "=", "(", "lambda", "x...
normalize aws facts args: metadata : provider metadata facts : facts to update returns: dict: the result of adding the normalized metadata to the provided facts dict .
train
false
11,592
def unenroll_email(course_id, student_email, email_students=False, email_params=None, language=None): previous_state = EmailEnrollmentState(course_id, student_email) if previous_state.enrollment: CourseEnrollment.unenroll_by_email(student_email, course_id) if email_students: email_params['message'] = 'enrolled_unenroll' email_params['email_address'] = student_email email_params['full_name'] = previous_state.full_name send_mail_to_student(student_email, email_params, language=language) if previous_state.allowed: CourseEnrollmentAllowed.objects.get(course_id=course_id, email=student_email).delete() if email_students: email_params['message'] = 'allowed_unenroll' email_params['email_address'] = student_email send_mail_to_student(student_email, email_params, language=language) after_state = EmailEnrollmentState(course_id, student_email) return (previous_state, after_state)
[ "def", "unenroll_email", "(", "course_id", ",", "student_email", ",", "email_students", "=", "False", ",", "email_params", "=", "None", ",", "language", "=", "None", ")", ":", "previous_state", "=", "EmailEnrollmentState", "(", "course_id", ",", "student_email", ...
unenroll a student by email .
train
false
11,594
def load_abort_and_exit_bindings(): registry = Registry() handle = registry.add_binding @handle(Keys.ControlC) def _(event): u' Abort when Control-C has been pressed. ' event.cli.abort() @Condition def ctrl_d_condition(cli): u' Ctrl-D binding is only active when the default buffer is selected\n and empty. ' return ((cli.current_buffer_name == DEFAULT_BUFFER) and (not cli.current_buffer.text)) handle(Keys.ControlD, filter=ctrl_d_condition)(get_by_name(u'end-of-file')) return registry
[ "def", "load_abort_and_exit_bindings", "(", ")", ":", "registry", "=", "Registry", "(", ")", "handle", "=", "registry", ".", "add_binding", "@", "handle", "(", "Keys", ".", "ControlC", ")", "def", "_", "(", "event", ")", ":", "event", ".", "cli", ".", ...
basic bindings for abort and exit .
train
true
11,595
def delete_tagging(Bucket, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) conn.delete_bucket_tagging(Bucket=Bucket) return {'deleted': True, 'name': Bucket} except ClientError as e: return {'deleted': False, 'error': __utils__['boto3.get_error'](e)}
[ "def", "delete_tagging", "(", "Bucket", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key...
delete the tags from the given bucket returns {deleted: true} if tags were deleted and returns {deleted: false} if tags were not deleted .
train
true
11,596
def _api_warnings(name, output, kwargs): if (name == 'clear'): return report(output, keyword='warnings', data=sabnzbd.GUIHANDLER.clear()) elif (name == 'show'): return report(output, keyword='warnings', data=sabnzbd.GUIHANDLER.content()) elif name: return report(output, _MSG_NOT_IMPLEMENTED) return report(output, keyword='warnings', data=sabnzbd.GUIHANDLER.content())
[ "def", "_api_warnings", "(", "name", ",", "output", ",", "kwargs", ")", ":", "if", "(", "name", "==", "'clear'", ")", ":", "return", "report", "(", "output", ",", "keyword", "=", "'warnings'", ",", "data", "=", "sabnzbd", ".", "GUIHANDLER", ".", "clear...
api: accepts name .
train
false
11,597
def download_config(): if (not os.path.exists(CONFIG_PATH)): os.makedirs(CONFIG_PATH) if os.path.exists((CONFIG_PATH + CONFIG_FILE)): os.remove((CONFIG_PATH + CONFIG_FILE)) conf_file_local = open((CONFIG_PATH + CONFIG_FILE), 'wb') print '[*] Downloading config file..' conf_file = urlrequest.urlopen(CONFIG_URL) print '[*] Saving to File {}'.format(CONFIG_FILE) conf_file_local.write(bytes(conf_file.read())) conf_file_local.close()
[ "def", "download_config", "(", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "CONFIG_PATH", ")", ")", ":", "os", ".", "makedirs", "(", "CONFIG_PATH", ")", "if", "os", ".", "path", ".", "exists", "(", "(", "CONFIG_PATH", "+", "C...
download initial config file .
train
false
11,599
def test_bootstrap(): a_ones = np.ones(10) n_boot = 5 out1 = algo.bootstrap(a_ones, n_boot=n_boot) assert_array_equal(out1, np.ones(n_boot)) out2 = algo.bootstrap(a_ones, n_boot=n_boot, func=np.median) assert_array_equal(out2, np.ones(n_boot))
[ "def", "test_bootstrap", "(", ")", ":", "a_ones", "=", "np", ".", "ones", "(", "10", ")", "n_boot", "=", "5", "out1", "=", "algo", ".", "bootstrap", "(", "a_ones", ",", "n_boot", "=", "n_boot", ")", "assert_array_equal", "(", "out1", ",", "np", ".", ...
test of bootstrapping of epochs .
train
false
11,600
def render_to_string_test(template_name, django_context): from django.test import signals signals.template_rendered.send(sender=None, template=template_name, context=django_context) return render_to_string_normal(template_name, django_context)
[ "def", "render_to_string_test", "(", "template_name", ",", "django_context", ")", ":", "from", "django", ".", "test", "import", "signals", "signals", ".", "template_rendered", ".", "send", "(", "sender", "=", "None", ",", "template", "=", "template_name", ",", ...
in tests .
train
false
11,602
def descape_entity(m, defs=htmlentitydefs.entitydefs): try: return defs[m.group(1)] except KeyError: return m.group(0)
[ "def", "descape_entity", "(", "m", ",", "defs", "=", "htmlentitydefs", ".", "entitydefs", ")", ":", "try", ":", "return", "defs", "[", "m", ".", "group", "(", "1", ")", "]", "except", "KeyError", ":", "return", "m", ".", "group", "(", "0", ")" ]
translate one entity to its iso latin value .
train
false
11,605
def _intercept_idx(design_info): from patsy.desc import INTERCEPT from numpy import array return array([(INTERCEPT == i) for i in design_info.terms])
[ "def", "_intercept_idx", "(", "design_info", ")", ":", "from", "patsy", ".", "desc", "import", "INTERCEPT", "from", "numpy", "import", "array", "return", "array", "(", "[", "(", "INTERCEPT", "==", "i", ")", "for", "i", "in", "design_info", ".", "terms", ...
returns boolean array index indicating which column holds the intercept .
train
false
11,606
def get_boutiques_output(name, interface, tool_inputs, verbose=False): output = {} output[u'name'] = name.replace(u'_', u' ').capitalize() output[u'id'] = name output[u'type'] = u'File' output[u'path-template'] = u'' output[u'optional'] = True output_value = interface._list_outputs()[name] if ((output_value != u'') and isinstance(output_value, str)): for input in tool_inputs: if (not input[u'tempvalue']): continue input_value = input[u'tempvalue'] if (input[u'type'] == u'File'): input_value = os.path.splitext(os.path.basename(input_value))[0] if (str(input_value) in output_value): output_value = os.path.basename(output_value.replace(input_value, input[u'command-line-key'])) output[u'path-template'] = os.path.basename(output_value) return output
[ "def", "get_boutiques_output", "(", "name", ",", "interface", ",", "tool_inputs", ",", "verbose", "=", "False", ")", ":", "output", "=", "{", "}", "output", "[", "u'name'", "]", "=", "name", ".", "replace", "(", "u'_'", ",", "u' '", ")", ".", "capitali...
returns a dictionary containing the boutiques output corresponding to a nipype output .
train
false
11,607
def json_qs_parser(body): try: return json.loads(body) except: pass try: return ElementTree.fromstring(body) except: pass return dict(parse.parse_qsl(body))
[ "def", "json_qs_parser", "(", "body", ")", ":", "try", ":", "return", "json", ".", "loads", "(", "body", ")", "except", ":", "pass", "try", ":", "return", "ElementTree", ".", "fromstring", "(", "body", ")", "except", ":", "pass", "return", "dict", "(",...
parses response body from json .
train
false
11,608
def get_ec2_creds(module): (region, ec2_url, boto_params) = get_aws_connection_info(module) return (ec2_url, boto_params['aws_access_key_id'], boto_params['aws_secret_access_key'], region)
[ "def", "get_ec2_creds", "(", "module", ")", ":", "(", "region", ",", "ec2_url", ",", "boto_params", ")", "=", "get_aws_connection_info", "(", "module", ")", "return", "(", "ec2_url", ",", "boto_params", "[", "'aws_access_key_id'", "]", ",", "boto_params", "[",...
for compatibility mode with old modules that dont/cant yet use ec2_connect method .
train
false
11,609
def autosummary_table_visit_html(self, node): try: tbody = node[0][0][(-1)] for row in tbody: col1_entry = row[0] par = col1_entry[0] for (j, subnode) in enumerate(list(par)): if isinstance(subnode, nodes.Text): new_text = text_type(subnode.astext()) new_text = new_text.replace(u' ', u'\xa0') par[j] = nodes.Text(new_text) except IndexError: pass
[ "def", "autosummary_table_visit_html", "(", "self", ",", "node", ")", ":", "try", ":", "tbody", "=", "node", "[", "0", "]", "[", "0", "]", "[", "(", "-", "1", ")", "]", "for", "row", "in", "tbody", ":", "col1_entry", "=", "row", "[", "0", "]", ...
make the first column of the table non-breaking .
train
false
11,610
def format_metadata_url(api_key, page_number): query_params = [('per_page', '100'), ('sort_by', 'id'), ('page', str(page_number)), ('database_code', 'WIKI')] if (api_key is not None): query_params = ([('api_key', api_key)] + query_params) return ('https://www.quandl.com/api/v3/datasets.csv?' + urlencode(query_params))
[ "def", "format_metadata_url", "(", "api_key", ",", "page_number", ")", ":", "query_params", "=", "[", "(", "'per_page'", ",", "'100'", ")", ",", "(", "'sort_by'", ",", "'id'", ")", ",", "(", "'page'", ",", "str", "(", "page_number", ")", ")", ",", "(",...
build the query rl for the quandl wiki metadata .
train
false
11,614
def verify_plaintext(request, client_secret=None, resource_owner_secret=None): signature = sign_plaintext(client_secret, resource_owner_secret) return safe_string_equals(signature, request.signature)
[ "def", "verify_plaintext", "(", "request", ",", "client_secret", "=", "None", ",", "resource_owner_secret", "=", "None", ")", ":", "signature", "=", "sign_plaintext", "(", "client_secret", ",", "resource_owner_secret", ")", "return", "safe_string_equals", "(", "sign...
verify a plaintext signature .
train
false
11,616
def subPre(sub, a, b): sub.calledSubPre = (sub.calledSubPre + 1)
[ "def", "subPre", "(", "sub", ",", "a", ",", "b", ")", ":", "sub", ".", "calledSubPre", "=", "(", "sub", ".", "calledSubPre", "+", "1", ")" ]
a pre-hook for the subclass .
train
false
11,617
@lower_builtin('is not', types.Any, types.Any) def generic_is_not(context, builder, sig, args): is_impl = context.get_function('is', sig) return builder.not_(is_impl(builder, args))
[ "@", "lower_builtin", "(", "'is not'", ",", "types", ".", "Any", ",", "types", ".", "Any", ")", "def", "generic_is_not", "(", "context", ",", "builder", ",", "sig", ",", "args", ")", ":", "is_impl", "=", "context", ".", "get_function", "(", "'is'", ","...
implement x is not y as not .
train
false
11,619
def append_use_flags(atom, uses=None, overwrite=False): if (not uses): uses = portage.dep.dep_getusedeps(atom) if (len(uses) == 0): return atom = atom[:atom.rfind('[')] append_to_package_conf('use', atom=atom, flags=uses, overwrite=overwrite)
[ "def", "append_use_flags", "(", "atom", ",", "uses", "=", "None", ",", "overwrite", "=", "False", ")", ":", "if", "(", "not", "uses", ")", ":", "uses", "=", "portage", ".", "dep", ".", "dep_getusedeps", "(", "atom", ")", "if", "(", "len", "(", "use...
append a list of use flags for a given package or depend atom cli example: .
train
true
11,620
def FindQualifiedTargets(target, qualified_list): return [t for t in qualified_list if (ParseQualifiedTarget(t)[1] == target)]
[ "def", "FindQualifiedTargets", "(", "target", ",", "qualified_list", ")", ":", "return", "[", "t", "for", "t", "in", "qualified_list", "if", "(", "ParseQualifiedTarget", "(", "t", ")", "[", "1", "]", "==", "target", ")", "]" ]
given a list of qualified targets .
train
false
11,622
def create_rel_file_str(ana, entry): tex_root = ana.tex_root() file_name = entry.file_name (file_dir, file_base) = os.path.split(file_name) (root_dir, _) = os.path.split(tex_root) if (file_dir == root_dir): show_path = file_base else: show_path = os.path.relpath(file_dir, root_dir) show_path = os.path.join(show_path, file_base) if (sublime.platform() == 'windows'): show_path = show_path.replace('\\', '/') line = line_nr(ana, entry) return '{show_path}:{line}'.format(**locals())
[ "def", "create_rel_file_str", "(", "ana", ",", "entry", ")", ":", "tex_root", "=", "ana", ".", "tex_root", "(", ")", "file_name", "=", "entry", ".", "file_name", "(", "file_dir", ",", "file_base", ")", "=", "os", ".", "path", ".", "split", "(", "file_n...
create a nice string "rel_path/to/file .
train
false
11,625
def maybe_declare(entity, channel=None, retry=False, **retry_policy): is_bound = entity.is_bound orig = entity if (not is_bound): assert channel entity = entity.bind(channel) if (channel is None): assert is_bound channel = entity.channel declared = ident = None if (channel.connection and entity.can_cache_declaration): declared = channel.connection.client.declared_entities ident = hash(entity) if (ident in declared): return False if retry: return _imaybe_declare(entity, declared, ident, channel, orig, **retry_policy) return _maybe_declare(entity, declared, ident, channel, orig)
[ "def", "maybe_declare", "(", "entity", ",", "channel", "=", "None", ",", "retry", "=", "False", ",", "**", "retry_policy", ")", ":", "is_bound", "=", "entity", ".", "is_bound", "orig", "=", "entity", "if", "(", "not", "is_bound", ")", ":", "assert", "c...
declare entity .
train
false
11,626
def display_figure_with_iterm2(fig): print display_image_bytes(_get_buffer(fig, format='png', dpi=fig.dpi).read())
[ "def", "display_figure_with_iterm2", "(", "fig", ")", ":", "print", "display_image_bytes", "(", "_get_buffer", "(", "fig", ",", "format", "=", "'png'", ",", "dpi", "=", "fig", ".", "dpi", ")", ".", "read", "(", ")", ")" ]
displays a matplotlib figure using iterm2 inline-image escape sequence .
train
false
11,629
@pytest.mark.django_db def test_plugin_image_id_field(): image = File.objects.create() image_id = ImageIDField() assert (image_id.clean('1') == 1) with pytest.raises(ValidationError): image_id.clean('something malicious')
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_plugin_image_id_field", "(", ")", ":", "image", "=", "File", ".", "objects", ".", "create", "(", ")", "image_id", "=", "ImageIDField", "(", ")", "assert", "(", "image_id", ".", "clean", "(", "'1'...
test that imageidfield only accepts id values .
train
false
11,630
def avatar_for_email(email, size=80, skip_cache=False): if (email == u''): email = u'noreply@weblate.org' mail_hash = hashlib.md5(email.lower().encode(u'utf-8')).hexdigest() cache_key = u'-'.join((u'avatar', mail_hash, str(size))) cache = caches[u'default'] url = cache.get(cache_key) if ((url is not None) and (not skip_cache)): return url if HAS_LIBRAVATAR: url = libravatar.libravatar_url(email=email, https=True, default=appsettings.AVATAR_DEFAULT_IMAGE, size=size) else: url = u'{0}avatar/{1}?d={2}&s={3}'.format(appsettings.AVATAR_URL_PREFIX, mail_hash, quote(appsettings.AVATAR_DEFAULT_IMAGE), str(size)) cache.set(cache_key, url) return url
[ "def", "avatar_for_email", "(", "email", ",", "size", "=", "80", ",", "skip_cache", "=", "False", ")", ":", "if", "(", "email", "==", "u''", ")", ":", "email", "=", "u'noreply@weblate.org'", "mail_hash", "=", "hashlib", ".", "md5", "(", "email", ".", "...
generates url for avatar .
train
false
11,631
@_np.deprecate(message='scipy.constants.C2K is deprecated in scipy 0.18.0. Use scipy.constants.convert_temperature instead. Note that the new function has a different signature.') def C2K(C): return (_np.asanyarray(C) + zero_Celsius)
[ "@", "_np", ".", "deprecate", "(", "message", "=", "'scipy.constants.C2K is deprecated in scipy 0.18.0. Use scipy.constants.convert_temperature instead. Note that the new function has a different signature.'", ")", "def", "C2K", "(", "C", ")", ":", "return", "(", "_np", ".", "a...
convert celsius to kelvin parameters c : array_like celsius temperature(s) to be converted .
train
false
11,633
def force_off(name): ret = {} client = salt.client.get_local_client(__opts__['conf_file']) data = vm_info(name, quiet=True) if (not data): print('Failed to find VM {0} to destroy'.format(name)) return 'fail' host = next(six.iterkeys(data)) if (data[host][name]['state'] == 'shutdown'): print('VM {0} is already shutdown'.format(name)) return 'bad state' try: cmd_ret = client.cmd_iter(host, 'virt.destroy', [name], timeout=600) except SaltClientError as client_error: return 'Virtual machine {0} could not be forced off: {1}'.format(name, client_error) for comp in cmd_ret: ret.update(comp) __jid_event__.fire_event({'message': 'Powered off VM {0}'.format(name)}, 'progress') return 'good'
[ "def", "force_off", "(", "name", ")", ":", "ret", "=", "{", "}", "client", "=", "salt", ".", "client", ".", "get_local_client", "(", "__opts__", "[", "'conf_file'", "]", ")", "data", "=", "vm_info", "(", "name", ",", "quiet", "=", "True", ")", "if", ...
force power down the named virtual machine .
train
true
11,636
def encode_labels(labels, nclass=5): Y = np.zeros((len(labels), nclass)).astype('float32') for (j, y) in enumerate(labels): for i in range(nclass): if ((i + 1) == (np.floor(y) + 1)): Y[(j, i)] = (y - np.floor(y)) if ((i + 1) == np.floor(y)): Y[(j, i)] = ((np.floor(y) - y) + 1) return Y
[ "def", "encode_labels", "(", "labels", ",", "nclass", "=", "5", ")", ":", "Y", "=", "np", ".", "zeros", "(", "(", "len", "(", "labels", ")", ",", "nclass", ")", ")", ".", "astype", "(", "'float32'", ")", "for", "(", "j", ",", "y", ")", "in", ...
label encoding from tree lstm paper .
train
false
11,637
def call_iternext(context, builder, iterator_type, val): itemty = iterator_type.yield_type pair_type = types.Pair(itemty, types.boolean) iternext_sig = typing.signature(pair_type, iterator_type) iternext_impl = context.get_function('iternext', iternext_sig) val = iternext_impl(builder, (val,)) pairobj = context.make_helper(builder, pair_type, val) return _IternextResult(context, builder, pairobj)
[ "def", "call_iternext", "(", "context", ",", "builder", ",", "iterator_type", ",", "val", ")", ":", "itemty", "=", "iterator_type", ".", "yield_type", "pair_type", "=", "types", ".", "Pair", "(", "itemty", ",", "types", ".", "boolean", ")", "iternext_sig", ...
call the iternext() implementation for the given *iterator_type* of value *val* .
train
false
11,639
def XMLescape(txt): return txt.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;').replace('"', '&quot;').replace(u'\x0c', '').replace(u'\x1b', '')
[ "def", "XMLescape", "(", "txt", ")", ":", "return", "txt", ".", "replace", "(", "'&'", ",", "'&amp;'", ")", ".", "replace", "(", "'<'", ",", "'&lt;'", ")", ".", "replace", "(", "'>'", ",", "'&gt;'", ")", ".", "replace", "(", "'\"'", ",", "'&quot;'"...
returns provided string with symbols & < > " replaced by their respective xml entities .
train
false
11,640
def remove_datacenter(module, profitbricks): name = module.params.get('name') changed = False if uuid_match.match(name): _remove_datacenter(module, profitbricks, name) changed = True else: datacenters = profitbricks.list_datacenters() for d in datacenters['items']: vdc = profitbricks.get_datacenter(d['id']) if (name == vdc['properties']['name']): name = d['id'] _remove_datacenter(module, profitbricks, name) changed = True return changed
[ "def", "remove_datacenter", "(", "module", ",", "profitbricks", ")", ":", "name", "=", "module", ".", "params", ".", "get", "(", "'name'", ")", "changed", "=", "False", "if", "uuid_match", ".", "match", "(", "name", ")", ":", "_remove_datacenter", "(", "...
removes a datacenter .
train
false
11,641
def edit_team(name, description=None, privacy=None, permission=None, profile='github'): team = get_team(name, profile=profile) if (not team): log.error('Team {0} does not exist'.format(name)) return False try: client = _get_client(profile) organization = client.get_organization(_get_config_value(profile, 'org_name')) team = organization.get_team(team['id']) parameters = {} if (name is not None): parameters['name'] = name if ('description' is not None): parameters['description'] = description if ('privacy' is not None): parameters['privacy'] = privacy if (permission is not None): parameters['permission'] = permission team._requester.requestJsonAndCheck('PATCH', team.url, input=parameters) return True except UnknownObjectException as e: log.exception('Resource not found: {0}'.format(team['id'])) return False
[ "def", "edit_team", "(", "name", ",", "description", "=", "None", ",", "privacy", "=", "None", ",", "permission", "=", "None", ",", "profile", "=", "'github'", ")", ":", "team", "=", "get_team", "(", "name", ",", "profile", "=", "profile", ")", "if", ...
updates an existing github team .
train
true
11,642
def net2deeper_experiment(): train_data = (train_x, train_y) validation_data = (validation_x, validation_y) print('\nExperiment of Net2DeeperNet ...') print('\nbuilding teacher model ...') (teacher_model, _) = make_teacher_model(train_data, validation_data, nb_epoch=3) print('\nbuilding deeper student model by random init ...') make_deeper_student_model(teacher_model, train_data, validation_data, 'random-init', nb_epoch=3) print('\nbuilding deeper student model by net2deeper ...') make_deeper_student_model(teacher_model, train_data, validation_data, 'net2deeper', nb_epoch=3)
[ "def", "net2deeper_experiment", "(", ")", ":", "train_data", "=", "(", "train_x", ",", "train_y", ")", "validation_data", "=", "(", "validation_x", ",", "validation_y", ")", "print", "(", "'\\nExperiment of Net2DeeperNet ...'", ")", "print", "(", "'\\nbuilding teach...
benchmark performances of (1) a teacher model .
train
false
11,644
def html_quote(v, encoding=None): encoding = (encoding or default_encoding) if (v is None): return '' elif isinstance(v, six.binary_type): return cgi.escape(v, 1) elif isinstance(v, six.text_type): if six.PY3: return cgi.escape(v, 1) else: return cgi.escape(v.encode(encoding), 1) elif six.PY3: return cgi.escape(six.text_type(v), 1) else: return cgi.escape(six.text_type(v).encode(encoding), 1)
[ "def", "html_quote", "(", "v", ",", "encoding", "=", "None", ")", ":", "encoding", "=", "(", "encoding", "or", "default_encoding", ")", "if", "(", "v", "is", "None", ")", ":", "return", "''", "elif", "isinstance", "(", "v", ",", "six", ".", "binary_t...
escape html characters .
train
false
11,647
def print_form(n, f, OUT): if f.name: print >>OUT, ('\nForm name=%s (#%d)' % (f.name, (n + 1))) else: print >>OUT, ('\nForm #%d' % ((n + 1),)) if f.controls: print >>OUT, '## ## __Name__________________ __Type___ __ID________ __Value__________________' submit_indices = {} n = 1 for c in f.controls: if c.is_of_kind('clickable'): submit_indices[c] = n n += 1 clickies = [c for c in f.controls if c.is_of_kind('clickable')] nonclickies = [c for c in f.controls if (c not in clickies)] for (n, field) in enumerate(f.controls): if hasattr(field, 'items'): items = [i.name for i in field.items] value_displayed = ('%s of %s' % (field.value, items)) else: value_displayed = ('%s' % (field.value,)) if field.is_of_kind('clickable'): submit_index = ('%-2s' % (submit_indices[field],)) else: submit_index = ' ' strings = (('%-2s' % ((n + 1),)), submit_index, ('%-24s %-9s' % (trunc(str(field.name), 24), trunc(field.type, 9))), ('%-12s' % (trunc((field.id or '(None)'), 12),)), trunc(value_displayed, 40)) for s in strings: print >>OUT, s, print >>OUT, '' print ''
[ "def", "print_form", "(", "n", ",", "f", ",", "OUT", ")", ":", "if", "f", ".", "name", ":", "print", ">>", "OUT", ",", "(", "'\\nForm name=%s (#%d)'", "%", "(", "f", ".", "name", ",", "(", "n", "+", "1", ")", ")", ")", "else", ":", "print", "...
dump the contents of a form as html .
train
false
11,650
def production_url(service_name): project_id = os.environ.get('GAE_LONG_APP_ID') project_url = '{}.appspot.com'.format(project_id) if (service_name == 'default'): return 'https://{}'.format(project_url) else: return 'https://{}-dot-{}'.format(service_name, project_url)
[ "def", "production_url", "(", "service_name", ")", ":", "project_id", "=", "os", ".", "environ", ".", "get", "(", "'GAE_LONG_APP_ID'", ")", "project_url", "=", "'{}.appspot.com'", ".", "format", "(", "project_id", ")", "if", "(", "service_name", "==", "'defaul...
generates url for a service when deployed to app engine .
train
false
11,651
def test_enn_fit(): enn = EditedNearestNeighbours(random_state=RND_SEED) enn.fit(X, Y) assert_equal(enn.min_c_, 0) assert_equal(enn.maj_c_, 2) assert_equal(enn.stats_c_[0], 2) assert_equal(enn.stats_c_[1], 6) assert_equal(enn.stats_c_[2], 12)
[ "def", "test_enn_fit", "(", ")", ":", "enn", "=", "EditedNearestNeighbours", "(", "random_state", "=", "RND_SEED", ")", "enn", ".", "fit", "(", "X", ",", "Y", ")", "assert_equal", "(", "enn", ".", "min_c_", ",", "0", ")", "assert_equal", "(", "enn", "....
test the fitting method .
train
false
11,652
def get_user_groups(name, sid=False): if (name == 'SYSTEM'): groups = [name] else: groups = win32net.NetUserGetLocalGroups(None, name) if (not sid): return groups ret_groups = set() for group in groups: ret_groups.add(get_sid_from_name(group)) return ret_groups
[ "def", "get_user_groups", "(", "name", ",", "sid", "=", "False", ")", ":", "if", "(", "name", "==", "'SYSTEM'", ")", ":", "groups", "=", "[", "name", "]", "else", ":", "groups", "=", "win32net", ".", "NetUserGetLocalGroups", "(", "None", ",", "name", ...
get the groups to which a user belongs args: name : the user name to query sid : true will return a list of sids .
train
true
11,653
def asColumnMatrix(X): if (len(X) == 0): return np.array([]) total = 1 for i in range(0, np.ndim(X[0])): total = (total * X[0].shape[i]) mat = np.empty([total, 0], dtype=X[0].dtype) for col in X: mat = np.append(mat, col.reshape((-1), 1), axis=1) return np.asmatrix(mat)
[ "def", "asColumnMatrix", "(", "X", ")", ":", "if", "(", "len", "(", "X", ")", "==", "0", ")", ":", "return", "np", ".", "array", "(", "[", "]", ")", "total", "=", "1", "for", "i", "in", "range", "(", "0", ",", "np", ".", "ndim", "(", "X", ...
creates a column-matrix from multi-dimensional data items in list l .
train
false
11,654
def get_cip(jd1, jd2): rpnb = erfa.pnm06a(jd1, jd2) (x, y) = erfa.bpn2xy(rpnb) s = erfa.s06(jd1, jd2, x, y) return (x, y, s)
[ "def", "get_cip", "(", "jd1", ",", "jd2", ")", ":", "rpnb", "=", "erfa", ".", "pnm06a", "(", "jd1", ",", "jd2", ")", "(", "x", ",", "y", ")", "=", "erfa", ".", "bpn2xy", "(", "rpnb", ")", "s", "=", "erfa", ".", "s06", "(", "jd1", ",", "jd2"...
find the x .
train
false
11,655
def disconnectMsToNet(Facility_presence=0, UserUser_presence=0, SsVersionIndicator_presence=0): a = TpPd(pd=3) b = MessageType(mesType=37) c = Cause() packet = ((a / b) / c) if (Facility_presence is 1): d = FacilityHdr(ieiF=28, eightBitF=0) packet = (packet / d) if (UserUser_presence is 1): e = UserUserHdr(ieiUU=126, eightBitUU=0) packet = (packet / e) if (SsVersionIndicator_presence is 1): f = SsVersionIndicatorHdr(ieiSVI=127, eightBitSVI=0) packet = (packet / f) return packet
[ "def", "disconnectMsToNet", "(", "Facility_presence", "=", "0", ",", "UserUser_presence", "=", "0", ",", "SsVersionIndicator_presence", "=", "0", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "3", ")", "b", "=", "MessageType", "(", "mesType", "=", "37", ")...
disconnect section 9 .
train
true
11,657
@contextfunction def account_notification_count(context): request = context['request'] user = None if request.user.username: try: user = request.user.profile except Exception: pass notifications = 0 account = None if user: modules = user.get_perspective().get_modules() account = modules.filter(name='treeio.account') if account: notifications = user.notification_set.count() response_format = 'html' if ('response_format' in context): response_format = context['response_format'] return Markup(render_to_string('account/tags/notification_count', {'account': account, 'notifications': notifications}, response_format=response_format))
[ "@", "contextfunction", "def", "account_notification_count", "(", "context", ")", ":", "request", "=", "context", "[", "'request'", "]", "user", "=", "None", "if", "request", ".", "user", ".", "username", ":", "try", ":", "user", "=", "request", ".", "user...
account notification count .
train
false
11,658
def _is_list(arg): if isinstance(arg, dict): return False if isinstance(arg, str): return False return (((not _has_method(arg, 'strip')) and _has_method(arg, '__getitem__')) or _has_method(arg, '__iter__'))
[ "def", "_is_list", "(", "arg", ")", ":", "if", "isinstance", "(", "arg", ",", "dict", ")", ":", "return", "False", "if", "isinstance", "(", "arg", ",", "str", ")", ":", "return", "False", "return", "(", "(", "(", "not", "_has_method", "(", "arg", "...
checks if arg is list-like .
train
true
11,660
def is_error(message): return (message.get('level') == _ERROR_TOKEN)
[ "def", "is_error", "(", "message", ")", ":", "return", "(", "message", ".", "get", "(", "'level'", ")", "==", "_ERROR_TOKEN", ")" ]
check if the query response is an error .
train
false
11,661
@cleanup def test_invisible_Line_rendering(): N = (10 ** 7) x = np.linspace(0, 1, N) y = np.random.normal(size=N) fig = plt.figure() ax = plt.subplot(111) l = mlines.Line2D(x, y) l.set_visible(False) t_no_line = min(repeat(fig.canvas.draw, number=1, repeat=3)) ax.add_line(l) t_unvisible_line = min(repeat(fig.canvas.draw, number=1, repeat=3)) slowdown_factor = (t_unvisible_line / t_no_line) slowdown_threshold = 2 assert (slowdown_factor < slowdown_threshold)
[ "@", "cleanup", "def", "test_invisible_Line_rendering", "(", ")", ":", "N", "=", "(", "10", "**", "7", ")", "x", "=", "np", ".", "linspace", "(", "0", ",", "1", ",", "N", ")", "y", "=", "np", ".", "random", ".", "normal", "(", "size", "=", "N",...
github issue #1256 identified a bug in line .
train
false
11,662
def proxyEndpoint(wrappedEndpoint): return _WrapperServerEndpoint(wrappedEndpoint, HAProxyWrappingFactory)
[ "def", "proxyEndpoint", "(", "wrappedEndpoint", ")", ":", "return", "_WrapperServerEndpoint", "(", "wrappedEndpoint", ",", "HAProxyWrappingFactory", ")" ]
wrap an endpoint with proxy protocol support .
train
false
11,663
def _initialize_models(obj, tokenizer, pos_tagger, np_extractor, analyzer, parser, classifier): obj.tokenizer = _validated_param(tokenizer, u'tokenizer', base_class=(BaseTokenizer, nltk.tokenize.api.TokenizerI), default=BaseBlob.tokenizer, base_class_name=u'BaseTokenizer') obj.np_extractor = _validated_param(np_extractor, u'np_extractor', base_class=BaseNPExtractor, default=BaseBlob.np_extractor) obj.pos_tagger = _validated_param(pos_tagger, u'pos_tagger', BaseTagger, BaseBlob.pos_tagger) obj.analyzer = _validated_param(analyzer, u'analyzer', BaseSentimentAnalyzer, BaseBlob.analyzer) obj.parser = _validated_param(parser, u'parser', BaseParser, BaseBlob.parser) obj.classifier = classifier
[ "def", "_initialize_models", "(", "obj", ",", "tokenizer", ",", "pos_tagger", ",", "np_extractor", ",", "analyzer", ",", "parser", ",", "classifier", ")", ":", "obj", ".", "tokenizer", "=", "_validated_param", "(", "tokenizer", ",", "u'tokenizer'", ",", "base_...
common initialization between baseblob and blobber classes .
train
false
11,665
def _select_for(fd, readable, writable, error, timeout): (rset, wset, xset) = ([], [], []) if readable: rset = [fd] if writable: wset = [fd] if error: xset = [fd] if (timeout is None): (rcount, wcount, xcount) = select.select(rset, wset, xset) else: (rcount, wcount, xcount) = select.select(rset, wset, xset, timeout) return bool((rcount or wcount or xcount))
[ "def", "_select_for", "(", "fd", ",", "readable", ",", "writable", ",", "error", ",", "timeout", ")", ":", "(", "rset", ",", "wset", ",", "xset", ")", "=", "(", "[", "]", ",", "[", "]", ",", "[", "]", ")", "if", "readable", ":", "rset", "=", ...
select polling backend .
train
true
11,666
def _eval_cond(cond): if isinstance(cond, bool): return cond return _condsimp(cond.doit())
[ "def", "_eval_cond", "(", "cond", ")", ":", "if", "isinstance", "(", "cond", ",", "bool", ")", ":", "return", "cond", "return", "_condsimp", "(", "cond", ".", "doit", "(", ")", ")" ]
re-evaluate the conditions .
train
false
11,668
def output(): return s3_rest_controller()
[ "def", "output", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
output -> html string either return the result of a function or a sparse htmlized error message and a message in the server log .
train
false
11,669
def fund_holdings(year, quarter, retry_count=3, pause=0.001): (start, end) = rv.QUARTS_DIC[str(quarter)] if (quarter == 1): start = (start % str((year - 1))) end = (end % year) else: (start, end) = ((start % year), (end % year)) ct._write_head() (df, pages) = _holding_cotent(start, end, 0, retry_count, pause) for idx in range(1, pages): df = df.append(_holding_cotent(start, end, idx, retry_count, pause), ignore_index=True) return df
[ "def", "fund_holdings", "(", "year", ",", "quarter", ",", "retry_count", "=", "3", ",", "pause", "=", "0.001", ")", ":", "(", "start", ",", "end", ")", "=", "rv", ".", "QUARTS_DIC", "[", "str", "(", "quarter", ")", "]", "if", "(", "quarter", "==", ...
parameters year:年份e .
train
false
11,670
def prctile_rank(x, p): if (not cbook.iterable(p)): p = np.arange((100.0 / p), 100.0, (100.0 / p)) else: p = np.asarray(p) if ((p.max() <= 1) or (p.min() < 0) or (p.max() > 100)): raise ValueError(u'percentiles should be in range 0..100, not 0..1') ptiles = prctile(x, p) return np.searchsorted(ptiles, x)
[ "def", "prctile_rank", "(", "x", ",", "p", ")", ":", "if", "(", "not", "cbook", ".", "iterable", "(", "p", ")", ")", ":", "p", "=", "np", ".", "arange", "(", "(", "100.0", "/", "p", ")", ",", "100.0", ",", "(", "100.0", "/", "p", ")", ")", ...
return the rank for each element in *x* .
train
false
11,671
def _get_min_grade_requirement(course_key): course = modulestore().get_course(course_key, depth=0) try: return [{'namespace': 'grade', 'name': 'grade', 'display_name': 'Minimum Grade', 'criteria': {'min_grade': course.minimum_grade_credit}}] except AttributeError: LOGGER.error('The course %s does not has minimum_grade_credit attribute', unicode(course.id)) else: return []
[ "def", "_get_min_grade_requirement", "(", "course_key", ")", ":", "course", "=", "modulestore", "(", ")", ".", "get_course", "(", "course_key", ",", "depth", "=", "0", ")", "try", ":", "return", "[", "{", "'namespace'", ":", "'grade'", ",", "'name'", ":", ...
get list of minimum_grade_credit requirement for the given course .
train
false
11,672
def get_stored_cert_serials(store): cmd = 'certutil.exe -store {0}'.format(store) out = __salt__['cmd.run'](cmd) matches = re.findall('Serial Number: (.*)\\r', out) return matches
[ "def", "get_stored_cert_serials", "(", "store", ")", ":", "cmd", "=", "'certutil.exe -store {0}'", ".", "format", "(", "store", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", "matches", "=", "re", ".", "findall", "(", "'Serial Number: (...
get all of the certificate serials in the specified store store the store to get all the certificate serials from cli example: .
train
true