id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
36,363
@allow_unvouched @never_cache def confirm_delete(request): return render(request, 'phonebook/confirm_delete.html')
[ "@", "allow_unvouched", "@", "never_cache", "def", "confirm_delete", "(", "request", ")", ":", "return", "render", "(", "request", ",", "'phonebook/confirm_delete.html'", ")" ]
display a confirmation page asking the user if they want to leave .
train
false
36,364
def test_unnamed(): t = usertypes.Timer() assert (not t.objectName()) assert (t._name == 'unnamed') assert (repr(t) == "<qutebrowser.utils.usertypes.Timer name='unnamed'>")
[ "def", "test_unnamed", "(", ")", ":", "t", "=", "usertypes", ".", "Timer", "(", ")", "assert", "(", "not", "t", ".", "objectName", "(", ")", ")", "assert", "(", "t", ".", "_name", "==", "'unnamed'", ")", "assert", "(", "repr", "(", "t", ")", "=="...
make sure an unnamed timer is named correctly .
train
false
36,365
def getSegmentFromPath(path, pathIndex): if (len(path) < 2): return None begin = path[(-1)] end = path[(-2)] forwardEndpoint = getEndpointFromPath(path, pathIndex) reversePath = path[:] reversePath.reverse() reverseEndpoint = getEndpointFromPath(reversePath, pathIndex) return (forwardEndpoint, reverseEndpoint)
[ "def", "getSegmentFromPath", "(", "path", ",", "pathIndex", ")", ":", "if", "(", "len", "(", "path", ")", "<", "2", ")", ":", "return", "None", "begin", "=", "path", "[", "(", "-", "1", ")", "]", "end", "=", "path", "[", "(", "-", "2", ")", "...
get endpoint segment from a path .
train
false
36,369
def headers_to_object_info(headers, status_int=HTTP_OK): (headers, meta, sysmeta) = _prep_headers_to_info(headers, 'object') transient_sysmeta = {} for (key, val) in headers.iteritems(): if is_object_transient_sysmeta(key): key = strip_object_transient_sysmeta_prefix(key.lower()) transient_sysmeta[key] = val info = {'status': status_int, 'length': headers.get('content-length'), 'type': headers.get('content-type'), 'etag': headers.get('etag'), 'meta': meta, 'sysmeta': sysmeta, 'transient_sysmeta': transient_sysmeta} return info
[ "def", "headers_to_object_info", "(", "headers", ",", "status_int", "=", "HTTP_OK", ")", ":", "(", "headers", ",", "meta", ",", "sysmeta", ")", "=", "_prep_headers_to_info", "(", "headers", ",", "'object'", ")", "transient_sysmeta", "=", "{", "}", "for", "("...
construct a cacheable dict of object info based on response headers .
train
false
36,370
def getAlongWayHexadecimalColor(beginBrightness, colorWidth, difference, endColorTuple, wayLength): alongWay = 1.0 if (wayLength != 0.0): alongWay = (0.4 + (0.6 * min(1.0, abs((float(difference) / float(wayLength)))))) hexadecimalColor = '#' oneMinusAlongWay = (1.0 - alongWay) for primaryIndex in xrange(3): hexadecimalColor += getAlongWayHexadecimalPrimary(beginBrightness, oneMinusAlongWay, colorWidth, endColorTuple[primaryIndex], alongWay) return hexadecimalColor
[ "def", "getAlongWayHexadecimalColor", "(", "beginBrightness", ",", "colorWidth", ",", "difference", ",", "endColorTuple", ",", "wayLength", ")", ":", "alongWay", "=", "1.0", "if", "(", "wayLength", "!=", "0.0", ")", ":", "alongWay", "=", "(", "0.4", "+", "("...
get a color along the way from begin brightness to the end color .
train
false
36,371
def func_to_time(x): y = ((5.0 * np.asarray(x)) - 10) time.sleep(y) return y
[ "def", "func_to_time", "(", "x", ")", ":", "y", "=", "(", "(", "5.0", "*", "np", ".", "asarray", "(", "x", ")", ")", "-", "10", ")", "time", ".", "sleep", "(", "y", ")", "return", "y" ]
this sleeps for y seconds for use with timing tests .
train
false
36,372
def rule_create(request, **kwargs): body = {'firewall_rule': kwargs} rule = neutronclient(request).create_firewall_rule(body).get('firewall_rule') return Rule(rule)
[ "def", "rule_create", "(", "request", ",", "**", "kwargs", ")", ":", "body", "=", "{", "'firewall_rule'", ":", "kwargs", "}", "rule", "=", "neutronclient", "(", "request", ")", ".", "create_firewall_rule", "(", "body", ")", ".", "get", "(", "'firewall_rule...
create a firewall rule .
train
false
36,373
def _get_instances_with_cached_ips(orig_func, *args, **kwargs): instances = orig_func(*args, **kwargs) context = args[0] fake_device = objects.PciDevice.get_by_dev_addr(context, 1, 'a') def _info_cache_for(instance): info_cache = dict(test_instance_info_cache.fake_info_cache, network_info=_get_fake_cache(), instance_uuid=instance['uuid']) if isinstance(instance, obj_base.NovaObject): _info_cache = objects.InstanceInfoCache(context) objects.InstanceInfoCache._from_db_object(context, _info_cache, info_cache) info_cache = _info_cache instance['info_cache'] = info_cache if isinstance(instances, (list, obj_base.ObjectListBase)): for instance in instances: _info_cache_for(instance) fake_device.claim(instance.uuid) fake_device.allocate(instance) else: _info_cache_for(instances) fake_device.claim(instances.uuid) fake_device.allocate(instances) return instances
[ "def", "_get_instances_with_cached_ips", "(", "orig_func", ",", "*", "args", ",", "**", "kwargs", ")", ":", "instances", "=", "orig_func", "(", "*", "args", ",", "**", "kwargs", ")", "context", "=", "args", "[", "0", "]", "fake_device", "=", "objects", "...
kludge the cache into instance(s) without having to create db entries .
train
false
36,374
def find_spec(name, package=None): fullname = (resolve_name(name, package) if name.startswith('.') else name) if (fullname not in sys.modules): parent_name = fullname.rpartition('.')[0] if parent_name: parent = __import__(parent_name, fromlist=['__path__']) return _find_spec(fullname, parent.__path__) else: return _find_spec(fullname, None) else: module = sys.modules[fullname] if (module is None): return None try: spec = module.__spec__ except AttributeError: raise ValueError('{}.__spec__ is not set'.format(name)) else: if (spec is None): raise ValueError('{}.__spec__ is None'.format(name)) return spec
[ "def", "find_spec", "(", "name", ",", "package", "=", "None", ")", ":", "fullname", "=", "(", "resolve_name", "(", "name", ",", "package", ")", "if", "name", ".", "startswith", "(", "'.'", ")", "else", "name", ")", "if", "(", "fullname", "not", "in",...
return the spec for the specified module .
train
true
36,375
def libvlc_audio_equalizer_new_from_preset(u_index): f = (_Cfunctions.get('libvlc_audio_equalizer_new_from_preset', None) or _Cfunction('libvlc_audio_equalizer_new_from_preset', ((1,),), None, ctypes.c_void_p, ctypes.c_uint)) return f(u_index)
[ "def", "libvlc_audio_equalizer_new_from_preset", "(", "u_index", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_audio_equalizer_new_from_preset'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_audio_equalizer_new_from_preset'", ",", "(", "(", ...
create a new equalizer .
train
true
36,376
@register.assignment_tag def shipping_charge_discount(method, basket): return method.discount(basket)
[ "@", "register", ".", "assignment_tag", "def", "shipping_charge_discount", "(", "method", ",", "basket", ")", ":", "return", "method", ".", "discount", "(", "basket", ")" ]
template tag for calculating the shipping discount for a given shipping method and basket .
train
false
36,378
def new_endpoint_ref_with_region(service_id, region, interface='public', **kwargs): ref = new_endpoint_ref(service_id, interface, region=region, region_id='invalid', **kwargs) del ref['region_id'] return ref
[ "def", "new_endpoint_ref_with_region", "(", "service_id", ",", "region", ",", "interface", "=", "'public'", ",", "**", "kwargs", ")", ":", "ref", "=", "new_endpoint_ref", "(", "service_id", ",", "interface", ",", "region", "=", "region", ",", "region_id", "=",...
define an endpoint_ref having a pre-3 .
train
false
36,379
def EntityGroupName(entity): element = entity.path().element(0) if element.has_id(): return str(element.id()) elif element.has_name(): return element.name() else: return 'None'
[ "def", "EntityGroupName", "(", "entity", ")", ":", "element", "=", "entity", ".", "path", "(", ")", ".", "element", "(", "0", ")", "if", "element", ".", "has_id", "(", ")", ":", "return", "str", "(", "element", ".", "id", "(", ")", ")", "elif", "...
given entity primary key as reference proto .
train
false
36,380
def _fix_dot_imports(not_consumed): names = {} for (name, stmts) in six.iteritems(not_consumed): if any(((isinstance(stmt, astroid.AssName) and isinstance(stmt.ass_type(), astroid.AugAssign)) for stmt in stmts)): continue for stmt in stmts: if (not isinstance(stmt, (astroid.From, astroid.Import))): continue for imports in stmt.names: second_name = None if (imports[0] == '*'): second_name = name elif ((imports[0].find('.') > (-1)) or (name in imports)): second_name = imports[0] if (second_name and (second_name not in names)): names[second_name] = stmt return sorted(names.items(), key=(lambda a: a[1].fromlineno))
[ "def", "_fix_dot_imports", "(", "not_consumed", ")", ":", "names", "=", "{", "}", "for", "(", "name", ",", "stmts", ")", "in", "six", ".", "iteritems", "(", "not_consumed", ")", ":", "if", "any", "(", "(", "(", "isinstance", "(", "stmt", ",", "astroi...
try to fix imports with multiple dots .
train
false
36,381
def inet_aton(text): parts = text.split('.') if (len(parts) != 4): raise dns.exception.SyntaxError for part in parts: if (not part.isdigit()): raise dns.exception.SyntaxError if ((len(part) > 1) and (part[0] == '0')): raise dns.exception.SyntaxError try: bytes = [int(part) for part in parts] return struct.pack('BBBB', *bytes) except: raise dns.exception.SyntaxError
[ "def", "inet_aton", "(", "text", ")", ":", "parts", "=", "text", ".", "split", "(", "'.'", ")", "if", "(", "len", "(", "parts", ")", "!=", "4", ")", ":", "raise", "dns", ".", "exception", ".", "SyntaxError", "for", "part", "in", "parts", ":", "if...
inet_aton -> packed 32-bit ip representation convert an ip address in string format to the 32-bit packed binary format used in low-level network functions .
train
false
36,382
def make_bpd_error(s_code, s_message): class NewBPDError(BPDError, ): code = s_code message = s_message cmd_name = '' index = 0 def __init__(self): pass return NewBPDError
[ "def", "make_bpd_error", "(", "s_code", ",", "s_message", ")", ":", "class", "NewBPDError", "(", "BPDError", ",", ")", ":", "code", "=", "s_code", "message", "=", "s_message", "cmd_name", "=", "''", "index", "=", "0", "def", "__init__", "(", "self", ")",...
create a bpderror subclass for a static code and message .
train
false
36,383
def addPegOutput(bevel, endZ, outputs, radiusArealized, sides, start, topOverBottom): height = abs((start.z - endZ)) bevelStartRatio = max((1.0 - (bevel / height)), 0.5) oneMinusBevelStartRatio = (1.0 - bevelStartRatio) trunkEndZ = ((bevelStartRatio * endZ) + (oneMinusBevelStartRatio * start.z)) trunkTopOverBottom = ((bevelStartRatio * topOverBottom) + oneMinusBevelStartRatio) cylinder.addCylinderOutputByEndStart(trunkEndZ, radiusArealized, outputs, sides, start, trunkTopOverBottom) capRadius = (radiusArealized * trunkTopOverBottom) capStart = ((bevelStartRatio * Vector3(start.x, start.y, endZ)) + (oneMinusBevelStartRatio * start)) radiusMaximum = max(radiusArealized.real, radiusArealized.imag) endRadiusMaximum = ((radiusMaximum * topOverBottom) - bevel) trunkRadiusMaximum = (radiusMaximum * trunkTopOverBottom) capTopOverBottom = (endRadiusMaximum / trunkRadiusMaximum) cylinder.addCylinderOutputByEndStart(endZ, capRadius, outputs, sides, capStart, capTopOverBottom)
[ "def", "addPegOutput", "(", "bevel", ",", "endZ", ",", "outputs", ",", "radiusArealized", ",", "sides", ",", "start", ",", "topOverBottom", ")", ":", "height", "=", "abs", "(", "(", "start", ".", "z", "-", "endZ", ")", ")", "bevelStartRatio", "=", "max...
add beveled cylinder to outputs given bevel .
train
false
36,384
def get_display_time_zone(time_zone_name): time_zone = timezone(time_zone_name) tz_abbr = get_time_zone_abbr(time_zone) tz_offset = get_time_zone_offset(time_zone) return '{name} ({abbr}, UTC{offset})'.format(name=time_zone, abbr=tz_abbr, offset=tz_offset).replace('_', ' ')
[ "def", "get_display_time_zone", "(", "time_zone_name", ")", ":", "time_zone", "=", "timezone", "(", "time_zone_name", ")", "tz_abbr", "=", "get_time_zone_abbr", "(", "time_zone", ")", "tz_offset", "=", "get_time_zone_offset", "(", "time_zone", ")", "return", "'{name...
returns a formatted display time zone (e .
train
false
36,385
def project_to_sphere(points, center, radius): lengths = scipy.spatial.distance.cdist(points, np.array([center])) return ((((points - center) / lengths) * radius) + center)
[ "def", "project_to_sphere", "(", "points", ",", "center", ",", "radius", ")", ":", "lengths", "=", "scipy", ".", "spatial", ".", "distance", ".", "cdist", "(", "points", ",", "np", ".", "array", "(", "[", "center", "]", ")", ")", "return", "(", "(", ...
projects the elements of points onto the sphere defined by center and radius .
train
false
36,386
def bpm(max_strokes): t0 = None dt = [] for i in range(max_strokes): s = input() if (s == ''): t1 = time.time() if t0: dt.append((t1 - t0)) t0 = t1 else: break ave = (sum([((1.0 / dti) * 60) for dti in dt]) / len(dt)) return ave
[ "def", "bpm", "(", "max_strokes", ")", ":", "t0", "=", "None", "dt", "=", "[", "]", "for", "i", "in", "range", "(", "max_strokes", ")", ":", "s", "=", "input", "(", ")", "if", "(", "s", "==", "''", ")", ":", "t1", "=", "time", ".", "time", ...
returns average bpm listening to enter keystrokes .
train
false
36,388
def to_cudandarray(x): if (not isinstance(x, pycuda.gpuarray.GPUArray)): raise ValueError('We can transfer only pycuda.gpuarray.GPUArray to CudaNdarray') elif (x.dtype != 'float32'): raise ValueError('CudaNdarray support only float32') else: strides = [1] for i in x.shape[::(-1)][:(-1)]: strides.append((strides[(-1)] * i)) strides = tuple(strides[::(-1)]) ptr = int(x.gpudata) z = cuda.from_gpu_pointer(ptr, x.shape, strides, x) return z
[ "def", "to_cudandarray", "(", "x", ")", ":", "if", "(", "not", "isinstance", "(", "x", ",", "pycuda", ".", "gpuarray", ".", "GPUArray", ")", ")", ":", "raise", "ValueError", "(", "'We can transfer only pycuda.gpuarray.GPUArray to CudaNdarray'", ")", "elif", "(",...
take a pycuda .
train
false
36,389
def tst_render_template(template, context): return '<div>{0}</div>'.format(saxutils.escape(repr(context)))
[ "def", "tst_render_template", "(", "template", ",", "context", ")", ":", "return", "'<div>{0}</div>'", ".", "format", "(", "saxutils", ".", "escape", "(", "repr", "(", "context", ")", ")", ")" ]
a test version of render to template .
train
false
36,390
@login_required def invitation_create(request, template_name='invitations/invitation_form.html', success_template_name='invitations/invitation_success.html'): try: allotment = request.user.invitation_allotment if (allotment.amount == 0): return invitation_error(request) except InvitationAllotment.DoesNotExist: return invitation_error(request) if (request.method == 'POST'): form = InvitationForm(request.POST) if form.is_valid(): invitation = form.save(commit=False) invitation.token = Invitation.objects.create_token(invitation.email) invitation.from_user = request.user invitation.save() allotment.decrement(1) send_invitation_email(invitation) return render(request, success_template_name) else: form = InvitationForm() return render(request, template_name, {'form': form})
[ "@", "login_required", "def", "invitation_create", "(", "request", ",", "template_name", "=", "'invitations/invitation_form.html'", ",", "success_template_name", "=", "'invitations/invitation_success.html'", ")", ":", "try", ":", "allotment", "=", "request", ".", "user", ...
returns a form for a user to send an invitation .
train
false
36,392
def make_net_dict(net_id, net_name, ports): res = {const.NET_ID: net_id, const.NET_NAME: net_name} res[const.NET_PORTS] = ports return res
[ "def", "make_net_dict", "(", "net_id", ",", "net_name", ",", "ports", ")", ":", "res", "=", "{", "const", ".", "NET_ID", ":", "net_id", ",", "const", ".", "NET_NAME", ":", "net_name", "}", "res", "[", "const", ".", "NET_PORTS", "]", "=", "ports", "re...
helper funciton .
train
false
36,395
@curry def delayed(obj, name=None, pure=False, nout=None, traverse=True): if isinstance(obj, Delayed): return obj if (isinstance(obj, base.Base) or traverse): (task, dasks) = to_task_dasks(obj) else: task = quote(obj) dasks = [] if (task is obj): if (not ((nout is None) or ((type(nout) is int) and (nout >= 0)))): raise ValueError(('nout must be None or a positive integer, got %s' % nout)) if (not name): try: prefix = obj.__name__ except AttributeError: prefix = type(obj).__name__ token = tokenize(obj, nout, pure=pure) name = ('%s-%s' % (prefix, token)) return DelayedLeaf(obj, name, pure=pure, nout=nout) else: if (not name): name = ('%s-%s' % (type(obj).__name__, tokenize(task, pure=pure))) dasks.append({name: task}) return Delayed(name, dasks)
[ "@", "curry", "def", "delayed", "(", "obj", ",", "name", "=", "None", ",", "pure", "=", "False", ",", "nout", "=", "None", ",", "traverse", "=", "True", ")", ":", "if", "isinstance", "(", "obj", ",", "Delayed", ")", ":", "return", "obj", "if", "(...
decorator used to capture the arguments of a function .
train
false
36,396
def getPathsByIntersectedLoop(begin, end, loop): closestBeginDistanceIndex = euclidean.getClosestDistanceIndexToLine(begin, loop) closestEndDistanceIndex = euclidean.getClosestDistanceIndexToLine(end, loop) beginIndex = ((closestBeginDistanceIndex.index + 1) % len(loop)) endIndex = ((closestEndDistanceIndex.index + 1) % len(loop)) closestBegin = euclidean.getClosestPointOnSegment(loop[closestBeginDistanceIndex.index], loop[beginIndex], begin) closestEnd = euclidean.getClosestPointOnSegment(loop[closestEndDistanceIndex.index], loop[endIndex], end) clockwisePath = [closestBegin] widdershinsPath = [closestBegin] if (closestBeginDistanceIndex.index != closestEndDistanceIndex.index): widdershinsPath += euclidean.getAroundLoop(beginIndex, endIndex, loop) clockwisePath += euclidean.getAroundLoop(endIndex, beginIndex, loop)[::(-1)] clockwisePath.append(closestEnd) widdershinsPath.append(closestEnd) return [clockwisePath, widdershinsPath]
[ "def", "getPathsByIntersectedLoop", "(", "begin", ",", "end", ",", "loop", ")", ":", "closestBeginDistanceIndex", "=", "euclidean", ".", "getClosestDistanceIndexToLine", "(", "begin", ",", "loop", ")", "closestEndDistanceIndex", "=", "euclidean", ".", "getClosestDista...
get both paths along the loop from the point nearest to the begin to the point nearest to the end .
train
false
36,397
def prepare_site_db_and_overrides(): _SITE_DB.clear() _SITE_DB[_MAIN_LANG] = _MAIN_SITEURL main_siteurl = ('/' if (_MAIN_SITEURL == '') else _MAIN_SITEURL) for (lang, overrides) in _SUBSITE_QUEUE.items(): if ('SITEURL' not in overrides): overrides['SITEURL'] = posixpath.join(main_siteurl, lang) _SITE_DB[lang] = overrides['SITEURL'] if ('OUTPUT_PATH' not in overrides): overrides['OUTPUT_PATH'] = os.path.join(_MAIN_SETTINGS['OUTPUT_PATH'], lang) if ('CACHE_PATH' not in overrides): overrides['CACHE_PATH'] = os.path.join(_MAIN_SETTINGS['CACHE_PATH'], lang) if ('STATIC_PATHS' not in overrides): overrides['STATIC_PATHS'] = [] if (('THEME' not in overrides) and ('THEME_STATIC_DIR' not in overrides) and ('THEME_STATIC_PATHS' not in overrides)): relpath = relpath_to_site(lang, _MAIN_LANG) overrides['THEME_STATIC_DIR'] = posixpath.join(relpath, _MAIN_SETTINGS['THEME_STATIC_DIR']) overrides['THEME_STATIC_PATHS'] = [] overrides['DEFAULT_LANG'] = lang
[ "def", "prepare_site_db_and_overrides", "(", ")", ":", "_SITE_DB", ".", "clear", "(", ")", "_SITE_DB", "[", "_MAIN_LANG", "]", "=", "_MAIN_SITEURL", "main_siteurl", "=", "(", "'/'", "if", "(", "_MAIN_SITEURL", "==", "''", ")", "else", "_MAIN_SITEURL", ")", "...
prepare overrides and create _site_db _site_db .
train
true
36,400
@docstring.dedent_interpd def xscale(*args, **kwargs): gca().set_xscale(*args, **kwargs)
[ "@", "docstring", ".", "dedent_interpd", "def", "xscale", "(", "*", "args", ",", "**", "kwargs", ")", ":", "gca", "(", ")", ".", "set_xscale", "(", "*", "args", ",", "**", "kwargs", ")" ]
set the scaling of the *x*-axis .
train
false
36,401
def collect_all_bears_from_sections(sections, log_printer): local_bears = {} global_bears = {} for section in sections: bear_dirs = sections[section].bear_dirs() (local_bears[section], global_bears[section]) = collect_bears(bear_dirs, ['**'], [BEAR_KIND.LOCAL, BEAR_KIND.GLOBAL], log_printer, warn_if_unused_glob=False) return (local_bears, global_bears)
[ "def", "collect_all_bears_from_sections", "(", "sections", ",", "log_printer", ")", ":", "local_bears", "=", "{", "}", "global_bears", "=", "{", "}", "for", "section", "in", "sections", ":", "bear_dirs", "=", "sections", "[", "section", "]", ".", "bear_dirs", ...
collect all kinds of bears from bear directories given in the sections .
train
false
36,402
def scroll(clip, h=None, w=None, x_speed=0, y_speed=0, x_start=0, y_start=0, apply_to='mask'): if (h is None): h = clip.h if (w is None): w = clip.w xmax = ((clip.w - w) - 1) ymax = ((clip.h - h) - 1) def f(gf, t): x = max(0, min(xmax, (x_start + np.round((x_speed * t))))) y = max(0, min(ymax, (y_start + np.round((y_speed * t))))) return gf(t)[y:(y + h), x:(x + w)] return clip.fl(f, apply_to=apply_to)
[ "def", "scroll", "(", "clip", ",", "h", "=", "None", ",", "w", "=", "None", ",", "x_speed", "=", "0", ",", "y_speed", "=", "0", ",", "x_start", "=", "0", ",", "y_start", "=", "0", ",", "apply_to", "=", "'mask'", ")", ":", "if", "(", "h", "is"...
scrolls horizontally or vertically a clip .
train
false
36,403
def limited_join(sep, items, max_chars=30, overflow_marker='...'): full_str = sep.join(items) if (len(full_str) < max_chars): return full_str n_chars = 0 n_items = 0 for (j, item) in enumerate(items): n_chars += (len(item) + len(sep)) if (n_chars < (max_chars - len(overflow_marker))): n_items += 1 else: break return sep.join((list(items[:n_items]) + [overflow_marker]))
[ "def", "limited_join", "(", "sep", ",", "items", ",", "max_chars", "=", "30", ",", "overflow_marker", "=", "'...'", ")", ":", "full_str", "=", "sep", ".", "join", "(", "items", ")", "if", "(", "len", "(", "full_str", ")", "<", "max_chars", ")", ":", ...
join a number of strings to one .
train
true
36,404
def lnsum0(lx): lx = numpy.asarray(lx) base = lx.max() x = numpy.exp((lx - base)) ssum = x.sum(0) normalized = nsum0(lx) result = numpy.log(normalized) conventional = numpy.log((numpy.exp(lx).sum(0) / numpy.exp(lx).sum())) assert similar(result, conventional) return result
[ "def", "lnsum0", "(", "lx", ")", ":", "lx", "=", "numpy", ".", "asarray", "(", "lx", ")", "base", "=", "lx", ".", "max", "(", ")", "x", "=", "numpy", ".", "exp", "(", "(", "lx", "-", "base", ")", ")", "ssum", "=", "x", ".", "sum", "(", "0...
accepts log-values as input .
train
true
36,405
def initialize_collector(cls, name=None, configfile=None, handlers=[]): log = logging.getLogger('diamond') collector = None try: collector = cls(name=name, configfile=configfile, handlers=handlers) except Exception: log.error('Failed to initialize Collector: %s. %s', cls.__name__, traceback.format_exc()) return collector
[ "def", "initialize_collector", "(", "cls", ",", "name", "=", "None", ",", "configfile", "=", "None", ",", "handlers", "=", "[", "]", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "'diamond'", ")", "collector", "=", "None", "try", ":", "collec...
initialize collector .
train
false
36,406
def post_save_user(instance, raw, created, **kwargs): from cms.utils.permissions import get_current_user creator = get_current_user() if ((not creator) or (not created) or creator.is_anonymous()): return page_user = PageUser(user_ptr_id=instance.pk, created_by=creator) page_user.__dict__.update(instance.__dict__) page_user.save()
[ "def", "post_save_user", "(", "instance", ",", "raw", ",", "created", ",", "**", "kwargs", ")", ":", "from", "cms", ".", "utils", ".", "permissions", "import", "get_current_user", "creator", "=", "get_current_user", "(", ")", "if", "(", "(", "not", "creato...
signal called when new user is created .
train
false
36,409
def dup_zz_factor(f, K): (cont, g) = dup_primitive(f, K) n = dup_degree(g) if (dup_LC(g, K) < 0): (cont, g) = ((- cont), dup_neg(g, K)) if (n <= 0): return (cont, []) elif (n == 1): return (cont, [(g, 1)]) if query('USE_IRREDUCIBLE_IN_FACTOR'): if dup_zz_irreducible_p(g, K): return (cont, [(g, 1)]) g = dup_sqf_part(g, K) H = None if query('USE_CYCLOTOMIC_FACTOR'): H = dup_zz_cyclotomic_factor(g, K) if (H is None): H = dup_zz_zassenhaus(g, K) factors = dup_trial_division(f, H, K) return (cont, factors)
[ "def", "dup_zz_factor", "(", "f", ",", "K", ")", ":", "(", "cont", ",", "g", ")", "=", "dup_primitive", "(", "f", ",", "K", ")", "n", "=", "dup_degree", "(", "g", ")", "if", "(", "dup_LC", "(", "g", ",", "K", ")", "<", "0", ")", ":", "(", ...
factor polynomials in z[x] .
train
false
36,410
def project_root(parser, projdir=os.getcwd()): previousdir = curdir = osp.abspath(projdir) testercls = PyTester conf_file_path = osp.join(curdir, CONF_FILE) if osp.isfile(conf_file_path): testercls = load_pytest_conf(conf_file_path, parser) while (this_is_a_testdir(curdir) or osp.isfile(osp.join(curdir, '__init__.py'))): newdir = osp.normpath(osp.join(curdir, os.pardir)) if (newdir == curdir): break previousdir = curdir curdir = newdir conf_file_path = osp.join(curdir, CONF_FILE) if osp.isfile(conf_file_path): testercls = load_pytest_conf(conf_file_path, parser) return (previousdir, testercls)
[ "def", "project_root", "(", "parser", ",", "projdir", "=", "os", ".", "getcwd", "(", ")", ")", ":", "previousdir", "=", "curdir", "=", "osp", ".", "abspath", "(", "projdir", ")", "testercls", "=", "PyTester", "conf_file_path", "=", "osp", ".", "join", ...
try to find projects root and add it to sys .
train
false
36,411
def preserve_plugin_listeners(): if (not ImportAddedPlugin.listeners): ImportAddedPlugin.listeners = _listeners
[ "def", "preserve_plugin_listeners", "(", ")", ":", "if", "(", "not", "ImportAddedPlugin", ".", "listeners", ")", ":", "ImportAddedPlugin", ".", "listeners", "=", "_listeners" ]
preserve the initial plugin listeners as they would otherwise be deleted after the first setup / tear down cycle .
train
false
36,412
def css(state, text, i, formats, user_data): pat = cdata_close_pats[u'style'] m = pat.search(text, i) if (m is None): css_text = text[i:] else: css_text = text[i:m.start()] ans = [] css_user_data = user_data.css_user_data = (user_data.css_user_data or CSSUserData()) state.sub_parser_state = css_user_data.state = (state.sub_parser_state or CSSState()) for (j, num, fmt) in run_loop(css_user_data, css_state_map, formats[u'css_sub_formats'], css_text): ans.append((num, fmt)) if (m is not None): state.sub_parser_state = None state.parse = IN_CLOSING_TAG add_tag_data(user_data, TagStart(m.start(), u'', u'style', True, True)) ans.extend([(2, formats[u'end_tag']), ((len(m.group()) - 2), formats[u'tag_name'])]) return ans
[ "def", "css", "(", "state", ",", "text", ",", "i", ",", "formats", ",", "user_data", ")", ":", "pat", "=", "cdata_close_pats", "[", "u'style'", "]", "m", "=", "pat", ".", "search", "(", "text", ",", "i", ")", "if", "(", "m", "is", "None", ")", ...
inside a <style> tag .
train
false
36,416
def _get_rar_version(xfile): with XFile(xfile) as fd: buf = fd.read(len(RAR5_ID)) if buf.startswith(RAR_ID): return 3 elif buf.startswith(RAR5_ID): return 5 return 0
[ "def", "_get_rar_version", "(", "xfile", ")", ":", "with", "XFile", "(", "xfile", ")", "as", "fd", ":", "buf", "=", "fd", ".", "read", "(", "len", "(", "RAR5_ID", ")", ")", "if", "buf", ".", "startswith", "(", "RAR_ID", ")", ":", "return", "3", "...
check quickly whether file is rar archive .
train
true
36,417
def nzf_cmp_date(nzf1, nzf2): return nzf_cmp_name(nzf1, nzf2, name=False)
[ "def", "nzf_cmp_date", "(", "nzf1", ",", "nzf2", ")", ":", "return", "nzf_cmp_name", "(", "nzf1", ",", "nzf2", ",", "name", "=", "False", ")" ]
compare files based on date .
train
false
36,419
def virtual_interface_update(context, address, values): return IMPL.virtual_interface_update(context, address, values)
[ "def", "virtual_interface_update", "(", "context", ",", "address", ",", "values", ")", ":", "return", "IMPL", ".", "virtual_interface_update", "(", "context", ",", "address", ",", "values", ")" ]
create a virtual interface record in the database .
train
false
36,420
def prompt_for_mc_auth(): print(u'These tests will never delete or modify your music.\n\nIf the tests fail, you *might* end up with a test song/playlist in your library, though.\n') mclient = Mobileclient() valid_mc_auth = False while (not valid_mc_auth): print() email = input(u'Email: ') passwd = getpass() try: android_id = os.environ[u'GM_AA_D_ID'] except KeyError: android_id = input(u"Device ID ('mac' for FROM_MAC_ADDRESS): ") if (android_id == u'mac'): android_id = Mobileclient.FROM_MAC_ADDRESS if (not android_id): print(u'a device id must be provided') sys.exit(1) valid_mc_auth = mclient.login(email, passwd, android_id) return (email, passwd, android_id)
[ "def", "prompt_for_mc_auth", "(", ")", ":", "print", "(", "u'These tests will never delete or modify your music.\\n\\nIf the tests fail, you *might* end up with a test song/playlist in your library, though.\\n'", ")", "mclient", "=", "Mobileclient", "(", ")", "valid_mc_auth", "=", "F...
return a valid tuple by continually prompting the user .
train
false
36,421
@permission_required([('Websites', 'ReadAbuse')]) def queue_abuse_websites(request): queues_helper = ReviewersQueuesHelper(request) sites = queues_helper.get_abuse_queue_websites() page = paginate(request, sites, per_page=20) abuse_formset = WebsiteAbuseViewFormSet((request.POST or None), queryset=page.object_list, request=request) if abuse_formset.is_valid(): abuse_formset.save() return redirect(reverse('reviewers.websites.queue_abuse')) return render(request, 'reviewers/queue.html', context(request, abuse_formset=abuse_formset, tab='abusewebsites', page=page))
[ "@", "permission_required", "(", "[", "(", "'Websites'", ",", "'ReadAbuse'", ")", "]", ")", "def", "queue_abuse_websites", "(", "request", ")", ":", "queues_helper", "=", "ReviewersQueuesHelper", "(", "request", ")", "sites", "=", "queues_helper", ".", "get_abus...
queue for reviewing abuse reports for websites .
train
false
36,422
def get_total_citations(soup): results = soup.find('div', attrs={'id': 'gs_ab_md'}).contents[0] matches = re.search('About\\s(\\d+)\\s', results) if matches: hits = matches.groups()[0] return hits
[ "def", "get_total_citations", "(", "soup", ")", ":", "results", "=", "soup", ".", "find", "(", "'div'", ",", "attrs", "=", "{", "'id'", ":", "'gs_ab_md'", "}", ")", ".", "contents", "[", "0", "]", "matches", "=", "re", ".", "search", "(", "'About\\\\...
get total citations .
train
false
36,425
def make_column_label(command_name, description, default): for (pattern, label) in description_map: if fnmatch.fnmatch(description, pattern): return label return default
[ "def", "make_column_label", "(", "command_name", ",", "description", ",", "default", ")", ":", "for", "(", "pattern", ",", "label", ")", "in", "description_map", ":", "if", "fnmatch", ".", "fnmatch", "(", "description", ",", "pattern", ")", ":", "return", ...
try to work out a reasonable column name from parameter description .
train
true
36,426
def list_storage_accounts(call=None, kwargs=None): global storconn if (not storconn): storconn = get_conn(StorageManagementClient) if (kwargs is None): kwargs = {} ret = {} for acct in pages_to_list(storconn.storage_accounts.list()): ret[acct.name] = object_to_dict(acct) return ret
[ "def", "list_storage_accounts", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "global", "storconn", "if", "(", "not", "storconn", ")", ":", "storconn", "=", "get_conn", "(", "StorageManagementClient", ")", "if", "(", "kwargs", "is", "None...
list storage accounts .
train
false
36,427
@real_memoize def is_openbsd(): return sys.platform.startswith('openbsd')
[ "@", "real_memoize", "def", "is_openbsd", "(", ")", ":", "return", "sys", ".", "platform", ".", "startswith", "(", "'openbsd'", ")" ]
simple function to return if host is openbsd or not .
train
false
36,428
def SelectFromList(prompt, items): vim_items = [prompt] vim_items.extend([u'{0}: {1}'.format((i + 1), item) for (i, item) in enumerate(items)]) vim.eval(u'inputsave()') try: selected = (GetIntValue(((u'inputlist( ' + json.dumps(vim_items)) + u' )')) - 1) except KeyboardInterrupt: selected = (-1) finally: vim.eval(u'inputrestore()') if ((selected < 0) or (selected >= len(items))): raise RuntimeError(NO_SELECTION_MADE_MSG) return selected
[ "def", "SelectFromList", "(", "prompt", ",", "items", ")", ":", "vim_items", "=", "[", "prompt", "]", "vim_items", ".", "extend", "(", "[", "u'{0}: {1}'", ".", "format", "(", "(", "i", "+", "1", ")", ",", "item", ")", "for", "(", "i", ",", "item", ...
ask the user to select an item from the list |items| .
train
false
36,429
def SetComputerInfo(): info = win32net.NetWkstaGetInfo(None, 502)
[ "def", "SetComputerInfo", "(", ")", ":", "info", "=", "win32net", ".", "NetWkstaGetInfo", "(", "None", ",", "502", ")" ]
doesnt actually change anything .
train
false
36,430
def get_child_environment(parent_env, child_params, item_to_remove=None, child_resource_name=None): def is_flat_params(env_or_param): if (env_or_param is None): return False for sect in env_fmt.SECTIONS: if (sect in env_or_param): return False return True child_env = parent_env.user_env_as_dict() child_env[env_fmt.PARAMETERS] = {} flat_params = is_flat_params(child_params) new_env = Environment() if (flat_params and (child_params is not None)): child_env[env_fmt.PARAMETERS] = child_params new_env.load(child_env) if ((not flat_params) and (child_params is not None)): new_env.load(child_params) if (item_to_remove is not None): new_env.registry.remove_item(item_to_remove) if child_resource_name: new_env.registry.remove_resources_except(child_resource_name) return new_env
[ "def", "get_child_environment", "(", "parent_env", ",", "child_params", ",", "item_to_remove", "=", "None", ",", "child_resource_name", "=", "None", ")", ":", "def", "is_flat_params", "(", "env_or_param", ")", ":", "if", "(", "env_or_param", "is", "None", ")", ...
build a child environment using the parent environment and params .
train
false
36,431
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): try: tarobj = tarfile.open(filename) except tarfile.TarError: raise UnrecognizedFormat(('%s is not a compressed or uncompressed tar file' % (filename,))) try: tarobj.chown = (lambda *args: None) for member in tarobj: name = member.name if ((not name.startswith('/')) and ('..' not in name.split('/'))): prelim_dst = os.path.join(extract_dir, *name.split('/')) while ((member is not None) and (member.islnk() or member.issym())): linkpath = member.linkname if member.issym(): linkpath = posixpath.join(posixpath.dirname(member.name), linkpath) linkpath = posixpath.normpath(linkpath) member = tarobj._getmember(linkpath) if ((member is not None) and (member.isfile() or member.isdir())): final_dst = progress_filter(name, prelim_dst) if final_dst: if final_dst.endswith(os.sep): final_dst = final_dst[:(-1)] try: tarobj._extract_member(member, final_dst) except tarfile.ExtractError: pass return True finally: tarobj.close()
[ "def", "unpack_tarfile", "(", "filename", ",", "extract_dir", ",", "progress_filter", "=", "default_filter", ")", ":", "try", ":", "tarobj", "=", "tarfile", ".", "open", "(", "filename", ")", "except", "tarfile", ".", "TarError", ":", "raise", "UnrecognizedFor...
unpack tar/tar .
train
true
36,432
def get_py_filename(name, force_win32=None): name = os.path.expanduser(name) if (force_win32 is not None): warn("The 'force_win32' argument to 'get_py_filename' is deprecated since IPython 5.0 and should not be used anymore", DeprecationWarning, stacklevel=2) if ((not os.path.isfile(name)) and (not name.endswith('.py'))): name += '.py' if os.path.isfile(name): return name else: raise IOError(('File `%r` not found.' % name))
[ "def", "get_py_filename", "(", "name", ",", "force_win32", "=", "None", ")", ":", "name", "=", "os", ".", "path", ".", "expanduser", "(", "name", ")", "if", "(", "force_win32", "is", "not", "None", ")", ":", "warn", "(", "\"The 'force_win32' argument to 'g...
return a valid python filename in the current directory .
train
false
36,433
def write_t4b(t4bfile, coverdata=None): from PIL import Image if (coverdata is not None): coverdata = BytesIO(coverdata) cover = Image.open(coverdata).convert(u'L') cover.thumbnail((96, 144), Image.ANTIALIAS) t4bcover = Image.new(u'L', (96, 144), u'white') (x, y) = cover.size t4bcover.paste(cover, (((96 - x) // 2), ((144 - y) // 2))) pxs = t4bcover.getdata() t4bfile.write('t4bp') data = (((16 * reduce_color(pxs[i])) + reduce_color(pxs[(i + 1)])) for i in xrange(0, len(pxs), 2)) t4bfile.write(bytes(bytearray(data))) else: t4bfile.write(DEFAULT_T4B_DATA)
[ "def", "write_t4b", "(", "t4bfile", ",", "coverdata", "=", "None", ")", ":", "from", "PIL", "import", "Image", "if", "(", "coverdata", "is", "not", "None", ")", ":", "coverdata", "=", "BytesIO", "(", "coverdata", ")", "cover", "=", "Image", ".", "open"...
t4bfile is a file handle ready to write binary data to disk .
train
false
36,434
def runs_last(func): def Wrapper(): calls = func.num_host_calls if (calls >= (len(env.hosts) - 1)): return func() else: func.num_host_calls = (calls + 1) return None setattr(func, 'num_host_calls', 0) return Wrapper
[ "def", "runs_last", "(", "func", ")", ":", "def", "Wrapper", "(", ")", ":", "calls", "=", "func", ".", "num_host_calls", "if", "(", "calls", ">=", "(", "len", "(", "env", ".", "hosts", ")", "-", "1", ")", ")", ":", "return", "func", "(", ")", "...
decorator to run a function only on the last invocation .
train
false
36,435
def checkForSerialEvents(read_time, rx_data, parser_state, **kwargs): print2err('checkForSerialEvents called: ', (read_time, rx_data, parser_state, kwargs)) parser_state['last_time'] = read_time serial_events = [] if (rx_data == 'TEST'): serial_events.append({'data': rx_data}) return serial_events
[ "def", "checkForSerialEvents", "(", "read_time", ",", "rx_data", ",", "parser_state", ",", "**", "kwargs", ")", ":", "print2err", "(", "'checkForSerialEvents called: '", ",", "(", "read_time", ",", "rx_data", ",", "parser_state", ",", "kwargs", ")", ")", "parser...
must have the following signature: evt_list = somecustomparsername where: read_time: the time when the serial device read() returned with the new rx_data .
train
false
36,436
@login_required def upgrade_account(request, acc_type): user = request.user bill = None try: bill = user.bill except: bill = Bill(user=user) bill.upgrade_type = acc_type bill.save() tn = bill.pk if (acc_type == 'bronze'): url = create_partner_trade_by_buyer(tn, u'ikindle\u6742\u5fd7\u8ba2\u9605(4\u4efd)', u'\u8ba2\u9605\u6742\u5fd7\u5230\u4f60\u7684Kindle\uff0c 2.99x6\u4e2a\u6708', '0.01') return HttpResponseRedirect(url) elif (acc_type == 'silver'): url = create_partner_trade_by_buyer(tn, u'ikindle\u6742\u5fd7\u8ba2\u9605(6\u4efd)', u'\u8ba2\u9605\u6742\u5fd7\u5230\u4f60\u7684Kindle\uff0c3.99x6\u4e2a\u6708', '0.01') return HttpResponseRedirect(url) elif (acc_type == 'gold'): url = create_partner_trade_by_buyer(tn, u'ikindle\u6742\u5fd7\u8ba2\u9605(\u65e0\u9650\u5236)', u'\u8ba2\u9605\u6742\u5fd7\u5230\u4f60\u7684Kindle\uff0c5.99x6\u4e2a\u6708', '0.01') return HttpResponseRedirect(url) else: return HttpResponseRedirect(reverse('payment_error'))
[ "@", "login_required", "def", "upgrade_account", "(", "request", ",", "acc_type", ")", ":", "user", "=", "request", ".", "user", "bill", "=", "None", "try", ":", "bill", "=", "user", ".", "bill", "except", ":", "bill", "=", "Bill", "(", "user", "=", ...
request for upgrade account to acc_type .
train
false
36,437
def create_index(): for (k, v) in INDEX.items(): for (key, kwargs) in v.items(): client[DATABASE_NAME][k].ensure_index((list(key) if (type(key) == types.TupleType) else key), **kwargs)
[ "def", "create_index", "(", ")", ":", "for", "(", "k", ",", "v", ")", "in", "INDEX", ".", "items", "(", ")", ":", "for", "(", "key", ",", "kwargs", ")", "in", "v", ".", "items", "(", ")", ":", "client", "[", "DATABASE_NAME", "]", "[", "k", "]...
create an index for a single column .
train
false
36,438
def remove_outliers(seqs, num_stds, fraction_seqs_for_stats=0.95): aln = Alignment.from_fasta_records(parse_fasta(seqs), DNA) consensus_seq = aln.majority_consensus() dists_to_consensus = [s.distance(consensus_seq) for s in aln] average_distance = mean(dists_to_consensus) std_distance = std(dists_to_consensus) dist_cutoff = (average_distance + (num_stds * std_distance)) seqs_to_keep = [] for (seq_id, dist_to_consensus) in izip(aln.ids(), dists_to_consensus): if (dist_to_consensus <= dist_cutoff): seqs_to_keep.append(seq_id) filtered_aln = aln.subalignment(seqs_to_keep=seqs_to_keep) return filtered_aln
[ "def", "remove_outliers", "(", "seqs", ",", "num_stds", ",", "fraction_seqs_for_stats", "=", "0.95", ")", ":", "aln", "=", "Alignment", ".", "from_fasta_records", "(", "parse_fasta", "(", "seqs", ")", ",", "DNA", ")", "consensus_seq", "=", "aln", ".", "major...
remove sequences very different from the majority consensus given aligned sequences .
train
false
36,439
def get_unused_port_and_socket(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('localhost', 0)) (addr, port) = s.getsockname() return (port, s)
[ "def", "get_unused_port_and_socket", "(", ")", ":", "s", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "s", ".", "bind", "(", "(", "'localhost'", ",", "0", ")", ")", "(", "addr", ",", "port", ")"...
returns an unused port on localhost and the open socket from which it was created .
train
false
36,440
def convert_bytes(bytes): if (bytes is None): bytes = 0 bytes = float(bytes) if (bytes >= 1099511627776): terabytes = (bytes / 1099511627776) size = ('%.2fTB' % terabytes) elif (bytes >= 1073741824): gigabytes = (bytes / 1073741824) size = ('%.2fGB' % gigabytes) elif (bytes >= 1048576): megabytes = (bytes / 1048576) size = ('%.2fMB' % megabytes) elif (bytes >= 1024): kilobytes = (bytes / 1024) size = ('%.2fKB' % kilobytes) else: size = ('%.2fb' % bytes) return size
[ "def", "convert_bytes", "(", "bytes", ")", ":", "if", "(", "bytes", "is", "None", ")", ":", "bytes", "=", "0", "bytes", "=", "float", "(", "bytes", ")", "if", "(", "bytes", ">=", "1099511627776", ")", ":", "terabytes", "=", "(", "bytes", "/", "1099...
returns given bytes as prettified string .
train
true
36,441
def getLoopConvex(polygonComplex): if (len(polygonComplex) < 4): return polygonComplex leftPointIndex = getLeftPointIndex(polygonComplex) around = (polygonComplex[(leftPointIndex + 1):] + polygonComplex[:(leftPointIndex + 1)]) lastAddedIndex = (-1) lastPoint = around[(-1)] aroundLengthMinusOne = (len(around) - 1) polygonConvex = [] segment = complex(0.0, (-1.0)) while (lastAddedIndex < aroundLengthMinusOne): lastAddedIndex = getLoopConvexAddedIndex(around, lastAddedIndex, lastPoint, segment) segment = getNormalized((around[lastAddedIndex] - lastPoint)) lastPoint = around[lastAddedIndex] polygonConvex.append(lastPoint) return polygonConvex
[ "def", "getLoopConvex", "(", "polygonComplex", ")", ":", "if", "(", "len", "(", "polygonComplex", ")", "<", "4", ")", ":", "return", "polygonComplex", "leftPointIndex", "=", "getLeftPointIndex", "(", "polygonComplex", ")", "around", "=", "(", "polygonComplex", ...
get convex hull of a complex polygon using gift wrap algorithm .
train
false
36,444
def vm_attach(name, kwargs=None, call=None): if (call != 'action'): raise SaltCloudSystemExit('The vm_attach action must be called with -a or --action.') if (kwargs is None): kwargs = {} path = kwargs.get('path', None) data = kwargs.get('data', None) if data: if path: log.warning("Both the 'data' and 'path' arguments were provided. 'data' will take precedence.") elif path: data = salt.utils.fopen(path, mode='r').read() else: raise SaltCloudSystemExit("The vm_attach function requires either 'data' or a file 'path' to be provided.") (server, user, password) = _get_xml_rpc() auth = ':'.join([user, password]) vm_id = int(get_vm_id(kwargs={'name': name})) response = server.one.vm.attach(auth, vm_id, data) ret = {'action': 'vm.attach', 'attached': response[0], 'vm_id': response[1], 'error_code': response[2]} return ret
[ "def", "vm_attach", "(", "name", ",", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The vm_attach action must be called with -a or --action.'", ")", "if", "(", "...
attaches a new disk to the given virtual machine .
train
true
36,445
def NowUTCToISO8601(): return TimestampUTCToISO8601(time.time())
[ "def", "NowUTCToISO8601", "(", ")", ":", "return", "TimestampUTCToISO8601", "(", "time", ".", "time", "(", ")", ")" ]
return the current date in iso 8601 format: yyyy-mm-dd .
train
false
36,446
def _format_unicode_password(pwd): return '"{0}"'.format(pwd).encode('utf-16-le')
[ "def", "_format_unicode_password", "(", "pwd", ")", ":", "return", "'\"{0}\"'", ".", "format", "(", "pwd", ")", ".", "encode", "(", "'utf-16-le'", ")" ]
formats a string per microsoft ad password specifications .
train
false
36,447
def require_certificate_permission(func): @wraps(func) def inner(request, *args, **kwargs): if has_access(request.user, 'certificates', 'global'): return func(request, *args, **kwargs) else: return HttpResponseForbidden() return inner
[ "def", "require_certificate_permission", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "inner", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "has_access", "(", "request", ".", "user", ",", "'certificates'", ",", ...
view decorator that requires permission to view and regenerate certificates .
train
false
36,450
def prepare_request_uri_query(oauth_params, uri): (sch, net, path, par, query, fra) = urlparse(uri) query = urlencode(_append_params(oauth_params, (extract_params(query) or []))) return urlunparse((sch, net, path, par, query, fra))
[ "def", "prepare_request_uri_query", "(", "oauth_params", ",", "uri", ")", ":", "(", "sch", ",", "net", ",", "path", ",", "par", ",", "query", ",", "fra", ")", "=", "urlparse", "(", "uri", ")", "query", "=", "urlencode", "(", "_append_params", "(", "oau...
prepare the request uri query .
train
true
36,451
def count_changes(change): if isinstance(change, ChangeSet): result = 0 for child in change.changes: result += count_changes(child) return result return 1
[ "def", "count_changes", "(", "change", ")", ":", "if", "isinstance", "(", "change", ",", "ChangeSet", ")", ":", "result", "=", "0", "for", "child", "in", "change", ".", "changes", ":", "result", "+=", "count_changes", "(", "child", ")", "return", "result...
counts the number of basic changes a change will make .
train
true
36,452
def model_to_dict(instance, fields=None, exclude=None): from django.db.models.fields.related import ManyToManyField opts = instance._meta data = {} for f in (opts.fields + opts.many_to_many): if (not f.editable): continue if (fields and (not (f.name in fields))): continue if (exclude and (f.name in exclude)): continue if isinstance(f, ManyToManyField): if (instance.pk is None): data[f.name] = [] else: data[f.name] = list(f.value_from_object(instance).values_list(u'pk', flat=True)) else: data[f.name] = f.value_from_object(instance) return data
[ "def", "model_to_dict", "(", "instance", ",", "fields", "=", "None", ",", "exclude", "=", "None", ")", ":", "from", "django", ".", "db", ".", "models", ".", "fields", ".", "related", "import", "ManyToManyField", "opts", "=", "instance", ".", "_meta", "da...
returns a dict containing the data in instance suitable for passing as a forms initial keyword argument .
train
false
36,453
def render_markup(text): return Markup(markdown.render(text))
[ "def", "render_markup", "(", "text", ")", ":", "return", "Markup", "(", "markdown", ".", "render", "(", "text", ")", ")" ]
renders the given text as markdown .
train
false
36,454
def register_email_hook(signal, handler): assert (signal in (review_request_published, review_request_closed, review_published, reply_published)), (u'Invalid signal %r' % signal) _hooks[signal].add(handler)
[ "def", "register_email_hook", "(", "signal", ",", "handler", ")", ":", "assert", "(", "signal", "in", "(", "review_request_published", ",", "review_request_closed", ",", "review_published", ",", "reply_published", ")", ")", ",", "(", "u'Invalid signal %r'", "%", "...
register an e-mail hook .
train
false
36,455
@bp.route('/') def topics(): page = force_int(request.args.get('page', 1), 0) if (not page): return abort(404) paginator = Topic.query.order_by(Topic.updated.desc()).paginate(page) paginator.items = fill_topics(paginator.items) return render_template('topic/topics.html', paginator=paginator, endpoint='topic.topics')
[ "@", "bp", ".", "route", "(", "'/'", ")", "def", "topics", "(", ")", ":", "page", "=", "force_int", "(", "request", ".", "args", ".", "get", "(", "'page'", ",", "1", ")", ",", "0", ")", "if", "(", "not", "page", ")", ":", "return", "abort", "...
the topics list page .
train
false
36,458
def single_pre_save_handler(sender, instance, **kwargs): instance._ct_inventory = None
[ "def", "single_pre_save_handler", "(", "sender", ",", "instance", ",", "**", "kwargs", ")", ":", "instance", ".", "_ct_inventory", "=", "None" ]
clobber the _ct_inventory attribute of this object .
train
false
36,460
def sanitizeSceneName(name, ezrss=False): if (not ezrss): bad_chars = u",:()'!?\u2019" else: bad_chars = u",()'?\u2019" for x in bad_chars: name = name.replace(x, '') name = name.replace('- ', '.').replace(' ', '.').replace('&', 'and').replace('/', '.') name = re.sub('\\.\\.*', '.', name) if name.endswith('.'): name = name[:(-1)] return name
[ "def", "sanitizeSceneName", "(", "name", ",", "ezrss", "=", "False", ")", ":", "if", "(", "not", "ezrss", ")", ":", "bad_chars", "=", "u\",:()'!?\\u2019\"", "else", ":", "bad_chars", "=", "u\",()'?\\u2019\"", "for", "x", "in", "bad_chars", ":", "name", "="...
takes a show name and returns the "scenified" version of it .
train
false
36,461
def iconvergence_agent_tests_factory(fixture): class IConvergenceAgentTests(TestCase, ): '\n Tests for ``IConvergenceAgent``.\n ' def test_connected(self): '\n ``IConvergenceAgent.connected()`` takes an AMP instance.\n ' agent = fixture(self) agent.connected(connected_amp_protocol()) def test_disconnected(self): '\n ``IConvergenceAgent.disconnected()`` can be called after\n ``IConvergenceAgent.connected()``.\n ' agent = fixture(self) agent.connected(connected_amp_protocol()) agent.disconnected() def test_reconnected(self): '\n ``IConvergenceAgent.connected()`` can be called after\n ``IConvergenceAgent.disconnected()``.\n ' agent = fixture(self) agent.connected(connected_amp_protocol()) agent.disconnected() agent.connected(connected_amp_protocol()) def test_cluster_updated(self): '\n ``IConvergenceAgent.cluster_updated()`` takes two ``Deployment``\n instances.\n ' agent = fixture(self) agent.connected(connected_amp_protocol()) agent.cluster_updated(Deployment(nodes=frozenset()), DeploymentState(nodes=[])) def test_interface(self): '\n The object provides ``IConvergenceAgent``.\n ' agent = fixture(self) self.assertTrue(verifyObject(IConvergenceAgent, agent)) return IConvergenceAgentTests
[ "def", "iconvergence_agent_tests_factory", "(", "fixture", ")", ":", "class", "IConvergenceAgentTests", "(", "TestCase", ",", ")", ":", "def", "test_connected", "(", "self", ")", ":", "agent", "=", "fixture", "(", "self", ")", "agent", ".", "connected", "(", ...
create tests that verify basic iconvergenceagent compliance .
train
false
36,462
def cachedir_index_add(minion_id, profile, driver, provider, base=None): base = init_cachedir(base) index_file = os.path.join(base, 'index.p') lock_file(index_file) if os.path.exists(index_file): with salt.utils.fopen(index_file, 'r') as fh_: index = msgpack.load(fh_) else: index = {} prov_comps = provider.split(':') index.update({minion_id: {'id': minion_id, 'profile': profile, 'driver': driver, 'provider': prov_comps[0]}}) with salt.utils.fopen(index_file, 'w') as fh_: msgpack.dump(index, fh_) unlock_file(index_file)
[ "def", "cachedir_index_add", "(", "minion_id", ",", "profile", ",", "driver", ",", "provider", ",", "base", "=", "None", ")", ":", "base", "=", "init_cachedir", "(", "base", ")", "index_file", "=", "os", ".", "path", ".", "join", "(", "base", ",", "'in...
add an entry to the cachedir index .
train
false
36,463
def unique_file(path, chmod=511, mode='w'): (path, tail) = os.path.split(path) return _unique_file(path, filename_pat=(lambda count: ('%04d_%s' % (count, tail))), count=0, chmod=chmod, mode=mode)
[ "def", "unique_file", "(", "path", ",", "chmod", "=", "511", ",", "mode", "=", "'w'", ")", ":", "(", "path", ",", "tail", ")", "=", "os", ".", "path", ".", "split", "(", "path", ")", "return", "_unique_file", "(", "path", ",", "filename_pat", "=", ...
safely finds a unique file .
train
false
36,465
def correlation_matrix(data): (_, num_columns) = shape(data) def matrix_entry(i, j): return correlation(get_column(data, i), get_column(data, j)) return make_matrix(num_columns, num_columns, matrix_entry)
[ "def", "correlation_matrix", "(", "data", ")", ":", "(", "_", ",", "num_columns", ")", "=", "shape", "(", "data", ")", "def", "matrix_entry", "(", "i", ",", "j", ")", ":", "return", "correlation", "(", "get_column", "(", "data", ",", "i", ")", ",", ...
returns the num_columns x num_columns matrix whose th entry is the correlation between columns i and j of data .
train
false
36,466
def _SendFileToServer(pool, fullpath, subpath, basename, rebalance): fp = FileCopyWrapper(rebalance, subpath, basename, fullpath) try: headers = {'Content-Length': 0} res = pool.urlopen('POST', '/rebalance/copy-file', headers=headers, body=fp) if (res.status != constants.RESPONSE_OK): return False except urllib3.exceptions.MaxRetryError: logging.warning('Failed to send file %s', fullpath) return False finally: fp.close() return True
[ "def", "_SendFileToServer", "(", "pool", ",", "fullpath", ",", "subpath", ",", "basename", ",", "rebalance", ")", ":", "fp", "=", "FileCopyWrapper", "(", "rebalance", ",", "subpath", ",", "basename", ",", "fullpath", ")", "try", ":", "headers", "=", "{", ...
sends a specific data store file to the server .
train
false
36,467
def _root_task(worker): return worker._add_task_history[0][0]
[ "def", "_root_task", "(", "worker", ")", ":", "return", "worker", ".", "_add_task_history", "[", "0", "]", "[", "0", "]" ]
return the first task scheduled by the worker .
train
false
36,468
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
36,469
def update_symlinks(config, unused_plugins): cert_manager.update_live_symlinks(config)
[ "def", "update_symlinks", "(", "config", ",", "unused_plugins", ")", ":", "cert_manager", ".", "update_live_symlinks", "(", "config", ")" ]
update the certificate file family symlinks use the information in the config file to make symlinks point to the correct archive directory .
train
false
36,470
def test_setup_headshape(): for hs in hs_fnames: (dig, t) = _process_bti_headshape(hs) expected = set(['kind', 'ident', 'r']) found = set(reduce((lambda x, y: (list(x) + list(y))), [d.keys() for d in dig])) assert_true((not (expected - found)))
[ "def", "test_setup_headshape", "(", ")", ":", "for", "hs", "in", "hs_fnames", ":", "(", "dig", ",", "t", ")", "=", "_process_bti_headshape", "(", "hs", ")", "expected", "=", "set", "(", "[", "'kind'", ",", "'ident'", ",", "'r'", "]", ")", "found", "=...
test reading bti headshape .
train
false
36,472
def load_kvm(): kvm_arch = get_kvm_arch() def load_module(mod='kvm'): return utils.system(('modprobe %s' % mod)) loaded = load_module() if (not loaded): loaded = load_module(mod=kvm_arch) return loaded
[ "def", "load_kvm", "(", ")", ":", "kvm_arch", "=", "get_kvm_arch", "(", ")", "def", "load_module", "(", "mod", "=", "'kvm'", ")", ":", "return", "utils", ".", "system", "(", "(", "'modprobe %s'", "%", "mod", ")", ")", "loaded", "=", "load_module", "(",...
loads the appropriate kvm kernel modules depending on the current cpu architecture :returns: 0 on success or 1 on failure :rtype: int .
train
false
36,473
def encrypt_int(message, ekey, n): if (type(message) is types.IntType): message = long(message) if (not (type(message) is types.LongType)): raise TypeError('You must pass a long or int') if ((message < 0) or (message > n)): raise OverflowError('The message is too long') safebit = (bit_size(n) - 2) message += (1 << safebit) return pow(message, ekey, n)
[ "def", "encrypt_int", "(", "message", ",", "ekey", ",", "n", ")", ":", "if", "(", "type", "(", "message", ")", "is", "types", ".", "IntType", ")", ":", "message", "=", "long", "(", "message", ")", "if", "(", "not", "(", "type", "(", "message", ")...
encrypts a message using encryption key ekey .
train
false
36,474
def random_mpd(distmat, n, iters): means = [] indices = arange(distmat.shape[0]) for i in range(iters): shuffle(indices) means.append(mpd(reduce_mtx(distmat, indices[:n]))) return (mean(means), std(means))
[ "def", "random_mpd", "(", "distmat", ",", "n", ",", "iters", ")", ":", "means", "=", "[", "]", "indices", "=", "arange", "(", "distmat", ".", "shape", "[", "0", "]", ")", "for", "i", "in", "range", "(", "iters", ")", ":", "shuffle", "(", "indices...
calc mean .
train
false
36,475
def set_namespace(namespace): if (namespace is None): os.environ.pop(_ENV_CURRENT_NAMESPACE, None) else: validate_namespace(namespace) os.environ[_ENV_CURRENT_NAMESPACE] = namespace
[ "def", "set_namespace", "(", "namespace", ")", ":", "if", "(", "namespace", "is", "None", ")", ":", "os", ".", "environ", ".", "pop", "(", "_ENV_CURRENT_NAMESPACE", ",", "None", ")", "else", ":", "validate_namespace", "(", "namespace", ")", "os", ".", "e...
set the default namespace for the current http request .
train
false
36,476
@core_helper def list_dict_filter(list_, search_field, output_field, value): for item in list_: if (item.get(search_field) == value): return item.get(output_field, value) return value
[ "@", "core_helper", "def", "list_dict_filter", "(", "list_", ",", "search_field", ",", "output_field", ",", "value", ")", ":", "for", "item", "in", "list_", ":", "if", "(", "item", ".", "get", "(", "search_field", ")", "==", "value", ")", ":", "return", ...
takes a list of dicts and returns the value of a given key if the item has a matching value for a supplied key .
train
false
36,477
def get_access_token(scopes, service_account_id=None): memcache_key = (_MEMCACHE_KEY_PREFIX + str(scopes)) if service_account_id: memcache_key += (',%d' % service_account_id) memcache_value = memcache.get(memcache_key, namespace=_MEMCACHE_NAMESPACE) if memcache_value: (access_token, expires_at) = memcache_value else: (access_token, expires_at) = get_access_token_uncached(scopes, service_account_id=service_account_id) memcache.add(memcache_key, (access_token, expires_at), (expires_at - 300), namespace=_MEMCACHE_NAMESPACE) return (access_token, expires_at)
[ "def", "get_access_token", "(", "scopes", ",", "service_account_id", "=", "None", ")", ":", "memcache_key", "=", "(", "_MEMCACHE_KEY_PREFIX", "+", "str", "(", "scopes", ")", ")", "if", "service_account_id", ":", "memcache_key", "+=", "(", "',%d'", "%", "servic...
get the access token using the provided credentials arguments: username : a string containing the username to log in password : a string containing the password for the username returns: str: authentication token .
train
false
36,478
def depend_check(deps_name, *args): missing = [] for dependency in args: try: __import__(dependency) except ImportError: missing.append(dependency) print(('-' * 80)) if missing: print((red('You are missing the following %s dependencies:') % deps_name)) for dep in missing: name = pkg_info_dict.get(dep, dep) print(' * ', name) print() else: print((blue('All %s dependencies installed! You are good to go!\n') % deps_name))
[ "def", "depend_check", "(", "deps_name", ",", "*", "args", ")", ":", "missing", "=", "[", "]", "for", "dependency", "in", "args", ":", "try", ":", "__import__", "(", "dependency", ")", "except", "ImportError", ":", "missing", ".", "append", "(", "depende...
make sure a given dependency is installed .
train
false
36,479
def rewrite_local_config(mobsf_home): CONFIG['MobSF']['subdir_tools'] = (mobsf_home + '\\StaticAnalyzer\\tools\\windows\\') CONFIG['MobSF']['dir'] = mobsf_home with open(os.path.join(CONFIG_PATH, CONFIG_FILE), 'w') as configfile: CONFIG.write(configfile)
[ "def", "rewrite_local_config", "(", "mobsf_home", ")", ":", "CONFIG", "[", "'MobSF'", "]", "[", "'subdir_tools'", "]", "=", "(", "mobsf_home", "+", "'\\\\StaticAnalyzer\\\\tools\\\\windows\\\\'", ")", "CONFIG", "[", "'MobSF'", "]", "[", "'dir'", "]", "=", "mobsf...
for local installation some config-vars need to be rewritten .
train
false
36,480
def _screen_docrevise(docstr): import re if (docstr is None): return None screenname = _CFG['examplescreen'] newdocstr = docstr.replace(('%s.' % screenname), '') parexp = re.compile((' \\(.+ %s\\):' % screenname)) newdocstr = parexp.sub(':', newdocstr) return newdocstr
[ "def", "_screen_docrevise", "(", "docstr", ")", ":", "import", "re", "if", "(", "docstr", "is", "None", ")", ":", "return", "None", "screenname", "=", "_CFG", "[", "'examplescreen'", "]", "newdocstr", "=", "docstr", ".", "replace", "(", "(", "'%s.'", "%"...
to reduce docstrings from turtlescreen class for functions .
train
false
36,482
def is_bar(bar_containers, **props): for container in bar_containers: if (props['mplobj'] in container): return True return False
[ "def", "is_bar", "(", "bar_containers", ",", "**", "props", ")", ":", "for", "container", "in", "bar_containers", ":", "if", "(", "props", "[", "'mplobj'", "]", "in", "container", ")", ":", "return", "True", "return", "False" ]
a test to decide whether a path is a bar from a vertical bar chart .
train
false
36,483
def _find_process_name(port_number): for connection in psutil.net_connections(): if (connection.laddr[1] == port_number): return psutil.Process(connection.pid).name() return None
[ "def", "_find_process_name", "(", "port_number", ")", ":", "for", "connection", "in", "psutil", ".", "net_connections", "(", ")", ":", "if", "(", "connection", ".", "laddr", "[", "1", "]", "==", "port_number", ")", ":", "return", "psutil", ".", "Process", ...
get the name of the process using the given port number .
train
false
36,484
def _contextkey(jail=None, chroot=None, root=None, prefix='pkg.list_pkgs'): if jail: return (str(prefix) + '.jail_{0}'.format(jail)) elif chroot: return (str(prefix) + '.chroot_{0}'.format(chroot)) elif root: return (str(prefix) + '.root_{0}'.format(root)) return prefix
[ "def", "_contextkey", "(", "jail", "=", "None", ",", "chroot", "=", "None", ",", "root", "=", "None", ",", "prefix", "=", "'pkg.list_pkgs'", ")", ":", "if", "jail", ":", "return", "(", "str", "(", "prefix", ")", "+", "'.jail_{0}'", ".", "format", "("...
as this module is designed to manipulate packages in jails and chroots .
train
false
36,485
def docker_build_publish(parse, xml_parent, data): db = XML.SubElement(xml_parent, 'com.cloudbees.dockerpublish.DockerBuilder') db.set('plugin', 'docker-build-publish') mapping = [('repo-name', 'repoName', None), ('repo-tag', 'repoTag', ''), ('no-cache', 'noCache', False), ('no-force-pull', 'noForcePull', False), ('skip-build', 'skipBuild', False), ('skip-decorate', 'skipDecorate', False), ('skip-tag-latest', 'skipTagLatest', False), ('skip-push', 'skipPush', False), ('file-path', 'dockerfilePath', ''), ('build-context', 'buildContext', '')] convert_mapping_to_xml(db, data, mapping, fail_required=True)
[ "def", "docker_build_publish", "(", "parse", ",", "xml_parent", ",", "data", ")", ":", "db", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'com.cloudbees.dockerpublish.DockerBuilder'", ")", "db", ".", "set", "(", "'plugin'", ",", "'docker-build-publish'"...
yaml: docker-build-publish requires the jenkins :jenkins-wikidocker build publish plugin <docker+build+publish+plugin> .
train
false
36,486
def autoimport(): word = env.var('a:word') if (not word): env.error('Should be word under cursor.') return False with RopeContext() as ctx: if (not ctx.importer.names): ctx.generate_autoimport_cache() modules = ctx.importer.get_modules(word) if (not modules): env.message(('Global name %s not found.' % word)) return False if (len(modules) == 1): _insert_import(word, modules[0], ctx) else: module = env.user_input_choices('Which module to import:', *modules) _insert_import(word, module, ctx) return True
[ "def", "autoimport", "(", ")", ":", "word", "=", "env", ".", "var", "(", "'a:word'", ")", "if", "(", "not", "word", ")", ":", "env", ".", "error", "(", "'Should be word under cursor.'", ")", "return", "False", "with", "RopeContext", "(", ")", "as", "ct...
autoimport modules .
train
false
36,488
def _CreateCommentsFromPrefix(comment_prefix, comment_lineno, comment_column, standalone=False): comments = [] lines = comment_prefix.split('\n') index = 0 while (index < len(lines)): comment_block = [] while ((index < len(lines)) and lines[index].lstrip().startswith('#')): comment_block.append(lines[index]) index += 1 if comment_block: new_lineno = ((comment_lineno + index) - 1) comment_block[0] = comment_block[0].lstrip() comment_block[(-1)] = comment_block[(-1)].rstrip('\n') comment_leaf = pytree.Leaf(type=token.COMMENT, value='\n'.join(comment_block), context=('', (new_lineno, comment_column))) comment_node = (comment_leaf if (not standalone) else pytree.Node(pygram.python_symbols.simple_stmt, [comment_leaf])) comments.append(comment_node) while ((index < len(lines)) and (not lines[index].lstrip())): index += 1 return comments
[ "def", "_CreateCommentsFromPrefix", "(", "comment_prefix", ",", "comment_lineno", ",", "comment_column", ",", "standalone", "=", "False", ")", ":", "comments", "=", "[", "]", "lines", "=", "comment_prefix", ".", "split", "(", "'\\n'", ")", "index", "=", "0", ...
create pytree nodes to represent the given comment prefix .
train
false
36,489
def print_top_words_per_topic(data, top_n=10, do_print=True): msgs = [] lists = [] for (j, topic_to_word) in enumerate(data['topic_term_dists']): top = np.argsort(topic_to_word)[::(-1)][:top_n] prefix = ('Top words in topic %i ' % j) top_words = [data['vocab'][i].strip().replace(' ', '_') for i in top] msg = ' '.join(top_words) if do_print: print (prefix + msg) lists.append(top_words) return lists
[ "def", "print_top_words_per_topic", "(", "data", ",", "top_n", "=", "10", ",", "do_print", "=", "True", ")", ":", "msgs", "=", "[", "]", "lists", "=", "[", "]", "for", "(", "j", ",", "topic_to_word", ")", "in", "enumerate", "(", "data", "[", "'topic_...
given a pyldavis data array .
train
false
36,491
def create_binding(site, hostheader='', ipaddress='*', port=80, protocol='http', sslflags=0): pscmd = list() protocol = str(protocol).lower() sslflags = int(sslflags) name = _get_binding_info(hostheader, ipaddress, port) if (protocol not in _VALID_PROTOCOLS): message = "Invalid protocol '{0}' specified. Valid formats: {1}".format(protocol, _VALID_PROTOCOLS) raise SaltInvocationError(message) if (sslflags not in _VALID_SSL_FLAGS): message = "Invalid sslflags '{0}' specified. Valid sslflags range: {1}..{2}".format(sslflags, _VALID_SSL_FLAGS[0], _VALID_SSL_FLAGS[(-1)]) raise SaltInvocationError(message) current_bindings = list_bindings(site) if (name in current_bindings): _LOG.debug('Binding already present: %s', name) return True pscmd.append("New-WebBinding -Name '{0}' -HostHeader '{1}'".format(site, hostheader)) pscmd.append(" -IpAddress '{0}' -Port '{1}'".format(ipaddress, port)) pscmd.append(" -Protocol '{0}' -SslFlags {1}".format(protocol, sslflags)) cmd_ret = _srvmgr(str().join(pscmd)) if (cmd_ret['retcode'] == 0): new_bindings = list_bindings(site) if (name in new_bindings): _LOG.debug('Binding created successfully: %s', name) return True _LOG.error('Unable to create binding: %s', name) return False
[ "def", "create_binding", "(", "site", ",", "hostheader", "=", "''", ",", "ipaddress", "=", "'*'", ",", "port", "=", "80", ",", "protocol", "=", "'http'", ",", "sslflags", "=", "0", ")", ":", "pscmd", "=", "list", "(", ")", "protocol", "=", "str", "...
create an iis binding .
train
false
36,492
def dev_from_pcapname(pcap_name): return IFACES.dev_from_pcapname(pcap_name)
[ "def", "dev_from_pcapname", "(", "pcap_name", ")", ":", "return", "IFACES", ".", "dev_from_pcapname", "(", "pcap_name", ")" ]
return libdnet/scapy device name for given pypcap device name .
train
false