id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
10,164
def progressive_widening_search(G, source, value, condition, initial_width=1): if condition(source): return source log_m = math.ceil(math.log(len(G), 2)) for i in range(log_m): width = (initial_width * pow(2, i)) for (u, v) in nx.bfs_beam_edges(G, source, value, width): if condition(v): return v raise nx.NodeNotFound('no node satisfied the termination condition')
[ "def", "progressive_widening_search", "(", "G", ",", "source", ",", "value", ",", "condition", ",", "initial_width", "=", "1", ")", ":", "if", "condition", "(", "source", ")", ":", "return", "source", "log_m", "=", "math", ".", "ceil", "(", "math", ".", ...
progressive widening beam search to find a node .
train
false
10,165
def _adjust_coefficients(op1, op2): adjust = 0 while (op2.int > op1.int): op1.int *= 10 op1.exp -= 1 adjust += 1 while (op1.int >= (10 * op2.int)): op2.int *= 10 op2.exp -= 1 adjust -= 1 return (op1, op2, adjust)
[ "def", "_adjust_coefficients", "(", "op1", ",", "op2", ")", ":", "adjust", "=", "0", "while", "(", "op2", ".", "int", ">", "op1", ".", "int", ")", ":", "op1", ".", "int", "*=", "10", "op1", ".", "exp", "-=", "1", "adjust", "+=", "1", "while", "...
adjust op1 .
train
false
10,166
def butter(N, Wn, btype='low', analog=False, output='ba'): return iirfilter(N, Wn, btype=btype, analog=analog, output=output, ftype='butter')
[ "def", "butter", "(", "N", ",", "Wn", ",", "btype", "=", "'low'", ",", "analog", "=", "False", ",", "output", "=", "'ba'", ")", ":", "return", "iirfilter", "(", "N", ",", "Wn", ",", "btype", "=", "btype", ",", "analog", "=", "analog", ",", "outpu...
butterworth digital and analog filter design .
train
false
10,168
def processClubAttendance(f, clubs): try: line = f.next() while (line == ',,,,,,,,,,,,,,,,,,,\n'): line = f.next() name = line.split(',')[0] if (name not in clubs): clubs[name] = Club(name) c = clubs[name] c.processAttendance(f) return True except StopIteration: return False
[ "def", "processClubAttendance", "(", "f", ",", "clubs", ")", ":", "try", ":", "line", "=", "f", ".", "next", "(", ")", "while", "(", "line", "==", "',,,,,,,,,,,,,,,,,,,\\n'", ")", ":", "line", "=", "f", ".", "next", "(", ")", "name", "=", "line", "...
process the attendance data of one club if the club already exists in the list update its data .
train
true
10,169
def safe_referrer(request, default): referrer = request.META.get('HTTP_REFERER') if (referrer and is_safe_url(referrer, request.get_host())): return referrer if default: return resolve_url(default) else: return default
[ "def", "safe_referrer", "(", "request", ",", "default", ")", ":", "referrer", "=", "request", ".", "META", ".", "get", "(", "'HTTP_REFERER'", ")", "if", "(", "referrer", "and", "is_safe_url", "(", "referrer", ",", "request", ".", "get_host", "(", ")", ")...
takes the request and a default url .
train
false
10,170
def kelvin_zeros(nt): if ((not isscalar(nt)) or (floor(nt) != nt) or (nt <= 0)): raise ValueError('nt must be positive integer scalar.') return (specfun.klvnzo(nt, 1), specfun.klvnzo(nt, 2), specfun.klvnzo(nt, 3), specfun.klvnzo(nt, 4), specfun.klvnzo(nt, 5), specfun.klvnzo(nt, 6), specfun.klvnzo(nt, 7), specfun.klvnzo(nt, 8))
[ "def", "kelvin_zeros", "(", "nt", ")", ":", "if", "(", "(", "not", "isscalar", "(", "nt", ")", ")", "or", "(", "floor", "(", "nt", ")", "!=", "nt", ")", "or", "(", "nt", "<=", "0", ")", ")", ":", "raise", "ValueError", "(", "'nt must be positive ...
compute nt zeros of all kelvin functions .
train
false
10,172
def getHorizontallyBoundedPath(horizontalBegin, horizontalEnd, path): horizontallyBoundedPath = [] for (pointIndex, point) in enumerate(path): begin = None previousIndex = (pointIndex - 1) if (previousIndex >= 0): begin = path[previousIndex] end = None nextIndex = (pointIndex + 1) if (nextIndex < len(path)): end = path[nextIndex] addHorizontallyBoundedPoint(begin, point, end, horizontalBegin, horizontalEnd, horizontallyBoundedPath) return horizontallyBoundedPath
[ "def", "getHorizontallyBoundedPath", "(", "horizontalBegin", ",", "horizontalEnd", ",", "path", ")", ":", "horizontallyBoundedPath", "=", "[", "]", "for", "(", "pointIndex", ",", "point", ")", "in", "enumerate", "(", "path", ")", ":", "begin", "=", "None", "...
get horizontally bounded path .
train
false
10,178
def clear_user_history(user, domain='all'): for key in redis_key(user, domain): client.delete(key)
[ "def", "clear_user_history", "(", "user", ",", "domain", "=", "'all'", ")", ":", "for", "key", "in", "redis_key", "(", "user", ",", "domain", ")", ":", "client", ".", "delete", "(", "key", ")" ]
this is only used by test code now .
train
false
10,180
def endpoint(request): s = getServer(request) query = util.normalDict((request.GET or request.POST)) try: openid_request = s.decodeRequest(query) except ProtocolError as why: return direct_to_template(request, 'server/endpoint.html', {'error': str(why)}) if (openid_request is None): return direct_to_template(request, 'server/endpoint.html', {}) if (openid_request.mode in ['checkid_immediate', 'checkid_setup']): return handleCheckIDRequest(request, openid_request) else: openid_response = s.handleRequest(openid_request) return displayResponse(request, openid_response)
[ "def", "endpoint", "(", "request", ")", ":", "s", "=", "getServer", "(", "request", ")", "query", "=", "util", ".", "normalDict", "(", "(", "request", ".", "GET", "or", "request", ".", "POST", ")", ")", "try", ":", "openid_request", "=", "s", ".", ...
respond to low-level openid protocol messages .
train
true
10,181
def has_missing(data): return np.isnan(np.sum(data))
[ "def", "has_missing", "(", "data", ")", ":", "return", "np", ".", "isnan", "(", "np", ".", "sum", "(", "data", ")", ")" ]
returns true if data contains missing entries .
train
false
10,182
def build_identifiers(identifiers, parent, params=None, raw_response=None): results = [] for identifier in identifiers: source = identifier.source target = identifier.target if (source == 'response'): value = jmespath.search(identifier.path, raw_response) elif (source == 'requestParameter'): value = jmespath.search(identifier.path, params) elif (source == 'identifier'): value = getattr(parent, xform_name(identifier.name)) elif (source == 'data'): value = get_data_member(parent, identifier.path) elif (source == 'input'): continue else: raise NotImplementedError('Unsupported source type: {0}'.format(source)) results.append((xform_name(target), value)) return results
[ "def", "build_identifiers", "(", "identifiers", ",", "parent", ",", "params", "=", "None", ",", "raw_response", "=", "None", ")", ":", "results", "=", "[", "]", "for", "identifier", "in", "identifiers", ":", "source", "=", "identifier", ".", "source", "tar...
builds a mapping of identifier names to values based on the identifier source location .
train
false
10,183
def worker_cmd(a_device, mp_queue, cmd='show arp'): identifier = '{ip}:{port}'.format(**a_device) return_data = {} try: net_connect = ConnectHandler(**a_device) output = net_connect.send_command(cmd) return_data[identifier] = (True, output) except (NetMikoTimeoutException, NetMikoAuthenticationException) as e: return_data[identifier] = (False, e) mp_queue.put(return_data)
[ "def", "worker_cmd", "(", "a_device", ",", "mp_queue", ",", "cmd", "=", "'show arp'", ")", ":", "identifier", "=", "'{ip}:{port}'", ".", "format", "(", "**", "a_device", ")", "return_data", "=", "{", "}", "try", ":", "net_connect", "=", "ConnectHandler", "...
return a dictionary where the key is the device identifier value is (success|fail .
train
false
10,184
def archive_provider_for(node, user): return node.get_addon(settings.ARCHIVE_PROVIDER)
[ "def", "archive_provider_for", "(", "node", ",", "user", ")", ":", "return", "node", ".", "get_addon", "(", "settings", ".", "ARCHIVE_PROVIDER", ")" ]
a generic function to get the archive provider for some node .
train
false
10,185
def group_id_or_name_exists(reference, context): model = context['model'] result = model.Group.get(reference) if (not result): raise Invalid(_('That group name or ID does not exist.')) return reference
[ "def", "group_id_or_name_exists", "(", "reference", ",", "context", ")", ":", "model", "=", "context", "[", "'model'", "]", "result", "=", "model", ".", "Group", ".", "get", "(", "reference", ")", "if", "(", "not", "result", ")", ":", "raise", "Invalid",...
raises invalid if a group identified by the name or id cannot be found .
train
false
10,186
def test_gridsearch(): clf1 = LogisticRegression(random_state=1) clf2 = RandomForestClassifier(random_state=1) clf3 = GaussianNB() eclf = VotingClassifier(estimators=[('lr', clf1), ('rf', clf2), ('gnb', clf3)], voting='soft') params = {'lr__C': [1.0, 100.0], 'voting': ['soft', 'hard'], 'weights': [[0.5, 0.5, 0.5], [1.0, 0.5, 0.5]]} grid = GridSearchCV(estimator=eclf, param_grid=params, cv=5) grid.fit(iris.data, iris.target)
[ "def", "test_gridsearch", "(", ")", ":", "clf1", "=", "LogisticRegression", "(", "random_state", "=", "1", ")", "clf2", "=", "RandomForestClassifier", "(", "random_state", "=", "1", ")", "clf3", "=", "GaussianNB", "(", ")", "eclf", "=", "VotingClassifier", "...
check gridsearch support .
train
false
10,187
def fcontext_policy_is_applied(name, recursive=False): cmd = 'restorecon -n -v ' if recursive: cmd += '-R ' cmd += re.escape(name) return __salt__['cmd.run_all'](cmd).get('stdout')
[ "def", "fcontext_policy_is_applied", "(", "name", ",", "recursive", "=", "False", ")", ":", "cmd", "=", "'restorecon -n -v '", "if", "recursive", ":", "cmd", "+=", "'-R '", "cmd", "+=", "re", ".", "escape", "(", "name", ")", "return", "__salt__", "[", "'cm...
returns an empty string if the selinux policy for a given filespec is applied .
train
true
10,188
def _get_indices_Mul(expr, return_dummies=False): inds = list(map(get_indices, expr.args)) (inds, syms) = list(zip(*inds)) inds = list(map(list, inds)) inds = list(reduce((lambda x, y: (x + y)), inds)) (inds, dummies) = _remove_repeated(inds) symmetry = {} for s in syms: for pair in s: if (pair in symmetry): symmetry[pair] *= s[pair] else: symmetry[pair] = s[pair] if return_dummies: return (inds, symmetry, dummies) else: return (inds, symmetry)
[ "def", "_get_indices_Mul", "(", "expr", ",", "return_dummies", "=", "False", ")", ":", "inds", "=", "list", "(", "map", "(", "get_indices", ",", "expr", ".", "args", ")", ")", "(", "inds", ",", "syms", ")", "=", "list", "(", "zip", "(", "*", "inds"...
determine the outer indices of a mul object .
train
false
10,189
def max_uuid_from_time(timestamp): return uuid_from_time(timestamp, 140185576636287, 16255)
[ "def", "max_uuid_from_time", "(", "timestamp", ")", ":", "return", "uuid_from_time", "(", "timestamp", ",", "140185576636287", ",", "16255", ")" ]
generates the maximum timeuuid for a given timestamp .
train
false
10,193
def coro(gen): @wraps(gen) def wind_up(*args, **kwargs): it = gen(*args, **kwargs) next(it) return it return wind_up
[ "def", "coro", "(", "gen", ")", ":", "@", "wraps", "(", "gen", ")", "def", "wind_up", "(", "*", "args", ",", "**", "kwargs", ")", ":", "it", "=", "gen", "(", "*", "args", ",", "**", "kwargs", ")", "next", "(", "it", ")", "return", "it", "retu...
decorator to mark generator as co-routine .
train
false
10,194
def _geqv(a, b): assert (isinstance(a, GenericMeta) and isinstance(b, GenericMeta)) return (_gorg(a) is _gorg(b))
[ "def", "_geqv", "(", "a", ",", "b", ")", ":", "assert", "(", "isinstance", "(", "a", ",", "GenericMeta", ")", "and", "isinstance", "(", "b", ",", "GenericMeta", ")", ")", "return", "(", "_gorg", "(", "a", ")", "is", "_gorg", "(", "b", ")", ")" ]
return whether two generic classes are equivalent .
train
false
10,196
def _get_string(path, base=win32con.HKEY_CLASSES_ROOT): try: return win32api.RegQueryValue(base, path) except win32api.error: return None
[ "def", "_get_string", "(", "path", ",", "base", "=", "win32con", ".", "HKEY_CLASSES_ROOT", ")", ":", "try", ":", "return", "win32api", ".", "RegQueryValue", "(", "base", ",", "path", ")", "except", "win32api", ".", "error", ":", "return", "None" ]
decode a bson string to python unicode string .
train
false
10,197
def map_viewing_client(): print_mode = get_vars.get('print', None) if print_mode: collapsed = True mouse_position = False print_mode = True toolbar = False zoomcontrol = False else: collapsed = False mouse_position = None print_mode = False toolbar = True zoomcontrol = None save = settings.get_gis_save() map = define_map(window=True, toolbar=toolbar, collapsed=collapsed, closable=False, maximizable=False, mouse_position=mouse_position, print_mode=print_mode, save=save, zoomcontrol=zoomcontrol) response.title = T('Map Viewing Client') return dict(map=map)
[ "def", "map_viewing_client", "(", ")", ":", "print_mode", "=", "get_vars", ".", "get", "(", "'print'", ",", "None", ")", "if", "print_mode", ":", "collapsed", "=", "True", "mouse_position", "=", "False", "print_mode", "=", "True", "toolbar", "=", "False", ...
map viewing client .
train
false
10,199
def get_current_device(): return current_context().device
[ "def", "get_current_device", "(", ")", ":", "return", "current_context", "(", ")", ".", "device" ]
get current device associated with the current thread .
train
false
10,200
def can_change_page(request): from cms.utils import page_permissions user = request.user current_page = request.current_page if current_page: return page_permissions.user_can_change_page(user, current_page) site = Site.objects.get_current(request) return page_permissions.user_can_change_all_pages(user, site)
[ "def", "can_change_page", "(", "request", ")", ":", "from", "cms", ".", "utils", "import", "page_permissions", "user", "=", "request", ".", "user", "current_page", "=", "request", ".", "current_page", "if", "current_page", ":", "return", "page_permissions", ".",...
check whether a user has the permission to change the page .
train
false
10,202
@handle_response_format @treeio_login_required @module_admin_required() def page_delete(request, page_id, response_format='html'): page = get_object_or_404(Page, pk=page_id) if request.POST: if ('delete' in request.POST): page.delete() return HttpResponseRedirect(reverse('core_admin_index_pages')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('core_admin_page_view', args=[page.id])) return render_to_response('core/administration/page_delete', {'page': page}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "@", "module_admin_required", "(", ")", "def", "page_delete", "(", "request", ",", "page_id", ",", "response_format", "=", "'html'", ")", ":", "page", "=", "get_object_or_404", "(", "Page", ",", "pk", ...
static page delete .
train
false
10,204
def mean_quadratic_weighted_kappa(kappas, weights=None): kappas = np.array(kappas, dtype=float) if (weights is None): weights = np.ones(np.shape(kappas)) else: weights = (weights / np.mean(weights)) kappas = np.array([min(x, 0.999) for x in kappas]) kappas = np.array([max(x, (-0.999)) for x in kappas]) z = ((0.5 * np.log(((1 + kappas) / (1 - kappas)))) * weights) z = np.mean(z) return ((np.exp((2 * z)) - 1) / (np.exp((2 * z)) + 1))
[ "def", "mean_quadratic_weighted_kappa", "(", "kappas", ",", "weights", "=", "None", ")", ":", "kappas", "=", "np", ".", "array", "(", "kappas", ",", "dtype", "=", "float", ")", "if", "(", "weights", "is", "None", ")", ":", "weights", "=", "np", ".", ...
calculates the mean of the quadratic weighted kappas after applying fishers r-to-z transform .
train
false
10,205
def prepare_token_revocation_request(url, token, token_type_hint=u'access_token', callback=None, body=u'', **kwargs): if (not is_secure_transport(url)): raise InsecureTransportError() params = [(u'token', token)] if token_type_hint: params.append((u'token_type_hint', token_type_hint)) for k in kwargs: if kwargs[k]: params.append((unicode_type(k), kwargs[k])) headers = {u'Content-Type': u'application/x-www-form-urlencoded'} if callback: params.append((u'callback', callback)) return (add_params_to_uri(url, params), headers, body) else: return (url, headers, add_params_to_qs(body, params))
[ "def", "prepare_token_revocation_request", "(", "url", ",", "token", ",", "token_type_hint", "=", "u'access_token'", ",", "callback", "=", "None", ",", "body", "=", "u''", ",", "**", "kwargs", ")", ":", "if", "(", "not", "is_secure_transport", "(", "url", ")...
prepare a token revocation request .
train
false
10,207
def _strip_quotes(str_q): if ((str_q[0] == str_q[(-1)]) and str_q.startswith(("'", '"'))): return str_q[1:(-1)] return str_q
[ "def", "_strip_quotes", "(", "str_q", ")", ":", "if", "(", "(", "str_q", "[", "0", "]", "==", "str_q", "[", "(", "-", "1", ")", "]", ")", "and", "str_q", ".", "startswith", "(", "(", "\"'\"", ",", "'\"'", ")", ")", ")", ":", "return", "str_q", ...
helper function to strip off the or " off of a string .
train
false
10,210
def lorenz(x, y, z, s=10, r=28, b=2.667): x_dot = (s * (y - x)) y_dot = (((r * x) - y) - (x * z)) z_dot = ((x * y) - (b * z)) return (x_dot, y_dot, z_dot)
[ "def", "lorenz", "(", "x", ",", "y", ",", "z", ",", "s", "=", "10", ",", "r", "=", "28", ",", "b", "=", "2.667", ")", ":", "x_dot", "=", "(", "s", "*", "(", "y", "-", "x", ")", ")", "y_dot", "=", "(", "(", "(", "r", "*", "x", ")", "...
given: x .
train
false
10,211
def get_related_rand_names(prefixes, max_length=None): if max_length: length = (max_length - max((len(p) for p in prefixes))) if (length <= 0): raise ValueError(_("'max_length' must be longer than all prefixes")) else: length = 8 rndchrs = helpers.get_random_string(length) return [(p + rndchrs) for p in prefixes]
[ "def", "get_related_rand_names", "(", "prefixes", ",", "max_length", "=", "None", ")", ":", "if", "max_length", ":", "length", "=", "(", "max_length", "-", "max", "(", "(", "len", "(", "p", ")", "for", "p", "in", "prefixes", ")", ")", ")", "if", "(",...
returns a list of the prefixes with the same random characters appended .
train
false
10,212
@task @needs('pavelib.prereqs.install_python_prereqs') @cmdopts([('system=', 's', 'System to act on')]) @timed def run_pep8(options): (count, violations_list) = _get_pep8_violations() violations_list = ''.join(violations_list) violations_count_str = 'Number of pep8 violations: {count}'.format(count=count) print violations_count_str print violations_list with open((Env.METRICS_DIR / 'pep8'), 'w') as f: f.write((violations_count_str + '\n\n')) f.write(violations_list) if count: failure_string = ('Too many pep8 violations. ' + violations_count_str) failure_string += '\n\nViolations:\n{violations_list}'.format(violations_list=violations_list) raise BuildFailure(failure_string)
[ "@", "task", "@", "needs", "(", "'pavelib.prereqs.install_python_prereqs'", ")", "@", "cmdopts", "(", "[", "(", "'system='", ",", "'s'", ",", "'System to act on'", ")", "]", ")", "@", "timed", "def", "run_pep8", "(", "options", ")", ":", "(", "count", ",",...
run pep8 on system code .
train
false
10,213
def patch_builtins(): if (sys.version_info[:2] < (3, 3)): patch_module('builtins')
[ "def", "patch_builtins", "(", ")", ":", "if", "(", "sys", ".", "version_info", "[", ":", "2", "]", "<", "(", "3", ",", "3", ")", ")", ":", "patch_module", "(", "'builtins'", ")" ]
make the builtin __import__ function greenlet safe_ under python 2 .
train
false
10,215
def buildCyclicNetwork(recurrent): Network = (RecurrentNetwork if recurrent else FeedForwardNetwork) N = Network('cyc') a = LinearLayer(1, name='a') b = LinearLayer(2, name='b') c = LinearLayer(3, name='c') d = LinearLayer(4, name='d') N.addInputModule(a) N.addModule(b) N.addModule(d) N.addOutputModule(c) N.addConnection(FullConnection(a, b)) N.addConnection(FullConnection(b, c)) N.addConnection(FullConnection(c, d)) if recurrent: N.addRecurrentConnection(FullConnection(d, a)) else: N.addConnection(FullConnection(d, a)) N.sortModules() return N
[ "def", "buildCyclicNetwork", "(", "recurrent", ")", ":", "Network", "=", "(", "RecurrentNetwork", "if", "recurrent", "else", "FeedForwardNetwork", ")", "N", "=", "Network", "(", "'cyc'", ")", "a", "=", "LinearLayer", "(", "1", ",", "name", "=", "'a'", ")",...
build a cyclic network with 4 modules :key recurrent: make one of the connections recurrent .
train
false
10,216
def atleast_2dcol(x): x = np.asarray(x) if (x.ndim == 1): x = x[:, None] elif (x.ndim == 0): x = np.atleast_2d(x) elif (x.ndim > 0): raise ValueError('too many dimensions') return x
[ "def", "atleast_2dcol", "(", "x", ")", ":", "x", "=", "np", ".", "asarray", "(", "x", ")", "if", "(", "x", ".", "ndim", "==", "1", ")", ":", "x", "=", "x", "[", ":", ",", "None", "]", "elif", "(", "x", ".", "ndim", "==", "0", ")", ":", ...
convert array_like to 2d from 1d or 0d not tested because not used .
train
false
10,219
def _delete_inventory_from_provider(conn, rp, to_delete): allocation_query = sa.select([_ALLOC_TBL.c.resource_class_id.label('resource_class')]).where(sa.and_((_ALLOC_TBL.c.resource_provider_id == rp.id), _ALLOC_TBL.c.resource_class_id.in_(to_delete))).group_by(_ALLOC_TBL.c.resource_class_id) allocations = conn.execute(allocation_query).fetchall() if allocations: resource_classes = ', '.join([_RC_CACHE.string_from_id(alloc[0]) for alloc in allocations]) raise exception.InventoryInUse(resource_classes=resource_classes, resource_provider=rp.uuid) del_stmt = _INV_TBL.delete().where(sa.and_((_INV_TBL.c.resource_provider_id == rp.id), _INV_TBL.c.resource_class_id.in_(to_delete))) res = conn.execute(del_stmt) return res.rowcount
[ "def", "_delete_inventory_from_provider", "(", "conn", ",", "rp", ",", "to_delete", ")", ":", "allocation_query", "=", "sa", ".", "select", "(", "[", "_ALLOC_TBL", ".", "c", ".", "resource_class_id", ".", "label", "(", "'resource_class'", ")", "]", ")", ".",...
deletes any inventory records from the supplied provider and set() of resource class identifiers .
train
false
10,220
def add_gateway_router(router, ext_network, profile=None): conn = _auth(profile) return conn.add_gateway_router(router, ext_network)
[ "def", "add_gateway_router", "(", "router", ",", "ext_network", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "add_gateway_router", "(", "router", ",", "ext_network", ")" ]
adds an external network gateway to the specified router cli example: .
train
true
10,222
def key_list(items=None): if (items is None): items = [] ret = {} if (items and isinstance(items, list)): for item in items: if ('name' in item): if ('id' not in item): item['id'] = item['name'] ret[item['name']] = item return ret
[ "def", "key_list", "(", "items", "=", "None", ")", ":", "if", "(", "items", "is", "None", ")", ":", "items", "=", "[", "]", "ret", "=", "{", "}", "if", "(", "items", "and", "isinstance", "(", "items", ",", "list", ")", ")", ":", "for", "item", ...
convert list to dictionary using the key as the identifier .
train
true
10,223
@pytest.mark.network def test_uninstall_namespace_package(script): result = script.pip('install', 'pd.requires==0.0.3', expect_error=True) assert (join(script.site_packages, 'pd') in result.files_created), sorted(result.files_created.keys()) result2 = script.pip('uninstall', 'pd.find', '-y', expect_error=True) assert (join(script.site_packages, 'pd') not in result2.files_deleted), sorted(result2.files_deleted.keys()) assert (join(script.site_packages, 'pd', 'find') in result2.files_deleted), sorted(result2.files_deleted.keys())
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_uninstall_namespace_package", "(", "script", ")", ":", "result", "=", "script", ".", "pip", "(", "'install'", ",", "'pd.requires==0.0.3'", ",", "expect_error", "=", "True", ")", "assert", "(", "join", ...
uninstall a distribution with a namespace package without clobbering the namespace and everything in it .
train
false
10,224
def complete_modules(text): import MAVProxy.modules, pkgutil modlist = [x[1] for x in pkgutil.iter_modules(MAVProxy.modules.__path__)] ret = [] loaded = set(complete_loadedmodules('')) for m in modlist: if (not m.startswith('mavproxy_')): continue name = m[9:] if (not (name in loaded)): ret.append(name) return ret
[ "def", "complete_modules", "(", "text", ")", ":", "import", "MAVProxy", ".", "modules", ",", "pkgutil", "modlist", "=", "[", "x", "[", "1", "]", "for", "x", "in", "pkgutil", ".", "iter_modules", "(", "MAVProxy", ".", "modules", ".", "__path__", ")", "]...
complete mavproxy module names .
train
true
10,225
def rev_elements(getter, action): for element in getter()[::(-1)]: action(element) print line
[ "def", "rev_elements", "(", "getter", ",", "action", ")", ":", "for", "element", "in", "getter", "(", ")", "[", ":", ":", "(", "-", "1", ")", "]", ":", "action", "(", "element", ")", "print", "line" ]
template skeleton that iterates items in reverse order .
train
false
10,226
def negaterow(row, K): return [(- element) for element in row]
[ "def", "negaterow", "(", "row", ",", "K", ")", ":", "return", "[", "(", "-", "element", ")", "for", "element", "in", "row", "]" ]
negates a row element-wise .
train
false
10,227
def migrate_docker_facts(facts): params = {'common': ('additional_registries', 'insecure_registries', 'blocked_registries', 'options'), 'node': ('log_driver', 'log_options')} if ('docker' not in facts): facts['docker'] = {} for role in params.keys(): if (role in facts): for param in params[role]: old_param = ('docker_' + param) if (old_param in facts[role]): facts['docker'][param] = facts[role].pop(old_param) if (('node' in facts) and ('portal_net' in facts['node'])): facts['docker']['hosted_registry_network'] = facts['node'].pop('portal_net') if (('log_options' in facts['docker']) and isinstance(facts['docker']['log_options'], string_types)): facts['docker']['log_options'] = facts['docker']['log_options'].split(',') return facts
[ "def", "migrate_docker_facts", "(", "facts", ")", ":", "params", "=", "{", "'common'", ":", "(", "'additional_registries'", ",", "'insecure_registries'", ",", "'blocked_registries'", ",", "'options'", ")", ",", "'node'", ":", "(", "'log_driver'", ",", "'log_option...
apply migrations for docker facts .
train
false
10,228
@contextfilter def do_selectattr(*args, **kwargs): return select_or_reject(args, kwargs, (lambda x: x), True)
[ "@", "contextfilter", "def", "do_selectattr", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "select_or_reject", "(", "args", ",", "kwargs", ",", "(", "lambda", "x", ":", "x", ")", ",", "True", ")" ]
filters a sequence of objects by appying a test to either the object or the attribute and only selecting the ones with the test succeeding .
train
false
10,229
def dup_trunc(f, p, K): if K.is_ZZ: g = [] for c in f: c = (c % p) if (c > (p // 2)): g.append((c - p)) else: g.append(c) else: g = [(c % p) for c in f] return dup_strip(g)
[ "def", "dup_trunc", "(", "f", ",", "p", ",", "K", ")", ":", "if", "K", ".", "is_ZZ", ":", "g", "=", "[", "]", "for", "c", "in", "f", ":", "c", "=", "(", "c", "%", "p", ")", "if", "(", "c", ">", "(", "p", "//", "2", ")", ")", ":", "g...
reduce a k[x] polynomial modulo a constant p in k .
train
false
10,230
@memoize def from_style(key): style = QtGui.QApplication.instance().style() return style.standardIcon(key)
[ "@", "memoize", "def", "from_style", "(", "key", ")", ":", "style", "=", "QtGui", ".", "QApplication", ".", "instance", "(", ")", ".", "style", "(", ")", "return", "style", ".", "standardIcon", "(", "key", ")" ]
maintain a cache of standard icons and return cache entries .
train
false
10,231
def _polyder(p, m): if (m == 0): result = p else: n = len(p) if (n <= m): result = np.zeros_like(p[:1, ...]) else: dp = p[:(- m)].copy() for k in range(m): rng = np.arange(((n - k) - 1), ((m - k) - 1), (-1)) dp *= rng.reshape((((n - m),) + ((1,) * (p.ndim - 1)))) result = dp return result
[ "def", "_polyder", "(", "p", ",", "m", ")", ":", "if", "(", "m", "==", "0", ")", ":", "result", "=", "p", "else", ":", "n", "=", "len", "(", "p", ")", "if", "(", "n", "<=", "m", ")", ":", "result", "=", "np", ".", "zeros_like", "(", "p", ...
differentiate polynomials represented with coefficients .
train
false
10,232
@mock_streams('stdout') @with_patched_input(p) def test_prompt_appends_space(): s = 'This is my prompt' prompt(s) eq_(sys.stdout.getvalue(), (s + ' '))
[ "@", "mock_streams", "(", "'stdout'", ")", "@", "with_patched_input", "(", "p", ")", "def", "test_prompt_appends_space", "(", ")", ":", "s", "=", "'This is my prompt'", "prompt", "(", "s", ")", "eq_", "(", "sys", ".", "stdout", ".", "getvalue", "(", ")", ...
prompt() appends a single space when no default is given .
train
false
10,233
def test_complex(): entry = tokenize('(1j)')[0][0] assert (entry == HyComplex('1.0j')) entry = tokenize('(j)')[0][0] assert (entry == HySymbol('j'))
[ "def", "test_complex", "(", ")", ":", "entry", "=", "tokenize", "(", "'(1j)'", ")", "[", "0", "]", "[", "0", "]", "assert", "(", "entry", "==", "HyComplex", "(", "'1.0j'", ")", ")", "entry", "=", "tokenize", "(", "'(j)'", ")", "[", "0", "]", "[",...
ensure we tokenize complex numbers properly .
train
false
10,234
@cleanup def test__EventCollection__get_linestyle(): (_, coll, _) = generate_EventCollection_plot() assert_equal(coll.get_linestyle(), [(None, None)])
[ "@", "cleanup", "def", "test__EventCollection__get_linestyle", "(", ")", ":", "(", "_", ",", "coll", ",", "_", ")", "=", "generate_EventCollection_plot", "(", ")", "assert_equal", "(", "coll", ".", "get_linestyle", "(", ")", ",", "[", "(", "None", ",", "No...
check to make sure the default linestyle matches the input linestyle .
train
false
10,235
def compute_key(gr, n, nodes_explored): min = float('inf') for v in gr.neighbors(n): if (v in nodes_explored): w = gr.get_edge_weight((n, v)) if (w < min): min = w return min
[ "def", "compute_key", "(", "gr", ",", "n", ",", "nodes_explored", ")", ":", "min", "=", "float", "(", "'inf'", ")", "for", "v", "in", "gr", ".", "neighbors", "(", "n", ")", ":", "if", "(", "v", "in", "nodes_explored", ")", ":", "w", "=", "gr", ...
computes minimum key for node n from a set of nodes_explored in graph gr .
train
false
10,236
def _extract_expressions(node): if (isinstance(node, nodes.CallFunc) and isinstance(node.func, nodes.Name) and (node.func.name == _TRANSIENT_FUNCTION)): real_expr = node.args[0] real_expr.parent = node.parent for name in node.parent._astroid_fields: child = getattr(node.parent, name) if isinstance(child, (list, tuple)): for (idx, compound_child) in enumerate(child): if (compound_child is node): child[idx] = real_expr elif (child is node): setattr(node.parent, name, real_expr) (yield real_expr) else: for child in node.get_children(): for result in _extract_expressions(child): (yield result)
[ "def", "_extract_expressions", "(", "node", ")", ":", "if", "(", "isinstance", "(", "node", ",", "nodes", ".", "CallFunc", ")", "and", "isinstance", "(", "node", ".", "func", ",", "nodes", ".", "Name", ")", "and", "(", "node", ".", "func", ".", "name...
find expressions in a call to _transient_function and extract them .
train
true
10,237
def encode_fvwi(val, flags, flag_size=4): ans = (val << flag_size) for i in xrange(flag_size): ans |= (flags & (1 << i)) return encint(ans)
[ "def", "encode_fvwi", "(", "val", ",", "flags", ",", "flag_size", "=", "4", ")", ":", "ans", "=", "(", "val", "<<", "flag_size", ")", "for", "i", "in", "xrange", "(", "flag_size", ")", ":", "ans", "|=", "(", "flags", "&", "(", "1", "<<", "i", "...
encode the value val and the flag_size bits from flags as a fvwi .
train
false
10,240
def has_hidden(layer): return hasattr(layer, 'initial_hidden_state')
[ "def", "has_hidden", "(", "layer", ")", ":", "return", "hasattr", "(", "layer", ",", "'initial_hidden_state'", ")" ]
whether a layer has a trainable initial hidden state .
train
false
10,242
def test_weighted_pool(): rng = numpy.random.RandomState(220) for ds in [3]: for batch in [2]: for ch in [2]: data = rng.uniform(size=(batch, ch, ds, ds)).astype('float32') x = theano.tensor.tensor4() w_max = weighted_max_pool_bc01(x, (ds, ds), (ds, ds), (ds, ds)) f = theano.function([x], w_max) op_val = numpy.asarray(f(data)) norm = (data / data.sum(3).sum(2)[:, :, numpy.newaxis, numpy.newaxis]) py_val = (data * norm).sum(3).sum(2)[:, :, numpy.newaxis, numpy.newaxis] assert numpy.allclose(op_val, py_val)
[ "def", "test_weighted_pool", "(", ")", ":", "rng", "=", "numpy", ".", "random", ".", "RandomState", "(", "220", ")", "for", "ds", "in", "[", "3", "]", ":", "for", "batch", "in", "[", "2", "]", ":", "for", "ch", "in", "[", "2", "]", ":", "data",...
test weighted pooling theano implementation against numpy implementation .
train
false
10,244
def head_container(url, token, container, http_conn=None, headers=None, service_token=None): if http_conn: (parsed, conn) = http_conn else: (parsed, conn) = http_connection(url) path = ('%s/%s' % (parsed.path, quote(container))) method = 'HEAD' req_headers = {'X-Auth-Token': token} if service_token: req_headers['X-Service-Token'] = service_token if headers: req_headers.update(headers) conn.request(method, path, '', req_headers) resp = conn.getresponse() body = resp.read() http_log((('%s%s' % (url.replace(parsed.path, ''), path)), method), {'headers': req_headers}, resp, body) if ((resp.status < 200) or (resp.status >= 300)): raise ClientException.from_response(resp, 'Container HEAD failed', body) resp_headers = resp_header_dict(resp) return resp_headers
[ "def", "head_container", "(", "url", ",", "token", ",", "container", ",", "http_conn", "=", "None", ",", "headers", "=", "None", ",", "service_token", "=", "None", ")", ":", "if", "http_conn", ":", "(", "parsed", ",", "conn", ")", "=", "http_conn", "el...
get container stats .
train
false
10,245
def urlencode_path(s): from urllib import quote safe = ((os.sep + (os.altsep or '')) + ':') return quote(s, safe=safe)
[ "def", "urlencode_path", "(", "s", ")", ":", "from", "urllib", "import", "quote", "safe", "=", "(", "(", "os", ".", "sep", "+", "(", "os", ".", "altsep", "or", "''", ")", ")", "+", "':'", ")", "return", "quote", "(", "s", ",", "safe", "=", "saf...
url-encode the given path string .
train
false
10,247
def get_instance_identity(version='latest', url='http://169.254.169.254', timeout=None, num_retries=5): iid = {} base_url = _build_instance_metadata_url(url, version, 'dynamic/instance-identity/') try: data = retry_url(base_url, num_retries=num_retries, timeout=timeout) fields = data.split('\n') for field in fields: val = retry_url((((base_url + '/') + field) + '/'), num_retries=num_retries, timeout=timeout) if (val[0] == '{'): val = json.loads(val) if field: iid[field] = val return iid except urllib.error.URLError: return None
[ "def", "get_instance_identity", "(", "version", "=", "'latest'", ",", "url", "=", "'http://169.254.169.254'", ",", "timeout", "=", "None", ",", "num_retries", "=", "5", ")", ":", "iid", "=", "{", "}", "base_url", "=", "_build_instance_metadata_url", "(", "url"...
returns the instance identity as a nested python dictionary .
train
false
10,248
def get_fs_home(): return _get_root_home('FREESURFER_HOME', 'freesurfer', _fs_home_problem)
[ "def", "get_fs_home", "(", ")", ":", "return", "_get_root_home", "(", "'FREESURFER_HOME'", ",", "'freesurfer'", ",", "_fs_home_problem", ")" ]
get the freesurfer_home directory .
train
false
10,250
def _constrain_sv_less_than_one_python(unconstrained, order=None, k_endog=None): from scipy import linalg constrained = [] if (order is None): order = len(unconstrained) if (k_endog is None): k_endog = unconstrained[0].shape[0] eye = np.eye(k_endog) for i in range(order): A = unconstrained[i] (B, lower) = linalg.cho_factor((eye + np.dot(A, A.T)), lower=True) constrained.append(linalg.solve_triangular(B, A, lower=lower)) return constrained
[ "def", "_constrain_sv_less_than_one_python", "(", "unconstrained", ",", "order", "=", "None", ",", "k_endog", "=", "None", ")", ":", "from", "scipy", "import", "linalg", "constrained", "=", "[", "]", "if", "(", "order", "is", "None", ")", ":", "order", "="...
transform arbitrary matrices to matrices with singular values less than one .
train
false
10,252
def get_taxes_of_effective_rules(taxing_context, tax_rules): matching_rules = (tax_rule for tax_rule in tax_rules if tax_rule.matches(taxing_context)) grouped_by_override = groupby(matching_rules, attrgetter('override_group')) highest_override_group = first(grouped_by_override, (None, []))[1] grouped_rules = groupby(highest_override_group, attrgetter('priority')) tax_groups = [[rule.tax for rule in rules] for (_, rules) in grouped_rules] return tax_groups
[ "def", "get_taxes_of_effective_rules", "(", "taxing_context", ",", "tax_rules", ")", ":", "matching_rules", "=", "(", "tax_rule", "for", "tax_rule", "in", "tax_rules", "if", "tax_rule", ".", "matches", "(", "taxing_context", ")", ")", "grouped_by_override", "=", "...
get taxes grouped by priority from effective tax rules .
train
false
10,253
def get_datetime_utc_now(): dt = datetime.datetime.utcnow() dt = add_utc_tz(dt) return dt
[ "def", "get_datetime_utc_now", "(", ")", ":", "dt", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "dt", "=", "add_utc_tz", "(", "dt", ")", "return", "dt" ]
retrieve datetime object for current time with included utc timezone info .
train
false
10,254
def findlabels(code): labels = [] n = len(code) i = 0 while (i < n): c = code[i] op = ord(c) i = (i + 1) if (op >= HAVE_ARGUMENT): oparg = (ord(code[i]) + (ord(code[(i + 1)]) * 256)) i = (i + 2) label = (-1) if (op in hasjrel): label = (i + oparg) elif (op in hasjabs): label = oparg if (label >= 0): if (label not in labels): labels.append(label) return labels
[ "def", "findlabels", "(", "code", ")", ":", "labels", "=", "[", "]", "n", "=", "len", "(", "code", ")", "i", "=", "0", "while", "(", "i", "<", "n", ")", ":", "c", "=", "code", "[", "i", "]", "op", "=", "ord", "(", "c", ")", "i", "=", "(...
detect all offsets in a byte code which are jump targets .
train
false
10,255
def _is_master(): return (__salt__['config.option']('solr.type') == 'master')
[ "def", "_is_master", "(", ")", ":", "return", "(", "__salt__", "[", "'config.option'", "]", "(", "'solr.type'", ")", "==", "'master'", ")" ]
private method simple method to determine if the minion is configured as master or slave return: boolean:: true if __opts__[solr .
train
false
10,259
def _render_permission_denied(request, template_name=u'reviews/review_request_permission_denied.html'): response = render_to_response(template_name, RequestContext(request)) response.status_code = 403 return response
[ "def", "_render_permission_denied", "(", "request", ",", "template_name", "=", "u'reviews/review_request_permission_denied.html'", ")", ":", "response", "=", "render_to_response", "(", "template_name", ",", "RequestContext", "(", "request", ")", ")", "response", ".", "s...
renders a permission denied error for this review request .
train
false
10,260
def _report_unknown_attribute(name): logging.error('unknown Cookie attribute: %r', name)
[ "def", "_report_unknown_attribute", "(", "name", ")", ":", "logging", ".", "error", "(", "'unknown Cookie attribute: %r'", ",", "name", ")" ]
how this module logs an unknown attribute when exception suppressed .
train
false
10,261
def create_from_template(zone, template): ret = {'status': True} res = __salt__['cmd.run_all']('zonecfg -z {zone} create -t {tmpl} -F'.format(zone=zone, tmpl=template)) ret['status'] = (res['retcode'] == 0) ret['message'] = (res['stdout'] if ret['status'] else res['stderr']) ret['message'] = ret['message'].replace('zonecfg: ', '') if (ret['message'] == ''): del ret['message'] return ret
[ "def", "create_from_template", "(", "zone", ",", "template", ")", ":", "ret", "=", "{", "'status'", ":", "True", "}", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "'zonecfg -z {zone} create -t {tmpl} -F'", ".", "format", "(", "zone", "=", "zone", ...
create an in-memory configuration from a template for the specified zone .
train
false
10,262
@requires_duration @apply_to_mask @apply_to_audio def time_mirror(self): return self.fl_time((lambda t: (self.duration - t)), keep_duration=True)
[ "@", "requires_duration", "@", "apply_to_mask", "@", "apply_to_audio", "def", "time_mirror", "(", "self", ")", ":", "return", "self", ".", "fl_time", "(", "(", "lambda", "t", ":", "(", "self", ".", "duration", "-", "t", ")", ")", ",", "keep_duration", "=...
returns a clip that plays the current clip backwards .
train
false
10,264
@core_helper def build_nav_icon(menu_item, title, **kw): return _make_menu_item(menu_item, title, **kw)
[ "@", "core_helper", "def", "build_nav_icon", "(", "menu_item", ",", "title", ",", "**", "kw", ")", ":", "return", "_make_menu_item", "(", "menu_item", ",", "title", ",", "**", "kw", ")" ]
build a navigation item used for example in user/read_base .
train
false
10,265
def PQa(P_0, Q_0, D): A_i_2 = B_i_1 = 0 A_i_1 = B_i_2 = 1 G_i_2 = (- P_0) G_i_1 = Q_0 P_i = P_0 Q_i = Q_0 while 1: a_i = floor(((P_i + sqrt(D)) / Q_i)) A_i = ((a_i * A_i_1) + A_i_2) B_i = ((a_i * B_i_1) + B_i_2) G_i = ((a_i * G_i_1) + G_i_2) (yield (P_i, Q_i, a_i, A_i, B_i, G_i)) (A_i_1, A_i_2) = (A_i, A_i_1) (B_i_1, B_i_2) = (B_i, B_i_1) (G_i_1, G_i_2) = (G_i, G_i_1) P_i = ((a_i * Q_i) - P_i) Q_i = ((D - (P_i ** 2)) / Q_i)
[ "def", "PQa", "(", "P_0", ",", "Q_0", ",", "D", ")", ":", "A_i_2", "=", "B_i_1", "=", "0", "A_i_1", "=", "B_i_2", "=", "1", "G_i_2", "=", "(", "-", "P_0", ")", "G_i_1", "=", "Q_0", "P_i", "=", "P_0", "Q_i", "=", "Q_0", "while", "1", ":", "a...
returns useful information needed to solve the pell equation .
train
false
10,266
def _ensure_datetimelike_to_i8(other): if (lib.isscalar(other) and isnull(other)): other = tslib.iNaT elif isinstance(other, ABCIndexClass): if (getattr(other, 'tz', None) is not None): other = other.tz_localize(None).asi8 else: other = other.asi8 else: try: other = np.array(other, copy=False).view('i8') except TypeError: other = Index(other).asi8 return other
[ "def", "_ensure_datetimelike_to_i8", "(", "other", ")", ":", "if", "(", "lib", ".", "isscalar", "(", "other", ")", "and", "isnull", "(", "other", ")", ")", ":", "other", "=", "tslib", ".", "iNaT", "elif", "isinstance", "(", "other", ",", "ABCIndexClass",...
helper for coercing an input scalar or array to i8 .
train
false
10,268
def split_string_after(str_, n): for start in range(0, len(str_), n): (yield str_[start:(start + n)])
[ "def", "split_string_after", "(", "str_", ",", "n", ")", ":", "for", "start", "in", "range", "(", "0", ",", "len", "(", "str_", ")", ",", "n", ")", ":", "(", "yield", "str_", "[", "start", ":", "(", "start", "+", "n", ")", "]", ")" ]
yield chunks of length n from the given string .
train
false
10,269
def batch(iterable, batch_size=BATCH_SIZE): b = [] for i in iterable: b.append(i) if (len(b) == batch_size): (yield tuple(b)) b = [] if b: (yield tuple(b))
[ "def", "batch", "(", "iterable", ",", "batch_size", "=", "BATCH_SIZE", ")", ":", "b", "=", "[", "]", "for", "i", "in", "iterable", ":", "b", ".", "append", "(", "i", ")", "if", "(", "len", "(", "b", ")", "==", "batch_size", ")", ":", "(", "yiel...
yaml: batch execute a batch command .
train
false
10,270
def _metric_alarm_to_dict(alarm): d = odict.OrderedDict() fields = ['name', 'metric', 'namespace', 'statistic', 'comparison', 'threshold', 'period', 'evaluation_periods', 'unit', 'description', 'dimensions', 'alarm_actions', 'insufficient_data_actions', 'ok_actions'] for f in fields: if hasattr(alarm, f): d[f] = getattr(alarm, f) return d
[ "def", "_metric_alarm_to_dict", "(", "alarm", ")", ":", "d", "=", "odict", ".", "OrderedDict", "(", ")", "fields", "=", "[", "'name'", ",", "'metric'", ",", "'namespace'", ",", "'statistic'", ",", "'comparison'", ",", "'threshold'", ",", "'period'", ",", "...
convert a boto .
train
true
10,271
def promotion_history(cls, count_column, codename, start, stop): time_points = get_time_points('hour', start, stop) q = Session.query(cls.date, sum(count_column)).filter((cls.interval == 'hour')).filter((cls.codename == codename)).filter(cls.date.in_(time_points)).group_by(cls.date).order_by(cls.date) return [(r[0], (r[1],)) for r in q.all()]
[ "def", "promotion_history", "(", "cls", ",", "count_column", ",", "codename", ",", "start", ",", "stop", ")", ":", "time_points", "=", "get_time_points", "(", "'hour'", ",", "start", ",", "stop", ")", "q", "=", "Session", ".", "query", "(", "cls", ".", ...
get hourly traffic for a self-serve promotion .
train
false
10,273
def write_string(fid, kind, data): str_data = data.encode('utf-8') data_size = len(str_data) my_dtype = '>a' if (data_size > 0): _write(fid, str_data, kind, data_size, FIFF.FIFFT_STRING, my_dtype)
[ "def", "write_string", "(", "fid", ",", "kind", ",", "data", ")", ":", "str_data", "=", "data", ".", "encode", "(", "'utf-8'", ")", "data_size", "=", "len", "(", "str_data", ")", "my_dtype", "=", "'>a'", "if", "(", "data_size", ">", "0", ")", ":", ...
write a string tag .
train
false
10,275
def _resolve_asspart(parts, asspath, context): asspath = asspath[:] index = asspath.pop(0) for part in parts: if hasattr(part, 'getitem'): try: assigned = part.getitem(index, context) except (TypeError, IndexError): return if (not asspath): (yield assigned) elif (assigned is YES): return else: try: for infered in _resolve_asspart(assigned.infer(context), asspath, context): (yield infered) except InferenceError: return
[ "def", "_resolve_asspart", "(", "parts", ",", "asspath", ",", "context", ")", ":", "asspath", "=", "asspath", "[", ":", "]", "index", "=", "asspath", ".", "pop", "(", "0", ")", "for", "part", "in", "parts", ":", "if", "hasattr", "(", "part", ",", "...
recursive function to resolve multiple assignments .
train
false
10,276
def _annotation_ascii(sent): if ((sent._type == u'fulltext_sentence') or ((u'annotationSet' in sent) and (len(sent.annotationSet) > 2))): return _annotation_ascii_frames(sent) else: return _annotation_ascii_FEs(sent)
[ "def", "_annotation_ascii", "(", "sent", ")", ":", "if", "(", "(", "sent", ".", "_type", "==", "u'fulltext_sentence'", ")", "or", "(", "(", "u'annotationSet'", "in", "sent", ")", "and", "(", "len", "(", "sent", ".", "annotationSet", ")", ">", "2", ")",...
given a sentence or fe annotation set .
train
false
10,277
def sm_backend_conf_delete(context, sm_backend_conf_id): return IMPL.sm_backend_conf_delete(context, sm_backend_conf_id)
[ "def", "sm_backend_conf_delete", "(", "context", ",", "sm_backend_conf_id", ")", ":", "return", "IMPL", ".", "sm_backend_conf_delete", "(", "context", ",", "sm_backend_conf_id", ")" ]
delete a sm backend config .
train
false
10,278
def admin_required(reviewers=False): def decorator(f): @login_required @functools.wraps(f) def wrapper(request, *args, **kw): admin = (action_allowed(request, 'Admin', '%') or action_allowed(request, 'AdminTools', 'View')) if (reviewers is True): admin = (admin or action_allowed(request, 'ReviewerAdminTools', 'View')) if admin: return f(request, *args, **kw) raise PermissionDenied return wrapper if callable(reviewers): return decorator(reviewers) else: return decorator
[ "def", "admin_required", "(", "reviewers", "=", "False", ")", ":", "def", "decorator", "(", "f", ")", ":", "@", "login_required", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "request", ",", "*", "args", ",", "**", "kw", ")",...
requires app engine admin credentials .
train
false
10,279
def test_represent_zgate(): circuit = (ZGate(0) * Qubit('00')) answer = represent(circuit, nqubits=2) assert (Matrix([1, 0, 0, 0]) == answer)
[ "def", "test_represent_zgate", "(", ")", ":", "circuit", "=", "(", "ZGate", "(", "0", ")", "*", "Qubit", "(", "'00'", ")", ")", "answer", "=", "represent", "(", "circuit", ",", "nqubits", "=", "2", ")", "assert", "(", "Matrix", "(", "[", "1", ",", ...
test the representation of the z gate .
train
false
10,280
def import_helper(mod_name): try: sys.stdout = STDOUT_FAKE __import__(mod_name) finally: sys.stdout = STDOUT_BAK
[ "def", "import_helper", "(", "mod_name", ")", ":", "try", ":", "sys", ".", "stdout", "=", "STDOUT_FAKE", "__import__", "(", "mod_name", ")", "finally", ":", "sys", ".", "stdout", "=", "STDOUT_BAK" ]
helper function used to temporarily override stdout before importing a module .
train
false
10,281
def _arg_raw(dvi, delta): return delta
[ "def", "_arg_raw", "(", "dvi", ",", "delta", ")", ":", "return", "delta" ]
return *delta* without reading anything more from the dvi file .
train
false
10,283
def sync_type(ks_name, type_model, connection=None): if (not _allow_schema_modification()): return if (not issubclass(type_model, UserType)): raise CQLEngineException('Types must be derived from base UserType.') _sync_type(ks_name, type_model, connection=connection)
[ "def", "sync_type", "(", "ks_name", ",", "type_model", ",", "connection", "=", "None", ")", ":", "if", "(", "not", "_allow_schema_modification", "(", ")", ")", ":", "return", "if", "(", "not", "issubclass", "(", "type_model", ",", "UserType", ")", ")", "...
inspects the type_model and creates / updates the corresponding type .
train
true
10,284
def line_to_tuple(line): kv = line.split(',')[0].split(':') (k, v) = (kv[0].strip(' "'), kv[1].strip(' "')) return (k, v)
[ "def", "line_to_tuple", "(", "line", ")", ":", "kv", "=", "line", ".", "split", "(", "','", ")", "[", "0", "]", ".", "split", "(", "':'", ")", "(", "k", ",", "v", ")", "=", "(", "kv", "[", "0", "]", ".", "strip", "(", "' \"'", ")", ",", "...
convert a single line of the .
train
false
10,285
def INT(value): if (value is None): raise ValueError(u'None is not a valid integer') return int(value)
[ "def", "INT", "(", "value", ")", ":", "if", "(", "value", "is", "None", ")", ":", "raise", "ValueError", "(", "u'None is not a valid integer'", ")", "return", "int", "(", "value", ")" ]
converts a value that matches [+-]?d+ into and integer .
train
false
10,286
def lowlevelErrorToString(errorcode): try: (name, advice) = ERROR_TO_STRING_DICT[str(errorcode)] except KeyError: name = 'UNKNOWN_ERROR' advice = ('Unrecognized error code (%s)' % errorcode) if (advice is not ''): msg = ('%s (%s)\n%s' % (name, errorcode, advice)) else: msg = ('%s (%s)' % (name, errorcode)) return msg
[ "def", "lowlevelErrorToString", "(", "errorcode", ")", ":", "try", ":", "(", "name", ",", "advice", ")", "=", "ERROR_TO_STRING_DICT", "[", "str", "(", "errorcode", ")", "]", "except", "KeyError", ":", "name", "=", "'UNKNOWN_ERROR'", "advice", "=", "(", "'U...
converts a low-level errorcode into a string .
train
false
10,287
def get_random_alphanumeric(length=10): return str(uuid.uuid4()).replace('-', '')[:length]
[ "def", "get_random_alphanumeric", "(", "length", "=", "10", ")", ":", "return", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ".", "replace", "(", "'-'", ",", "''", ")", "[", ":", "length", "]" ]
generate a random alphanumeric string of the specified length .
train
false
10,289
def mobile_basic_assess(): if (not auth.is_logged_in()): redirect(URL(c='default', f='index')) assess_tables() impact_tables() custom_assess_fields = (('assess', 'location_id', 'auto'), ('impact', 1), ('impact', 2), ('impact', 3), ('impact', 4), ('impact', 5), ('impact', 6), ('impact', 7), ('assess', 'comments')) (form, form_accepted, assess_id) = custom_assess(custom_assess_fields) if form_accepted: form = FORM(H1(settings.get_system_name_short()), H2(T('Short Assessment')), P(T('Assessment Reported')), A(T('Report Another Assessment...'), _href=URL(r=request)), _class='mobile') return dict(form=form)
[ "def", "mobile_basic_assess", "(", ")", ":", "if", "(", "not", "auth", ".", "is_logged_in", "(", ")", ")", ":", "redirect", "(", "URL", "(", "c", "=", "'default'", ",", "f", "=", "'index'", ")", ")", "assess_tables", "(", ")", "impact_tables", "(", "...
custom page to hide the complexity of the assessments/impacts/summary model: mobile device version .
train
false
10,291
def summary(): assess_tables() return s3_rest_controller()
[ "def", "summary", "(", ")", ":", "assess_tables", "(", ")", "return", "s3_rest_controller", "(", ")" ]
given an importtask .
train
false
10,292
def validate_timezone(zone): if (zone is None): return None if (not pytz): if (zone.upper() != u'UTC'): raise ValueError(u'Only UTC available, since pytz is not installed.') else: return zone zone = u'/'.join(reversed(zone.split(u', '))).replace(u' ', u'_') if (len(zone) <= 4): zone = zone.upper() else: zone = zone.title() if (zone in pytz.all_timezones): return zone else: raise ValueError(u'Invalid time zone.')
[ "def", "validate_timezone", "(", "zone", ")", ":", "if", "(", "zone", "is", "None", ")", ":", "return", "None", "if", "(", "not", "pytz", ")", ":", "if", "(", "zone", ".", "upper", "(", ")", "!=", "u'UTC'", ")", ":", "raise", "ValueError", "(", "...
return an ietf timezone from the given ietf zone or common abbreviation .
train
false
10,293
def inputfield(course, problem_type, choice=None, input_num=1): section_loc = section_location(course) ptype = problem_type.replace(' ', '_') if (problem_type in ('radio_text', 'checkbox_text')): selector_template = 'input#{}_2_{input}' else: selector_template = 'input#input_{}_2_{input}' sel = selector_template.format(section_loc.course_key.make_usage_key('problem', ptype).html_id(), input=input_num) if (choice is not None): base = ('_choice_' if (problem_type == 'multiple choice') else '_') sel = ((sel + base) + str(choice)) assert world.is_css_present(sel) return sel
[ "def", "inputfield", "(", "course", ",", "problem_type", ",", "choice", "=", "None", ",", "input_num", "=", "1", ")", ":", "section_loc", "=", "section_location", "(", "course", ")", "ptype", "=", "problem_type", ".", "replace", "(", "' '", ",", "'_'", "...
return the css selector for problem_type .
train
false
10,295
def make_path_regular(p): c = p.codes if (c is None): c = np.empty(p.vertices.shape[:1], u'i') c.fill(Path.LINETO) c[0] = Path.MOVETO return Path(p.vertices, c) else: return p
[ "def", "make_path_regular", "(", "p", ")", ":", "c", "=", "p", ".", "codes", "if", "(", "c", "is", "None", ")", ":", "c", "=", "np", ".", "empty", "(", "p", ".", "vertices", ".", "shape", "[", ":", "1", "]", ",", "u'i'", ")", "c", ".", "fil...
fill in the codes if none .
train
false
10,300
def rowcol_to_cell(row, col, row_abs=False, col_abs=False): assert (0 <= row < MAX_ROW) assert (0 <= col < MAX_COL) d = (col // 26) m = (col % 26) chr1 = '' if row_abs: row_abs = '$' else: row_abs = '' if col_abs: col_abs = '$' else: col_abs = '' if (d > 0): chr1 = chr(((ord('A') + d) - 1)) chr2 = chr((ord('A') + m)) return ((((col_abs + chr1) + chr2) + row_abs) + str((row + 1)))
[ "def", "rowcol_to_cell", "(", "row", ",", "col", ",", "row_abs", "=", "False", ",", "col_abs", "=", "False", ")", ":", "assert", "(", "0", "<=", "row", "<", "MAX_ROW", ")", "assert", "(", "0", "<=", "col", "<", "MAX_COL", ")", "d", "=", "(", "col...
convert numeric row/col notation to an excel cell reference string in a1 notation .
train
false
10,301
def load_feature_profile(request): if hasattr(request, 'feature_profile'): return profile = None if (request.GET.get('dev') in ('firefoxos', 'android')): sig = request.GET.get('pro') if sig: try: profile = FeatureProfile.from_signature(sig) except ValueError: pass request.feature_profile = profile
[ "def", "load_feature_profile", "(", "request", ")", ":", "if", "hasattr", "(", "request", ",", "'feature_profile'", ")", ":", "return", "profile", "=", "None", "if", "(", "request", ".", "GET", ".", "get", "(", "'dev'", ")", "in", "(", "'firefoxos'", ","...
adds a feature_profile on the request object if one is present and the dev parameter is either firefoxos or android .
train
false
10,302
def _process_running(process_name): process_running = False for p in psutil.process_iter(): for args in p.cmdline: if ((os.path.basename(args) == process_name) and p.is_running): process_running = True return process_running
[ "def", "_process_running", "(", "process_name", ")", ":", "process_running", "=", "False", "for", "p", "in", "psutil", ".", "process_iter", "(", ")", ":", "for", "args", "in", "p", ".", "cmdline", ":", "if", "(", "(", "os", ".", "path", ".", "basename"...
return whether a given process name is running .
train
false
10,303
def ignore(item): filename = os.path.basename(item.path) (name, ext) = os.path.splitext(filename) dirs = os.path.split(item.path) if (not item.isdir()): if item.path.startswith('./test/results/'): return None if (item.path.startswith('./docsite/') and filename.endswith('_module.rst')): return None if (name in IGNORE_FILES): return None if (ext in IGNORE_EXTENSIONS): return None if any(((d in IGNORE_DIRS) for d in dirs)): return None return item
[ "def", "ignore", "(", "item", ")", ":", "filename", "=", "os", ".", "path", ".", "basename", "(", "item", ".", "path", ")", "(", "name", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "dirs", "=", "os", ".", "pa...
ignore a specific program update .
train
false
10,304
@shared_task(bind=True) def collect_ids(self, res, i): return (res, (self.request.root_id, self.request.parent_id, i))
[ "@", "shared_task", "(", "bind", "=", "True", ")", "def", "collect_ids", "(", "self", ",", "res", ",", "i", ")", ":", "return", "(", "res", ",", "(", "self", ".", "request", ".", "root_id", ",", "self", ".", "request", ".", "parent_id", ",", "i", ...
used as a callback in a chain or group where the previous tasks are :task:ids: returns a tuple of:: (previous_result .
train
false
10,306
@contextlib.contextmanager def skip_cache(): old = getattr(_locals, 'skip_cache', False) _locals.skip_cache = True try: (yield) finally: _locals.skip_cache = old
[ "@", "contextlib", ".", "contextmanager", "def", "skip_cache", "(", ")", ":", "old", "=", "getattr", "(", "_locals", ",", "'skip_cache'", ",", "False", ")", "_locals", ".", "skip_cache", "=", "True", "try", ":", "(", "yield", ")", "finally", ":", "_local...
within this context .
train
false
10,308
def get_code_object(obj): return getattr(obj, '__code__', getattr(obj, 'func_code', None))
[ "def", "get_code_object", "(", "obj", ")", ":", "return", "getattr", "(", "obj", ",", "'__code__'", ",", "getattr", "(", "obj", ",", "'func_code'", ",", "None", ")", ")" ]
get the code-object for a module .
train
false
10,309
def set_chassis_datacenter(location, host=None, admin_username=None, admin_password=None): return set_general('cfgLocation', 'cfgLocationDatacenter', location, host=host, admin_username=admin_username, admin_password=admin_password)
[ "def", "set_chassis_datacenter", "(", "location", ",", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ")", ":", "return", "set_general", "(", "'cfgLocation'", ",", "'cfgLocationDatacenter'", ",", "location", ",", "ho...
set the location of the chassis .
train
true