id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
22,556
def listps(num, order): cmd = 'ps max -o pid,ppid,pcpu,pmem,rss,vsz,lwp,nlwp,lstart,cmd' cmd += (' --sort -%s' % order) title = 'Process/Threads details:' ps_info = utils.system_output(cmd, verbose=False).splitlines() if (num != 0): (cnt, idx) = (0, 0) title = ('Top %d process/threads details, ' % num) title += ('order by %s:' % order) for line in ps_info: idx += 1 if re.match('^\\s+\\d', line): cnt += 1 if (cnt > num): break ps_info = ps_info[:idx] ps_info.insert(0, title) return ps_info
[ "def", "listps", "(", "num", ",", "order", ")", ":", "cmd", "=", "'ps max -o pid,ppid,pcpu,pmem,rss,vsz,lwp,nlwp,lstart,cmd'", "cmd", "+=", "(", "' --sort -%s'", "%", "order", ")", "title", "=", "'Process/Threads details:'", "ps_info", "=", "utils", ".", "system_out...
select top num process order by $order from system .
train
false
22,557
def B_0123(x, der=0): x = np.atleast_1d(x) conds = [(x < 1), ((x > 1) & (x < 2)), (x > 2)] if (der == 0): funcs = [(lambda x: ((x * x) / 2.0)), (lambda x: ((3.0 / 4) - ((x - (3.0 / 2)) ** 2))), (lambda x: (((3.0 - x) ** 2) / 2))] elif (der == 2): funcs = [(lambda x: 1.0), (lambda x: (-2.0)), (lambda x: 1.0)] else: raise ValueError(('never be here: der=%s' % der)) pieces = np.piecewise(x, conds, funcs) return pieces
[ "def", "B_0123", "(", "x", ",", "der", "=", "0", ")", ":", "x", "=", "np", ".", "atleast_1d", "(", "x", ")", "conds", "=", "[", "(", "x", "<", "1", ")", ",", "(", "(", "x", ">", "1", ")", "&", "(", "x", "<", "2", ")", ")", ",", "(", ...
a quadratic b-spline function b .
train
false
22,558
@require_context def pinned_array(shape, dtype=np.float, strides=None, order='C'): (shape, strides, dtype) = _prepare_shape_strides_dtype(shape, strides, dtype, order) bytesize = driver.memory_size_from_info(shape, strides, dtype.itemsize) buffer = current_context().memhostalloc(bytesize) return np.ndarray(shape=shape, strides=strides, dtype=dtype, order=order, buffer=buffer)
[ "@", "require_context", "def", "pinned_array", "(", "shape", ",", "dtype", "=", "np", ".", "float", ",", "strides", "=", "None", ",", "order", "=", "'C'", ")", ":", "(", "shape", ",", "strides", ",", "dtype", ")", "=", "_prepare_shape_strides_dtype", "("...
pinned_array allocate a np .
train
false
22,559
def getRepositoryText(repository): repositoryWriter = getRepositoryWriter(repository.title.lower()) for setting in repository.preferences: setting.writeToRepositoryWriter(repositoryWriter) return repositoryWriter.getvalue()
[ "def", "getRepositoryText", "(", "repository", ")", ":", "repositoryWriter", "=", "getRepositoryWriter", "(", "repository", ".", "title", ".", "lower", "(", ")", ")", "for", "setting", "in", "repository", ".", "preferences", ":", "setting", ".", "writeToReposito...
get the text representation of the repository .
train
false
22,563
def given(expr, condition=None, **kwargs): if ((not random_symbols(condition)) or pspace_independent(expr, condition)): return expr if isinstance(condition, RandomSymbol): condition = Eq(condition, condition.symbol) condsymbols = random_symbols(condition) if (isinstance(condition, Equality) and (len(condsymbols) == 1) and (not isinstance(pspace(expr).domain, ConditionalDomain))): rv = tuple(condsymbols)[0] results = solveset(condition, rv) if (isinstance(results, Intersection) and (S.Reals in results.args)): results = list(results.args[1]) return sum((expr.subs(rv, res) for res in results)) fullspace = pspace(Tuple(expr, condition)) space = fullspace.conditional_space(condition, **kwargs) swapdict = rs_swap(fullspace.values, space.values) expr = expr.xreplace(swapdict) return expr
[ "def", "given", "(", "expr", ",", "condition", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "(", "not", "random_symbols", "(", "condition", ")", ")", "or", "pspace_independent", "(", "expr", ",", "condition", ")", ")", ":", "return", "expr", ...
conditional random expression from a random expression and a condition on that expression creates a new probability space from the condition and returns the same expression on that conditional probability space .
train
false
22,564
@register.function @jinja2.contextfunction def queue_tabnav_themes(context): tabs = [] if acl.action_allowed(context['request'], 'Personas', 'Review'): tabs.append(('editors.themes.list', 'pending_themes', _('Pending'))) if acl.action_allowed(context['request'], 'SeniorPersonasTools', 'View'): tabs.append(('editors.themes.list_flagged', 'flagged_themes', _('Flagged'))) tabs.append(('editors.themes.list_rereview', 'rereview_themes', _('Updates'))) return tabs
[ "@", "register", ".", "function", "@", "jinja2", ".", "contextfunction", "def", "queue_tabnav_themes", "(", "context", ")", ":", "tabs", "=", "[", "]", "if", "acl", ".", "action_allowed", "(", "context", "[", "'request'", "]", ",", "'Personas'", ",", "'Rev...
similar to queue_tabnav .
train
false
22,565
def hdmi_force_hotplug_config_set(kodi_setting, all_settings): if (kodi_setting == 'true'): return '1' elif (all_settings.get('hdmi_edid_file', None) == 'true'): return '1' else: return 'remove_this_line'
[ "def", "hdmi_force_hotplug_config_set", "(", "kodi_setting", ",", "all_settings", ")", ":", "if", "(", "kodi_setting", "==", "'true'", ")", ":", "return", "'1'", "elif", "(", "all_settings", ".", "get", "(", "'hdmi_edid_file'", ",", "None", ")", "==", "'true'"...
hdmi_edid_file needs hdmi_force_hotplug but hdmi_force_hotplug doesnt need hdmi_edid_file .
train
false
22,566
def get_zonecode(): return False
[ "def", "get_zonecode", "(", ")", ":", "return", "False" ]
get current timezone cli example: .
train
false
22,567
def repeat_call_proxy_for(interface, provider): return proxyForInterface(interface, originalAttribute='_original')(_RepeatProxy(_provider=provider))
[ "def", "repeat_call_proxy_for", "(", "interface", ",", "provider", ")", ":", "return", "proxyForInterface", "(", "interface", ",", "originalAttribute", "=", "'_original'", ")", "(", "_RepeatProxy", "(", "_provider", "=", "provider", ")", ")" ]
constructs an implementation of interface that calls the corresponding method on implementation twice for every call to a method .
train
false
22,568
def set_inactdays(name, days): return False
[ "def", "set_inactdays", "(", "name", ",", "days", ")", ":", "return", "False" ]
set the number of days of inactivity after a password has expired before the account is locked .
train
false
22,569
def get_quote(code, data, name): if (code in data): return (name, data[code]) return (name, data[u'ALL'])
[ "def", "get_quote", "(", "code", ",", "data", ",", "name", ")", ":", "if", "(", "code", "in", "data", ")", ":", "return", "(", "name", ",", "data", "[", "code", "]", ")", "return", "(", "name", ",", "data", "[", "u'ALL'", "]", ")" ]
returns special char for quote .
train
false
22,570
@app.route('/account/<subscription_id>/resourcegroups/<resource_group_name>/storageaccounts/<account_name>') @auth.require_login def storageaccount_view(subscription_id, resource_group_name, account_name): creds = _get_credentials() model = models.get_storage_account_details(subscription_id, creds, resource_group_name, account_name) return render_template('storageaccount.html', title=account_name, year=datetime.now().year, subscription_id=subscription_id, resource_group_name=resource_group_name, model=model)
[ "@", "app", ".", "route", "(", "'/account/<subscription_id>/resourcegroups/<resource_group_name>/storageaccounts/<account_name>'", ")", "@", "auth", ".", "require_login", "def", "storageaccount_view", "(", "subscription_id", ",", "resource_group_name", ",", "account_name", ")",...
renders the storage account details .
train
false
22,573
def add_volume_bricks(name, bricks): ret = {'name': name, 'changes': {}, 'comment': '', 'result': False} volinfo = __salt__['glusterfs.info']() if (name not in volinfo): ret['comment'] = 'Volume {0} does not exist'.format(name) return ret if (int(volinfo[name]['status']) != 1): ret['comment'] = 'Volume {0} is not started'.format(name) return ret current_bricks = [brick['path'] for brick in volinfo[name]['bricks'].values()] if (not (set(bricks) - set(current_bricks))): ret['result'] = True ret['comment'] = 'Bricks already added in volume {0}'.format(name) return ret bricks_added = __salt__['glusterfs.add_volume_bricks'](name, bricks) if bricks_added: ret['result'] = True ret['comment'] = 'Bricks successfully added to volume {0}'.format(name) new_bricks = [brick['path'] for brick in __salt__['glusterfs.info']()[name]['bricks'].values()] ret['changes'] = {'new': new_bricks, 'old': current_bricks} return ret ret['comment'] = 'Adding bricks to volume {0} failed'.format(name) return ret
[ "def", "add_volume_bricks", "(", "name", ",", "bricks", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'comment'", ":", "''", ",", "'result'", ":", "False", "}", "volinfo", "=", "__salt__", "[", "'glusterfs.inf...
add brick(s) to an existing volume name volume name bricks list of bricks to add to the volume .
train
true
22,574
@docstring.dedent_interpd def mark_inset(parent_axes, inset_axes, loc1, loc2, **kwargs): rect = TransformedBbox(inset_axes.viewLim, parent_axes.transData) pp = BboxPatch(rect, fill=False, **kwargs) parent_axes.add_patch(pp) p1 = BboxConnector(inset_axes.bbox, rect, loc1=loc1, **kwargs) inset_axes.add_patch(p1) p1.set_clip_on(False) p2 = BboxConnector(inset_axes.bbox, rect, loc1=loc2, **kwargs) inset_axes.add_patch(p2) p2.set_clip_on(False) return (pp, p1, p2)
[ "@", "docstring", ".", "dedent_interpd", "def", "mark_inset", "(", "parent_axes", ",", "inset_axes", ",", "loc1", ",", "loc2", ",", "**", "kwargs", ")", ":", "rect", "=", "TransformedBbox", "(", "inset_axes", ".", "viewLim", ",", "parent_axes", ".", "transDa...
draw a box to mark the location of an area represented by an inset axes .
train
false
22,576
def isDefaultHandler(): return (signal.getsignal(signal.SIGCHLD) == signal.SIG_DFL)
[ "def", "isDefaultHandler", "(", ")", ":", "return", "(", "signal", ".", "getsignal", "(", "signal", ".", "SIGCHLD", ")", "==", "signal", ".", "SIG_DFL", ")" ]
determine whether the i{sigchld} handler is the default or not .
train
false
22,577
def net_recv_object(sock, timeout=60): try: endtime = (time.time() + timeout) data = '' d_len = int(sock.recv(10)) while (len(data) < d_len): sock.settimeout((endtime - time.time())) data += sock.recv((d_len - len(data))) data = pickle.loads(data) return data except (socket.timeout, ValueError) as e: raise error.NetCommunicationError('Failed to receive python object over the network.')
[ "def", "net_recv_object", "(", "sock", ",", "timeout", "=", "60", ")", ":", "try", ":", "endtime", "=", "(", "time", ".", "time", "(", ")", "+", "timeout", ")", "data", "=", "''", "d_len", "=", "int", "(", "sock", ".", "recv", "(", "10", ")", "...
receive python object over network .
train
false
22,580
@require_GET @ensure_csrf_cookie @login_required @ensure_valid_course_key def import_status_handler(request, course_key_string, filename=None): course_key = CourseKey.from_string(course_key_string) if (not has_course_author_access(request.user, course_key)): raise PermissionDenied() try: session_status = request.session['import_status'] status = session_status[(course_key_string + filename)] except KeyError: status = 0 return JsonResponse({'ImportStatus': status})
[ "@", "require_GET", "@", "ensure_csrf_cookie", "@", "login_required", "@", "ensure_valid_course_key", "def", "import_status_handler", "(", "request", ",", "course_key_string", ",", "filename", "=", "None", ")", ":", "course_key", "=", "CourseKey", ".", "from_string", ...
returns an integer corresponding to the status of a file import .
train
false
22,581
def Lagrangian(frame, *body): if (not isinstance(frame, ReferenceFrame)): raise TypeError('Please supply a valid ReferenceFrame') for e in body: if (not isinstance(e, (RigidBody, Particle))): raise TypeError('*body must have only Particle or RigidBody') return (kinetic_energy(frame, *body) - potential_energy(*body))
[ "def", "Lagrangian", "(", "frame", ",", "*", "body", ")", ":", "if", "(", "not", "isinstance", "(", "frame", ",", "ReferenceFrame", ")", ")", ":", "raise", "TypeError", "(", "'Please supply a valid ReferenceFrame'", ")", "for", "e", "in", "body", ":", "if"...
lagrangian of a multibody system .
train
false
22,582
def _root_broyden2_doc(): pass
[ "def", "_root_broyden2_doc", "(", ")", ":", "pass" ]
options nit : int .
train
false
22,583
def _collect_post_update_commands(base_mapper, uowtransaction, table, states_to_update, post_update_cols): update = [] for (state, state_dict, mapper, connection) in states_to_update: if (table not in mapper._pks_by_table): continue pks = mapper._pks_by_table[table] params = {} hasdata = False for col in mapper._cols_by_table[table]: if (col in pks): params[col._label] = mapper._get_state_attr_by_column(state, state_dict, col) elif (col in post_update_cols): prop = mapper._columntoproperty[col] history = attributes.get_state_history(state, prop.key, attributes.PASSIVE_NO_INITIALIZE) if history.added: value = history.added[0] params[col.key] = value hasdata = True if hasdata: update.append((state, state_dict, params, mapper, connection)) return update
[ "def", "_collect_post_update_commands", "(", "base_mapper", ",", "uowtransaction", ",", "table", ",", "states_to_update", ",", "post_update_cols", ")", ":", "update", "=", "[", "]", "for", "(", "state", ",", "state_dict", ",", "mapper", ",", "connection", ")", ...
identify sets of values to use in update statements for a list of states within a post_update operation .
train
false
22,584
def ar2full(ar): (nlags, nvar, nvarex) = ar.shape return np.r_[(np.eye(nvar, nvarex)[None, :, :], (- ar))]
[ "def", "ar2full", "(", "ar", ")", ":", "(", "nlags", ",", "nvar", ",", "nvarex", ")", "=", "ar", ".", "shape", "return", "np", ".", "r_", "[", "(", "np", ".", "eye", "(", "nvar", ",", "nvarex", ")", "[", "None", ",", ":", ",", ":", "]", ","...
make reduced lagpolynomial into a right side lagpoly array .
train
false
22,585
def sdm_to_dict(f): return dict(f)
[ "def", "sdm_to_dict", "(", "f", ")", ":", "return", "dict", "(", "f", ")" ]
make a dictionary from a distributed polynomial .
train
false
22,586
def addPathIndexFirstSegment(gridPixel, pathIndexTable, pixelTable, segmentFirstPixel): for yStep in xrange(gridPixel[1], (segmentFirstPixel[1] - 1), (-1)): if getKeyIsInPixelTableAddValue((gridPixel[0], yStep), pathIndexTable, pixelTable): return
[ "def", "addPathIndexFirstSegment", "(", "gridPixel", ",", "pathIndexTable", ",", "pixelTable", ",", "segmentFirstPixel", ")", ":", "for", "yStep", "in", "xrange", "(", "gridPixel", "[", "1", "]", ",", "(", "segmentFirstPixel", "[", "1", "]", "-", "1", ")", ...
add the path index of the closest segment found toward the second segment .
train
false
22,587
@dispatch(sa.sql.elements.Label) def get_inner_columns(label): name = label.name inner_columns = get_inner_columns(label.element) assert (len(inner_columns) == 1) return [lower_column(c).label(name) for c in inner_columns]
[ "@", "dispatch", "(", "sa", ".", "sql", ".", "elements", ".", "Label", ")", "def", "get_inner_columns", "(", "label", ")", ":", "name", "=", "label", ".", "name", "inner_columns", "=", "get_inner_columns", "(", "label", ".", "element", ")", "assert", "("...
notes this should only ever return a list of length 1 this is because we need to turn scalarselects into an actual column .
train
false
22,588
def get_own_memusage_linux(): with open(('/proc/%d/status' % os.getpid()), 'r') as f: procdata = f.read() m = re.search('VmRSS:\\s*(\\d+)\\s*kB', procdata, (re.S | re.I)) if (m is not None): memusage = (float(m.group(1)) * 1000.0) return memusage return np.nan
[ "def", "get_own_memusage_linux", "(", ")", ":", "with", "open", "(", "(", "'/proc/%d/status'", "%", "os", ".", "getpid", "(", ")", ")", ",", "'r'", ")", "as", "f", ":", "procdata", "=", "f", ".", "read", "(", ")", "m", "=", "re", ".", "search", "...
return the memory usage of the current process .
train
false
22,589
@click.command(u'destroy-all-sessions') @click.option(u'--reason') @pass_context def destroy_all_sessions(context, reason=None): import frappe.sessions for site in context.sites: try: frappe.init(site=site) frappe.connect() frappe.sessions.clear_all_sessions(reason) frappe.db.commit() finally: frappe.destroy()
[ "@", "click", ".", "command", "(", "u'destroy-all-sessions'", ")", "@", "click", ".", "option", "(", "u'--reason'", ")", "@", "pass_context", "def", "destroy_all_sessions", "(", "context", ",", "reason", "=", "None", ")", ":", "import", "frappe", ".", "sessi...
clear sessions of all users .
train
false
22,590
@not_implemented_for('directed') def to_nested_tuple(T, root, canonical_form=False): def _make_tuple(T, root, _parent): 'Recursively compute the nested tuple representation of the\n given rooted tree.\n\n ``_parent`` is the parent node of ``root`` in the supertree in\n which ``T`` is a subtree, or ``None`` if ``root`` is the root of\n the supertree. This argument is used to determine which\n neighbors of ``root`` are children and which is the parent.\n\n ' children = (set(T[root]) - {_parent}) if (len(children) == 0): return () nested = (_make_tuple(T, v, root) for v in children) if canonical_form: nested = sorted(nested) return tuple(nested) if (not nx.is_tree(T)): raise nx.NotATree('provided graph is not a tree') if (root not in T): raise nx.NodeNotFound('Graph {} contains no node {}'.format(T, root)) return _make_tuple(T, root, None)
[ "@", "not_implemented_for", "(", "'directed'", ")", "def", "to_nested_tuple", "(", "T", ",", "root", ",", "canonical_form", "=", "False", ")", ":", "def", "_make_tuple", "(", "T", ",", "root", ",", "_parent", ")", ":", "children", "=", "(", "set", "(", ...
returns a nested tuple representation of the given tree .
train
false
22,593
def iter_docstring_info(dir_name): for file_info in iter_filename_info(dir_name): if ('error' in file_info): Logger.error(file_info['error']) continue source = slash(examples_dir, file_info['dir'], ((file_info['file'] + '.') + file_info['ext'])) if (not os.path.exists(source)): Logger.error('Screen shot references source code that does not exist: %s', source) continue with open(source) as f: text = f.read() docstring_info = parse_docstring_info(text) if ('error' in docstring_info): Logger.error(((docstring_info['error'] + ' File: ') + source)) continue else: file_info.update(docstring_info) (yield file_info)
[ "def", "iter_docstring_info", "(", "dir_name", ")", ":", "for", "file_info", "in", "iter_filename_info", "(", "dir_name", ")", ":", "if", "(", "'error'", "in", "file_info", ")", ":", "Logger", ".", "error", "(", "file_info", "[", "'error'", "]", ")", "cont...
iterate over screenshots in directory .
train
false
22,594
def _OpenFileInSplitIfNeeded(filepath): buffer_num = GetBufferNumberForFilename(filepath, False) if BufferIsVisible(buffer_num): return (buffer_num, False) OpenFilename(filepath, {u'focus': True, u'fix': True, u'size': GetIntValue(u'&previewheight')}) buffer_num = GetBufferNumberForFilename(filepath, False) if (not BufferIsVisible(buffer_num)): raise RuntimeError(u'Unable to open file: {0}\nFixIt/Refactor operation aborted prior to completion. Your files have not been fully updated. Please use undo commands to revert the applied changes.'.format(filepath)) return (buffer_num, True)
[ "def", "_OpenFileInSplitIfNeeded", "(", "filepath", ")", ":", "buffer_num", "=", "GetBufferNumberForFilename", "(", "filepath", ",", "False", ")", "if", "BufferIsVisible", "(", "buffer_num", ")", ":", "return", "(", "buffer_num", ",", "False", ")", "OpenFilename",...
ensure that the supplied filepath is open in a visible window .
train
false
22,595
def hmason(): exec random.choice(re.findall('def (.*):', socode_source))
[ "def", "hmason", "(", ")", ":", "exec", "random", ".", "choice", "(", "re", ".", "findall", "(", "'def (.*):'", ",", "socode_source", ")", ")" ]
introduce randomness .
train
false
22,596
def is_weight(w): if isinstance(w, np.ndarray): w = w.tolist() valid_elems = all((((v >= 0) and isinstance(v, (numbers.Integral, Fraction))) for v in w)) return (valid_elems and (sum(w) == 1))
[ "def", "is_weight", "(", "w", ")", ":", "if", "isinstance", "(", "w", ",", "np", ".", "ndarray", ")", ":", "w", "=", "w", ".", "tolist", "(", ")", "valid_elems", "=", "all", "(", "(", "(", "(", "v", ">=", "0", ")", "and", "isinstance", "(", "...
test if w is a valid weight vector .
train
false
22,597
def gen_arg_addrspace_md(fn): mod = fn.module fnty = fn.type.pointee codes = [] for a in fnty.args: if cgutils.is_pointer(a): codes.append(SPIR_GLOBAL_ADDRSPACE) else: codes.append(SPIR_PRIVATE_ADDRSPACE) consts = [lc.Constant.int(lc.Type.int(), x) for x in codes] name = lc.MetaDataString.get(mod, 'kernel_arg_addr_space') return lc.MetaData.get(mod, ([name] + consts))
[ "def", "gen_arg_addrspace_md", "(", "fn", ")", ":", "mod", "=", "fn", ".", "module", "fnty", "=", "fn", ".", "type", ".", "pointee", "codes", "=", "[", "]", "for", "a", "in", "fnty", ".", "args", ":", "if", "cgutils", ".", "is_pointer", "(", "a", ...
generate kernel_arg_addr_space metadata .
train
false
22,598
def get_placeholder_conf(setting, placeholder, template=None, default=None): if placeholder: keys = [] placeholder_conf = get_cms_setting('PLACEHOLDER_CONF') if template: keys.append((u'%s %s' % (template, placeholder))) keys.append(placeholder) if template: keys.append(template) keys.append(None) for key in keys: for (conf_key, conf) in placeholder_conf.items(): if (force_text(conf_key) == force_text(key)): if (not conf): continue value = conf.get(setting) if (value is not None): return value inherit = conf.get('inherit') if inherit: if (' ' in inherit): inherit = inherit.split(' ') else: inherit = (None, inherit) value = get_placeholder_conf(setting, inherit[1], inherit[0], default) if (value is not None): return value return default
[ "def", "get_placeholder_conf", "(", "setting", ",", "placeholder", ",", "template", "=", "None", ",", "default", "=", "None", ")", ":", "if", "placeholder", ":", "keys", "=", "[", "]", "placeholder_conf", "=", "get_cms_setting", "(", "'PLACEHOLDER_CONF'", ")",...
returns the placeholder configuration for a given setting .
train
false
22,599
def _filter_attenuation(h, freq, gain): from scipy.signal import freqd (_, filt_resp) = freqd(h.ravel(), worN=(np.pi * freq)) filt_resp = np.abs(filt_resp) filt_resp[np.where((gain == 1))] = 0 idx = np.argmax(filt_resp) att_db = ((-20) * np.log10(np.maximum(filt_resp[idx], 1e-20))) att_freq = freq[idx] return (att_db, att_freq)
[ "def", "_filter_attenuation", "(", "h", ",", "freq", ",", "gain", ")", ":", "from", "scipy", ".", "signal", "import", "freqd", "(", "_", ",", "filt_resp", ")", "=", "freqd", "(", "h", ".", "ravel", "(", ")", ",", "worN", "=", "(", "np", ".", "pi"...
compute minimum attenuation at stop frequency .
train
false
22,600
@handle_response_format @treeio_login_required def service_record_delete(request, service_record_id, response_format='html'): service_record = get_object_or_404(ItemServicing, pk=service_record_id) if (not request.user.profile.has_permission(service_record, mode='w')): return user_denied(request, message="You don't have write access to this ServiceRecord") if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): service_record.trash = True service_record.save() else: service_record.delete() return HttpResponseRedirect(reverse('infrastructure_settings_view')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('infrastructure_service_record_view', args=[service_record.id])) context = _get_default_context(request) context.update({'service_record': service_record}) return render_to_response('infrastructure/service_record_delete', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "service_record_delete", "(", "request", ",", "service_record_id", ",", "response_format", "=", "'html'", ")", ":", "service_record", "=", "get_object_or_404", "(", "ItemServicing", ",", "pk", "=", "s...
servicerecord delete page .
train
false
22,601
def GenerateAuthSubUrl(next, scope, secure=False, session=True, request_url='https://www.google.com/accounts/AuthSubRequest', domain='default'): if secure: secure = 1 else: secure = 0 if session: session = 1 else: session = 0 request_params = urllib.urlencode({'next': next, 'scope': scope, 'secure': secure, 'session': session, 'hd': domain}) if (request_url.find('?') == (-1)): return ('%s?%s' % (request_url, request_params)) else: return ('%s&%s' % (request_url, request_params))
[ "def", "GenerateAuthSubUrl", "(", "next", ",", "scope", ",", "secure", "=", "False", ",", "session", "=", "True", ",", "request_url", "=", "'https://www.google.com/accounts/AuthSubRequest'", ",", "domain", "=", "'default'", ")", ":", "if", "secure", ":", "secure...
generate a url at which the user will login and be redirected back .
train
false
22,602
def __remove_null_logging_handler(): global LOGGING_NULL_HANDLER if (LOGGING_NULL_HANDLER is None): return root_logger = logging.getLogger() for handler in root_logger.handlers: if (handler is LOGGING_NULL_HANDLER): root_logger.removeHandler(LOGGING_NULL_HANDLER) LOGGING_NULL_HANDLER = None break
[ "def", "__remove_null_logging_handler", "(", ")", ":", "global", "LOGGING_NULL_HANDLER", "if", "(", "LOGGING_NULL_HANDLER", "is", "None", ")", ":", "return", "root_logger", "=", "logging", ".", "getLogger", "(", ")", "for", "handler", "in", "root_logger", ".", "...
this function will run once the temporary logging has been configured .
train
true
22,603
def has_potential_command_injection(user_input): s = shlex.shlex(user_input) token = s.get_token() while (token != ''): if (token in [';', '|', '||', '&', '&&', '>', '>>', '<', '<<', '^', '$', '`', '(', '{']): print 'Detected potential command injection' return True token = s.get_token() return False
[ "def", "has_potential_command_injection", "(", "user_input", ")", ":", "s", "=", "shlex", ".", "shlex", "(", "user_input", ")", "token", "=", "s", ".", "get_token", "(", ")", "while", "(", "token", "!=", "''", ")", ":", "if", "(", "token", "in", "[", ...
in order to detect potential command injection methods .
train
false
22,604
def umount_volume(mnt_base): try: utils.execute('umount.quobyte', mnt_base) except processutils.ProcessExecutionError as exc: if ('Device or resource busy' in six.text_type(exc)): LOG.error(_LE('The Quobyte volume at %s is still in use.'), mnt_base) else: LOG.exception(_LE("Couldn't unmount the Quobyte Volume at %s"), mnt_base)
[ "def", "umount_volume", "(", "mnt_base", ")", ":", "try", ":", "utils", ".", "execute", "(", "'umount.quobyte'", ",", "mnt_base", ")", "except", "processutils", ".", "ProcessExecutionError", "as", "exc", ":", "if", "(", "'Device or resource busy'", "in", "six", ...
wraps execute calls for unmouting a quobyte volume .
train
false
22,605
def fast_could_be_isomorphic(G1, G2): if (G1.order() != G2.order()): return False d1 = G1.degree() t1 = nx.triangles(G1) props1 = [[d, t1[v]] for (v, d) in d1] props1.sort() d2 = G2.degree() t2 = nx.triangles(G2) props2 = [[d, t2[v]] for (v, d) in d2] props2.sort() if (props1 != props2): return False return True
[ "def", "fast_could_be_isomorphic", "(", "G1", ",", "G2", ")", ":", "if", "(", "G1", ".", "order", "(", ")", "!=", "G2", ".", "order", "(", ")", ")", ":", "return", "False", "d1", "=", "G1", ".", "degree", "(", ")", "t1", "=", "nx", ".", "triang...
returns false if graphs are definitely not isomorphic .
train
false
22,606
def formatUIDListResponse(msgs, getUidl): (yield successResponse('')) for ele in formatUIDListLines(msgs, getUidl): (yield ele) (yield '.\r\n')
[ "def", "formatUIDListResponse", "(", "msgs", ",", "getUidl", ")", ":", "(", "yield", "successResponse", "(", "''", ")", ")", "for", "ele", "in", "formatUIDListLines", "(", "msgs", ",", "getUidl", ")", ":", "(", "yield", "ele", ")", "(", "yield", "'.\\r\\...
format a list of message sizes appropriately for a complete uidl response .
train
false
22,607
def Disconnect(si): __Logout(si) SetSi(None)
[ "def", "Disconnect", "(", "si", ")", ":", "__Logout", "(", "si", ")", "SetSi", "(", "None", ")" ]
disconnect service instance .
train
false
22,608
def getFileOrDirectoryTypes(fileName, fileTypes, wasCancelled): if isEmptyOrCancelled(fileName, wasCancelled): return [] if isDirectorySetting(): return archive.getFilesWithFileTypesWithoutWords(fileTypes, [], fileName) return [fileName]
[ "def", "getFileOrDirectoryTypes", "(", "fileName", ",", "fileTypes", ",", "wasCancelled", ")", ":", "if", "isEmptyOrCancelled", "(", "fileName", ",", "wasCancelled", ")", ":", "return", "[", "]", "if", "isDirectorySetting", "(", ")", ":", "return", "archive", ...
get the gcode files in the directory the file is in if directory setting is true .
train
false
22,609
def parse_dict_querystring(environ): source = environ.get('QUERY_STRING', '') if (not source): return MultiDict() if ('paste.parsed_dict_querystring' in environ): (parsed, check_source) = environ['paste.parsed_dict_querystring'] if (check_source == source): return parsed parsed = cgi.parse_qsl(source, keep_blank_values=True, strict_parsing=False) multi = MultiDict(parsed) environ['paste.parsed_dict_querystring'] = (multi, source) return multi
[ "def", "parse_dict_querystring", "(", "environ", ")", ":", "source", "=", "environ", ".", "get", "(", "'QUERY_STRING'", ",", "''", ")", "if", "(", "not", "source", ")", ":", "return", "MultiDict", "(", ")", "if", "(", "'paste.parsed_dict_querystring'", "in",...
parses a query string like parse_querystring .
train
false
22,610
def action_all(options): lists = get_regexp_lists() header = [u'#', u'List Name'] table_data = [header] for regexp_list in lists: table_data.append([regexp_list.id, regexp_list.name]) table = TerminalTable(options.table_type, table_data) try: console(table.output) except TerminalTableError as e: console((u'ERROR: %s' % str(e)))
[ "def", "action_all", "(", "options", ")", ":", "lists", "=", "get_regexp_lists", "(", ")", "header", "=", "[", "u'#'", ",", "u'List Name'", "]", "table_data", "=", "[", "header", "]", "for", "regexp_list", "in", "lists", ":", "table_data", ".", "append", ...
show all regexp lists .
train
false
22,611
def _sort_map_entries(category_map, sort_alpha): things = [] for (title, entry) in category_map['entries'].items(): if ((entry['sort_key'] is None) and sort_alpha): entry['sort_key'] = title things.append((title, entry, TYPE_ENTRY)) for (title, category) in category_map['subcategories'].items(): things.append((title, category, TYPE_SUBCATEGORY)) _sort_map_entries(category_map['subcategories'][title], sort_alpha) category_map['children'] = [(x[0], x[2]) for x in sorted(things, key=(lambda x: x[1]['sort_key']))]
[ "def", "_sort_map_entries", "(", "category_map", ",", "sort_alpha", ")", ":", "things", "=", "[", "]", "for", "(", "title", ",", "entry", ")", "in", "category_map", "[", "'entries'", "]", ".", "items", "(", ")", ":", "if", "(", "(", "entry", "[", "'s...
internal helper method to list category entries according to the provided sort order .
train
false
22,612
def corner(sankey): prior = len(sankey.diagrams) sankey.add(flows=[1, (-1)], orientations=[0, 1], patchlabel=str(prior), facecolor='k', prior=(prior - 1), connect=(1, 0), alpha=0.5)
[ "def", "corner", "(", "sankey", ")", ":", "prior", "=", "len", "(", "sankey", ".", "diagrams", ")", "sankey", ".", "add", "(", "flows", "=", "[", "1", ",", "(", "-", "1", ")", "]", ",", "orientations", "=", "[", "0", ",", "1", "]", ",", "patc...
generate a corner link .
train
false
22,613
def _prefix_number_with_country_calling_code(country_code, num_format, formatted_number): if (num_format == PhoneNumberFormat.E164): return ((_PLUS_SIGN + unicod(country_code)) + formatted_number) elif (num_format == PhoneNumberFormat.INTERNATIONAL): return (((_PLUS_SIGN + unicod(country_code)) + U_SPACE) + formatted_number) elif (num_format == PhoneNumberFormat.RFC3966): return ((((_RFC3966_PREFIX + _PLUS_SIGN) + unicod(country_code)) + U_DASH) + formatted_number) else: return formatted_number
[ "def", "_prefix_number_with_country_calling_code", "(", "country_code", ",", "num_format", ",", "formatted_number", ")", ":", "if", "(", "num_format", "==", "PhoneNumberFormat", ".", "E164", ")", ":", "return", "(", "(", "_PLUS_SIGN", "+", "unicod", "(", "country_...
a helper function that is used by format_number and format_by_pattern .
train
true
22,615
def dist_to_mat(cord, vec, matidxlist, mcmap, invert=False, limit=False): assert isinstance(mcmap, mcInterface.SaveFile) block = mcmap.block curcord = [(i + 0.5) for i in cord] iterations = 0 on_map = True while on_map: x = int(curcord[0]) y = int(curcord[1]) z = int(curcord[2]) return_dict = block(x, y, z) if (return_dict is None): break else: block_value = return_dict['B'] if ((block_value in matidxlist) and (invert == False)): break elif ((block_value not in matidxlist) and invert): break else: curcord = [(curcord[i] + vec[i]) for i in range(3)] iterations += 1 if (limit and (iterations > limit)): break return iterations
[ "def", "dist_to_mat", "(", "cord", ",", "vec", ",", "matidxlist", ",", "mcmap", ",", "invert", "=", "False", ",", "limit", "=", "False", ")", ":", "assert", "isinstance", "(", "mcmap", ",", "mcInterface", ".", "SaveFile", ")", "block", "=", "mcmap", "....
travel from cord along vec and return how far it was to a point of matidx the distance is returned in number of iterations .
train
false
22,618
def _serializer(obj): import datetime if isinstance(obj, datetime.datetime): if (obj.utcoffset() is not None): obj = (obj - obj.utcoffset()) return obj.__str__() return obj
[ "def", "_serializer", "(", "obj", ")", ":", "import", "datetime", "if", "isinstance", "(", "obj", ",", "datetime", ".", "datetime", ")", ":", "if", "(", "obj", ".", "utcoffset", "(", ")", "is", "not", "None", ")", ":", "obj", "=", "(", "obj", "-", ...
helper function to serialize some objects for prettier return .
train
true
22,621
def colored(text, color=None, on_color=None, attrs=None): if (os.getenv('ANSI_COLORS_DISABLED') is None): fmt_str = '\x1b[%dm%s' if (color is not None): text = (fmt_str % (COLORS[color], text)) if (on_color is not None): text = (fmt_str % (HIGHLIGHTS[on_color], text)) if (attrs is not None): for attr in attrs: text = (fmt_str % (ATTRIBUTES[attr], text)) text += RESET return text
[ "def", "colored", "(", "text", ",", "color", "=", "None", ",", "on_color", "=", "None", ",", "attrs", "=", "None", ")", ":", "if", "(", "os", ".", "getenv", "(", "'ANSI_COLORS_DISABLED'", ")", "is", "None", ")", ":", "fmt_str", "=", "'\\x1b[%dm%s'", ...
colorize text .
train
true
22,622
def _convertTime(title, fromPTDFtoWEB=1, _emptyString=u''): try: if fromPTDFtoWEB: from_format = _ptdf_format to_format = _web_format else: from_format = (u'Episode dated %s' % _web_format) to_format = _ptdf_format t = strptime(title, from_format) title = strftime(to_format, t) if fromPTDFtoWEB: if (title[0] == '0'): title = title[1:] title = (u'Episode dated %s' % title) except ValueError: pass if isinstance(_emptyString, str): try: title = str(title) except UnicodeDecodeError: pass return title
[ "def", "_convertTime", "(", "title", ",", "fromPTDFtoWEB", "=", "1", ",", "_emptyString", "=", "u''", ")", ":", "try", ":", "if", "fromPTDFtoWEB", ":", "from_format", "=", "_ptdf_format", "to_format", "=", "_web_format", "else", ":", "from_format", "=", "(",...
convert a time expressed in the pain text data files .
train
false
22,623
def _testPermissions(uid, gid, spath, mode='r'): if (mode == 'r'): usr = stat.S_IRUSR grp = stat.S_IRGRP oth = stat.S_IROTH amode = os.R_OK elif (mode == 'w'): usr = stat.S_IWUSR grp = stat.S_IWGRP oth = stat.S_IWOTH amode = os.W_OK else: raise ValueError(("Invalid mode %r: must specify 'r' or 'w'" % (mode,))) access = False if os.path.exists(spath): if (uid == 0): access = True else: s = os.stat(spath) if ((usr & s.st_mode) and (uid == s.st_uid)): access = True elif ((grp & s.st_mode) and (gid in _getgroups(uid))): access = True elif (oth & s.st_mode): access = True if access: if (not os.access(spath, amode)): access = False log.msg(('Filesystem grants permission to UID %d but it is inaccessible to me running as UID %d' % (uid, os.getuid()))) return access
[ "def", "_testPermissions", "(", "uid", ",", "gid", ",", "spath", ",", "mode", "=", "'r'", ")", ":", "if", "(", "mode", "==", "'r'", ")", ":", "usr", "=", "stat", ".", "S_IRUSR", "grp", "=", "stat", ".", "S_IRGRP", "oth", "=", "stat", ".", "S_IROT...
checks to see if uid has proper permissions to access path with mode @type uid: c{int} .
train
false
22,625
def callLater_weakly(delay, func, *args, **kwargs): warnings.warn('deprecated', DeprecationWarning) def cleanup(weakmeth, thedeadweakref): if weakmeth.callId.active(): weakmeth.callId.cancel() weakmeth = WeakMethod(func, callback=cleanup) weakmeth.callId = reactor.callLater(delay, weakmeth, *args, **kwargs) return weakmeth
[ "def", "callLater_weakly", "(", "delay", ",", "func", ",", "*", "args", ",", "**", "kwargs", ")", ":", "warnings", ".", "warn", "(", "'deprecated'", ",", "DeprecationWarning", ")", "def", "cleanup", "(", "weakmeth", ",", "thedeadweakref", ")", ":", "if", ...
call func later .
train
false
22,626
def is_solenoidal(field): if (not isinstance(field, Vector)): raise TypeError('field should be a Vector') if (field == Vector.zero): return True coord_sys = list(field.separate())[0] return (divergence(field, coord_sys).simplify() == S(0))
[ "def", "is_solenoidal", "(", "field", ")", ":", "if", "(", "not", "isinstance", "(", "field", ",", "Vector", ")", ")", ":", "raise", "TypeError", "(", "'field should be a Vector'", ")", "if", "(", "field", "==", "Vector", ".", "zero", ")", ":", "return",...
checks if a field is solenoidal .
train
false
22,627
def serializable(arg=None, **kwargs): if isinstance(arg, FunctionType): decorator = True func = arg serialized_type = BaseType elif ((arg is None) or isinstance(arg, (BaseType, TypeMeta))): decorator = False serialized_type = (arg or kwargs.pop(u'type', BaseType)) else: raise TypeError(u"The argument to 'serializable' must be a function or a type.") if isinstance(serialized_type, BaseType): serialized_type._set_export_level(kwargs.pop(u'export_level', None), kwargs.pop(u'serialize_when_none', None)) for (name, value) in kwargs.items(): setattr(serialized_type, name, value) else: serialized_type = serialized_type(**kwargs) if decorator: return Serializable(func, serialized_type) else: return partial(Serializable, type=serialized_type)
[ "def", "serializable", "(", "arg", "=", "None", ",", "**", "kwargs", ")", ":", "if", "isinstance", "(", "arg", ",", "FunctionType", ")", ":", "decorator", "=", "True", "func", "=", "arg", "serialized_type", "=", "BaseType", "elif", "(", "(", "arg", "is...
a serializable is a way to define dynamic serializable fields that are derived from other fields .
train
false
22,628
def greet(greetee): return green((u'Hello, %s!' % greetee))
[ "def", "greet", "(", "greetee", ")", ":", "return", "green", "(", "(", "u'Hello, %s!'", "%", "greetee", ")", ")" ]
logs a friendly greeting to person given as argument .
train
false
22,629
def cronite_datetime_to_timestamp(self, d): if (d.tzinfo is not None): d = (d.replace(tzinfo=None) - d.utcoffset()) return total_seconds((d - datetime.datetime(1970, 1, 1)))
[ "def", "cronite_datetime_to_timestamp", "(", "self", ",", "d", ")", ":", "if", "(", "d", ".", "tzinfo", "is", "not", "None", ")", ":", "d", "=", "(", "d", ".", "replace", "(", "tzinfo", "=", "None", ")", "-", "d", ".", "utcoffset", "(", ")", ")",...
converts a datetime object d into a unix timestamp .
train
false
22,632
def parse_machine(machine, user='root', password='', port=22, profile=''): if ('@' in machine): (user, machine) = machine.split('@', 1) if (':' in user): (user, password) = user.split(':', 1) if (':' in machine): (machine, port) = machine.split(':', 1) try: port = int(port) except ValueError: (port, profile) = port.split('#', 1) port = int(port) if ('#' in machine): (machine, profile) = machine.split('#', 1) if ((not machine) or (not user)): raise ValueError return (machine, user, password, port, profile)
[ "def", "parse_machine", "(", "machine", ",", "user", "=", "'root'", ",", "password", "=", "''", ",", "port", "=", "22", ",", "profile", "=", "''", ")", ":", "if", "(", "'@'", "in", "machine", ")", ":", "(", "user", ",", "machine", ")", "=", "mach...
parse the machine string user:pass@host#profile:port and return it separately .
train
false
22,633
def rebuild_all_dbs(Session): db_read_url_parts = cli.parse_db_config('ckan.datastore.write_url') db_ckan_url_parts = cli.parse_db_config('sqlalchemy.url') same_db = (db_read_url_parts['db_name'] == db_ckan_url_parts['db_name']) if same_db: model.repo.tables_created_and_initialised = False clear_db(Session) model.repo.rebuild_db()
[ "def", "rebuild_all_dbs", "(", "Session", ")", ":", "db_read_url_parts", "=", "cli", ".", "parse_db_config", "(", "'ckan.datastore.write_url'", ")", "db_ckan_url_parts", "=", "cli", ".", "parse_db_config", "(", "'sqlalchemy.url'", ")", "same_db", "=", "(", "db_read_...
if the tests are running on the same db .
train
false
22,634
def convert_cidr(cidr): ret = {'network': None, 'netmask': None} cidr = calc_net(cidr) network_info = salt.ext.ipaddress.ip_network(cidr) ret['network'] = str(network_info.network_address) ret['netmask'] = str(network_info.netmask) return ret
[ "def", "convert_cidr", "(", "cidr", ")", ":", "ret", "=", "{", "'network'", ":", "None", ",", "'netmask'", ":", "None", "}", "cidr", "=", "calc_net", "(", "cidr", ")", "network_info", "=", "salt", ".", "ext", ".", "ipaddress", ".", "ip_network", "(", ...
returns the network and subnet mask of a cidr addr .
train
false
22,636
def run_commentstree(qname='commentstree_q', limit=400): @g.stats.amqp_processor(qname) def _run_commentstree(msgs, chan): comments = Comment._by_fullname([msg.body for msg in msgs], data=True, return_dict=False) print ('Processing %r' % (comments,)) if comments: add_comments(comments) if (qname == 'commentstree_fastlane_q'): limit = max(1000, limit) amqp.handle_items(qname, _run_commentstree, limit=limit)
[ "def", "run_commentstree", "(", "qname", "=", "'commentstree_q'", ",", "limit", "=", "400", ")", ":", "@", "g", ".", "stats", ".", "amqp_processor", "(", "qname", ")", "def", "_run_commentstree", "(", "msgs", ",", "chan", ")", ":", "comments", "=", "Comm...
add new incoming comments to their respective comments trees .
train
false
22,637
def fragcascade(attr, seq_type, doc=''): assert (seq_type in ('hit', 'query')) attr_name = ('_%s_%s' % (seq_type, attr)) def getter(self): return getattr(self, attr_name) def setter(self, value): setattr(self, attr_name, value) seq = getattr(self, seq_type) if (seq is not None): setattr(seq, attr, value) return property(fget=getter, fset=setter, doc=doc)
[ "def", "fragcascade", "(", "attr", ",", "seq_type", ",", "doc", "=", "''", ")", ":", "assert", "(", "seq_type", "in", "(", "'hit'", ",", "'query'", ")", ")", "attr_name", "=", "(", "'_%s_%s'", "%", "(", "seq_type", ",", "attr", ")", ")", "def", "ge...
returns a getter property with cascading setter .
train
false
22,639
def createCertificate(req, issuerCertKey, serial, validityPeriod, digest='sha256'): (issuerCert, issuerKey) = issuerCertKey (notBefore, notAfter) = validityPeriod cert = crypto.X509() cert.set_serial_number(serial) cert.gmtime_adj_notBefore(notBefore) cert.gmtime_adj_notAfter(notAfter) cert.set_issuer(issuerCert.get_subject()) cert.set_subject(req.get_subject()) cert.set_pubkey(req.get_pubkey()) cert.sign(issuerKey, digest) return cert
[ "def", "createCertificate", "(", "req", ",", "issuerCertKey", ",", "serial", ",", "validityPeriod", ",", "digest", "=", "'sha256'", ")", ":", "(", "issuerCert", ",", "issuerKey", ")", "=", "issuerCertKey", "(", "notBefore", ",", "notAfter", ")", "=", "validi...
generate a certificate given a certificate request .
train
true
22,640
def populate(): print print 'Creating roles...' create_roles() print 'Creating services...' create_services() print 'Creating permissions...' create_permissions() print 'Creating custom system roles...' create_custom_sys_roles() print 'Creating admin panel permissions...' create_panel_permissions() print 'Creating user permissions...' create_user_permissions() print 'Creating admin message settings...' create_admin_message_settings() db.session.commit()
[ "def", "populate", "(", ")", ":", "print", "print", "'Creating roles...'", "create_roles", "(", ")", "print", "'Creating services...'", "create_services", "(", ")", "print", "'Creating permissions...'", "create_permissions", "(", ")", "print", "'Creating custom system rol...
populate table with n records .
train
false
22,641
@remote_data def test_invalid_location_download(): from ..data import download_file with pytest.raises(urllib.error.URLError): download_file(u'http://astropy.org/nonexistentfile')
[ "@", "remote_data", "def", "test_invalid_location_download", "(", ")", ":", "from", ".", ".", "data", "import", "download_file", "with", "pytest", ".", "raises", "(", "urllib", ".", "error", ".", "URLError", ")", ":", "download_file", "(", "u'http://astropy.org/...
checks that download_file gives a urlerror and not an attributeerror .
train
false
22,642
def _find_bad_optimizations2(order, reasons, r_vals): checked_variables = set() def check_variable_norec(new_r): '\n Verify that `r` has the same value as the results it replaces.\n\n ' for (reason, r, old_graph_str, new_graph_str) in reasons[new_r]: new_r_val = r_vals[new_r] r_val = r_vals[r] if ((r.type != new_r.type) or (not r.type.values_eq_approx(r_val, new_r_val))): raise BadOptimization(old_r=r, new_r=new_r, old_r_val=r_val, new_r_val=new_r_val, reason=reason, old_graph=old_graph_str, new_graph=new_graph_str) def check_variable(r): if (r in checked_variables): return checked_variables.add(r) list_of_vars = [old_r for (reason, old_r, olds, news) in reasons[r]] if (None is not r.owner): list_of_vars += r.owner.inputs for var_that_could_make_r_look_bad in list_of_vars: check_variable(var_that_could_make_r_look_bad) check_variable_norec(r) for (i, node) in enumerate(order): for new_r in node.outputs: check_variable(new_r)
[ "def", "_find_bad_optimizations2", "(", "order", ",", "reasons", ",", "r_vals", ")", ":", "checked_variables", "=", "set", "(", ")", "def", "check_variable_norec", "(", "new_r", ")", ":", "for", "(", "reason", ",", "r", ",", "old_graph_str", ",", "new_graph_...
use a simple algorithm to find broken optimizations .
train
false
22,643
def read_plain_byte_array_fixed(file_obj, fixed_length): return file_obj.read(fixed_length)
[ "def", "read_plain_byte_array_fixed", "(", "file_obj", ",", "fixed_length", ")", ":", "return", "file_obj", ".", "read", "(", "fixed_length", ")" ]
read a byte array of the given fixed_length .
train
false
22,646
def is_attr_protected(attrname): return ((attrname[0] == '_') and (not (attrname == '_')) and (not (attrname.startswith('__') and attrname.endswith('__'))))
[ "def", "is_attr_protected", "(", "attrname", ")", ":", "return", "(", "(", "attrname", "[", "0", "]", "==", "'_'", ")", "and", "(", "not", "(", "attrname", "==", "'_'", ")", ")", "and", "(", "not", "(", "attrname", ".", "startswith", "(", "'__'", "...
return true if attribute name is protected .
train
false
22,647
def quiet_shutdown(socket_instance): try: socket_instance.shutdown(socket.SHUT_RDWR) except Exception: logger.debug(u'Exception while shutting down a socket', exc_info=True)
[ "def", "quiet_shutdown", "(", "socket_instance", ")", ":", "try", ":", "socket_instance", ".", "shutdown", "(", "socket", ".", "SHUT_RDWR", ")", "except", "Exception", ":", "logger", ".", "debug", "(", "u'Exception while shutting down a socket'", ",", "exc_info", ...
quietly shuts down a socket without throwing an exception .
train
false
22,650
def random_state_data(n, random_state=None): import numpy as np if (not isinstance(random_state, np.random.RandomState)): random_state = np.random.RandomState(random_state) maxuint32 = np.iinfo(np.uint32).max return [(random_state.rand(624) * maxuint32).astype('uint32') for i in range(n)]
[ "def", "random_state_data", "(", "n", ",", "random_state", "=", "None", ")", ":", "import", "numpy", "as", "np", "if", "(", "not", "isinstance", "(", "random_state", ",", "np", ".", "random", ".", "RandomState", ")", ")", ":", "random_state", "=", "np", ...
return a list of arrays that can initialize np .
train
false
22,652
def _header_property(header): def getter(self): return self.headers.get(header, None) def setter(self, value): self.headers[header] = value return property(getter, setter, doc=('Retrieve and set the %s header' % header))
[ "def", "_header_property", "(", "header", ")", ":", "def", "getter", "(", "self", ")", ":", "return", "self", ".", "headers", ".", "get", "(", "header", ",", "None", ")", "def", "setter", "(", "self", ",", "value", ")", ":", "self", ".", "headers", ...
set and retrieve the value of self .
train
false
22,653
def setup_db_env(): global sa_logger, _IDLE_TIMEOUT, _MAX_RETRIES, _RETRY_INTERVAL, _CONNECTION _IDLE_TIMEOUT = CONF.sql_idle_timeout _MAX_RETRIES = CONF.sql_max_retries _RETRY_INTERVAL = CONF.sql_retry_interval _CONNECTION = CONF.sql_connection sa_logger = logging.getLogger('sqlalchemy.engine') if CONF.debug: sa_logger.setLevel(logging.DEBUG)
[ "def", "setup_db_env", "(", ")", ":", "global", "sa_logger", ",", "_IDLE_TIMEOUT", ",", "_MAX_RETRIES", ",", "_RETRY_INTERVAL", ",", "_CONNECTION", "_IDLE_TIMEOUT", "=", "CONF", ".", "sql_idle_timeout", "_MAX_RETRIES", "=", "CONF", ".", "sql_max_retries", "_RETRY_IN...
setup configuration for database .
train
false
22,655
@transaction.non_atomic_requests @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_global_staff @require_POST def start_certificate_regeneration(request, course_id): course_key = CourseKey.from_string(course_id) certificates_statuses = request.POST.getlist('certificate_statuses', []) if (not certificates_statuses): return JsonResponse({'message': _('Please select one or more certificate statuses that require certificate regeneration.')}, status=400) allowed_statuses = [CertificateStatuses.downloadable, CertificateStatuses.error, CertificateStatuses.notpassing, CertificateStatuses.audit_passing, CertificateStatuses.audit_notpassing] if (not set(certificates_statuses).issubset(allowed_statuses)): return JsonResponse({'message': _('Please select certificate statuses from the list only.')}, status=400) try: lms.djangoapps.instructor_task.api.regenerate_certificates(request, course_key, certificates_statuses) except AlreadyRunningError as error: return JsonResponse({'message': error.message}, status=400) response_payload = {'message': _('Certificate regeneration task has been started. You can view the status of the generation task in the "Pending Tasks" section.'), 'success': True} return JsonResponse(response_payload)
[ "@", "transaction", ".", "non_atomic_requests", "@", "ensure_csrf_cookie", "@", "cache_control", "(", "no_cache", "=", "True", ",", "no_store", "=", "True", ",", "must_revalidate", "=", "True", ")", "@", "require_global_staff", "@", "require_POST", "def", "start_c...
start regenerating certificates for students whose certificate statuses lie with in certificate_statuses entry in post data .
train
false
22,657
def addEdgePair(edgePairTable, edges, faceEdgeIndex, remainingEdgeIndex, remainingEdgeTable): if (faceEdgeIndex == remainingEdgeIndex): return if (not (faceEdgeIndex in remainingEdgeTable)): return edgePair = EdgePair().getFromIndexesEdges([remainingEdgeIndex, faceEdgeIndex], edges) edgePairTable[str(edgePair)] = edgePair
[ "def", "addEdgePair", "(", "edgePairTable", ",", "edges", ",", "faceEdgeIndex", ",", "remainingEdgeIndex", ",", "remainingEdgeTable", ")", ":", "if", "(", "faceEdgeIndex", "==", "remainingEdgeIndex", ")", ":", "return", "if", "(", "not", "(", "faceEdgeIndex", "i...
add edge pair to the edge pair table .
train
false
22,658
@register.filter def css_class(field): return field.field.widget.__class__.__name__.lower()
[ "@", "register", ".", "filter", "def", "css_class", "(", "field", ")", ":", "return", "field", ".", "field", ".", "widget", ".", "__class__", ".", "__name__", ".", "lower", "(", ")" ]
returns widgets class name in lowercase .
train
false
22,660
def seq_allequal(seq1, seq2): if (seq1 is None): return (seq2 is None) if (seq2 is None): return False if (len(seq1) != len(seq2)): return False return npy.alltrue(npy.equal(seq1, seq2))
[ "def", "seq_allequal", "(", "seq1", ",", "seq2", ")", ":", "if", "(", "seq1", "is", "None", ")", ":", "return", "(", "seq2", "is", "None", ")", "if", "(", "seq2", "is", "None", ")", ":", "return", "False", "if", "(", "len", "(", "seq1", ")", "!...
seq1 and seq2 are either none or sequences or numerix arrays return true if both are none or both are seqs with identical elements .
train
false
22,661
def convert_repr_number(number): sep = '' dec = '' part_one = '0' part_two = '' for digit in number: if digit.isdigit(): if (sep == ''): part_one += digit else: part_two += digit elif ((digit == '-') and (part_one == '0')): part_one = '-0' elif ((sep == '') and (sep != digit)): sep = digit elif (dec == ''): dec = digit part_two += '.' else: return False if (dec == ''): return float(('%s.%s' % (part_one, part_two))) else: return float(('%s%s' % (part_one, part_two)))
[ "def", "convert_repr_number", "(", "number", ")", ":", "sep", "=", "''", "dec", "=", "''", "part_one", "=", "'0'", "part_two", "=", "''", "for", "digit", "in", "number", ":", "if", "digit", ".", "isdigit", "(", ")", ":", "if", "(", "sep", "==", "''...
helper function to convert a string representation back to a number .
train
false
22,662
def EntityListKind(keylist): kinds = map(EntityKind, keylist) unique_kinds = set(kinds) numkinds = len(unique_kinds) if (numkinds > 1): return 'Multi' elif (numkinds == 1): return unique_kinds.pop() else: return 'None'
[ "def", "EntityListKind", "(", "keylist", ")", ":", "kinds", "=", "map", "(", "EntityKind", ",", "keylist", ")", "unique_kinds", "=", "set", "(", "kinds", ")", "numkinds", "=", "len", "(", "unique_kinds", ")", "if", "(", "numkinds", ">", "1", ")", ":", ...
given list of entity keys .
train
false
22,663
def shell_quote(text): return u"\\'".join(((u"'%s'" % p) for p in text.split(u"'")))
[ "def", "shell_quote", "(", "text", ")", ":", "return", "u\"\\\\'\"", ".", "join", "(", "(", "(", "u\"'%s'\"", "%", "p", ")", "for", "p", "in", "text", ".", "split", "(", "u\"'\"", ")", ")", ")" ]
quote text for inserting into a shell .
train
false
22,665
def create_reg_numbers(flow_params): _replace_register(flow_params, ovsfw_consts.REG_PORT, 'reg_port') _replace_register(flow_params, ovsfw_consts.REG_NET, 'reg_net')
[ "def", "create_reg_numbers", "(", "flow_params", ")", ":", "_replace_register", "(", "flow_params", ",", "ovsfw_consts", ".", "REG_PORT", ",", "'reg_port'", ")", "_replace_register", "(", "flow_params", ",", "ovsfw_consts", ".", "REG_NET", ",", "'reg_net'", ")" ]
replace reg_ values with defined register numbers .
train
false
22,666
def test_protocol0_regression(): brick = Linear(5, 10) brick.allocate() buf = BytesIO() dump(brick, buf, parameters=list(brick.parameters), protocol=0) try: load(buf) except TypeError: assert False
[ "def", "test_protocol0_regression", "(", ")", ":", "brick", "=", "Linear", "(", "5", ",", "10", ")", "brick", ".", "allocate", "(", ")", "buf", "=", "BytesIO", "(", ")", "dump", "(", "brick", ",", "buf", ",", "parameters", "=", "list", "(", "brick", ...
check for a regression where protocol 0 dumps fail on load .
train
false
22,668
def test_exit_on_collection_error(testdir): testdir.makepyfile(**COLLECTION_ERROR_PY_FILES) res = testdir.runpytest() assert (res.ret == 2) res.stdout.fnmatch_lines(['collected 2 items / 2 errors', '*ERROR collecting test_02_import_error.py*', '*No module named *asdfa*', '*ERROR collecting test_03_import_error.py*', '*No module named *asdfa*'])
[ "def", "test_exit_on_collection_error", "(", "testdir", ")", ":", "testdir", ".", "makepyfile", "(", "**", "COLLECTION_ERROR_PY_FILES", ")", "res", "=", "testdir", ".", "runpytest", "(", ")", "assert", "(", "res", ".", "ret", "==", "2", ")", "res", ".", "s...
verify that all collection errors are collected and no tests executed .
train
false
22,670
def _transform_result(typ, result): if issubclass(typ, bytes): return tostring(result, encoding='utf-8') elif issubclass(typ, unicode): return tostring(result, encoding='unicode') else: return result
[ "def", "_transform_result", "(", "typ", ",", "result", ")", ":", "if", "issubclass", "(", "typ", ",", "bytes", ")", ":", "return", "tostring", "(", "result", ",", "encoding", "=", "'utf-8'", ")", "elif", "issubclass", "(", "typ", ",", "unicode", ")", "...
convert the result back into the input type .
train
true
22,673
def output_list_inventory(json_output): print json.dumps(json_output)
[ "def", "output_list_inventory", "(", "json_output", ")", ":", "print", "json", ".", "dumps", "(", "json_output", ")" ]
output the --list data structure as json .
train
false
22,674
def ValidatePropertyString(name, value): ValidateStringLength(name, value, max_len=_MAX_STRING_LENGTH)
[ "def", "ValidatePropertyString", "(", "name", ",", "value", ")", ":", "ValidateStringLength", "(", "name", ",", "value", ",", "max_len", "=", "_MAX_STRING_LENGTH", ")" ]
validates the length of an indexed string property .
train
false
22,675
def get_vmvalue_from_procfs(vmkey='VmSize', pid=None): if (pid is None): pid = os.getpid() try: t = open(('/proc/%d/status' % pid)) except IOError: raise RuntimeError('/proc filesystem not supported') if (sys.platform == 'sunos5'): return _vmvalue_solaris(vmkey, pid) else: v = t.read() t.close() i = v.index((vmkey + ':')) v = v[i:].split(None, 3) if (len(v) < 3): return 0 return (int(v[1]) * _vmvalue_scale[v[2]])
[ "def", "get_vmvalue_from_procfs", "(", "vmkey", "=", "'VmSize'", ",", "pid", "=", "None", ")", ":", "if", "(", "pid", "is", "None", ")", ":", "pid", "=", "os", ".", "getpid", "(", ")", "try", ":", "t", "=", "open", "(", "(", "'/proc/%d/status'", "%...
return virtual memory value for the given pid using the /proc filesystem .
train
false
22,677
def TextAfterCursor(): return ToUnicode(vim.current.line[CurrentColumn():])
[ "def", "TextAfterCursor", "(", ")", ":", "return", "ToUnicode", "(", "vim", ".", "current", ".", "line", "[", "CurrentColumn", "(", ")", ":", "]", ")" ]
returns the text after currentcolumn .
train
false
22,678
def register_models(engine): models = (Artifact, ArtifactTag, ArtifactProperty, ArtifactBlob, ArtifactBlobLocation, ArtifactDependency) for model in models: model.metadata.create_all(engine)
[ "def", "register_models", "(", "engine", ")", ":", "models", "=", "(", "Artifact", ",", "ArtifactTag", ",", "ArtifactProperty", ",", "ArtifactBlob", ",", "ArtifactBlobLocation", ",", "ArtifactDependency", ")", "for", "model", "in", "models", ":", "model", ".", ...
register models and create metadata .
train
false
22,679
@require_http_methods(['GET']) @login_required def user_verification_status(request): course_id = request.GET.get('course_id', None) if (course_id is None): return HttpResponseBadRequest() course_key = CourseLocator.from_string(course_id) (enrollment_mode, __) = CourseEnrollment.enrollment_mode_for_user(request.user, course_key) is_verification_required = (enrollment_mode in CourseMode.VERIFIED_MODES) return JsonResponse({'is_verification_required': is_verification_required})
[ "@", "require_http_methods", "(", "[", "'GET'", "]", ")", "@", "login_required", "def", "user_verification_status", "(", "request", ")", ":", "course_id", "=", "request", ".", "GET", ".", "get", "(", "'course_id'", ",", "None", ")", "if", "(", "course_id", ...
check for user verification status .
train
false
22,680
def cp_cmp(c, d): zero = Polyn(c[2]).ring.zero c0 = lbp(c[0], zero, Num(c[2])) d0 = lbp(d[0], zero, Num(d[2])) r = lbp_cmp(c0, d0) if (r == (-1)): return (-1) if (r == 0): c1 = lbp(c[3], zero, Num(c[5])) d1 = lbp(d[3], zero, Num(d[5])) r = lbp_cmp(c1, d1) if (r == (-1)): return (-1) return 1
[ "def", "cp_cmp", "(", "c", ",", "d", ")", ":", "zero", "=", "Polyn", "(", "c", "[", "2", "]", ")", ".", "ring", ".", "zero", "c0", "=", "lbp", "(", "c", "[", "0", "]", ",", "zero", ",", "Num", "(", "c", "[", "2", "]", ")", ")", "d0", ...
compare two critical pairs c and d .
train
false
22,681
def lookup_ssh_host_config(hostname, config): return config.lookup(hostname)
[ "def", "lookup_ssh_host_config", "(", "hostname", ",", "config", ")", ":", "return", "config", ".", "lookup", "(", "hostname", ")" ]
provided only as a backward-compatible wrapper around .
train
false
22,682
@register_canonicalize @register_specialize @gof.local_optimizer([T.Elemwise]) def local_func_inv(node): inv_pairs = ((basic.Deg2Rad, basic.Rad2Deg), (basic.Cosh, basic.ArcCosh), (basic.Tanh, basic.ArcTanh), (basic.Sinh, basic.ArcSinh), (basic.Conj, basic.Conj), (basic.Neg, basic.Neg), (basic.Inv, basic.Inv)) x = node.inputs[0] if (not isinstance(node.op, T.Elemwise)): return if ((not x.owner) or (not isinstance(x.owner.op, T.Elemwise))): return prev_op = x.owner.op.scalar_op node_op = node.op.scalar_op for inv_pair in inv_pairs: if is_inverse_pair(node_op, prev_op, inv_pair): return x.owner.inputs return
[ "@", "register_canonicalize", "@", "register_specialize", "@", "gof", ".", "local_optimizer", "(", "[", "T", ".", "Elemwise", "]", ")", "def", "local_func_inv", "(", "node", ")", ":", "inv_pairs", "=", "(", "(", "basic", ".", "Deg2Rad", ",", "basic", ".", ...
check for two consecutive operations that are functional inverses and remove them from the function graph .
train
false
22,683
def create_egg(name, resources): egg = imp.new_module(name) egg.__loader__ = MockLoader() egg._resources = resources sys.modules[name] = egg
[ "def", "create_egg", "(", "name", ",", "resources", ")", ":", "egg", "=", "imp", ".", "new_module", "(", "name", ")", "egg", ".", "__loader__", "=", "MockLoader", "(", ")", "egg", ".", "_resources", "=", "resources", "sys", ".", "modules", "[", "name",...
creates a mock egg with a list of resources .
train
false
22,684
def map_qos_spec(qos_spec, volume): if (qos_spec is None): return None qos_spec = map_dict_to_lower(qos_spec) spec = dict(policy_name=get_qos_policy_group_name(volume), max_throughput=None) if ('maxiops' in qos_spec): spec['max_throughput'] = ('%siops' % qos_spec['maxiops']) elif ('maxiopspergib' in qos_spec): spec['max_throughput'] = ('%siops' % six.text_type((int(qos_spec['maxiopspergib']) * int(volume['size'])))) elif ('maxbps' in qos_spec): spec['max_throughput'] = ('%sB/s' % qos_spec['maxbps']) elif ('maxbpspergib' in qos_spec): spec['max_throughput'] = ('%sB/s' % six.text_type((int(qos_spec['maxbpspergib']) * int(volume['size'])))) return spec
[ "def", "map_qos_spec", "(", "qos_spec", ",", "volume", ")", ":", "if", "(", "qos_spec", "is", "None", ")", ":", "return", "None", "qos_spec", "=", "map_dict_to_lower", "(", "qos_spec", ")", "spec", "=", "dict", "(", "policy_name", "=", "get_qos_policy_group_...
map cinder qos spec to limit/throughput-value as used in client api .
train
false
22,686
def breadth_first(tree, children=iter, maxdepth=(-1)): queue = deque([(tree, 0)]) while queue: (node, depth) = queue.popleft() (yield node) if (depth != maxdepth): try: queue.extend(((c, (depth + 1)) for c in children(node))) except TypeError: pass
[ "def", "breadth_first", "(", "tree", ",", "children", "=", "iter", ",", "maxdepth", "=", "(", "-", "1", ")", ")", ":", "queue", "=", "deque", "(", "[", "(", "tree", ",", "0", ")", "]", ")", "while", "queue", ":", "(", "node", ",", "depth", ")",...
traverse the nodes of a tree in breadth-first order .
train
false
22,687
@deprecated_network def do_dns_domains(cs, args): domains = cs.dns_domains.domains() _print_domain_list(domains)
[ "@", "deprecated_network", "def", "do_dns_domains", "(", "cs", ",", "args", ")", ":", "domains", "=", "cs", ".", "dns_domains", ".", "domains", "(", ")", "_print_domain_list", "(", "domains", ")" ]
print a list of available dns domains .
train
false
22,690
def test_no_data_with_empty_serie(Chart): chart = Chart() chart.add('Serie', []) q = chart.render_pyquery() assert (q('.text-overlay text').text() == 'No data')
[ "def", "test_no_data_with_empty_serie", "(", "Chart", ")", ":", "chart", "=", "Chart", "(", ")", "chart", ".", "add", "(", "'Serie'", ",", "[", "]", ")", "q", "=", "chart", ".", "render_pyquery", "(", ")", "assert", "(", "q", "(", "'.text-overlay text'",...
test no data for empty serie .
train
false
22,692
def vary_on_headers(*headers): def decorator(func): def inner_func(*args, **kwargs): response = func(*args, **kwargs) patch_vary_headers(response, headers) return response return wraps(func, assigned=available_attrs(func))(inner_func) return decorator
[ "def", "vary_on_headers", "(", "*", "headers", ")", ":", "def", "decorator", "(", "func", ")", ":", "def", "inner_func", "(", "*", "args", ",", "**", "kwargs", ")", ":", "response", "=", "func", "(", "*", "args", ",", "**", "kwargs", ")", "patch_vary...
a view decorator that adds the specified headers to the vary header of the response .
train
false
22,693
def _get_application_url(): application_url = __salt__['pillar.get']('uptime:application_url') if (application_url is None): log.error('Could not load {0} pillar'.format('uptime:application_url')) msg = '{0} pillar is required for authentication' raise CommandExecutionError(msg.format('uptime:application_url')) return application_url
[ "def", "_get_application_url", "(", ")", ":", "application_url", "=", "__salt__", "[", "'pillar.get'", "]", "(", "'uptime:application_url'", ")", "if", "(", "application_url", "is", "None", ")", ":", "log", ".", "error", "(", "'Could not load {0} pillar'", ".", ...
helper function to get application url from pillar .
train
false