id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1
value | is_duplicated bool 2
classes |
|---|---|---|---|---|---|
24,856 | def set_assignment_type(user_group, assignment_type):
course_cohort = user_group.cohort
if (is_last_random_cohort(user_group) and (course_cohort.assignment_type != assignment_type)):
raise ValueError(_('There must be one cohort to which students can automatically be assigned.'))
course_cohort.assignment_type = assignment_type
course_cohort.save()
| [
"def",
"set_assignment_type",
"(",
"user_group",
",",
"assignment_type",
")",
":",
"course_cohort",
"=",
"user_group",
".",
"cohort",
"if",
"(",
"is_last_random_cohort",
"(",
"user_group",
")",
"and",
"(",
"course_cohort",
".",
"assignment_type",
"!=",
"assignment_t... | set assignment type for cohort . | train | false |
24,857 | def latest_version(*names, **kwargs):
kwargs.pop('refresh', True)
pkgs = list_pkgs()
ret = {}
for name in names:
ret[name] = ''
cmd = 'pkg_info -q -I {0}'.format(' '.join(names))
out = __salt__['cmd.run_stdout'](cmd, python_shell=False, output_loglevel='trace')
for line in out.splitlines():
try:
(pkgname, pkgver, flavor) = __PKG_RE.match(line).groups()
except AttributeError:
continue
pkgname += ('--{0}'.format(flavor) if flavor else '')
cur = pkgs.get(pkgname, '')
if ((not cur) or salt.utils.compare_versions(ver1=cur, oper='<', ver2=pkgver)):
ret[pkgname] = pkgver
if (len(names) == 1):
return ret[names[0]]
return ret
| [
"def",
"latest_version",
"(",
"*",
"names",
",",
"**",
"kwargs",
")",
":",
"kwargs",
".",
"pop",
"(",
"'refresh'",
",",
"True",
")",
"pkgs",
"=",
"list_pkgs",
"(",
")",
"ret",
"=",
"{",
"}",
"for",
"name",
"in",
"names",
":",
"ret",
"[",
"name",
... | return the latest version of the named package available for upgrade or installation . | train | true |
24,858 | def ranks_from_sequence(seq):
return ((k, i) for (i, k) in enumerate(seq))
| [
"def",
"ranks_from_sequence",
"(",
"seq",
")",
":",
"return",
"(",
"(",
"k",
",",
"i",
")",
"for",
"(",
"i",
",",
"k",
")",
"in",
"enumerate",
"(",
"seq",
")",
")"
] | given a sequence . | train | false |
24,859 | def git_push():
app = get_app()
if (not have_git):
session.flash = GIT_MISSING
redirect(URL('site'))
form = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY()))
form.element('input[type=submit]')['_value'] = T('Push')
form.add_button(T('Cancel'), URL('site'))
form.process()
if form.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
index = repo.index
index.add([((apath(r=request) + app) + '/*')])
new_commit = index.commit(form.vars.changelog)
origin = repo.remotes.origin
origin.push()
session.flash = T('Git repo updated with latest application changes.')
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T('Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.')
redirect(URL('site'))
return dict(app=app, form=form)
| [
"def",
"git_push",
"(",
")",
":",
"app",
"=",
"get_app",
"(",
")",
"if",
"(",
"not",
"have_git",
")",
":",
"session",
".",
"flash",
"=",
"GIT_MISSING",
"redirect",
"(",
"URL",
"(",
"'site'",
")",
")",
"form",
"=",
"SQLFORM",
".",
"factory",
"(",
"F... | git push handler . | train | false |
24,860 | @contextfunction
def help_link(context, link=''):
request = context['request']
response_format = 'html'
if ('response_format' in context):
response_format = context['response_format']
if (not link):
url = request.path
match = re.match('/(?P<url>\\w+)(/)?.*', url)
if match:
link = (match.group('url') + '/')
link = (getattr(settings, 'HARDTREE_HELP_LINK_PREFIX', '/help/') + link)
return Markup(render_to_string('core/tags/help_link_block', {'link': link}, context_instance=RequestContext(request), response_format=response_format))
| [
"@",
"contextfunction",
"def",
"help_link",
"(",
"context",
",",
"link",
"=",
"''",
")",
":",
"request",
"=",
"context",
"[",
"'request'",
"]",
"response_format",
"=",
"'html'",
"if",
"(",
"'response_format'",
"in",
"context",
")",
":",
"response_format",
"=... | block with objects permissions . | train | false |
24,861 | def pbkdf2_hex(data, salt, iterations=1000, keylen=24, hashfunc=None):
return pbkdf2_bin(data, salt, iterations, keylen, hashfunc).encode('hex')
| [
"def",
"pbkdf2_hex",
"(",
"data",
",",
"salt",
",",
"iterations",
"=",
"1000",
",",
"keylen",
"=",
"24",
",",
"hashfunc",
"=",
"None",
")",
":",
"return",
"pbkdf2_bin",
"(",
"data",
",",
"salt",
",",
"iterations",
",",
"keylen",
",",
"hashfunc",
")",
... | like :func:pbkdf2_bin . | train | false |
24,863 | def book_info(td):
title = td.find('div', 'thumbheader').a.text
by_author = td.find('div', 'AuthorName').text
authors = [x.strip() for x in re.sub('^By ', '', by_author).split(',')]
isbn_link = td.find('div', 'thumbheader').a.get('href')
isbn = re.match('/product/(.*)\\.do', isbn_link).groups()[0]
date = td.find('span', 'directorydate').text.strip()
return {'title': title, 'authors': authors, 'isbn': isbn, 'date': date}
| [
"def",
"book_info",
"(",
"td",
")",
":",
"title",
"=",
"td",
".",
"find",
"(",
"'div'",
",",
"'thumbheader'",
")",
".",
"a",
".",
"text",
"by_author",
"=",
"td",
".",
"find",
"(",
"'div'",
",",
"'AuthorName'",
")",
".",
"text",
"authors",
"=",
"[",... | given a beautifulsoup <td> tag representing a book . | train | false |
24,864 | def fix_s3_host(request, signature_version, region_name, default_endpoint_url='s3.amazonaws.com', **kwargs):
if ((signature_version is not botocore.UNSIGNED) and ('s3v4' in signature_version)):
return
elif (not _allowed_region(region_name)):
return
try:
switch_to_virtual_host_style(request, signature_version, default_endpoint_url)
except InvalidDNSNameError as e:
bucket_name = e.kwargs['bucket_name']
logger.debug('Not changing URI, bucket is not DNS compatible: %s', bucket_name)
| [
"def",
"fix_s3_host",
"(",
"request",
",",
"signature_version",
",",
"region_name",
",",
"default_endpoint_url",
"=",
"'s3.amazonaws.com'",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"(",
"signature_version",
"is",
"not",
"botocore",
".",
"UNSIGNED",
")",
"and",
... | this handler looks at s3 requests just before they are signed . | train | false |
24,867 | @command('(da|dv)upl\\s+(.*)')
def down_user_pls(dltype, user):
user_pls(user)
for i in g.ytpls:
down_plist(dltype, i.get('link'))
return
| [
"@",
"command",
"(",
"'(da|dv)upl\\\\s+(.*)'",
")",
"def",
"down_user_pls",
"(",
"dltype",
",",
"user",
")",
":",
"user_pls",
"(",
"user",
")",
"for",
"i",
"in",
"g",
".",
"ytpls",
":",
"down_plist",
"(",
"dltype",
",",
"i",
".",
"get",
"(",
"'link'",
... | download all user playlists . | train | false |
24,872 | def as_flattened_list(iterable):
return [e for i in iterable for e in i]
| [
"def",
"as_flattened_list",
"(",
"iterable",
")",
":",
"return",
"[",
"e",
"for",
"i",
"in",
"iterable",
"for",
"e",
"in",
"i",
"]"
] | return an iterable with one level flattened . | train | false |
24,873 | def hasExplorer():
if ((sys.platform == 'win32') or (sys.platform == 'cygwin') or (sys.platform == 'darwin')):
return True
if (sys.platform == 'linux2'):
if os.path.isfile('/usr/bin/xdg-open'):
return True
return False
| [
"def",
"hasExplorer",
"(",
")",
":",
"if",
"(",
"(",
"sys",
".",
"platform",
"==",
"'win32'",
")",
"or",
"(",
"sys",
".",
"platform",
"==",
"'cygwin'",
")",
"or",
"(",
"sys",
".",
"platform",
"==",
"'darwin'",
")",
")",
":",
"return",
"True",
"if",... | check if we have support for opening file dialog windows . | train | false |
24,874 | def test_even(value):
return ((value % 2) == 0)
| [
"def",
"test_even",
"(",
"value",
")",
":",
"return",
"(",
"(",
"value",
"%",
"2",
")",
"==",
"0",
")"
] | return true if the variable is even . | train | false |
24,876 | def rot_axis2(theta):
ct = cos(theta)
st = sin(theta)
lil = ((ct, 0, (- st)), (0, 1, 0), (st, 0, ct))
return Matrix(lil)
| [
"def",
"rot_axis2",
"(",
"theta",
")",
":",
"ct",
"=",
"cos",
"(",
"theta",
")",
"st",
"=",
"sin",
"(",
"theta",
")",
"lil",
"=",
"(",
"(",
"ct",
",",
"0",
",",
"(",
"-",
"st",
")",
")",
",",
"(",
"0",
",",
"1",
",",
"0",
")",
",",
"(",... | returns a rotation matrix for a rotation of theta about the 2-axis . | train | false |
24,877 | def _create_modulename(cdef_sources, source, sys_version):
key = '\x00'.join([sys_version[:3], source, cdef_sources])
key = key.encode('utf-8')
k1 = hex((binascii.crc32(key[0::2]) & 4294967295))
k1 = k1.lstrip('0x').rstrip('L')
k2 = hex((binascii.crc32(key[1::2]) & 4294967295))
k2 = k2.lstrip('0').rstrip('L')
return '_Cryptography_cffi_{0}{1}'.format(k1, k2)
| [
"def",
"_create_modulename",
"(",
"cdef_sources",
",",
"source",
",",
"sys_version",
")",
":",
"key",
"=",
"'\\x00'",
".",
"join",
"(",
"[",
"sys_version",
"[",
":",
"3",
"]",
",",
"source",
",",
"cdef_sources",
"]",
")",
"key",
"=",
"key",
".",
"encod... | cffi creates a modulename internally that incorporates the cffi version . | train | true |
24,878 | def convert_installed():
return (_get_convert_command() is not None)
| [
"def",
"convert_installed",
"(",
")",
":",
"return",
"(",
"_get_convert_command",
"(",
")",
"is",
"not",
"None",
")"
] | return whether imagemagick/convert is available in the path . | train | false |
24,880 | def path_identity(path):
return path
| [
"def",
"path_identity",
"(",
"path",
")",
":",
"return",
"path"
] | used as a dummy path converter where no conversion necessary . | train | false |
24,881 | def find_root(resource):
for location in lineage(resource):
if (location.__parent__ is None):
resource = location
break
return resource
| [
"def",
"find_root",
"(",
"resource",
")",
":",
"for",
"location",
"in",
"lineage",
"(",
"resource",
")",
":",
"if",
"(",
"location",
".",
"__parent__",
"is",
"None",
")",
":",
"resource",
"=",
"location",
"break",
"return",
"resource"
] | find the top level namespace . | train | false |
24,883 | def dc2dict(exml):
vals = {}
regions = []
keywords = []
mdata = CswRecord(exml)
identifier = mdata.identifier
vals['language'] = mdata.language
vals['spatial_representation_type'] = mdata.type
keywords = mdata.subjects
regions = [mdata.spatial]
vals['temporal_extent_start'] = mdata.temporal
vals['temporal_extent_end'] = mdata.temporal
vals['constraints_other'] = mdata.license
vals['date'] = sniff_date(mdata.modified)
vals['title'] = mdata.title
vals['abstract'] = mdata.abstract
return [identifier, vals, regions, keywords]
| [
"def",
"dc2dict",
"(",
"exml",
")",
":",
"vals",
"=",
"{",
"}",
"regions",
"=",
"[",
"]",
"keywords",
"=",
"[",
"]",
"mdata",
"=",
"CswRecord",
"(",
"exml",
")",
"identifier",
"=",
"mdata",
".",
"identifier",
"vals",
"[",
"'language'",
"]",
"=",
"m... | generate dict of properties from csw:record . | train | false |
24,884 | @require_context
def volume_attachment_get(context, attachment_id):
return _attachment_get(context, attachment_id)
| [
"@",
"require_context",
"def",
"volume_attachment_get",
"(",
"context",
",",
"attachment_id",
")",
":",
"return",
"_attachment_get",
"(",
"context",
",",
"attachment_id",
")"
] | fetch the specified attachment record . | train | false |
24,885 | def find_between(a, b, string):
p = ('%s(.*?)%s' % (a, b))
p = re.compile(p, (re.DOTALL | re.I))
return [m for m in p.findall(string)]
| [
"def",
"find_between",
"(",
"a",
",",
"b",
",",
"string",
")",
":",
"p",
"=",
"(",
"'%s(.*?)%s'",
"%",
"(",
"a",
",",
"b",
")",
")",
"p",
"=",
"re",
".",
"compile",
"(",
"p",
",",
"(",
"re",
".",
"DOTALL",
"|",
"re",
".",
"I",
")",
")",
"... | returns a list of substrings between a and b in the given string . | train | false |
24,886 | def GCD(x, y):
x = abs(x)
y = abs(y)
while (x > 0):
(x, y) = ((y % x), x)
return y
| [
"def",
"GCD",
"(",
"x",
",",
"y",
")",
":",
"x",
"=",
"abs",
"(",
"x",
")",
"y",
"=",
"abs",
"(",
"y",
")",
"while",
"(",
"x",
">",
"0",
")",
":",
"(",
"x",
",",
"y",
")",
"=",
"(",
"(",
"y",
"%",
"x",
")",
",",
"x",
")",
"return",
... | gcd: long return the gcd of x and y . | train | false |
24,887 | def save_business_segment(shop, business_segment):
configuration.set(shop, SAMPLE_BUSINESS_SEGMENT_KEY, business_segment)
| [
"def",
"save_business_segment",
"(",
"shop",
",",
"business_segment",
")",
":",
"configuration",
".",
"set",
"(",
"shop",
",",
"SAMPLE_BUSINESS_SEGMENT_KEY",
",",
"business_segment",
")"
] | save the business segment identifier . | train | false |
24,888 | def _newInstance(cls, state):
instance = _createBlank(cls)
def defaultSetter(state):
instance.__dict__ = state
setter = getattr(instance, '__setstate__', defaultSetter)
setter(state)
return instance
| [
"def",
"_newInstance",
"(",
"cls",
",",
"state",
")",
":",
"instance",
"=",
"_createBlank",
"(",
"cls",
")",
"def",
"defaultSetter",
"(",
"state",
")",
":",
"instance",
".",
"__dict__",
"=",
"state",
"setter",
"=",
"getattr",
"(",
"instance",
",",
"'__se... | make a new instance of a class without calling its __init__ method . | train | false |
24,893 | def _setupPath(environ):
if ('TRIAL_PYTHONPATH' in environ):
sys.path[:] = environ['TRIAL_PYTHONPATH'].split(os.pathsep)
| [
"def",
"_setupPath",
"(",
"environ",
")",
":",
"if",
"(",
"'TRIAL_PYTHONPATH'",
"in",
"environ",
")",
":",
"sys",
".",
"path",
"[",
":",
"]",
"=",
"environ",
"[",
"'TRIAL_PYTHONPATH'",
"]",
".",
"split",
"(",
"os",
".",
"pathsep",
")"
] | override c{sys . | train | false |
24,894 | def format_hostname(hostname):
if (has_ipv6 and (re.match(u'\\d+.\\d+.\\d+.\\d+', hostname) is not None)):
hostname = (u'::ffff:%s' % hostname)
return hostname
| [
"def",
"format_hostname",
"(",
"hostname",
")",
":",
"if",
"(",
"has_ipv6",
"and",
"(",
"re",
".",
"match",
"(",
"u'\\\\d+.\\\\d+.\\\\d+.\\\\d+'",
",",
"hostname",
")",
"is",
"not",
"None",
")",
")",
":",
"hostname",
"=",
"(",
"u'::ffff:%s'",
"%",
"hostnam... | format hostname for display . | train | false |
24,897 | def execl(file, *args):
execv(file, args)
| [
"def",
"execl",
"(",
"file",
",",
"*",
"args",
")",
":",
"execv",
"(",
"file",
",",
"args",
")"
] | execl execute the executable file with argument list args . | train | false |
24,898 | def read_console_output_character(x=0, y=0, fd=1, buf=None, bufsize=1024, raw=False):
hcon = STDHANDLES[fd]
if (buf is None):
if raw:
buf = ctypes.c_char_p((' ' * bufsize))
else:
buf = ctypes.c_wchar_p((' ' * bufsize))
coord = COORD(x, y)
n = DWORD()
if raw:
ReadConsoleOutputCharacterA(hcon, buf, bufsize, coord, byref(n))
else:
ReadConsoleOutputCharacterW(hcon, buf, bufsize, coord, byref(n))
return buf.value[:n.value]
| [
"def",
"read_console_output_character",
"(",
"x",
"=",
"0",
",",
"y",
"=",
"0",
",",
"fd",
"=",
"1",
",",
"buf",
"=",
"None",
",",
"bufsize",
"=",
"1024",
",",
"raw",
"=",
"False",
")",
":",
"hcon",
"=",
"STDHANDLES",
"[",
"fd",
"]",
"if",
"(",
... | reads chracters from the console buffer . | train | false |
24,899 | def check_whitelist_blacklist(value, whitelist=None, blacklist=None):
if (blacklist is not None):
if (not hasattr(blacklist, '__iter__')):
blacklist = [blacklist]
try:
for expr in blacklist:
if expr_match(value, expr):
return False
except TypeError:
log.error('Non-iterable blacklist {0}'.format(blacklist))
if whitelist:
if (not hasattr(whitelist, '__iter__')):
whitelist = [whitelist]
try:
for expr in whitelist:
if expr_match(value, expr):
return True
except TypeError:
log.error('Non-iterable whitelist {0}'.format(whitelist))
else:
return True
return False
| [
"def",
"check_whitelist_blacklist",
"(",
"value",
",",
"whitelist",
"=",
"None",
",",
"blacklist",
"=",
"None",
")",
":",
"if",
"(",
"blacklist",
"is",
"not",
"None",
")",
":",
"if",
"(",
"not",
"hasattr",
"(",
"blacklist",
",",
"'__iter__'",
")",
")",
... | check a whitelist and/or blacklist to see if the value matches it . | train | true |
24,900 | def _arg_olen1(dvi, delta):
return dvi._arg((delta + 1), (delta == 3))
| [
"def",
"_arg_olen1",
"(",
"dvi",
",",
"delta",
")",
":",
"return",
"dvi",
".",
"_arg",
"(",
"(",
"delta",
"+",
"1",
")",
",",
"(",
"delta",
"==",
"3",
")",
")"
] | optionally signed . | train | false |
24,902 | @pytest.mark.django_db
def test_delete_purge_user(en_tutorial_po_member_updated, member, evil_member):
_test_user_purging(en_tutorial_po_member_updated, member, evil_member, (lambda m: m.delete(purge=True)))
| [
"@",
"pytest",
".",
"mark",
".",
"django_db",
"def",
"test_delete_purge_user",
"(",
"en_tutorial_po_member_updated",
",",
"member",
",",
"evil_member",
")",
":",
"_test_user_purging",
"(",
"en_tutorial_po_member_updated",
",",
"member",
",",
"evil_member",
",",
"(",
... | test purging user using user . | train | false |
24,904 | def make_nonnegative(X, min_value=0):
min_ = safe_min(X)
if (min_ < min_value):
if issparse(X):
raise ValueError('Cannot make the data matrix nonnegative because it is sparse. Adding a value to every entry would make it no longer sparse.')
X = (X + (min_value - min_))
return X
| [
"def",
"make_nonnegative",
"(",
"X",
",",
"min_value",
"=",
"0",
")",
":",
"min_",
"=",
"safe_min",
"(",
"X",
")",
"if",
"(",
"min_",
"<",
"min_value",
")",
":",
"if",
"issparse",
"(",
"X",
")",
":",
"raise",
"ValueError",
"(",
"'Cannot make the data m... | ensure x . | train | false |
24,905 | def host(ipaddress, hostnames, use_sudo=False):
res = run_as_root(('cat /etc/hosts | egrep "^%(ipaddress)s"' % locals()))
if res.succeeded:
m = re.match(('^%(ipaddress)s (.*)' % locals()), res)
if m:
toadd = list()
hostnames = hostnames.split(' ')
inthehosts = m.group(1).split(' ')
for h in hostnames:
if (h not in inthehosts):
toadd.append(h)
if (len(toadd) > 0):
print ('ADD: %s' % toadd)
print res
hostline = ('%s %s' % (res, ' '.join(toadd)))
with hide('stdout', 'warnings'):
sed('/etc/hosts', res, hostline, use_sudo=use_sudo)
else:
hostline = ('%s %s' % (res, hostnames))
append('/etc/hosts', hostline, use_sudo=use_sudo)
| [
"def",
"host",
"(",
"ipaddress",
",",
"hostnames",
",",
"use_sudo",
"=",
"False",
")",
":",
"res",
"=",
"run_as_root",
"(",
"(",
"'cat /etc/hosts | egrep \"^%(ipaddress)s\"'",
"%",
"locals",
"(",
")",
")",
")",
"if",
"res",
".",
"succeeded",
":",
"m",
"=",... | add a ipadress and hostname(s) in /etc/hosts file example:: from fabtools import require require . | train | false |
24,906 | @handle_response_format
@treeio_login_required
def file_edit(request, file_id, response_format='html'):
file = get_object_or_404(File, pk=file_id)
if (not request.user.profile.has_permission(file, mode='w')):
return user_denied(request, message="You don't have access to this File")
if request.POST:
if ('cancel' not in request.POST):
form = FileForm(request.user.profile, None, request.POST, request.FILES, instance=file)
if form.is_valid():
file = form.save()
return HttpResponseRedirect(reverse('documents_file_view', args=[file.id]))
else:
return HttpResponseRedirect(reverse('documents_file_view', args=[file.id]))
else:
form = FileForm(request.user.profile, None, instance=file)
context = _get_default_context(request)
context.update({'form': form, 'file': file})
return render_to_response('documents/file_edit', context, context_instance=RequestContext(request), response_format=response_format)
| [
"@",
"handle_response_format",
"@",
"treeio_login_required",
"def",
"file_edit",
"(",
"request",
",",
"file_id",
",",
"response_format",
"=",
"'html'",
")",
":",
"file",
"=",
"get_object_or_404",
"(",
"File",
",",
"pk",
"=",
"file_id",
")",
"if",
"(",
"not",
... | file edit page . | train | false |
24,907 | def hashDBWrite(key, value, serialize=False):
_ = ('%s%s%s' % ((conf.url or ('%s%s' % (conf.hostname, conf.port))), key, HASHDB_MILESTONE_VALUE))
conf.hashDB.write(_, value, serialize)
| [
"def",
"hashDBWrite",
"(",
"key",
",",
"value",
",",
"serialize",
"=",
"False",
")",
":",
"_",
"=",
"(",
"'%s%s%s'",
"%",
"(",
"(",
"conf",
".",
"url",
"or",
"(",
"'%s%s'",
"%",
"(",
"conf",
".",
"hostname",
",",
"conf",
".",
"port",
")",
")",
... | helper function for writing session data to hashdb . | train | false |
24,908 | def easeInOutQuad(t, b, c, d):
t /= (d / 2)
if (t < 1):
return ((((c / 2) * t) * t) + b)
t -= 1
return ((((- c) / 2) * ((t * (t - 2)) - 1)) + b)
| [
"def",
"easeInOutQuad",
"(",
"t",
",",
"b",
",",
"c",
",",
"d",
")",
":",
"t",
"/=",
"(",
"d",
"/",
"2",
")",
"if",
"(",
"t",
"<",
"1",
")",
":",
"return",
"(",
"(",
"(",
"(",
"c",
"/",
"2",
")",
"*",
"t",
")",
"*",
"t",
")",
"+",
"... | robert penner easing function examples at: URL t = current time in frames or whatever unit b = beginning/start value c = change in value d = duration . | train | false |
24,911 | def inactive_user_view(request):
return redirect(request.GET.get('next', 'dashboard'))
| [
"def",
"inactive_user_view",
"(",
"request",
")",
":",
"return",
"redirect",
"(",
"request",
".",
"GET",
".",
"get",
"(",
"'next'",
",",
"'dashboard'",
")",
")"
] | a newly or recently registered user has completed the social auth pipeline . | train | false |
24,912 | def _get_line_index(line_or_func, lines):
if hasattr(line_or_func, '__call__'):
return line_or_func(lines)
elif line_or_func:
if (line_or_func >= 0):
return line_or_func
else:
n_lines = sum((1 for line in lines))
return (n_lines + line_or_func)
else:
return line_or_func
| [
"def",
"_get_line_index",
"(",
"line_or_func",
",",
"lines",
")",
":",
"if",
"hasattr",
"(",
"line_or_func",
",",
"'__call__'",
")",
":",
"return",
"line_or_func",
"(",
"lines",
")",
"elif",
"line_or_func",
":",
"if",
"(",
"line_or_func",
">=",
"0",
")",
"... | return the appropriate line index . | train | false |
24,915 | @pytest.mark.django_db
def test_admin_not_logged_in(client):
response = client.get(ADMIN_URL)
assert (response.status_code == 403)
| [
"@",
"pytest",
".",
"mark",
".",
"django_db",
"def",
"test_admin_not_logged_in",
"(",
"client",
")",
":",
"response",
"=",
"client",
".",
"get",
"(",
"ADMIN_URL",
")",
"assert",
"(",
"response",
".",
"status_code",
"==",
"403",
")"
] | checks logged-out users cannot access the admin site . | train | false |
24,917 | def _create_glance_client(context, netloc, use_ssl, version=None):
if (version is None):
version = CONF.glance_api_version
params = {}
if use_ssl:
scheme = 'https'
params['insecure'] = CONF.glance_api_insecure
params['ssl_compression'] = CONF.glance_api_ssl_compression
params['cacert'] = CONF.glance_ca_certificates_file
else:
scheme = 'http'
if (CONF.auth_strategy == 'keystone'):
params['token'] = context.auth_token
if (CONF.glance_request_timeout is not None):
params['timeout'] = CONF.glance_request_timeout
endpoint = ('%s://%s' % (scheme, netloc))
return glanceclient.Client(str(version), endpoint, **params)
| [
"def",
"_create_glance_client",
"(",
"context",
",",
"netloc",
",",
"use_ssl",
",",
"version",
"=",
"None",
")",
":",
"if",
"(",
"version",
"is",
"None",
")",
":",
"version",
"=",
"CONF",
".",
"glance_api_version",
"params",
"=",
"{",
"}",
"if",
"use_ssl... | instantiate a new glanceclient . | train | false |
24,920 | def test_oss_with_wrong_object():
knn = 'rnd'
oss = OneSidedSelection(random_state=RND_SEED, n_neighbors=knn)
assert_raises(ValueError, oss.fit_sample, X, Y)
| [
"def",
"test_oss_with_wrong_object",
"(",
")",
":",
"knn",
"=",
"'rnd'",
"oss",
"=",
"OneSidedSelection",
"(",
"random_state",
"=",
"RND_SEED",
",",
"n_neighbors",
"=",
"knn",
")",
"assert_raises",
"(",
"ValueError",
",",
"oss",
".",
"fit_sample",
",",
"X",
... | test if an error is raised while passing a wrong object . | train | false |
24,922 | def _rstrip_inplace(array, chars=None):
for item in np.nditer(array, flags=['zerosize_ok'], op_flags=['readwrite']):
item[...] = item.item().rstrip(chars)
| [
"def",
"_rstrip_inplace",
"(",
"array",
",",
"chars",
"=",
"None",
")",
":",
"for",
"item",
"in",
"np",
".",
"nditer",
"(",
"array",
",",
"flags",
"=",
"[",
"'zerosize_ok'",
"]",
",",
"op_flags",
"=",
"[",
"'readwrite'",
"]",
")",
":",
"item",
"[",
... | performs an in-place rstrip operation on string arrays . | train | false |
24,923 | def convert_png_binary_to_data_url(content):
if (imghdr.what(None, content) == 'png'):
return ('data:image/png;base64,%s' % urllib.quote(content.encode('base64')))
else:
raise Exception('The given string does not represent a PNG image.')
| [
"def",
"convert_png_binary_to_data_url",
"(",
"content",
")",
":",
"if",
"(",
"imghdr",
".",
"what",
"(",
"None",
",",
"content",
")",
"==",
"'png'",
")",
":",
"return",
"(",
"'data:image/png;base64,%s'",
"%",
"urllib",
".",
"quote",
"(",
"content",
".",
"... | converts a png image string to a data url . | train | false |
24,924 | def join_lines(lines_enum):
primary_line_number = None
new_line = []
for (line_number, line) in lines_enum:
if ((not line.endswith('\\')) or COMMENT_RE.match(line)):
if COMMENT_RE.match(line):
line = (' ' + line)
if new_line:
new_line.append(line)
(yield (primary_line_number, ''.join(new_line)))
new_line = []
else:
(yield (line_number, line))
else:
if (not new_line):
primary_line_number = line_number
new_line.append(line.strip('\\'))
if new_line:
(yield (primary_line_number, ''.join(new_line)))
| [
"def",
"join_lines",
"(",
"lines_enum",
")",
":",
"primary_line_number",
"=",
"None",
"new_line",
"=",
"[",
"]",
"for",
"(",
"line_number",
",",
"line",
")",
"in",
"lines_enum",
":",
"if",
"(",
"(",
"not",
"line",
".",
"endswith",
"(",
"'\\\\'",
")",
"... | joins a line ending in with the previous line . | train | true |
24,925 | def circcorrcoef(alpha, beta, axis=None, weights_alpha=None, weights_beta=None):
if (np.size(alpha, axis) != np.size(beta, axis)):
raise ValueError(u'alpha and beta must be arrays of the same size')
mu_a = circmean(alpha, axis, weights_alpha)
mu_b = circmean(beta, axis, weights_beta)
sin_a = np.sin((alpha - mu_a))
sin_b = np.sin((beta - mu_b))
rho = (np.sum((sin_a * sin_b)) / np.sqrt((np.sum((sin_a * sin_a)) * np.sum((sin_b * sin_b)))))
return rho
| [
"def",
"circcorrcoef",
"(",
"alpha",
",",
"beta",
",",
"axis",
"=",
"None",
",",
"weights_alpha",
"=",
"None",
",",
"weights_beta",
"=",
"None",
")",
":",
"if",
"(",
"np",
".",
"size",
"(",
"alpha",
",",
"axis",
")",
"!=",
"np",
".",
"size",
"(",
... | computes the circular correlation coefficient between two array of circular data . | train | false |
24,926 | def solve3DTransform(points1, points2):
import numpy.linalg
pts = []
for inp in (points1, points2):
if isinstance(inp, np.ndarray):
A = np.empty((4, 4), dtype=float)
A[:, :3] = inp[:, :3]
A[:, 3] = 1.0
else:
A = np.array([[inp[i].x(), inp[i].y(), inp[i].z(), 1] for i in range(4)])
pts.append(A)
matrix = np.zeros((4, 4))
for i in range(3):
matrix[i] = numpy.linalg.solve(pts[0], pts[1][:, i])
return matrix
| [
"def",
"solve3DTransform",
"(",
"points1",
",",
"points2",
")",
":",
"import",
"numpy",
".",
"linalg",
"pts",
"=",
"[",
"]",
"for",
"inp",
"in",
"(",
"points1",
",",
"points2",
")",
":",
"if",
"isinstance",
"(",
"inp",
",",
"np",
".",
"ndarray",
")",... | find a 3d transformation matrix that maps points1 onto points2 . | train | false |
24,929 | def get_exchange(conn):
ex = copy(event_exchange)
if (conn.transport.driver_type == u'redis'):
ex.type = u'fanout'
return ex
| [
"def",
"get_exchange",
"(",
"conn",
")",
":",
"ex",
"=",
"copy",
"(",
"event_exchange",
")",
"if",
"(",
"conn",
".",
"transport",
".",
"driver_type",
"==",
"u'redis'",
")",
":",
"ex",
".",
"type",
"=",
"u'fanout'",
"return",
"ex"
] | get exchange used for sending events . | train | false |
24,930 | def draw_rectangle(im, rect, color=(255, 0, 0)):
cv2.rectangle(im, rect[0:2], rect[2:4], color=color, thickness=1)
return im
| [
"def",
"draw_rectangle",
"(",
"im",
",",
"rect",
",",
"color",
"=",
"(",
"255",
",",
"0",
",",
"0",
")",
")",
":",
"cv2",
".",
"rectangle",
"(",
"im",
",",
"rect",
"[",
"0",
":",
"2",
"]",
",",
"rect",
"[",
"2",
":",
"4",
"]",
",",
"color",... | rect = . | train | false |
24,931 | def parse_results(fields, data):
master = []
for record in data['records']:
row = ([None] * len(fields))
for (obj, value) in record.iteritems():
if (not isinstance(value, (dict, list, tuple))):
if (obj in fields):
row[fields.index(obj)] = ensure_utf(value)
elif (isinstance(value, dict) and (obj != 'attributes')):
path = obj
_traverse_results(value, fields, row, path)
master.append(row)
return master
| [
"def",
"parse_results",
"(",
"fields",
",",
"data",
")",
":",
"master",
"=",
"[",
"]",
"for",
"record",
"in",
"data",
"[",
"'records'",
"]",
":",
"row",
"=",
"(",
"[",
"None",
"]",
"*",
"len",
"(",
"fields",
")",
")",
"for",
"(",
"obj",
",",
"v... | traverses ordered dictionary . | train | true |
24,932 | def test_extract_array_1d():
assert np.all((extract_array(np.arange(4), 3, ((-1),), fill_value=(-99)) == np.array([(-99), (-99), 0])))
assert np.all((extract_array(np.arange(4), 3, (-1), fill_value=(-99)) == np.array([(-99), (-99), 0])))
| [
"def",
"test_extract_array_1d",
"(",
")",
":",
"assert",
"np",
".",
"all",
"(",
"(",
"extract_array",
"(",
"np",
".",
"arange",
"(",
"4",
")",
",",
"3",
",",
"(",
"(",
"-",
"1",
")",
",",
")",
",",
"fill_value",
"=",
"(",
"-",
"99",
")",
")",
... | in 1d . | train | false |
24,934 | @pytest.mark.network
def test_finder_detects_latest_already_satisfied_pypi_links():
req = InstallRequirement.from_line('initools', None)
latest_version = '0.3.1'
satisfied_by = Mock(location='/path', parsed_version=parse_version(latest_version), version=latest_version)
req.satisfied_by = satisfied_by
finder = PackageFinder([], ['http://pypi.python.org/simple'], session=PipSession())
with pytest.raises(BestVersionAlreadyInstalled):
finder.find_requirement(req, True)
| [
"@",
"pytest",
".",
"mark",
".",
"network",
"def",
"test_finder_detects_latest_already_satisfied_pypi_links",
"(",
")",
":",
"req",
"=",
"InstallRequirement",
".",
"from_line",
"(",
"'initools'",
",",
"None",
")",
"latest_version",
"=",
"'0.3.1'",
"satisfied_by",
"=... | test packagefinder detects latest already satisfied using pypi links . | train | false |
24,935 | def get_inspector():
global _INSPECTOR
if _INSPECTOR:
return _INSPECTOR
else:
bind = op.get_bind()
_INSPECTOR = sa.engine.reflection.Inspector.from_engine(bind)
return _INSPECTOR
| [
"def",
"get_inspector",
"(",
")",
":",
"global",
"_INSPECTOR",
"if",
"_INSPECTOR",
":",
"return",
"_INSPECTOR",
"else",
":",
"bind",
"=",
"op",
".",
"get_bind",
"(",
")",
"_INSPECTOR",
"=",
"sa",
".",
"engine",
".",
"reflection",
".",
"Inspector",
".",
"... | reuse inspector . | train | true |
24,936 | @testing.requires_testing_data
@requires_mne
def test_snr():
tempdir = _TempDir()
inv = read_inverse_operator(fname_inv)
evoked = read_evokeds(fname_evoked, baseline=(None, 0))[0]
snr = estimate_snr(evoked, inv)[0]
orig_dir = os.getcwd()
os.chdir(tempdir)
try:
cmd = ['mne_compute_mne', '--inv', fname_inv, '--meas', fname_evoked, '--snronly', '--bmin', '-200', '--bmax', '0']
run_subprocess(cmd)
except Exception:
pass
finally:
os.chdir(orig_dir)
snr_c = np.loadtxt(op.join(tempdir, 'SNR'))[:, 1]
assert_allclose(snr, snr_c, atol=0.01, rtol=0.01)
| [
"@",
"testing",
".",
"requires_testing_data",
"@",
"requires_mne",
"def",
"test_snr",
"(",
")",
":",
"tempdir",
"=",
"_TempDir",
"(",
")",
"inv",
"=",
"read_inverse_operator",
"(",
"fname_inv",
")",
"evoked",
"=",
"read_evokeds",
"(",
"fname_evoked",
",",
"bas... | test snr calculation . | train | false |
24,937 | def ifndefs():
lines = ['#define _PYZMQ_UNDEFINED (-9999)']
for name in all_names:
if (not no_prefix(name)):
name = ('ZMQ_%s' % name)
lines.append(ifndef_t.format(name))
return dict(ZMQ_IFNDEFS='\n'.join(lines))
| [
"def",
"ifndefs",
"(",
")",
":",
"lines",
"=",
"[",
"'#define _PYZMQ_UNDEFINED (-9999)'",
"]",
"for",
"name",
"in",
"all_names",
":",
"if",
"(",
"not",
"no_prefix",
"(",
"name",
")",
")",
":",
"name",
"=",
"(",
"'ZMQ_%s'",
"%",
"name",
")",
"lines",
".... | generate #ifndef zmq_const block for zmq_constants . | train | true |
24,938 | def external_program_check(to_check=frozenset([PSQL_BIN, LZOP_BIN, PV_BIN])):
could_not_run = []
error_msgs = []
def psql_err_handler(popen):
assert (popen.returncode != 0)
error_msgs.append(textwrap.fill('Could not get a connection to the database: note that superuser access is required'))
raise EnvironmentError('INTERNAL: Had problems running psql from external_program_check')
with open(os.devnull, 'wb') as nullf:
for program in to_check:
try:
if (program is PSQL_BIN):
psql_csv_run('SELECT 1', error_handler=psql_err_handler)
else:
if (program is PV_BIN):
extra_args = ['--quiet']
else:
extra_args = []
proc = popen_sp(([program] + extra_args), stdout=nullf, stderr=nullf, stdin=subprocess.PIPE)
proc.stdin.close()
proc.wait()
except EnvironmentError:
could_not_run.append(program)
if could_not_run:
error_msgs.append(('Could not run the following programs, are they installed? ' + ', '.join(could_not_run)))
if error_msgs:
raise UserException('could not run one or more external programs WAL-E depends upon', '\n'.join(error_msgs))
return None
| [
"def",
"external_program_check",
"(",
"to_check",
"=",
"frozenset",
"(",
"[",
"PSQL_BIN",
",",
"LZOP_BIN",
",",
"PV_BIN",
"]",
")",
")",
":",
"could_not_run",
"=",
"[",
"]",
"error_msgs",
"=",
"[",
"]",
"def",
"psql_err_handler",
"(",
"popen",
")",
":",
... | validates the existence and basic working-ness of other programs implemented because it is easy to get confusing error output when one does not install a dependency because of the fork-worker model that is both necessary for throughput and makes more obscure the cause of failures . | train | true |
24,939 | def requires_files(files):
def check_files_precondition():
for file in files:
if (not os.path.exists(extras.get_extras_path(file))):
return (False, ('required file %s not found' % file))
return (True, '')
def wrapper(cls):
cls.check_precondition = _build_precondition_join_fn(cls, check_files_precondition)
return cls
return wrapper
| [
"def",
"requires_files",
"(",
"files",
")",
":",
"def",
"check_files_precondition",
"(",
")",
":",
"for",
"file",
"in",
"files",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"extras",
".",
"get_extras_path",
"(",
"file",
")",
")",
")",... | decorator for creating a handler that requies files be present in the extras dir in order to run files: required files to be present for the handler to be available . | train | false |
24,940 | @then(u'we see record inserted')
def step_see_record_inserted(context):
_expect_exact(context, u'INSERT 0 1', timeout=2)
| [
"@",
"then",
"(",
"u'we see record inserted'",
")",
"def",
"step_see_record_inserted",
"(",
"context",
")",
":",
"_expect_exact",
"(",
"context",
",",
"u'INSERT 0 1'",
",",
"timeout",
"=",
"2",
")"
] | wait to see insert output . | train | false |
24,941 | def test_sample_regular_wrong_svm():
kind = 'svm'
nn_k = NearestNeighbors(n_neighbors=6)
svm = 'rnd'
smote = SMOTE(random_state=RND_SEED, kind=kind, k_neighbors=nn_k, svm_estimator=svm)
assert_raises(ValueError, smote.fit_sample, X, Y)
| [
"def",
"test_sample_regular_wrong_svm",
"(",
")",
":",
"kind",
"=",
"'svm'",
"nn_k",
"=",
"NearestNeighbors",
"(",
"n_neighbors",
"=",
"6",
")",
"svm",
"=",
"'rnd'",
"smote",
"=",
"SMOTE",
"(",
"random_state",
"=",
"RND_SEED",
",",
"kind",
"=",
"kind",
","... | test sample function with regular smote with a nn object . | train | false |
24,942 | def combine_kit_markers():
_check_mayavi_version()
from ._backend import _check_backend
_check_backend()
from ._marker_gui import CombineMarkersFrame
gui = CombineMarkersFrame()
gui.configure_traits()
return gui
| [
"def",
"combine_kit_markers",
"(",
")",
":",
"_check_mayavi_version",
"(",
")",
"from",
".",
"_backend",
"import",
"_check_backend",
"_check_backend",
"(",
")",
"from",
".",
"_marker_gui",
"import",
"CombineMarkersFrame",
"gui",
"=",
"CombineMarkersFrame",
"(",
")",... | create a new kit marker file by interpolating two marker files . | train | false |
24,943 | def _SetUpLoadedBuffer(command, filename, fix, position, watch):
if (command == u'split'):
vim.current.window.options[u'winfixheight'] = fix
if (command == u'vsplit'):
vim.current.window.options[u'winfixwidth'] = fix
if watch:
vim.current.buffer.options[u'autoread'] = True
vim.command(u"exec 'au BufEnter <buffer> :silent! checktime {0}'".format(filename))
if (position == u'end'):
vim.command(u'silent! normal G zz')
| [
"def",
"_SetUpLoadedBuffer",
"(",
"command",
",",
"filename",
",",
"fix",
",",
"position",
",",
"watch",
")",
":",
"if",
"(",
"command",
"==",
"u'split'",
")",
":",
"vim",
".",
"current",
".",
"window",
".",
"options",
"[",
"u'winfixheight'",
"]",
"=",
... | after opening a buffer . | train | false |
24,944 | def checkResponse(auth_map, password, method='GET', encrypt=None, **kwargs):
checker = AUTH_RESPONSES[auth_map['auth_scheme']]
return checker(auth_map, password, method=method, encrypt=encrypt, **kwargs)
| [
"def",
"checkResponse",
"(",
"auth_map",
",",
"password",
",",
"method",
"=",
"'GET'",
",",
"encrypt",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"checker",
"=",
"AUTH_RESPONSES",
"[",
"auth_map",
"[",
"'auth_scheme'",
"]",
"]",
"return",
"checker",
"(",
... | checkresponse compares the auth_map with the password and optionally other arguments that each implementation might need . | train | false |
24,945 | def get_module_from_file(category, field, fallback_module_name=None):
module_name = get_module_name(category, field, fallback_module_name)
rc = MODULE_CACHE.get(module_name, None)
if (rc is None):
raise CX((_('Failed to load module for %s/%s') % (category, field)))
return rc
| [
"def",
"get_module_from_file",
"(",
"category",
",",
"field",
",",
"fallback_module_name",
"=",
"None",
")",
":",
"module_name",
"=",
"get_module_name",
"(",
"category",
",",
"field",
",",
"fallback_module_name",
")",
"rc",
"=",
"MODULE_CACHE",
".",
"get",
"(",
... | get python module . | train | false |
24,948 | def _average_gradients(tower_grads):
average_grads = []
for grad_and_vars in zip(*tower_grads):
grads = []
for (g, _) in grad_and_vars:
expanded_g = tf.expand_dims(g, 0)
grads.append(expanded_g)
grad = tf.concat(0, grads)
grad = tf.reduce_mean(grad, 0)
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
| [
"def",
"_average_gradients",
"(",
"tower_grads",
")",
":",
"average_grads",
"=",
"[",
"]",
"for",
"grad_and_vars",
"in",
"zip",
"(",
"*",
"tower_grads",
")",
":",
"grads",
"=",
"[",
"]",
"for",
"(",
"g",
",",
"_",
")",
"in",
"grad_and_vars",
":",
"expa... | calculate the average gradient for each shared variable across all towers . | train | true |
24,949 | def precision(key):
_check_obsolete(key)
return (physical_constants[key][2] / physical_constants[key][0])
| [
"def",
"precision",
"(",
"key",
")",
":",
"_check_obsolete",
"(",
"key",
")",
"return",
"(",
"physical_constants",
"[",
"key",
"]",
"[",
"2",
"]",
"/",
"physical_constants",
"[",
"key",
"]",
"[",
"0",
"]",
")"
] | given a set of reference values and a set of test values . | train | false |
24,950 | def zipadd(func, zip_file, name):
with tempfile.NamedTemporaryFile('wb', delete=False) as temp_file:
func(temp_file)
temp_file.close()
zip_file.write(temp_file.name, arcname=name)
if os.path.isfile(temp_file.name):
os.remove(temp_file.name)
| [
"def",
"zipadd",
"(",
"func",
",",
"zip_file",
",",
"name",
")",
":",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"'wb'",
",",
"delete",
"=",
"False",
")",
"as",
"temp_file",
":",
"func",
"(",
"temp_file",
")",
"temp_file",
".",
"close",
"(",
"... | calls a function with a file object . | train | false |
24,951 | def _NewFieldFromPb(pb):
name = _DecodeUTF8(pb.name())
val_type = pb.value().type()
value = _DecodeValue(_GetValue(pb.value()), val_type)
lang = None
if pb.value().has_language():
lang = _DecodeUTF8(pb.value().language())
if (val_type == document_pb.FieldValue.TEXT):
return TextField(name, value, lang)
elif (val_type == document_pb.FieldValue.HTML):
return HtmlField(name, value, lang)
elif (val_type == document_pb.FieldValue.ATOM):
return AtomField(name, value, lang)
elif (val_type == document_pb.FieldValue.UNTOKENIZED_PREFIX):
return UntokenizedPrefixField(name, value, lang)
elif (val_type == document_pb.FieldValue.TOKENIZED_PREFIX):
return TokenizedPrefixField(name, value, lang)
elif (val_type == document_pb.FieldValue.DATE):
return DateField(name, value)
elif (val_type == document_pb.FieldValue.NUMBER):
return NumberField(name, value)
elif (val_type == document_pb.FieldValue.GEO):
return GeoField(name, value)
return InvalidRequest(('Unknown field value type %d' % val_type))
| [
"def",
"_NewFieldFromPb",
"(",
"pb",
")",
":",
"name",
"=",
"_DecodeUTF8",
"(",
"pb",
".",
"name",
"(",
")",
")",
"val_type",
"=",
"pb",
".",
"value",
"(",
")",
".",
"type",
"(",
")",
"value",
"=",
"_DecodeValue",
"(",
"_GetValue",
"(",
"pb",
".",
... | constructs a field from a document_pb . | train | false |
24,953 | def convert_uuid_to_es_fmt(uuid_str):
uuid_base32 = encode_hex_to_base32(uuid.UUID(six.text_type(uuid_str)).hex)
es_label = uuid_base32.strip('=')
if six.PY3:
es_label = es_label.decode('ascii')
return es_label
| [
"def",
"convert_uuid_to_es_fmt",
"(",
"uuid_str",
")",
":",
"uuid_base32",
"=",
"encode_hex_to_base32",
"(",
"uuid",
".",
"UUID",
"(",
"six",
".",
"text_type",
"(",
"uuid_str",
")",
")",
".",
"hex",
")",
"es_label",
"=",
"uuid_base32",
".",
"strip",
"(",
"... | converts uuid to e-series compatible name format . | train | false |
24,954 | def test_create_default_groups(database):
assert (Group.query.count() == 0)
create_default_groups()
assert (Group.query.count() == len(group_fixture))
for (key, attributes) in group_fixture.items():
group = Group.query.filter_by(name=key).first()
for (attribute, value) in attributes.items():
assert (getattr(group, attribute) == value)
| [
"def",
"test_create_default_groups",
"(",
"database",
")",
":",
"assert",
"(",
"Group",
".",
"query",
".",
"count",
"(",
")",
"==",
"0",
")",
"create_default_groups",
"(",
")",
"assert",
"(",
"Group",
".",
"query",
".",
"count",
"(",
")",
"==",
"len",
... | test that the default groups are created correctly . | train | false |
24,955 | @then(u'we see pgcli prompt')
def step_see_prompt(context):
_expect_exact(context, u'{0}> '.format(context.conf[u'dbname']), timeout=5)
| [
"@",
"then",
"(",
"u'we see pgcli prompt'",
")",
"def",
"step_see_prompt",
"(",
"context",
")",
":",
"_expect_exact",
"(",
"context",
",",
"u'{0}> '",
".",
"format",
"(",
"context",
".",
"conf",
"[",
"u'dbname'",
"]",
")",
",",
"timeout",
"=",
"5",
")"
] | wait to see the prompt . | train | false |
24,956 | def get_using_network_time():
ret = salt.utils.mac_utils.execute_return_result('systemsetup -getusingnetworktime')
return (salt.utils.mac_utils.validate_enabled(salt.utils.mac_utils.parse_return(ret)) == 'on')
| [
"def",
"get_using_network_time",
"(",
")",
":",
"ret",
"=",
"salt",
".",
"utils",
".",
"mac_utils",
".",
"execute_return_result",
"(",
"'systemsetup -getusingnetworktime'",
")",
"return",
"(",
"salt",
".",
"utils",
".",
"mac_utils",
".",
"validate_enabled",
"(",
... | display whether network time is on or off :return: true if network time is on . | train | true |
24,957 | def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name, msbuild_settings_name, setting_type):
def _Translate(value, msbuild_settings):
tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value)
_msvs_validators[tool.msvs_name][msvs_settings_name] = setting_type.ValidateMSVS
validator = setting_type.ValidateMSBuild
_msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator
_msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate
| [
"def",
"_MovedAndRenamed",
"(",
"tool",
",",
"msvs_settings_name",
",",
"msbuild_tool_name",
",",
"msbuild_settings_name",
",",
"setting_type",
")",
":",
"def",
"_Translate",
"(",
"value",
",",
"msbuild_settings",
")",
":",
"tool_settings",
"=",
"msbuild_settings",
... | defines a setting that may have moved to a new section . | train | false |
24,958 | @hook.on_start
def load_cache(db):
global ignore_cache
ignore_cache = []
for row in db.execute(table.select()):
conn = row['connection']
chan = row['channel']
mask = row['mask']
ignore_cache.append((conn, chan, mask))
| [
"@",
"hook",
".",
"on_start",
"def",
"load_cache",
"(",
"db",
")",
":",
"global",
"ignore_cache",
"ignore_cache",
"=",
"[",
"]",
"for",
"row",
"in",
"db",
".",
"execute",
"(",
"table",
".",
"select",
"(",
")",
")",
":",
"conn",
"=",
"row",
"[",
"'c... | return unpickled object . | train | false |
24,959 | def retval_section(retval_type, types):
return serialize_type(retval_type, 'this->retval', types)
| [
"def",
"retval_section",
"(",
"retval_type",
",",
"types",
")",
":",
"return",
"serialize_type",
"(",
"retval_type",
",",
"'this->retval'",
",",
"types",
")"
] | returns a serialization stetement for a return value of the given type . | train | false |
24,960 | def startpings(host, targetips):
targetips = ' '.join(targetips)
cmd = ((('while true; do for ip in %s; do ' % targetips) + (' echo -n %s "->" $ip ' % host.IP())) + ' `ping -c1 -w 1 $ip | grep packets` ; sleep 1; done; done &')
info(('*** Host %s (%s) will be pinging ips: %s\n' % (host.name, host.IP(), targetips)))
host.cmd(cmd)
| [
"def",
"startpings",
"(",
"host",
",",
"targetips",
")",
":",
"targetips",
"=",
"' '",
".",
"join",
"(",
"targetips",
")",
"cmd",
"=",
"(",
"(",
"(",
"'while true; do for ip in %s; do '",
"%",
"targetips",
")",
"+",
"(",
"' echo -n %s \"->\" $ip '",
"%",
"... | tell host to repeatedly ping targets . | train | false |
24,961 | def getWidenedLoops(loop, loopList, outsetLoop, radius):
intersectingWithinLoops = getIntersectingWithinLoops(loop, loopList, outsetLoop)
if (len(intersectingWithinLoops) < 1):
return [loop]
loopsUnified = boolean_solid.getLoopsUnion(radius, [[loop], intersectingWithinLoops])
if (len(loopsUnified) < 1):
return [loop]
return loopsUnified
| [
"def",
"getWidenedLoops",
"(",
"loop",
",",
"loopList",
",",
"outsetLoop",
",",
"radius",
")",
":",
"intersectingWithinLoops",
"=",
"getIntersectingWithinLoops",
"(",
"loop",
",",
"loopList",
",",
"outsetLoop",
")",
"if",
"(",
"len",
"(",
"intersectingWithinLoops"... | get the widened loop . | train | false |
24,963 | def validate_form_on_submit(form):
return (is_form_submitted() and form.validate())
| [
"def",
"validate_form_on_submit",
"(",
"form",
")",
":",
"return",
"(",
"is_form_submitted",
"(",
")",
"and",
"form",
".",
"validate",
"(",
")",
")"
] | if current method is put or post . | train | false |
24,964 | def truth_table(expr, variables, input=True):
variables = [sympify(v) for v in variables]
expr = sympify(expr)
if ((not isinstance(expr, BooleanFunction)) and (not is_literal(expr))):
return
table = product([0, 1], repeat=len(variables))
for term in table:
term = list(term)
value = expr.xreplace(dict(zip(variables, term)))
if input:
(yield (term, value))
else:
(yield value)
| [
"def",
"truth_table",
"(",
"expr",
",",
"variables",
",",
"input",
"=",
"True",
")",
":",
"variables",
"=",
"[",
"sympify",
"(",
"v",
")",
"for",
"v",
"in",
"variables",
"]",
"expr",
"=",
"sympify",
"(",
"expr",
")",
"if",
"(",
"(",
"not",
"isinsta... | return a generator of all possible configurations of the input variables . | train | false |
24,965 | def parsepdf(path, *args, **kwargs):
return PDF(path, *args, **kwargs).content
| [
"def",
"parsepdf",
"(",
"path",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"return",
"PDF",
"(",
"path",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
".",
"content"
] | returns the content as a unicode string from the given . | train | false |
24,967 | def getToothProfileRack(derivation):
addendumSide = (derivation.quarterWavelength - (derivation.addendum * derivation.tanPressure))
addendumComplex = complex(addendumSide, derivation.addendum)
dedendumSide = (derivation.quarterWavelength + (derivation.dedendum * derivation.tanPressure))
dedendumComplex = complex(dedendumSide, (- derivation.dedendum))
toothProfile = [dedendumComplex]
if (derivation.rootBevel > 0.0):
mirrorPoint = complex((derivation.wavelength - dedendumSide), (- derivation.dedendum))
toothProfile = getBevelPath(addendumComplex, derivation.rootBevel, dedendumComplex, mirrorPoint)
if (derivation.tipBevel > 0.0):
mirrorPoint = complex((- addendumComplex.real), addendumComplex.imag)
bevelPath = getBevelPath(dedendumComplex, derivation.tipBevel, addendumComplex, mirrorPoint)
bevelPath.reverse()
toothProfile += bevelPath
else:
toothProfile.append(addendumComplex)
return euclidean.getMirrorPath(getWidthMultipliedPath(toothProfile, derivation.toothWidthMultiplier))
| [
"def",
"getToothProfileRack",
"(",
"derivation",
")",
":",
"addendumSide",
"=",
"(",
"derivation",
".",
"quarterWavelength",
"-",
"(",
"derivation",
".",
"addendum",
"*",
"derivation",
".",
"tanPressure",
")",
")",
"addendumComplex",
"=",
"complex",
"(",
"addend... | get profile for one rack tooth . | train | false |
24,968 | def update_exploration(committer_id, exploration_id, change_list, commit_message, is_suggestion=False):
is_public = rights_manager.is_exploration_public(exploration_id)
if (is_public and (not commit_message)):
raise ValueError('Exploration is public so expected a commit message but received none.')
if (is_suggestion and ((not commit_message) or (not commit_message.startswith(feconf.COMMIT_MESSAGE_ACCEPTED_SUGGESTION_PREFIX)))):
raise ValueError('Invalid commit message for suggestion.')
if ((not is_suggestion) and commit_message and commit_message.startswith(feconf.COMMIT_MESSAGE_ACCEPTED_SUGGESTION_PREFIX)):
raise ValueError(("Commit messages for non-suggestions may not start with '%s'" % feconf.COMMIT_MESSAGE_ACCEPTED_SUGGESTION_PREFIX))
exploration = apply_change_list(exploration_id, change_list)
_save_exploration(committer_id, exploration, commit_message, change_list)
discard_draft(exploration_id, committer_id)
update_exploration_summary(exploration.id, committer_id)
user_services.add_edited_exploration_id(committer_id, exploration.id)
user_services.record_user_edited_an_exploration(committer_id)
if (not rights_manager.is_exploration_private(exploration.id)):
user_services.update_first_contribution_msec_if_not_set(committer_id, utils.get_current_time_in_millisecs())
| [
"def",
"update_exploration",
"(",
"committer_id",
",",
"exploration_id",
",",
"change_list",
",",
"commit_message",
",",
"is_suggestion",
"=",
"False",
")",
":",
"is_public",
"=",
"rights_manager",
".",
"is_exploration_public",
"(",
"exploration_id",
")",
"if",
"(",... | update an exploration . | train | false |
24,969 | @pytest.fixture
def tmpconfig(request):
SUBFOLDER = tempfile.mkdtemp()
CONF = UserConfig('spyder-test', defaults=DEFAULTS, version=CONF_VERSION, subfolder=SUBFOLDER, raw_mode=True)
def fin():
'\n Fixture finalizer to delete the temporary CONF element.\n '
shutil.rmtree(SUBFOLDER)
request.addfinalizer(fin)
return CONF
| [
"@",
"pytest",
".",
"fixture",
"def",
"tmpconfig",
"(",
"request",
")",
":",
"SUBFOLDER",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"CONF",
"=",
"UserConfig",
"(",
"'spyder-test'",
",",
"defaults",
"=",
"DEFAULTS",
",",
"version",
"=",
"CONF_VERSION",
","... | fixtures that returns a temporary conf element . | train | true |
24,970 | def addFacesGivenVertexes(triangleMesh, vertexIndexTable, vertexes):
for vertexIndex in xrange(0, len(vertexes), 3):
triangleMesh.faces.append(getFaceGivenLines(triangleMesh, vertexIndex, vertexIndexTable, vertexes))
| [
"def",
"addFacesGivenVertexes",
"(",
"triangleMesh",
",",
"vertexIndexTable",
",",
"vertexes",
")",
":",
"for",
"vertexIndex",
"in",
"xrange",
"(",
"0",
",",
"len",
"(",
"vertexes",
")",
",",
"3",
")",
":",
"triangleMesh",
".",
"faces",
".",
"append",
"(",... | add faces given stl text . | train | false |
24,972 | def handle_conversation(sock, address):
try:
while True:
handle_request(sock)
except EOFError:
print 'Client socket to {} has closed'.format(address)
except Exception as e:
print 'Client {} error: {}'.format(address, e)
finally:
sock.close()
| [
"def",
"handle_conversation",
"(",
"sock",
",",
"address",
")",
":",
"try",
":",
"while",
"True",
":",
"handle_request",
"(",
"sock",
")",
"except",
"EOFError",
":",
"print",
"'Client socket to {} has closed'",
".",
"format",
"(",
"address",
")",
"except",
"Ex... | converse with a client over sock until they are done talking . | train | false |
24,974 | def test_ipython_display_formatter():
f = get_ipython().display_formatter
catcher = []
class SelfDisplaying(object, ):
def _ipython_display_(self):
catcher.append(self)
class NotSelfDisplaying(object, ):
def __repr__(self):
return 'NotSelfDisplaying'
def _ipython_display_(self):
raise NotImplementedError
yes = SelfDisplaying()
no = NotSelfDisplaying()
(d, md) = f.format(no)
nt.assert_equal(d, {'text/plain': repr(no)})
nt.assert_equal(md, {})
nt.assert_equal(catcher, [])
(d, md) = f.format(yes)
nt.assert_equal(d, {})
nt.assert_equal(md, {})
nt.assert_equal(catcher, [yes])
| [
"def",
"test_ipython_display_formatter",
"(",
")",
":",
"f",
"=",
"get_ipython",
"(",
")",
".",
"display_formatter",
"catcher",
"=",
"[",
"]",
"class",
"SelfDisplaying",
"(",
"object",
",",
")",
":",
"def",
"_ipython_display_",
"(",
"self",
")",
":",
"catche... | objects with _ipython_display_ defined bypass other formatters . | train | false |
24,975 | def shuffle_slice(x, start, stop=None):
if (stop is None):
stop = len(x)
for i in reversed(xrange((start + 1), stop)):
j = random.randint(start, i)
(x[i], x[j]) = (x[j], x[i])
| [
"def",
"shuffle_slice",
"(",
"x",
",",
"start",
",",
"stop",
"=",
"None",
")",
":",
"if",
"(",
"stop",
"is",
"None",
")",
":",
"stop",
"=",
"len",
"(",
"x",
")",
"for",
"i",
"in",
"reversed",
"(",
"xrange",
"(",
"(",
"start",
"+",
"1",
")",
"... | given a list . | train | false |
24,976 | def show_security_group(call=None, kwargs=None):
global netconn
if (not netconn):
netconn = get_conn(NetworkManagementClient)
if (kwargs is None):
kwargs = {}
if (kwargs.get('resource_group') is None):
kwargs['resource_group'] = config.get_cloud_config_value('resource_group', {}, __opts__, search_global=True)
group = netconn.network_security_groups.get(resource_group_name=kwargs['resource_group'], network_security_group_name=kwargs['security_group'])
group_dict = make_safe(group)
def_rules = {}
for rule in group.default_security_rules:
def_rules[rule.name] = make_safe(rule)
group_dict['default_security_rules'] = def_rules
sec_rules = {}
for rule in group.security_rules:
sec_rules[rule.name] = make_safe(rule)
group_dict['security_rules'] = sec_rules
return group_dict
| [
"def",
"show_security_group",
"(",
"call",
"=",
"None",
",",
"kwargs",
"=",
"None",
")",
":",
"global",
"netconn",
"if",
"(",
"not",
"netconn",
")",
":",
"netconn",
"=",
"get_conn",
"(",
"NetworkManagementClient",
")",
"if",
"(",
"kwargs",
"is",
"None",
... | create a network security_group . | train | false |
24,977 | def get_unknown():
if (PyObject._unknown is None):
PyObject._unknown = PyObject(get_base_type('Unknown'))
return PyObject._unknown
| [
"def",
"get_unknown",
"(",
")",
":",
"if",
"(",
"PyObject",
".",
"_unknown",
"is",
"None",
")",
":",
"PyObject",
".",
"_unknown",
"=",
"PyObject",
"(",
"get_base_type",
"(",
"'Unknown'",
")",
")",
"return",
"PyObject",
".",
"_unknown"
] | return a pyobject whose type is unknown note that two unknown objects are equal . | train | false |
24,980 | def tools_rpcclient():
rpc_url = CONFIG['rpc']['url']
mobsf_subdir_tools = CONFIG['MobSF']['tools']
rpc_file_path = CONFIG['rpc']['file']
rpc_local_file = open((mobsf_subdir_tools + rpc_file_path), 'wb')
print '[*] Downloading rpc_server..'
rpc_file = urlrequest.urlopen(rpc_url)
print '[*] Saving to File {}'.format(rpc_file_path)
rpc_local_file.write(bytes(rpc_file.read()))
rpc_local_file.close()
| [
"def",
"tools_rpcclient",
"(",
")",
":",
"rpc_url",
"=",
"CONFIG",
"[",
"'rpc'",
"]",
"[",
"'url'",
"]",
"mobsf_subdir_tools",
"=",
"CONFIG",
"[",
"'MobSF'",
"]",
"[",
"'tools'",
"]",
"rpc_file_path",
"=",
"CONFIG",
"[",
"'rpc'",
"]",
"[",
"'file'",
"]",... | download and install rpc-server for mobsf . | train | false |
24,981 | def _get_modules(request):
perspective = request.user.profile.get_perspective()
modules = perspective.modules.filter(display=True).order_by('title')
if (not modules):
modules = Module.objects.filter(display=True).order_by('title')
active = None
for module in modules:
module.type = 'minor'
try:
import_name = ((module.name + '.') + settings.HARDTREE_MODULE_IDENTIFIER)
hmodule = __import__(import_name, fromlist=[str(module.name)])
urls = hmodule.URL_PATTERNS
for regexp in urls:
if re.match(regexp, request.path):
active = module
module.type = hmodule.PROPERTIES['type']
except ImportError:
pass
except AttributeError:
pass
except KeyError:
pass
return (modules, active)
| [
"def",
"_get_modules",
"(",
"request",
")",
":",
"perspective",
"=",
"request",
".",
"user",
".",
"profile",
".",
"get_perspective",
"(",
")",
"modules",
"=",
"perspective",
".",
"modules",
".",
"filter",
"(",
"display",
"=",
"True",
")",
".",
"order_by",
... | returns set of current modules and active module . | train | false |
24,982 | @then(u'the command output should contain "{text}" {count:d} times')
def step_command_output_should_contain_text_multiple_times(context, text, count):
assert (count >= 0)
expected_text = text
if (('{__WORKDIR__}' in expected_text) or ('{__CWD__}' in expected_text)):
expected_text = textutil.template_substitute(text, __WORKDIR__=posixpath_normpath(context.workdir), __CWD__=posixpath_normpath(os.getcwd()))
actual_output = context.command_result.output
with on_assert_failed_print_details(actual_output, expected_text):
textutil.assert_normtext_should_contain_multiple_times(actual_output, expected_text, count)
| [
"@",
"then",
"(",
"u'the command output should contain \"{text}\" {count:d} times'",
")",
"def",
"step_command_output_should_contain_text_multiple_times",
"(",
"context",
",",
"text",
",",
"count",
")",
":",
"assert",
"(",
"count",
">=",
"0",
")",
"expected_text",
"=",
... | example: then the command output should contain "text" 3 times . | train | false |
24,983 | def to_dict(sequences, key_function=None):
if (key_function is None):
key_function = (lambda rec: rec.id)
d = dict()
for record in sequences:
key = key_function(record)
if (key in d):
raise ValueError(("Duplicate key '%s'" % key))
d[key] = record
return d
| [
"def",
"to_dict",
"(",
"sequences",
",",
"key_function",
"=",
"None",
")",
":",
"if",
"(",
"key_function",
"is",
"None",
")",
":",
"key_function",
"=",
"(",
"lambda",
"rec",
":",
"rec",
".",
"id",
")",
"d",
"=",
"dict",
"(",
")",
"for",
"record",
"... | given something that closely resembles a dictionary . | train | false |
24,984 | def validate_start_action(action):
valid_actions = ['start', 'restart']
if (action.lower() not in valid_actions):
raise UnknownAction(action)
| [
"def",
"validate_start_action",
"(",
"action",
")",
":",
"valid_actions",
"=",
"[",
"'start'",
",",
"'restart'",
"]",
"if",
"(",
"action",
".",
"lower",
"(",
")",
"not",
"in",
"valid_actions",
")",
":",
"raise",
"UnknownAction",
"(",
"action",
")"
] | validates if the action given is a valid one - currently only start and restart are supported . | train | false |
24,985 | def _encode_uuid(name, value, dummy, opts):
uuid_representation = opts.uuid_representation
if (uuid_representation == OLD_UUID_SUBTYPE):
return ((('\x05' + name) + '\x10\x00\x00\x00\x03') + value.bytes)
elif (uuid_representation == JAVA_LEGACY):
from_uuid = value.bytes
data = (from_uuid[0:8][::(-1)] + from_uuid[8:16][::(-1)])
return ((('\x05' + name) + '\x10\x00\x00\x00\x03') + data)
elif (uuid_representation == CSHARP_LEGACY):
return ((('\x05' + name) + '\x10\x00\x00\x00\x03') + value.bytes_le)
else:
return ((('\x05' + name) + '\x10\x00\x00\x00\x04') + value.bytes)
| [
"def",
"_encode_uuid",
"(",
"name",
",",
"value",
",",
"dummy",
",",
"opts",
")",
":",
"uuid_representation",
"=",
"opts",
".",
"uuid_representation",
"if",
"(",
"uuid_representation",
"==",
"OLD_UUID_SUBTYPE",
")",
":",
"return",
"(",
"(",
"(",
"'\\x05'",
"... | encode uuid . | train | true |
24,986 | @verbose
def _mixed_norm_solver_cd(M, G, alpha, lipschitz_constant, maxit=10000, tol=1e-08, verbose=None, init=None, n_orient=1):
from sklearn.linear_model.coordinate_descent import MultiTaskLasso
(n_sensors, n_times) = M.shape
(n_sensors, n_sources) = G.shape
if (init is not None):
init = init.T
clf = MultiTaskLasso(alpha=(alpha / len(M)), tol=tol, normalize=False, fit_intercept=False, max_iter=maxit, warm_start=True)
clf.coef_ = init
clf.fit(G, M)
X = clf.coef_.T
active_set = np.any(X, axis=1)
X = X[active_set]
(gap, pobj, dobj, _) = dgap_l21(M, G, X, active_set, alpha, n_orient)
return (X, active_set, pobj)
| [
"@",
"verbose",
"def",
"_mixed_norm_solver_cd",
"(",
"M",
",",
"G",
",",
"alpha",
",",
"lipschitz_constant",
",",
"maxit",
"=",
"10000",
",",
"tol",
"=",
"1e-08",
",",
"verbose",
"=",
"None",
",",
"init",
"=",
"None",
",",
"n_orient",
"=",
"1",
")",
... | solve l21 inverse problem with coordinate descent . | train | false |
24,988 | def get_cache():
return requests.Session().cache
| [
"def",
"get_cache",
"(",
")",
":",
"return",
"requests",
".",
"Session",
"(",
")",
".",
"cache"
] | return a cache instance based on option . | train | false |
24,992 | def get_template_path_with_theme(relative_path):
relative_path = os.path.normpath(relative_path)
theme = get_current_theme()
if (not theme):
return relative_path
template_name = re.sub('^/+', '', relative_path)
template_path = (theme.template_path / template_name)
absolute_path = ((theme.path / 'templates') / template_name)
if absolute_path.exists():
return str(template_path)
else:
return relative_path
| [
"def",
"get_template_path_with_theme",
"(",
"relative_path",
")",
":",
"relative_path",
"=",
"os",
".",
"path",
".",
"normpath",
"(",
"relative_path",
")",
"theme",
"=",
"get_current_theme",
"(",
")",
"if",
"(",
"not",
"theme",
")",
":",
"return",
"relative_pa... | returns template path in current sites theme if it finds one there otherwise returns same path . | train | false |
24,993 | def DumpRoot():
path = ('LDAP://%srootDSE' % server)
rootdse = ADsGetObject(path)
for item in rootdse.Get('SupportedLDAPVersion'):
print ('%s supports ldap version %s' % (path, item))
attributes = ['CurrentTime', 'defaultNamingContext']
for attr in attributes:
val = rootdse.Get(attr)
print (' %s=%s' % (attr, val))
| [
"def",
"DumpRoot",
"(",
")",
":",
"path",
"=",
"(",
"'LDAP://%srootDSE'",
"%",
"server",
")",
"rootdse",
"=",
"ADsGetObject",
"(",
"path",
")",
"for",
"item",
"in",
"rootdse",
".",
"Get",
"(",
"'SupportedLDAPVersion'",
")",
":",
"print",
"(",
"'%s supports... | dumps the root dse . | train | false |
24,995 | @cache_permission
def can_change_dictionary(user, project):
return check_permission(user, project, 'trans.change_dictionary')
| [
"@",
"cache_permission",
"def",
"can_change_dictionary",
"(",
"user",
",",
"project",
")",
":",
"return",
"check_permission",
"(",
"user",
",",
"project",
",",
"'trans.change_dictionary'",
")"
] | checks whether user can change dictionary for given project . | train | false |
24,996 | def instart(cls, name, display_name=None, stay_alive=True):
cls._svc_name_ = name
cls._svc_display_name_ = (display_name or name)
try:
module_path = modules[cls.__module__].__file__
except AttributeError:
from sys import executable
module_path = executable
module_file = splitext(abspath(module_path))[0]
cls._svc_reg_class_ = '{0}.{1}'.format(module_file, cls.__name__)
if stay_alive:
win32api.SetConsoleCtrlHandler((lambda x: True), True)
try:
win32serviceutil.InstallService(cls._svc_reg_class_, cls._svc_name_, cls._svc_display_name_, startType=win32service.SERVICE_AUTO_START)
print('Install ok')
win32serviceutil.StartService(cls._svc_name_)
print('Start ok')
except Exception as err:
print(str(err))
| [
"def",
"instart",
"(",
"cls",
",",
"name",
",",
"display_name",
"=",
"None",
",",
"stay_alive",
"=",
"True",
")",
":",
"cls",
".",
"_svc_name_",
"=",
"name",
"cls",
".",
"_svc_display_name_",
"=",
"(",
"display_name",
"or",
"name",
")",
"try",
":",
"mo... | install and start a service cls : the class that implement the service name : service name display_name : the name displayed in the service manager stay_alive : service will stop on logout if false . | train | false |
24,997 | @js_defined('window.jQuery')
def disable_jquery_animations(page):
page.browser.execute_script('jQuery.fx.off = true;')
| [
"@",
"js_defined",
"(",
"'window.jQuery'",
")",
"def",
"disable_jquery_animations",
"(",
"page",
")",
":",
"page",
".",
"browser",
".",
"execute_script",
"(",
"'jQuery.fx.off = true;'",
")"
] | disable jquery animations on the page . | train | false |
24,998 | def jacobi_witness(x, n):
j = (jacobi(x, n) % n)
f = pow(x, (n >> 1), n)
if (j == f):
return False
return True
| [
"def",
"jacobi_witness",
"(",
"x",
",",
"n",
")",
":",
"j",
"=",
"(",
"jacobi",
"(",
"x",
",",
"n",
")",
"%",
"n",
")",
"f",
"=",
"pow",
"(",
"x",
",",
"(",
"n",
">>",
"1",
")",
",",
"n",
")",
"if",
"(",
"j",
"==",
"f",
")",
":",
"ret... | returns false if n is an euler pseudo-prime with base x . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.