id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
50,485 | def isdevice(dev):
try:
os.stat(dev)
return str(dev)
except OSError:
raise vol.Invalid('No device found!')
| [
"def",
"isdevice",
"(",
"dev",
")",
":",
"try",
":",
"os",
".",
"stat",
"(",
"dev",
")",
"return",
"str",
"(",
"dev",
")",
"except",
"OSError",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"'No device found!'",
")"
] | check if dev a real device . | train | false |
50,486 | def get_global(key):
global _global_data
if (_global_data is None):
dirname = os.path.join(os.path.dirname(__file__))
filename = os.path.join(dirname, 'global.dat')
if (not os.path.isfile(filename)):
_raise_no_data_error()
with open(filename, 'rb') as fileobj:
_global_data = pickle.load(fileobj)
return _global_data.get(key, {})
| [
"def",
"get_global",
"(",
"key",
")",
":",
"global",
"_global_data",
"if",
"(",
"_global_data",
"is",
"None",
")",
":",
"dirname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
")",
"filename",
"="... | return the dictionary for the given key in the global data . | train | false |
50,487 | def csrf_view_exempt(view_func):
warnings.warn('csrf_view_exempt is deprecated. Use csrf_exempt instead.', PendingDeprecationWarning)
return csrf_exempt(view_func)
| [
"def",
"csrf_view_exempt",
"(",
"view_func",
")",
":",
"warnings",
".",
"warn",
"(",
"'csrf_view_exempt is deprecated. Use csrf_exempt instead.'",
",",
"PendingDeprecationWarning",
")",
"return",
"csrf_exempt",
"(",
"view_func",
")"
] | marks a view function as being exempt from csrf view protection . | train | false |
50,489 | def get_member_names(group):
return [r.name for r in get_members(group)]
| [
"def",
"get_member_names",
"(",
"group",
")",
":",
"return",
"[",
"r",
".",
"name",
"for",
"r",
"in",
"get_members",
"(",
"group",
")",
"]"
] | get a list of resource names of the resources in the specified group . | train | false |
50,493 | def systemInformationType6():
a = L2PseudoLength(l2pLength=11)
b = TpPd(pd=6)
c = MessageType(mesType=30)
d = CellIdentity()
e = LocalAreaId()
f = CellOptionsBCCH()
g = NccPermitted()
h = Si6RestOctets()
packet = ((((((a / b) / c) / d) / e) / f) / g)
return packet
| [
"def",
"systemInformationType6",
"(",
")",
":",
"a",
"=",
"L2PseudoLength",
"(",
"l2pLength",
"=",
"11",
")",
"b",
"=",
"TpPd",
"(",
"pd",
"=",
"6",
")",
"c",
"=",
"MessageType",
"(",
"mesType",
"=",
"30",
")",
"d",
"=",
"CellIdentity",
"(",
")",
"... | system information type 6 section 9 . | train | true |
50,494 | def test_clone_should_raise_if_vcs_not_installed(mocker, clone_dir):
mocker.patch('cookiecutter.vcs.is_vcs_installed', autospec=True, return_value=False)
repo_url = 'https://github.com/pytest-dev/cookiecutter-pytest-plugin.git'
with pytest.raises(exceptions.VCSNotInstalled):
vcs.clone(repo_url, clone_to_dir=clone_dir)
| [
"def",
"test_clone_should_raise_if_vcs_not_installed",
"(",
"mocker",
",",
"clone_dir",
")",
":",
"mocker",
".",
"patch",
"(",
"'cookiecutter.vcs.is_vcs_installed'",
",",
"autospec",
"=",
"True",
",",
"return_value",
"=",
"False",
")",
"repo_url",
"=",
"'https://githu... | in clone() . | train | false |
50,496 | def change_lists_to_sets(iterable):
if isinstance(iterable, dict):
for key in iterable:
if isinstance(iterable[key], (list, tuple)):
try:
iterable[key] = set(iterable[key])
except TypeError:
pass
elif getattr(iterable[key], '__iter__', False):
change_lists_to_sets(iterable[key])
elif isinstance(iterable, (list, tuple)):
for item in iterable:
if isinstance(item, (list, tuple)):
iterable.pop(item)
iterable.append(set(item))
elif getattr(item, '__iter__', False):
change_lists_to_sets(item)
else:
raise NotImplementedError
| [
"def",
"change_lists_to_sets",
"(",
"iterable",
")",
":",
"if",
"isinstance",
"(",
"iterable",
",",
"dict",
")",
":",
"for",
"key",
"in",
"iterable",
":",
"if",
"isinstance",
"(",
"iterable",
"[",
"key",
"]",
",",
"(",
"list",
",",
"tuple",
")",
")",
... | convert any lists or tuples in iterable into sets . | train | false |
50,497 | def eliminate_implications(expr):
return to_nnf(expr)
| [
"def",
"eliminate_implications",
"(",
"expr",
")",
":",
"return",
"to_nnf",
"(",
"expr",
")"
] | change >> . | train | false |
50,498 | def wheel():
client = salt.wheel.Wheel(__opts__)
ret = client.get_docs()
return ret
| [
"def",
"wheel",
"(",
")",
":",
"client",
"=",
"salt",
".",
"wheel",
".",
"Wheel",
"(",
"__opts__",
")",
"ret",
"=",
"client",
".",
"get_docs",
"(",
")",
"return",
"ret"
] | return all inline documentation for wheel modules cli example: . | train | true |
50,500 | def collect_members(module_to_name):
members = {}
for (module, module_name) in module_to_name.items():
all_names = getattr(module, '__all__', None)
for (name, member) in inspect.getmembers(module):
if ((inspect.isfunction(member) or inspect.isclass(member)) and (not _always_drop_symbol_re.match(name)) and ((all_names is None) or (name in all_names))):
fullname = ('%s.%s' % (module_name, name))
if (name in members):
(other_fullname, other_member) = members[name]
if (member is not other_member):
raise RuntimeError(('Short name collision between %s and %s' % (fullname, other_fullname)))
if (len(fullname) == len(other_fullname)):
raise RuntimeError(("Can't decide whether to use %s or %s for %s: both full names have length %d" % (fullname, other_fullname, name, len(fullname))))
if (len(fullname) > len(other_fullname)):
continue
members[name] = (fullname, member)
return members
| [
"def",
"collect_members",
"(",
"module_to_name",
")",
":",
"members",
"=",
"{",
"}",
"for",
"(",
"module",
",",
"module_name",
")",
"in",
"module_to_name",
".",
"items",
"(",
")",
":",
"all_names",
"=",
"getattr",
"(",
"module",
",",
"'__all__'",
",",
"N... | collect all symbols from a list of modules . | train | true |
50,502 | def TR2(rv):
def f(rv):
if (rv.func is tan):
a = rv.args[0]
return (sin(a) / cos(a))
elif (rv.func is cot):
a = rv.args[0]
return (cos(a) / sin(a))
return rv
return bottom_up(rv, f)
| [
"def",
"TR2",
"(",
"rv",
")",
":",
"def",
"f",
"(",
"rv",
")",
":",
"if",
"(",
"rv",
".",
"func",
"is",
"tan",
")",
":",
"a",
"=",
"rv",
".",
"args",
"[",
"0",
"]",
"return",
"(",
"sin",
"(",
"a",
")",
"/",
"cos",
"(",
"a",
")",
")",
... | replace tan and cot with sin/cos and cos/sin examples . | train | false |
50,503 | def is_string_like(obj):
if isinstance(obj, (str, unicode)):
return True
if ma.isMaskedArray(obj):
if ((obj.ndim == 0) and (obj.dtype.kind in 'SU')):
return True
else:
return False
try:
(obj + '')
except (TypeError, ValueError):
return False
return True
| [
"def",
"is_string_like",
"(",
"obj",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"(",
"str",
",",
"unicode",
")",
")",
":",
"return",
"True",
"if",
"ma",
".",
"isMaskedArray",
"(",
"obj",
")",
":",
"if",
"(",
"(",
"obj",
".",
"ndim",
"==",
"0"... | return true if *obj* looks like a string . | train | false |
50,504 | def check_headers(request):
return HttpResponse(('HTTP_X_ARG_CHECK: %s' % request.META.get('HTTP_X_ARG_CHECK', 'Undefined')))
| [
"def",
"check_headers",
"(",
"request",
")",
":",
"return",
"HttpResponse",
"(",
"(",
"'HTTP_X_ARG_CHECK: %s'",
"%",
"request",
".",
"META",
".",
"get",
"(",
"'HTTP_X_ARG_CHECK'",
",",
"'Undefined'",
")",
")",
")"
] | a view that responds with value of the x-arg-check header . | train | false |
50,505 | def isPackage(module):
if (not isinstance(module, types.ModuleType)):
return False
basename = os.path.splitext(os.path.basename(module.__file__))[0]
return (basename == '__init__')
| [
"def",
"isPackage",
"(",
"module",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"module",
",",
"types",
".",
"ModuleType",
")",
")",
":",
"return",
"False",
"basename",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename... | given an object return true if the object looks like a package . | train | false |
50,506 | def strip_self(f):
def wrapper(self, *args, **kwargs):
return f(*args, **kwargs)
return wrapper
| [
"def",
"strip_self",
"(",
"f",
")",
":",
"def",
"wrapper",
"(",
"self",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"return",
"f",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"wrapper"
] | wrapper to attach module level functions to base class . | train | false |
50,508 | def invalidate_table_constraints(func):
def _cache_clear(self, table, *args, **opts):
db_name = self._get_setting('NAME')
if (db_name in self._constraint_cache):
del self._constraint_cache[db_name]
if (db_name in self._reverse_cache):
del self._reverse_cache[db_name]
if (db_name in self._constraint_references):
del self._constraint_references[db_name]
return func(self, table, *args, **opts)
return _cache_clear
| [
"def",
"invalidate_table_constraints",
"(",
"func",
")",
":",
"def",
"_cache_clear",
"(",
"self",
",",
"table",
",",
"*",
"args",
",",
"**",
"opts",
")",
":",
"db_name",
"=",
"self",
".",
"_get_setting",
"(",
"'NAME'",
")",
"if",
"(",
"db_name",
"in",
... | for mysql we grab all table constraints simultaneously . | train | false |
50,509 | def ulp(x):
x = abs(float(x))
if (math.isnan(x) or math.isinf(x)):
return x
n = struct.unpack('<q', struct.pack('<d', x))[0]
x_next = struct.unpack('<d', struct.pack('<q', (n + 1)))[0]
if math.isinf(x_next):
x_prev = struct.unpack('<d', struct.pack('<q', (n - 1)))[0]
return (x - x_prev)
else:
return (x_next - x)
| [
"def",
"ulp",
"(",
"x",
")",
":",
"x",
"=",
"abs",
"(",
"float",
"(",
"x",
")",
")",
"if",
"(",
"math",
".",
"isnan",
"(",
"x",
")",
"or",
"math",
".",
"isinf",
"(",
"x",
")",
")",
":",
"return",
"x",
"n",
"=",
"struct",
".",
"unpack",
"(... | return the value of the least significant bit of a float x . | train | false |
50,510 | def _retry_on_port_collision(reason, add, cleanup):
reason.trap(AddressInUse)
(ip, port) = reason.value.address
used_by = _find_process_name(port)
ADDRESS_IN_USE(ip=ip, port=port, name=used_by).write()
d = cleanup()
d.addCallback((lambda ignored: add()))
return d
| [
"def",
"_retry_on_port_collision",
"(",
"reason",
",",
"add",
",",
"cleanup",
")",
":",
"reason",
".",
"trap",
"(",
"AddressInUse",
")",
"(",
"ip",
",",
"port",
")",
"=",
"reason",
".",
"value",
".",
"address",
"used_by",
"=",
"_find_process_name",
"(",
... | cleanup and re-add a container if it failed to start because of a port collision . | train | false |
50,511 | def set_access_token(access_token):
global _access_token
_access_token = access_token
| [
"def",
"set_access_token",
"(",
"access_token",
")",
":",
"global",
"_access_token",
"_access_token",
"=",
"access_token"
] | set the shared access token to authenticate with cloud storage . | train | false |
50,512 | def validate_vector_shape(name, shape, nrows, nobs):
ndim = len(shape)
if (ndim not in [1, 2]):
raise ValueError(('Invalid value for %s vector. Requires a 1- or 2-dimensional array, got %d dimensions' % (name, ndim)))
if (not (shape[0] == nrows)):
raise ValueError(('Invalid dimensions for %s vector: requires %d rows, got %d' % (name, nrows, shape[0])))
if ((nobs is None) and (not ((ndim == 1) or (shape[(-1)] == 1)))):
raise ValueError(('Invalid dimensions for %s vector: time-varying vectors cannot be given unless `nobs` is specified (implicitly when a dataset is bound or else set explicity)' % name))
if ((ndim == 2) and (not (shape[1] in [1, nobs]))):
raise ValueError(('Invalid dimensions for time-varying %s vector. Requires shape (*,%d), got %s' % (name, nobs, str(shape))))
| [
"def",
"validate_vector_shape",
"(",
"name",
",",
"shape",
",",
"nrows",
",",
"nobs",
")",
":",
"ndim",
"=",
"len",
"(",
"shape",
")",
"if",
"(",
"ndim",
"not",
"in",
"[",
"1",
",",
"2",
"]",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'Invalid val... | validate the shape of a possibly time-varying vector . | train | false |
50,514 | def _check_stats(conn):
stats = conn.get_stats()
if (not stats):
raise CommandExecutionError('memcached server is down or does not exist')
return stats
| [
"def",
"_check_stats",
"(",
"conn",
")",
":",
"stats",
"=",
"conn",
".",
"get_stats",
"(",
")",
"if",
"(",
"not",
"stats",
")",
":",
"raise",
"CommandExecutionError",
"(",
"'memcached server is down or does not exist'",
")",
"return",
"stats"
] | helper function to check the stats data passed into it . | train | false |
50,515 | def test_simple_upload():
group = worker.WalTransferGroup(FakeWalUploader())
seg = FakeWalSegment((('1' * 8) * 3), explicit=True)
group.start(seg)
group.join()
assert success(seg)
| [
"def",
"test_simple_upload",
"(",
")",
":",
"group",
"=",
"worker",
".",
"WalTransferGroup",
"(",
"FakeWalUploader",
"(",
")",
")",
"seg",
"=",
"FakeWalSegment",
"(",
"(",
"(",
"'1'",
"*",
"8",
")",
"*",
"3",
")",
",",
"explicit",
"=",
"True",
")",
"... | model a case where there is no concurrency while uploading . | train | false |
50,516 | @when(u'we save a named query')
def step_save_named_query(context):
context.cli.sendline(u'\\ns foo SELECT 12345')
| [
"@",
"when",
"(",
"u'we save a named query'",
")",
"def",
"step_save_named_query",
"(",
"context",
")",
":",
"context",
".",
"cli",
".",
"sendline",
"(",
"u'\\\\ns foo SELECT 12345'",
")"
] | send s command . | train | false |
50,518 | def get_resource_path(venv, package=None, resource=None):
_verify_safe_py_code(package, resource)
bin_path = _verify_virtualenv(venv)
ret = __salt__['cmd.exec_code_all'](bin_path, "import pkg_resources; print(pkg_resources.resource_filename('{0}', '{1}'))".format(package, resource))
if (ret['retcode'] != 0):
raise CommandExecutionError('{stdout}\n{stderr}'.format(**ret))
return ret['stdout']
| [
"def",
"get_resource_path",
"(",
"venv",
",",
"package",
"=",
"None",
",",
"resource",
"=",
"None",
")",
":",
"_verify_safe_py_code",
"(",
"package",
",",
"resource",
")",
"bin_path",
"=",
"_verify_virtualenv",
"(",
"venv",
")",
"ret",
"=",
"__salt__",
"[",
... | return the full path of a resource of the given module . | train | true |
50,520 | def fill_model_instance(instance, data):
for field in instance._meta.fields:
if ((not field.editable) or (field.name not in data)):
continue
field.save_form_data(instance, data[field.name])
return instance
| [
"def",
"fill_model_instance",
"(",
"instance",
",",
"data",
")",
":",
"for",
"field",
"in",
"instance",
".",
"_meta",
".",
"fields",
":",
"if",
"(",
"(",
"not",
"field",
".",
"editable",
")",
"or",
"(",
"field",
".",
"name",
"not",
"in",
"data",
")",... | fill whatever fields possible in instance using the data dict . | train | false |
50,521 | def RequestID():
return os.environ.get(REQUEST_LOG_ID, None)
| [
"def",
"RequestID",
"(",
")",
":",
"return",
"os",
".",
"environ",
".",
"get",
"(",
"REQUEST_LOG_ID",
",",
"None",
")"
] | returns the id of the current request assigned by app engine . | train | false |
50,525 | def in6_isincluded(addr, prefix, plen):
temp = inet_pton(socket.AF_INET6, addr)
pref = in6_cidr2mask(plen)
zero = inet_pton(socket.AF_INET6, prefix)
return (zero == in6_and(temp, pref))
| [
"def",
"in6_isincluded",
"(",
"addr",
",",
"prefix",
",",
"plen",
")",
":",
"temp",
"=",
"inet_pton",
"(",
"socket",
".",
"AF_INET6",
",",
"addr",
")",
"pref",
"=",
"in6_cidr2mask",
"(",
"plen",
")",
"zero",
"=",
"inet_pton",
"(",
"socket",
".",
"AF_IN... | returns true when addr belongs to prefix/plen . | train | true |
50,526 | def getCourseNameFromId(courseid):
if (courseid == 1):
return ''
else:
q = (db.courses.id == courseid)
course_name = db(q).select()[0].course_name
return course_name
| [
"def",
"getCourseNameFromId",
"(",
"courseid",
")",
":",
"if",
"(",
"courseid",
"==",
"1",
")",
":",
"return",
"''",
"else",
":",
"q",
"=",
"(",
"db",
".",
"courses",
".",
"id",
"==",
"courseid",
")",
"course_name",
"=",
"db",
"(",
"q",
")",
".",
... | used to compute auth . | train | false |
50,527 | def url_to_s3_info(url):
parsed_url = parse_url(url)
assert (parsed_url.scheme == u's3'), (u'You can only use s3: urls (not %r)' % url)
(bucket, key) = (parsed_url.host, parsed_url.path)
return (bucket, key)
| [
"def",
"url_to_s3_info",
"(",
"url",
")",
":",
"parsed_url",
"=",
"parse_url",
"(",
"url",
")",
"assert",
"(",
"parsed_url",
".",
"scheme",
"==",
"u's3'",
")",
",",
"(",
"u'You can only use s3: urls (not %r)'",
"%",
"url",
")",
"(",
"bucket",
",",
"key",
"... | convert an s3 url to a tuple of bucket and key . | train | false |
50,529 | def p_definition_start(p):
print ("start = '%s'" % p[2])
| [
"def",
"p_definition_start",
"(",
"p",
")",
":",
"print",
"(",
"\"start = '%s'\"",
"%",
"p",
"[",
"2",
"]",
")"
] | definition : start id . | train | false |
50,530 | def create_artifact(entity_id, message_handle, endpoint_index=0):
sourceid = sha1(entity_id)
ter = ('%s%.2x%s%s' % (ARTIFACT_TYPECODE, endpoint_index, sourceid.digest(), message_handle))
return base64.b64encode(ter)
| [
"def",
"create_artifact",
"(",
"entity_id",
",",
"message_handle",
",",
"endpoint_index",
"=",
"0",
")",
":",
"sourceid",
"=",
"sha1",
"(",
"entity_id",
")",
"ter",
"=",
"(",
"'%s%.2x%s%s'",
"%",
"(",
"ARTIFACT_TYPECODE",
",",
"endpoint_index",
",",
"sourceid"... | saml_artifact := b64 typecode := byte1byte2 endpointindex := byte1byte2 remainingartifact := sourceid messagehandle sourceid := 20-byte_sequence messagehandle := 20-byte_sequence . | train | false |
50,531 | def _ver_from_ver_str(ver_str):
ver = []
for s in ver_str.split('.'):
try:
ver.append(int(s))
except ValueError:
ver.append(s)
return tuple(ver)
| [
"def",
"_ver_from_ver_str",
"(",
"ver_str",
")",
":",
"ver",
"=",
"[",
"]",
"for",
"s",
"in",
"ver_str",
".",
"split",
"(",
"'.'",
")",
":",
"try",
":",
"ver",
".",
"append",
"(",
"int",
"(",
"s",
")",
")",
"except",
"ValueError",
":",
"ver",
"."... | convert a version string to a version object as used internally for the "stdlibs" area of the database . | train | false |
50,532 | def _setDBMSAuthentication():
if (not conf.dbmsCred):
return
debugMsg = 'setting the DBMS authentication credentials'
logger.debug(debugMsg)
match = re.search('^(.+?):(.*?)$', conf.dbmsCred)
if (not match):
errMsg = 'DBMS authentication credentials value must be in format '
errMsg += 'username:password'
raise SqlmapSyntaxException(errMsg)
conf.dbmsUsername = match.group(1)
conf.dbmsPassword = match.group(2)
| [
"def",
"_setDBMSAuthentication",
"(",
")",
":",
"if",
"(",
"not",
"conf",
".",
"dbmsCred",
")",
":",
"return",
"debugMsg",
"=",
"'setting the DBMS authentication credentials'",
"logger",
".",
"debug",
"(",
"debugMsg",
")",
"match",
"=",
"re",
".",
"search",
"(... | check and set the dbms authentication credentials to run statements as another user . | train | false |
50,533 | def tuple_variable(t):
raise NotImplementedError()
| [
"def",
"tuple_variable",
"(",
"t",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | make a tuplevariable from a tuple t of theanovariables . | train | false |
50,534 | def update_output_levels(show, hide):
if show:
for key in show.split(','):
state.output[key] = True
if hide:
for key in hide.split(','):
state.output[key] = False
| [
"def",
"update_output_levels",
"(",
"show",
",",
"hide",
")",
":",
"if",
"show",
":",
"for",
"key",
"in",
"show",
".",
"split",
"(",
"','",
")",
":",
"state",
".",
"output",
"[",
"key",
"]",
"=",
"True",
"if",
"hide",
":",
"for",
"key",
"in",
"hi... | update state . | train | false |
50,535 | def re_unescape(s):
return _re_unescape_pattern.sub(_re_unescape_replacement, s)
| [
"def",
"re_unescape",
"(",
"s",
")",
":",
"return",
"_re_unescape_pattern",
".",
"sub",
"(",
"_re_unescape_replacement",
",",
"s",
")"
] | unescape a string escaped by re . | train | false |
50,537 | @open_file(1, mode='wt')
def write_graph6(G, path, nodes=None, header=True):
path.write(generate_graph6(G, nodes=nodes, header=header))
path.write('\n')
| [
"@",
"open_file",
"(",
"1",
",",
"mode",
"=",
"'wt'",
")",
"def",
"write_graph6",
"(",
"G",
",",
"path",
",",
"nodes",
"=",
"None",
",",
"header",
"=",
"True",
")",
":",
"path",
".",
"write",
"(",
"generate_graph6",
"(",
"G",
",",
"nodes",
"=",
"... | write a simple undirected graph to path in graph6 format . | train | false |
50,538 | def c_help(client):
cmds = ('help', 'help @teleport', 'help look', 'help @tunnel', 'help @dig')
return cmds
| [
"def",
"c_help",
"(",
"client",
")",
":",
"cmds",
"=",
"(",
"'help'",
",",
"'help @teleport'",
",",
"'help look'",
",",
"'help @tunnel'",
",",
"'help @dig'",
")",
"return",
"cmds"
] | reads help files . | train | false |
50,540 | def find_changes(accounts, monitor_names, debug=True):
for account_name in accounts:
monitors = get_monitors(account_name, monitor_names, debug)
for mon in monitors:
cw = mon.watcher
(items, exception_map) = cw.slurp()
cw.find_changes(current=items, exception_map=exception_map)
cw.save()
audit_changes(accounts, monitor_names, False, debug)
db.session.close()
| [
"def",
"find_changes",
"(",
"accounts",
",",
"monitor_names",
",",
"debug",
"=",
"True",
")",
":",
"for",
"account_name",
"in",
"accounts",
":",
"monitors",
"=",
"get_monitors",
"(",
"account_name",
",",
"monitor_names",
",",
"debug",
")",
"for",
"mon",
"in"... | runs watchers . | train | false |
50,541 | def filer_folder_from_path(path):
if (path is None):
return None
folder = None
for component in six.text_type(path).split('/'):
if component:
folder = Folder.objects.get_or_create(name=component, parent=folder)[0]
return folder
| [
"def",
"filer_folder_from_path",
"(",
"path",
")",
":",
"if",
"(",
"path",
"is",
"None",
")",
":",
"return",
"None",
"folder",
"=",
"None",
"for",
"component",
"in",
"six",
".",
"text_type",
"(",
"path",
")",
".",
"split",
"(",
"'/'",
")",
":",
"if",... | split path by slashes and create a hierarchy of filer folder objects accordingly . | train | false |
50,542 | def get_user_email(user):
return user.email()
| [
"def",
"get_user_email",
"(",
"user",
")",
":",
"return",
"user",
".",
"email",
"(",
")"
] | given an user object . | train | false |
50,543 | def gf_sub_mul(f, g, h, p, K):
return gf_sub(f, gf_mul(g, h, p, K), p, K)
| [
"def",
"gf_sub_mul",
"(",
"f",
",",
"g",
",",
"h",
",",
"p",
",",
"K",
")",
":",
"return",
"gf_sub",
"(",
"f",
",",
"gf_mul",
"(",
"g",
",",
"h",
",",
"p",
",",
"K",
")",
",",
"p",
",",
"K",
")"
] | compute f - g*h where f . | train | false |
50,545 | def print_report_info(results):
print('Report Infos:')
print(('Contains Sampled Data = %s' % results.get('containsSampledData')))
print(('Kind = %s' % results.get('kind')))
print(('ID = %s' % results.get('id')))
print(('Self Link = %s' % results.get('selfLink')))
print()
| [
"def",
"print_report_info",
"(",
"results",
")",
":",
"print",
"(",
"'Report Infos:'",
")",
"print",
"(",
"(",
"'Contains Sampled Data = %s'",
"%",
"results",
".",
"get",
"(",
"'containsSampledData'",
")",
")",
")",
"print",
"(",
"(",
"'Kind = %s'... | prints general information about this report . | train | false |
50,546 | def test_lambda_list_keywords_key():
can_compile(u'(fn (x &key {foo True}) (list x foo))')
cant_compile(u'(fn (x &key {bar "baz"} &key {foo 42}) (list x bar foo))')
cant_compile(u'(fn (x &key {1 2 3 4}) (list x))')
| [
"def",
"test_lambda_list_keywords_key",
"(",
")",
":",
"can_compile",
"(",
"u'(fn (x &key {foo True}) (list x foo))'",
")",
"cant_compile",
"(",
"u'(fn (x &key {bar \"baz\"} &key {foo 42}) (list x bar foo))'",
")",
"cant_compile",
"(",
"u'(fn (x &key {1 2 3 4}) (list x))'",
")"
] | ensure we can compile functions with &key . | train | false |
50,547 | def _add_directive(block, directive, replace):
directive = nginxparser.UnspacedList(directive)
if ((len(directive) == 0) or (directive[0] == '#')):
block.append(directive)
return
location = next((index for (index, line) in enumerate(block) if (line and (line[0] == directive[0]))), None)
if replace:
if (location is None):
raise errors.MisconfigurationError('expected directive for {0} in the Nginx config but did not find it.'.format(directive[0]))
block[location] = directive
_comment_directive(block, location)
else:
directive_name = directive[0]
directive_value = directive[1]
if ((location is None) or (isinstance(directive_name, str) and (directive_name in REPEATABLE_DIRECTIVES))):
block.append(directive)
_comment_directive(block, (len(block) - 1))
elif (block[location][1] != directive_value):
raise errors.MisconfigurationError('tried to insert directive "{0}" but found conflicting "{1}".'.format(directive, block[location]))
| [
"def",
"_add_directive",
"(",
"block",
",",
"directive",
",",
"replace",
")",
":",
"directive",
"=",
"nginxparser",
".",
"UnspacedList",
"(",
"directive",
")",
"if",
"(",
"(",
"len",
"(",
"directive",
")",
"==",
"0",
")",
"or",
"(",
"directive",
"[",
"... | adds or replaces a single directive in a config block . | train | false |
50,548 | def test_dict_key_completion_invalids():
ip = get_ipython()
complete = ip.Completer.complete
ip.user_ns['no_getitem'] = None
ip.user_ns['no_keys'] = []
ip.user_ns['cant_call_keys'] = dict
ip.user_ns['empty'] = {}
ip.user_ns['d'] = {'abc': 5}
(_, matches) = complete(line_buffer="no_getitem['")
(_, matches) = complete(line_buffer="no_keys['")
(_, matches) = complete(line_buffer="cant_call_keys['")
(_, matches) = complete(line_buffer="empty['")
(_, matches) = complete(line_buffer="name_error['")
(_, matches) = complete(line_buffer="d['\\")
| [
"def",
"test_dict_key_completion_invalids",
"(",
")",
":",
"ip",
"=",
"get_ipython",
"(",
")",
"complete",
"=",
"ip",
".",
"Completer",
".",
"complete",
"ip",
".",
"user_ns",
"[",
"'no_getitem'",
"]",
"=",
"None",
"ip",
".",
"user_ns",
"[",
"'no_keys'",
"]... | smoke test cases dict key completion cant handle . | train | false |
50,549 | def zeroize(name):
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
ret['changes'] = __salt__['junos.zeroize']()
return ret
| [
"def",
"zeroize",
"(",
"name",
")",
":",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"True",
",",
"'comment'",
":",
"''",
"}",
"ret",
"[",
"'changes'",
"]",
"=",
"__salt__",
"[",
"'junos.zeroize'",
... | resets the device to default factory settings usage: . | train | false |
50,550 | @register.filter(is_safe=True)
def markdown(value, arg=''):
import warnings
warnings.warn('The markdown filter has been deprecated', category=DeprecationWarning)
try:
import markdown
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in 'markdown' filter: The Python markdown library isn't installed.")
return force_text(value)
else:
markdown_vers = getattr(markdown, 'version_info', 0)
if (markdown_vers < (2, 1)):
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in 'markdown' filter: Django does not support versions of the Python markdown library < 2.1.")
return force_text(value)
else:
extensions = [e for e in arg.split(',') if e]
if (extensions and (extensions[0] == 'safe')):
extensions = extensions[1:]
return mark_safe(markdown.markdown(force_text(value), extensions, safe_mode=True, enable_attributes=False))
else:
return mark_safe(markdown.markdown(force_text(value), extensions, safe_mode=False))
| [
"@",
"register",
".",
"filter",
"(",
"is_safe",
"=",
"True",
")",
"def",
"markdown",
"(",
"value",
",",
"arg",
"=",
"''",
")",
":",
"import",
"warnings",
"warnings",
".",
"warn",
"(",
"'The markdown filter has been deprecated'",
",",
"category",
"=",
"Deprec... | runs markdown over a given value . | train | true |
50,551 | def decode_jpeg(image_buffer, scope=None):
with tf.op_scope([image_buffer], scope, 'decode_jpeg'):
image = tf.image.decode_jpeg(image_buffer, channels=3)
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
return image
| [
"def",
"decode_jpeg",
"(",
"image_buffer",
",",
"scope",
"=",
"None",
")",
":",
"with",
"tf",
".",
"op_scope",
"(",
"[",
"image_buffer",
"]",
",",
"scope",
",",
"'decode_jpeg'",
")",
":",
"image",
"=",
"tf",
".",
"image",
".",
"decode_jpeg",
"(",
"imag... | decode a jpeg string into one 3-d float image tensor . | train | false |
50,552 | def set_default_fetch_deadline(value):
_thread_local_settings.default_fetch_deadline = value
| [
"def",
"set_default_fetch_deadline",
"(",
"value",
")",
":",
"_thread_local_settings",
".",
"default_fetch_deadline",
"=",
"value"
] | set the default value for create_rpc()s deadline parameter . | train | false |
50,553 | def test_two_active_items():
items = [Item(QUrl(), '', active=True), Item(QUrl(), ''), Item(QUrl(), '', active=True)]
with pytest.raises(ValueError):
tabhistory.serialize(items)
| [
"def",
"test_two_active_items",
"(",
")",
":",
"items",
"=",
"[",
"Item",
"(",
"QUrl",
"(",
")",
",",
"''",
",",
"active",
"=",
"True",
")",
",",
"Item",
"(",
"QUrl",
"(",
")",
",",
"''",
")",
",",
"Item",
"(",
"QUrl",
"(",
")",
",",
"''",
",... | check tabhistory . | train | false |
50,554 | def test_rename_channels():
info = read_info(raw_fname)
mapping = {'EEG 160': 'EEG060'}
assert_raises(ValueError, rename_channels, info, mapping)
mapping = {'MEG 2641': 1.0}
assert_raises(ValueError, rename_channels, info, mapping)
mapping = {'MEG 2641': 'MEG 2642'}
assert_raises(ValueError, rename_channels, info, mapping)
assert_raises(ValueError, rename_channels, info, 1.0)
info2 = deepcopy(info)
info2['bads'] = ['EEG 060', 'EOG 061']
mapping = {'EEG 060': 'EEG060', 'EOG 061': 'EOG061'}
rename_channels(info2, mapping)
assert_true((info2['chs'][374]['ch_name'] == 'EEG060'))
assert_true((info2['ch_names'][374] == 'EEG060'))
assert_true((info2['chs'][375]['ch_name'] == 'EOG061'))
assert_true((info2['ch_names'][375] == 'EOG061'))
assert_array_equal(['EEG060', 'EOG061'], info2['bads'])
info2 = deepcopy(info)
rename_channels(info2, (lambda x: x.replace(' ', '')))
assert_true((info2['chs'][373]['ch_name'] == 'EEG059'))
info2 = deepcopy(info)
info2['bads'] = ['EEG 060', 'EEG 060']
rename_channels(info2, mapping)
assert_array_equal(['EEG060', 'EEG060'], info2['bads'])
| [
"def",
"test_rename_channels",
"(",
")",
":",
"info",
"=",
"read_info",
"(",
"raw_fname",
")",
"mapping",
"=",
"{",
"'EEG 160'",
":",
"'EEG060'",
"}",
"assert_raises",
"(",
"ValueError",
",",
"rename_channels",
",",
"info",
",",
"mapping",
")",
"mapping",
"=... | test rename channels . | train | false |
50,555 | def ndd_prefix_for_region(region_code, strip_non_digits):
if (region_code is None):
return None
metadata = PhoneMetadata.metadata_for_region(region_code.upper(), None)
if (metadata is None):
return None
national_prefix = metadata.national_prefix
if ((national_prefix is None) or (len(national_prefix) == 0)):
return None
if strip_non_digits:
national_prefix = re.sub(U_TILDE, U_EMPTY_STRING, national_prefix)
return national_prefix
| [
"def",
"ndd_prefix_for_region",
"(",
"region_code",
",",
"strip_non_digits",
")",
":",
"if",
"(",
"region_code",
"is",
"None",
")",
":",
"return",
"None",
"metadata",
"=",
"PhoneMetadata",
".",
"metadata_for_region",
"(",
"region_code",
".",
"upper",
"(",
")",
... | returns the national dialling prefix for a specific region . | train | true |
50,556 | def test_run_method_should_return_success_when_command_name_not_specified():
options_mock = Mock()
args = ()
help_cmd = HelpCommand()
status = help_cmd.run(options_mock, args)
assert (status == SUCCESS)
| [
"def",
"test_run_method_should_return_success_when_command_name_not_specified",
"(",
")",
":",
"options_mock",
"=",
"Mock",
"(",
")",
"args",
"=",
"(",
")",
"help_cmd",
"=",
"HelpCommand",
"(",
")",
"status",
"=",
"help_cmd",
".",
"run",
"(",
"options_mock",
",",
... | test helpcommand . | train | false |
50,558 | def mkNonce(when=None):
salt = cryptutil.randomString(6, NONCE_CHARS)
if (when is None):
t = gmtime()
else:
t = gmtime(when)
time_str = strftime(time_fmt, t)
return (time_str + salt)
| [
"def",
"mkNonce",
"(",
"when",
"=",
"None",
")",
":",
"salt",
"=",
"cryptutil",
".",
"randomString",
"(",
"6",
",",
"NONCE_CHARS",
")",
"if",
"(",
"when",
"is",
"None",
")",
":",
"t",
"=",
"gmtime",
"(",
")",
"else",
":",
"t",
"=",
"gmtime",
"(",... | generate a nonce with the current timestamp . | train | true |
50,559 | def Run(arg_dict, oauth2_parameters=None):
arg_dict = ProcessArguments(arg_dict)
SetupLogging(arg_dict)
return _PerformBulkload(arg_dict, oauth2_parameters)
| [
"def",
"Run",
"(",
"arg_dict",
",",
"oauth2_parameters",
"=",
"None",
")",
":",
"arg_dict",
"=",
"ProcessArguments",
"(",
"arg_dict",
")",
"SetupLogging",
"(",
"arg_dict",
")",
"return",
"_PerformBulkload",
"(",
"arg_dict",
",",
"oauth2_parameters",
")"
] | standalone command line access point . | train | false |
50,560 | def SetPlatformArchContext():
CONFIG.AddContext(('Platform:%s' % platform.system().title()))
machine = platform.uname()[4]
if (machine in ['x86_64', 'AMD64', 'i686']):
if (platform.architecture()[0] == '32bit'):
arch = 'i386'
else:
arch = 'amd64'
elif (machine == 'x86'):
arch = 'i386'
else:
arch = machine
CONFIG.AddContext(('Arch:%s' % arch))
| [
"def",
"SetPlatformArchContext",
"(",
")",
":",
"CONFIG",
".",
"AddContext",
"(",
"(",
"'Platform:%s'",
"%",
"platform",
".",
"system",
"(",
")",
".",
"title",
"(",
")",
")",
")",
"machine",
"=",
"platform",
".",
"uname",
"(",
")",
"[",
"4",
"]",
"if... | add the running contexts to the config system . | train | true |
50,562 | def overrideNative():
file_name = (Directory.GetCurrentDirectory() + '\\sys.cs')
file = open(file_name, 'w')
print >>file, cs_native
file.close()
compileAssembly(file_name)
file_name = (Directory.GetCurrentDirectory() + '\\re.cs')
file = open(file_name, 'w')
print >>file, cs_native_re
file.close()
compileAssembly(file_name)
| [
"def",
"overrideNative",
"(",
")",
":",
"file_name",
"=",
"(",
"Directory",
".",
"GetCurrentDirectory",
"(",
")",
"+",
"'\\\\sys.cs'",
")",
"file",
"=",
"open",
"(",
"file_name",
",",
"'w'",
")",
"print",
">>",
"file",
",",
"cs_native",
"file",
".",
"clo... | tries to override the native ip sys module . | train | false |
50,563 | def pre_save_title(instance, raw, **kwargs):
page = instance.page
page_languages = page.get_languages()
if (not (instance.language in page_languages)):
page_languages.append(instance.language)
page.update_languages(page_languages)
if (not page.publisher_is_draft):
menu_pool.clear(page.site_id)
if (instance.pk and (not hasattr(instance, 'tmp_path'))):
instance.tmp_path = None
try:
instance.tmp_path = Title.objects.filter(pk=instance.pk).values_list('path')[0][0]
except IndexError:
pass
if (instance.has_url_overwrite and instance.path):
instance.path = instance.path.strip(' /')
else:
update_title(instance)
apphook_pre_title_checker(instance, **kwargs)
| [
"def",
"pre_save_title",
"(",
"instance",
",",
"raw",
",",
"**",
"kwargs",
")",
":",
"page",
"=",
"instance",
".",
"page",
"page_languages",
"=",
"page",
".",
"get_languages",
"(",
")",
"if",
"(",
"not",
"(",
"instance",
".",
"language",
"in",
"page_lang... | save old state to instance and setup path . | train | false |
50,564 | def groovy(registry, xml_parent, data):
root_tag = 'hudson.plugins.groovy.Groovy'
groovy = XML.SubElement(xml_parent, root_tag)
groovy.append(_groovy_common_scriptSource(data))
XML.SubElement(groovy, 'groovyName').text = str(data.get('version', '(Default)'))
XML.SubElement(groovy, 'parameters').text = str(data.get('parameters', ''))
XML.SubElement(groovy, 'scriptParameters').text = str(data.get('script-parameters', ''))
XML.SubElement(groovy, 'properties').text = str(data.get('properties', ''))
XML.SubElement(groovy, 'javaOpts').text = str(data.get('java-opts', ''))
XML.SubElement(groovy, 'classPath').text = str(data.get('class-path', ''))
| [
"def",
"groovy",
"(",
"registry",
",",
"xml_parent",
",",
"data",
")",
":",
"root_tag",
"=",
"'hudson.plugins.groovy.Groovy'",
"groovy",
"=",
"XML",
".",
"SubElement",
"(",
"xml_parent",
",",
"root_tag",
")",
"groovy",
".",
"append",
"(",
"_groovy_common_scriptS... | yaml: groovy execute a groovy script or command . | train | false |
50,565 | def construct_change_message(form, formsets, add):
change_message = []
if add:
change_message.append({'added': {}})
elif form.changed_data:
change_message.append({'changed': {'fields': form.changed_data}})
if formsets:
with translation_override(None):
for formset in formsets:
for added_object in formset.new_objects:
change_message.append({'added': {'name': force_text(added_object._meta.verbose_name), 'object': force_text(added_object)}})
for (changed_object, changed_fields) in formset.changed_objects:
change_message.append({'changed': {'name': force_text(changed_object._meta.verbose_name), 'object': force_text(changed_object), 'fields': changed_fields}})
for deleted_object in formset.deleted_objects:
change_message.append({'deleted': {'name': force_text(deleted_object._meta.verbose_name), 'object': force_text(deleted_object)}})
return change_message
| [
"def",
"construct_change_message",
"(",
"form",
",",
"formsets",
",",
"add",
")",
":",
"change_message",
"=",
"[",
"]",
"if",
"add",
":",
"change_message",
".",
"append",
"(",
"{",
"'added'",
":",
"{",
"}",
"}",
")",
"elif",
"form",
".",
"changed_data",
... | construct a json structure describing changes from a changed object . | train | false |
50,566 | def parseChangeXML(raw_xml):
data = dict()
try:
doc = xml.dom.minidom.parseString(raw_xml)
except ExpatError:
print('\nError: Got an empty response with an empty changeset.\n')
raise
log_entry = doc.getElementsByTagName('logentry')[0]
data['revision'] = log_entry.getAttribute('revision')
data['author'] = ''.join([t.data for t in log_entry.getElementsByTagName('author')[0].childNodes])
data['comments'] = ''.join([t.data for t in log_entry.getElementsByTagName('msg')[0].childNodes])
pathlist = log_entry.getElementsByTagName('paths')[0]
paths = []
for path in pathlist.getElementsByTagName('path'):
paths.append(''.join([t.data for t in path.childNodes]))
data['paths'] = paths
return data
| [
"def",
"parseChangeXML",
"(",
"raw_xml",
")",
":",
"data",
"=",
"dict",
"(",
")",
"try",
":",
"doc",
"=",
"xml",
".",
"dom",
".",
"minidom",
".",
"parseString",
"(",
"raw_xml",
")",
"except",
"ExpatError",
":",
"print",
"(",
"'\\nError: Got an empty respon... | parse the raw xml and return a dict with key pairs set . | train | false |
50,567 | def _api_key_patch_replace(conn, apiKey, path, value):
response = conn.update_api_key(apiKey=apiKey, patchOperations=[{'op': 'replace', 'path': path, 'value': value}])
return response
| [
"def",
"_api_key_patch_replace",
"(",
"conn",
",",
"apiKey",
",",
"path",
",",
"value",
")",
":",
"response",
"=",
"conn",
".",
"update_api_key",
"(",
"apiKey",
"=",
"apiKey",
",",
"patchOperations",
"=",
"[",
"{",
"'op'",
":",
"'replace'",
",",
"'path'",
... | the replace patch operation on an apikey resource . | train | true |
50,568 | def f_1():
return __doc__
| [
"def",
"f_1",
"(",
")",
":",
"return",
"__doc__"
] | f 1 doc . | train | false |
50,569 | def _parse_focus(arg_str):
split_str = arg_str.split('|', 1)
if (len(split_str) == 1):
return {'id': split_str[0].rstrip(), 'annotation': ''}
else:
return {'id': split_str[0].rstrip(), 'annotation': split_str[1].lstrip()}
| [
"def",
"_parse_focus",
"(",
"arg_str",
")",
":",
"split_str",
"=",
"arg_str",
".",
"split",
"(",
"'|'",
",",
"1",
")",
"if",
"(",
"len",
"(",
"split_str",
")",
"==",
"1",
")",
":",
"return",
"{",
"'id'",
":",
"split_str",
"[",
"0",
"]",
".",
"rst... | returns id and annotation after splitting input string . | train | false |
50,572 | def _check_epochs_input(epochs, y, picks=None):
if (y is None):
y = epochs.events[:, 2]
elif isinstance(y, list):
y = np.array(y)
X = epochs.get_data()
if (picks is None):
picks = _pick_data_channels(epochs.info, with_ref_meg=False)
if isinstance(picks, (list, np.ndarray)):
picks = np.array(picks, dtype=np.int)
else:
raise ValueError('picks must be a list or a numpy.ndarray of int')
X = X[:, picks, :]
assert (X.shape[0] == y.shape[0])
return (X, y, picks)
| [
"def",
"_check_epochs_input",
"(",
"epochs",
",",
"y",
",",
"picks",
"=",
"None",
")",
":",
"if",
"(",
"y",
"is",
"None",
")",
":",
"y",
"=",
"epochs",
".",
"events",
"[",
":",
",",
"2",
"]",
"elif",
"isinstance",
"(",
"y",
",",
"list",
")",
":... | aux function of generalizationacrosstime . | train | false |
50,573 | def counter():
lock = compat.threading.Lock()
counter = itertools.count(1)
def _next():
lock.acquire()
try:
return next(counter)
finally:
lock.release()
return _next
| [
"def",
"counter",
"(",
")",
":",
"lock",
"=",
"compat",
".",
"threading",
".",
"Lock",
"(",
")",
"counter",
"=",
"itertools",
".",
"count",
"(",
"1",
")",
"def",
"_next",
"(",
")",
":",
"lock",
".",
"acquire",
"(",
")",
"try",
":",
"return",
"nex... | every time you reload . | train | false |
50,574 | def put_async(models, **kwargs):
(models, multiple) = datastore.NormalizeAndTypeCheck(models, Model)
entities = [model._populate_internal_entity() for model in models]
def extra_hook(keys):
if multiple:
return keys
assert (len(keys) == 1)
return keys[0]
return datastore.PutAsync(entities, extra_hook=extra_hook, **kwargs)
| [
"def",
"put_async",
"(",
"models",
",",
"**",
"kwargs",
")",
":",
"(",
"models",
",",
"multiple",
")",
"=",
"datastore",
".",
"NormalizeAndTypeCheck",
"(",
"models",
",",
"Model",
")",
"entities",
"=",
"[",
"model",
".",
"_populate_internal_entity",
"(",
"... | asynchronously store one or more model instances . | train | false |
50,575 | def getNS(s, count=1):
ns = []
c = 0
for i in range(count):
(l,) = struct.unpack('!L', s[c:(c + 4)])
ns.append(s[(c + 4):((4 + l) + c)])
c += (4 + l)
return (tuple(ns) + (s[c:],))
| [
"def",
"getNS",
"(",
"s",
",",
"count",
"=",
"1",
")",
":",
"ns",
"=",
"[",
"]",
"c",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"count",
")",
":",
"(",
"l",
",",
")",
"=",
"struct",
".",
"unpack",
"(",
"'!L'",
",",
"s",
"[",
"c",
":",
"... | get net string . | train | false |
50,576 | def plot_10_by_10_images(images):
fig = plt.figure()
images = [image[3:25, 3:25] for image in images]
for x in range(10):
for y in range(10):
ax = fig.add_subplot(10, 10, ((10 * y) + x))
ax.matshow(images[((10 * y) + x)], cmap=matplotlib.cm.binary)
plt.xticks(np.array([]))
plt.yticks(np.array([]))
plt.show()
| [
"def",
"plot_10_by_10_images",
"(",
"images",
")",
":",
"fig",
"=",
"plt",
".",
"figure",
"(",
")",
"images",
"=",
"[",
"image",
"[",
"3",
":",
"25",
",",
"3",
":",
"25",
"]",
"for",
"image",
"in",
"images",
"]",
"for",
"x",
"in",
"range",
"(",
... | plot 100 mnist images in a 10 by 10 table . | train | false |
50,577 | def cmd_status(args):
if (len(args) == 0):
mpstate.status.show(sys.stdout, pattern=None)
else:
for pattern in args:
mpstate.status.show(sys.stdout, pattern=pattern)
| [
"def",
"cmd_status",
"(",
"args",
")",
":",
"if",
"(",
"len",
"(",
"args",
")",
"==",
"0",
")",
":",
"mpstate",
".",
"status",
".",
"show",
"(",
"sys",
".",
"stdout",
",",
"pattern",
"=",
"None",
")",
"else",
":",
"for",
"pattern",
"in",
"args",
... | show status . | train | true |
50,579 | def parse_timestamp(timestamp, time_format=None):
if (time_format is None):
time_format = '%a, %d %b %Y %H:%M:%S %Z'
try:
return time.strptime(timestamp, time_format)
except ValueError:
try:
return time.strptime(re.sub(' \\w+$', '', timestamp), time_format.replace(' %Z', ''))
except ValueError:
LOG.error(('Failed to convert Oozie timestamp: %s' % time_format))
except Exception:
LOG.error(('Failed to convert Oozie timestamp: %s' % time_format))
return None
| [
"def",
"parse_timestamp",
"(",
"timestamp",
",",
"time_format",
"=",
"None",
")",
":",
"if",
"(",
"time_format",
"is",
"None",
")",
":",
"time_format",
"=",
"'%a, %d %b %Y %H:%M:%S %Z'",
"try",
":",
"return",
"time",
".",
"strptime",
"(",
"timestamp",
",",
"... | parse_timestamp -> struct_time does not raise valueerror . | train | false |
50,580 | @pytest.fixture(autouse=True)
def remove_replay_dump(request, replay_file):
def fin_remove_replay_file():
if os.path.exists(replay_file):
os.remove(replay_file)
request.addfinalizer(fin_remove_replay_file)
| [
"@",
"pytest",
".",
"fixture",
"(",
"autouse",
"=",
"True",
")",
"def",
"remove_replay_dump",
"(",
"request",
",",
"replay_file",
")",
":",
"def",
"fin_remove_replay_file",
"(",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"replay_file",
")",
"... | remove the replay file created by tests . | train | false |
50,581 | def get_settings(**kwargs):
settings = DEFAULT_CONFIG.copy()
for (key, value) in kwargs.items():
settings[key] = value
return settings
| [
"def",
"get_settings",
"(",
"**",
"kwargs",
")",
":",
"settings",
"=",
"DEFAULT_CONFIG",
".",
"copy",
"(",
")",
"for",
"(",
"key",
",",
"value",
")",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"settings",
"[",
"key",
"]",
"=",
"value",
"return",
... | provide tweaked setting dictionaries for testing set keyword arguments to override specific settings . | train | false |
50,582 | def p_statement_expr():
print t[1]
| [
"def",
"p_statement_expr",
"(",
")",
":",
"print",
"t",
"[",
"1",
"]"
] | statement : expression . | train | false |
50,583 | @cleanup
def test_determinism_images():
_determinism_check(u'i', format=u'pdf')
| [
"@",
"cleanup",
"def",
"test_determinism_images",
"(",
")",
":",
"_determinism_check",
"(",
"u'i'",
",",
"format",
"=",
"u'pdf'",
")"
] | test for reproducible pdf output: figure with different images . | train | false |
50,584 | def download_files_in_parallel(urls, cache=False, show_progress=True, timeout=None):
from .console import ProgressBar
if (timeout is None):
timeout = conf.remote_timeout
if show_progress:
progress = sys.stdout
else:
progress = io.BytesIO()
if (timeout is None):
timeout = REMOTE_TIMEOUT()
combined_urls = list(set(urls))
combined_paths = ProgressBar.map(_do_download_files_in_parallel, [(x, cache) for x in combined_urls], file=progress, multiprocess=True)
paths = []
for url in urls:
paths.append(combined_paths[combined_urls.index(url)])
return paths
| [
"def",
"download_files_in_parallel",
"(",
"urls",
",",
"cache",
"=",
"False",
",",
"show_progress",
"=",
"True",
",",
"timeout",
"=",
"None",
")",
":",
"from",
".",
"console",
"import",
"ProgressBar",
"if",
"(",
"timeout",
"is",
"None",
")",
":",
"timeout"... | downloads multiple files in parallel from the given urls . | train | false |
50,585 | def rot_to_quat(rot):
rot = rot.reshape((rot.shape[:(-2)] + (9,)))
return np.apply_along_axis(_one_rot_to_quat, (-1), rot)
| [
"def",
"rot_to_quat",
"(",
"rot",
")",
":",
"rot",
"=",
"rot",
".",
"reshape",
"(",
"(",
"rot",
".",
"shape",
"[",
":",
"(",
"-",
"2",
")",
"]",
"+",
"(",
"9",
",",
")",
")",
")",
"return",
"np",
".",
"apply_along_axis",
"(",
"_one_rot_to_quat",
... | convert a set of rotations to quaternions . | train | false |
50,586 | def xml_root_close():
return '</resource>'
| [
"def",
"xml_root_close",
"(",
")",
":",
"return",
"'</resource>'"
] | returns the closing tag of the xml root node . | train | false |
50,587 | def extract_thumbnail_number(text):
pattr = re.compile('^\\s*#\\s*sphinx_gallery_thumbnail_number\\s*=\\s*([0-9]+)\\s*$', flags=re.MULTILINE)
match = pattr.search(text)
if (match is None):
thumbnail_number = 1
else:
thumbnail_number = int(match.groups()[0])
return thumbnail_number
| [
"def",
"extract_thumbnail_number",
"(",
"text",
")",
":",
"pattr",
"=",
"re",
".",
"compile",
"(",
"'^\\\\s*#\\\\s*sphinx_gallery_thumbnail_number\\\\s*=\\\\s*([0-9]+)\\\\s*$'",
",",
"flags",
"=",
"re",
".",
"MULTILINE",
")",
"match",
"=",
"pattr",
".",
"search",
"(... | pull out the thumbnail image number specified in the docstring . | train | false |
50,588 | def _ensure_systemd(version):
try:
version = int(version)
except ValueError:
raise CommandExecutionError("Invalid version '{0}'".format(version))
try:
installed = _sd_version()
log.debug('nspawn: detected systemd {0}'.format(installed))
except (IndexError, ValueError):
raise CommandExecutionError('nspawn: Unable to get systemd version')
if (installed < version):
raise CommandExecutionError('This function requires systemd >= {0} (Detected version: {1}).'.format(version, installed))
| [
"def",
"_ensure_systemd",
"(",
"version",
")",
":",
"try",
":",
"version",
"=",
"int",
"(",
"version",
")",
"except",
"ValueError",
":",
"raise",
"CommandExecutionError",
"(",
"\"Invalid version '{0}'\"",
".",
"format",
"(",
"version",
")",
")",
"try",
":",
... | raises an exception if the systemd version is not greater than the passed version . | train | true |
50,589 | def parse_accept_language_header(string, pattern='([a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})?)\\s*(;\\s*q\\s*=\\s*(1|0\\.[0-9]+))?'):
res = {}
if (not string):
return None
for match in re.finditer(pattern, string):
if (None == match.group(4)):
q = 1
else:
q = match.group(4)
l = match.group(1).replace('-', '_')
if (len(l) == 2):
l = l.lower()
elif (len(l) == 5):
l = ((l.split('_')[0].lower() + '_') + l.split('_')[1].upper())
else:
l = None
if l:
res[l] = int((100 * float(q)))
return res
| [
"def",
"parse_accept_language_header",
"(",
"string",
",",
"pattern",
"=",
"'([a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})?)\\\\s*(;\\\\s*q\\\\s*=\\\\s*(1|0\\\\.[0-9]+))?'",
")",
":",
"res",
"=",
"{",
"}",
"if",
"(",
"not",
"string",
")",
":",
"return",
"None",
"for",
"match",
"i... | parse a dict from an accept-language header string example input: en-us . | train | false |
50,590 | @requires_application()
def test_reactive_draw():
with TestingCanvas() as c:
rpolygon = visuals.RegularPolygon(center=[50, 50, 0.0], radius=20, sides=8, color='yellow', parent=c.scene)
rpolygon.center = [70, 40, 0.0]
assert_image_approved(c.render(), 'visuals/reactive_regular_polygon1.png')
rpolygon.radius = 25
assert_image_approved(c.render(), 'visuals/reactive_regular_polygon2.png')
rpolygon.color = 'red'
assert_image_approved(c.render(), 'visuals/reactive_regular_polygon3.png')
rpolygon.border_color = 'yellow'
assert_image_approved(c.render(), 'visuals/reactive_regular_polygon4.png')
rpolygon.sides = 6
assert_image_approved(c.render(), 'visuals/reactive_regular_polygon5.png')
| [
"@",
"requires_application",
"(",
")",
"def",
"test_reactive_draw",
"(",
")",
":",
"with",
"TestingCanvas",
"(",
")",
"as",
"c",
":",
"rpolygon",
"=",
"visuals",
".",
"RegularPolygon",
"(",
"center",
"=",
"[",
"50",
",",
"50",
",",
"0.0",
"]",
",",
"ra... | test reactive regular polygon attributes . | train | false |
50,591 | def update_sorcery(module):
changed = False
if module.check_mode:
if ((not module.params['name']) and (not module.params['update_cache'])):
module.exit_json(changed=True, msg='would have updated Sorcery')
else:
sorcery_ver = get_sorcery_ver(module)
cmd_sorcery = ('%s update' % SORCERY['sorcery'])
(rc, stdout, stderr) = module.run_command(cmd_sorcery)
if (rc != 0):
module.fail_json(msg=('unable to update Sorcery: ' + stdout))
if (sorcery_ver != get_sorcery_ver(module)):
changed = True
if ((not module.params['name']) and (not module.params['update_cache'])):
module.exit_json(changed=changed, msg='successfully updated Sorcery')
| [
"def",
"update_sorcery",
"(",
"module",
")",
":",
"changed",
"=",
"False",
"if",
"module",
".",
"check_mode",
":",
"if",
"(",
"(",
"not",
"module",
".",
"params",
"[",
"'name'",
"]",
")",
"and",
"(",
"not",
"module",
".",
"params",
"[",
"'update_cache'... | update sorcery scripts . | train | false |
50,592 | def recreate_token_data(context, token_data=None, expires=None, user_ref=None, project_ref=None):
new_expires = expires
project_id = None
user_id = None
domain_id = None
methods = ['password', 'token']
extras = {}
if token_data:
token = token_data['token']
domain_id = (token['domain']['id'] if ('domain' in token) else None)
project_id = (token['project']['id'] if ('project' in token) else None)
if (not new_expires):
new_expires = token.get('expires_at', token.get('expires'))
user_id = token['user']['id']
methods = token['methods']
extras = token['extras']
else:
token = None
project_id = (project_ref['id'] if project_ref else None)
user_id = user_ref['id']
token_data_helper = TokenDataHelper(context)
return token_data_helper.get_token_data(user_id, methods, extras, domain_id, project_id, new_expires, token=token)
| [
"def",
"recreate_token_data",
"(",
"context",
",",
"token_data",
"=",
"None",
",",
"expires",
"=",
"None",
",",
"user_ref",
"=",
"None",
",",
"project_ref",
"=",
"None",
")",
":",
"new_expires",
"=",
"expires",
"project_id",
"=",
"None",
"user_id",
"=",
"N... | recreate token from an existing token . | train | false |
50,593 | def xmlencode(s):
return _encre.sub((lambda m: ('&#%d;' % ord(m.group(1)))), s)
| [
"def",
"xmlencode",
"(",
"s",
")",
":",
"return",
"_encre",
".",
"sub",
"(",
"(",
"lambda",
"m",
":",
"(",
"'&#%d;'",
"%",
"ord",
"(",
"m",
".",
"group",
"(",
"1",
")",
")",
")",
")",
",",
"s",
")"
] | encode the given string for inclusion in a utf-8 xml document . | train | false |
50,595 | def test_source(qtbot, py_proc, runner):
(cmd, args) = py_proc("\n import os\n import json\n\n data = {\n 'html_file': os.environ['QUTE_HTML'],\n 'text_file': os.environ['QUTE_TEXT'],\n }\n\n with open(os.environ['QUTE_HTML'], 'r') as f:\n data['html'] = f.read()\n\n with open(os.environ['QUTE_TEXT'], 'r') as f:\n data['text'] = f.read()\n\n with open(os.environ['QUTE_FIFO'], 'w') as f:\n json.dump(data, f)\n f.write('\\n')\n ")
with qtbot.waitSignal(runner.finished, timeout=10000):
with qtbot.waitSignal(runner.got_cmd, timeout=10000) as blocker:
runner.prepare_run(cmd, *args)
runner.store_html('This is HTML')
runner.store_text('This is text')
data = blocker.args[0]
parsed = json.loads(data)
assert (parsed['text'] == 'This is text')
assert (parsed['html'] == 'This is HTML')
assert (not os.path.exists(parsed['text_file']))
assert (not os.path.exists(parsed['html_file']))
| [
"def",
"test_source",
"(",
"qtbot",
",",
"py_proc",
",",
"runner",
")",
":",
"(",
"cmd",
",",
"args",
")",
"=",
"py_proc",
"(",
"\"\\n import os\\n import json\\n\\n data = {\\n 'html_file': os.environ['QUTE_HTML'],\\n 'text_file': os.en... | make sure the page source is read and cleaned up correctly . | train | false |
50,596 | def assertIsSubdomainOf(testCase, descendant, ancestor):
testCase.assertTrue(dns._isSubdomainOf(descendant, ancestor), ('%r is not a subdomain of %r' % (descendant, ancestor)))
| [
"def",
"assertIsSubdomainOf",
"(",
"testCase",
",",
"descendant",
",",
"ancestor",
")",
":",
"testCase",
".",
"assertTrue",
"(",
"dns",
".",
"_isSubdomainOf",
"(",
"descendant",
",",
"ancestor",
")",
",",
"(",
"'%r is not a subdomain of %r'",
"%",
"(",
"descenda... | assert that c{descendant} *is* a subdomain of c{ancestor} . | train | false |
50,597 | def __int(value):
(valid, _value) = (False, value)
try:
_value = int(value)
valid = True
except ValueError:
pass
return (valid, _value, 'integer')
| [
"def",
"__int",
"(",
"value",
")",
":",
"(",
"valid",
",",
"_value",
")",
"=",
"(",
"False",
",",
"value",
")",
"try",
":",
"_value",
"=",
"int",
"(",
"value",
")",
"valid",
"=",
"True",
"except",
"ValueError",
":",
"pass",
"return",
"(",
"valid",
... | validate an integer . | train | true |
50,598 | def _get_hook():
hook = (__salt__['config.get']('mattermost.hook') or __salt__['config.get']('mattermost:hook'))
if (not hook):
raise SaltInvocationError('No Mattermost Hook found')
return hook
| [
"def",
"_get_hook",
"(",
")",
":",
"hook",
"=",
"(",
"__salt__",
"[",
"'config.get'",
"]",
"(",
"'mattermost.hook'",
")",
"or",
"__salt__",
"[",
"'config.get'",
"]",
"(",
"'mattermost:hook'",
")",
")",
"if",
"(",
"not",
"hook",
")",
":",
"raise",
"SaltIn... | retrieves and return the mattermosts configured hook :return: string: the hook string . | train | false |
50,600 | def vm_detach_nic(name, kwargs=None, call=None):
if (call != 'action'):
raise SaltCloudSystemExit('The vm_detach_nic action must be called with -a or --action.')
if (kwargs is None):
kwargs = {}
nic_id = kwargs.get('nic_id', None)
if (nic_id is None):
raise SaltCloudSystemExit("The vm_detach_nic function requires a 'nic_id' to be provided.")
(server, user, password) = _get_xml_rpc()
auth = ':'.join([user, password])
vm_id = int(get_vm_id(kwargs={'name': name}))
response = server.one.vm.detachnic(auth, vm_id, int(nic_id))
data = {'action': 'vm.detachnic', 'nic_detached': response[0], 'vm_id': response[1], 'error_code': response[2]}
return data
| [
"def",
"vm_detach_nic",
"(",
"name",
",",
"kwargs",
"=",
"None",
",",
"call",
"=",
"None",
")",
":",
"if",
"(",
"call",
"!=",
"'action'",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The vm_detach_nic action must be called with -a or --action.'",
")",
"if",
... | detaches a disk from a virtual machine . | train | true |
50,603 | def unregister_unpack_format(name):
del _UNPACK_FORMATS[name]
| [
"def",
"unregister_unpack_format",
"(",
"name",
")",
":",
"del",
"_UNPACK_FORMATS",
"[",
"name",
"]"
] | removes the pack format from the registery . | train | false |
50,604 | def no_tvtk():
global _have_tvtk
return (not _have_tvtk)
| [
"def",
"no_tvtk",
"(",
")",
":",
"global",
"_have_tvtk",
"return",
"(",
"not",
"_have_tvtk",
")"
] | checks if tvtk was found . | train | false |
50,605 | def print_test(method):
def run(*args, **kw):
ts = time.time()
print (' DCTB testing function %r' % method.__name__)
method(*args, **kw)
te = time.time()
print (' DCTB [OK] in %r %2.2f sec' % (method.__name__, (te - ts)))
return run
| [
"def",
"print_test",
"(",
"method",
")",
":",
"def",
"run",
"(",
"*",
"args",
",",
"**",
"kw",
")",
":",
"ts",
"=",
"time",
".",
"time",
"(",
")",
"print",
"(",
"' DCTB testing function %r'",
"%",
"method",
".",
"__name__",
")",
"method",
"(",
"*",
... | utility method for print verbalizing test suite . | train | false |
50,606 | @decorator.decorator
def apply_to_audio(f, clip, *a, **k):
newclip = f(clip, *a, **k)
if (hasattr(newclip, 'audio') and (newclip.audio is not None)):
newclip.audio = f(newclip.audio, *a, **k)
return newclip
| [
"@",
"decorator",
".",
"decorator",
"def",
"apply_to_audio",
"(",
"f",
",",
"clip",
",",
"*",
"a",
",",
"**",
"k",
")",
":",
"newclip",
"=",
"f",
"(",
"clip",
",",
"*",
"a",
",",
"**",
"k",
")",
"if",
"(",
"hasattr",
"(",
"newclip",
",",
"'audi... | this decorator will apply the function f to the audio of the clip created with f . | train | false |
50,610 | def get_master_ip(host=None, port=None, password=None):
server = _connect(host, port, password)
srv_info = server.info()
ret = (srv_info.get('master_host', ''), srv_info.get('master_port', ''))
return dict(list(zip(('master_host', 'master_port'), ret)))
| [
"def",
"get_master_ip",
"(",
"host",
"=",
"None",
",",
"port",
"=",
"None",
",",
"password",
"=",
"None",
")",
":",
"server",
"=",
"_connect",
"(",
"host",
",",
"port",
",",
"password",
")",
"srv_info",
"=",
"server",
".",
"info",
"(",
")",
"ret",
... | get host information about slave . | train | true |
50,612 | def rotate_encryption_key(bucket_name, blob_name, base64_encryption_key, base64_new_encryption_key):
storage_client = storage.Client()
bucket = storage_client.get_bucket(bucket_name)
current_encryption_key = base64.b64decode(base64_encryption_key)
new_encryption_key = base64.b64decode(base64_new_encryption_key)
source_blob = Blob(blob_name, bucket, encryption_key=current_encryption_key)
destination_blob = Blob(blob_name, bucket, encryption_key=new_encryption_key)
token = None
while True:
(token, bytes_rewritten, total_bytes) = destination_blob.rewrite(source_blob, token=token)
if (token is None):
break
print 'Key rotation complete for Blob {}'.format(blob_name)
| [
"def",
"rotate_encryption_key",
"(",
"bucket_name",
",",
"blob_name",
",",
"base64_encryption_key",
",",
"base64_new_encryption_key",
")",
":",
"storage_client",
"=",
"storage",
".",
"Client",
"(",
")",
"bucket",
"=",
"storage_client",
".",
"get_bucket",
"(",
"bucke... | performs a key rotation by re-writing an encrypted blob with a new encryption key . | train | false |
50,613 | @csrf_protect
def account_register(request, next_url=u'dashboard'):
siteconfig = SiteConfiguration.objects.get_current()
auth_backends = get_enabled_auth_backends()
if (auth_backends[0].supports_registration and siteconfig.get(u'auth_enable_registration')):
response = register(request, next_page=reverse(next_url), form_class=RegistrationForm)
return response
return HttpResponseRedirect(reverse(u'login'))
| [
"@",
"csrf_protect",
"def",
"account_register",
"(",
"request",
",",
"next_url",
"=",
"u'dashboard'",
")",
":",
"siteconfig",
"=",
"SiteConfiguration",
".",
"objects",
".",
"get_current",
"(",
")",
"auth_backends",
"=",
"get_enabled_auth_backends",
"(",
")",
"if",... | display the appropriate registration page . | train | false |
50,614 | def jacobi(a, b):
if (a == 0):
return 0
result = 1
while (a > 1):
if (a & 1):
if ((((a - 1) * (b - 1)) >> 2) & 1):
result = (- result)
(a, b) = ((b % a), a)
else:
if ((((b * b) - 1) >> 3) & 1):
result = (- result)
a >>= 1
if (a == 0):
return 0
return result
| [
"def",
"jacobi",
"(",
"a",
",",
"b",
")",
":",
"if",
"(",
"a",
"==",
"0",
")",
":",
"return",
"0",
"result",
"=",
"1",
"while",
"(",
"a",
">",
"1",
")",
":",
"if",
"(",
"a",
"&",
"1",
")",
":",
"if",
"(",
"(",
"(",
"(",
"a",
"-",
"1",... | calculates the value of the jacobi symbol where both a and b are positive integers . | train | true |
50,615 | def get_amount(ref_doc, dt):
if (dt == u'Sales Order'):
grand_total = (flt(ref_doc.grand_total) - flt(ref_doc.advance_paid))
if (dt == u'Sales Invoice'):
if (ref_doc.party_account_currency == ref_doc.currency):
grand_total = flt(ref_doc.outstanding_amount)
else:
grand_total = (flt(ref_doc.outstanding_amount) / ref_doc.conversion_rate)
if (grand_total > 0):
return grand_total
else:
frappe.throw(_(u'Payment Entry is already created'))
| [
"def",
"get_amount",
"(",
"ref_doc",
",",
"dt",
")",
":",
"if",
"(",
"dt",
"==",
"u'Sales Order'",
")",
":",
"grand_total",
"=",
"(",
"flt",
"(",
"ref_doc",
".",
"grand_total",
")",
"-",
"flt",
"(",
"ref_doc",
".",
"advance_paid",
")",
")",
"if",
"("... | get amount based on doctype . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.