id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
53,879 | @contextmanager
@deprecated(u'1.4.0', _deprecation_msg)
def subsystem_instance(subsystem_type, scope=None, **options):
if (not issubclass(subsystem_type, Subsystem)):
raise TypeError(u'The given `subsystem_type` was not a subclass of `Subsystem`: {}'.format(subsystem_type))
optionables = Subsystem.closure([subsystem_type])
updated_options = (dict(Subsystem._options.items()) if Subsystem._options else {})
if options:
updated_options.update(options)
Subsystem._options = create_options_for_optionables(optionables, options=updated_options)
try:
if (scope is None):
(yield subsystem_type.global_instance())
else:
class ScopedOptionable(Optionable, ):
options_scope = scope
options_scope_category = ScopeInfo.SUBSYSTEM
(yield subsystem_type.scoped_instance(ScopedOptionable))
finally:
Subsystem.reset()
| [
"@",
"contextmanager",
"@",
"deprecated",
"(",
"u'1.4.0'",
",",
"_deprecation_msg",
")",
"def",
"subsystem_instance",
"(",
"subsystem_type",
",",
"scope",
"=",
"None",
",",
"**",
"options",
")",
":",
"if",
"(",
"not",
"issubclass",
"(",
"subsystem_type",
",",
... | creates a subsystem instance for test . | train | false |
53,880 | def findController(controllers=DefaultControllers):
for controller in controllers:
if controller.isAvailable():
return controller
| [
"def",
"findController",
"(",
"controllers",
"=",
"DefaultControllers",
")",
":",
"for",
"controller",
"in",
"controllers",
":",
"if",
"controller",
".",
"isAvailable",
"(",
")",
":",
"return",
"controller"
] | return first available controller from list . | train | false |
53,881 | def port_create_vxlan(br, port, id, remote, dst_port=None):
dst_port = ((' options:dst_port=' + str(dst_port)) if (0 < dst_port <= 65535) else '')
if (not (0 <= id < (2 ** 64))):
return False
elif (not __salt__['dig.check_ip'](remote)):
return False
elif (not bridge_exists(br)):
return False
elif (port in port_list(br)):
cmd = 'ovs-vsctl set interface {0} type=vxlan options:remote_ip={1} options:key={2}{3}'.format(port, remote, id, dst_port)
result = __salt__['cmd.run_all'](cmd)
return _retcode_to_bool(result['retcode'])
else:
cmd = 'ovs-vsctl add-port {0} {1} -- set interface {1} type=vxlan options:remote_ip={2} options:key={3}{4}'.format(br, port, remote, id, dst_port)
result = __salt__['cmd.run_all'](cmd)
return _retcode_to_bool(result['retcode'])
| [
"def",
"port_create_vxlan",
"(",
"br",
",",
"port",
",",
"id",
",",
"remote",
",",
"dst_port",
"=",
"None",
")",
":",
"dst_port",
"=",
"(",
"(",
"' options:dst_port='",
"+",
"str",
"(",
"dst_port",
")",
")",
"if",
"(",
"0",
"<",
"dst_port",
"<=",
"65... | virtual extensible local area network - creates vxlan tunnel between endpoints . | train | true |
53,882 | @hook.command('octopart', 'octo')
def octopart(text, reply):
if (not api_key):
return 'Octopart API key required.'
params = {'apikey': api_key, 'q': text, 'start': 0, 'limit': 1}
try:
request = requests.get(API_URL, params=params)
request.raise_for_status()
except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e:
return 'Could not fetch part data: {}'.format(e)
response = request.json()
if (not response['results']):
return 'No results.'
results = response['results']
for result in results:
part = result['item']
reply('{} - {} - {}'.format(part['brand']['name'], part['mpn'], part['octopart_url']))
| [
"@",
"hook",
".",
"command",
"(",
"'octopart'",
",",
"'octo'",
")",
"def",
"octopart",
"(",
"text",
",",
"reply",
")",
":",
"if",
"(",
"not",
"api_key",
")",
":",
"return",
"'Octopart API key required.'",
"params",
"=",
"{",
"'apikey'",
":",
"api_key",
"... | octopart <keyword> -- search for any part on the octopart database . | train | false |
53,883 | def save_categories(shop, categories_pk):
configuration.set(shop, SAMPLE_CATEGORIES_KEY, categories_pk)
| [
"def",
"save_categories",
"(",
"shop",
",",
"categories_pk",
")",
":",
"configuration",
".",
"set",
"(",
"shop",
",",
"SAMPLE_CATEGORIES_KEY",
",",
"categories_pk",
")"
] | save a list of pk as a list of sample categories for a shop . | train | false |
53,884 | def _strips(direction, text, remove):
if (direction == 'l'):
if text.startswith(remove):
return text[len(remove):]
elif (direction == 'r'):
if text.endswith(remove):
return text[:(- len(remove))]
else:
raise WrongDirection, 'Needs to be r or l.'
return text
| [
"def",
"_strips",
"(",
"direction",
",",
"text",
",",
"remove",
")",
":",
"if",
"(",
"direction",
"==",
"'l'",
")",
":",
"if",
"text",
".",
"startswith",
"(",
"remove",
")",
":",
"return",
"text",
"[",
"len",
"(",
"remove",
")",
":",
"]",
"elif",
... | strips remove from text at direction end . | train | false |
53,885 | def setAttributeDictionaryByArguments(argumentNames, arguments, xmlElement):
for (argumentIndex, argument) in enumerate(arguments):
xmlElement.attributeDictionary[argumentNames[argumentIndex]] = argument
| [
"def",
"setAttributeDictionaryByArguments",
"(",
"argumentNames",
",",
"arguments",
",",
"xmlElement",
")",
":",
"for",
"(",
"argumentIndex",
",",
"argument",
")",
"in",
"enumerate",
"(",
"arguments",
")",
":",
"xmlElement",
".",
"attributeDictionary",
"[",
"argum... | set the attribute dictionary to the arguments . | train | false |
53,886 | def expand_dimension_links(metadata):
links = []
for link in metadata:
if isinstance(link, compat.string_type):
link = {'name': link}
elif ('name' not in link):
raise ModelError('Dimension link has no name')
links.append(link)
return links
| [
"def",
"expand_dimension_links",
"(",
"metadata",
")",
":",
"links",
"=",
"[",
"]",
"for",
"link",
"in",
"metadata",
":",
"if",
"isinstance",
"(",
"link",
",",
"compat",
".",
"string_type",
")",
":",
"link",
"=",
"{",
"'name'",
":",
"link",
"}",
"elif"... | expands links to dimensions . | train | false |
53,887 | @register.inclusion_tag(u'generic/includes/comment.html', takes_context=True)
def comment_thread(context, parent):
if (u'all_comments' not in context):
comments = defaultdict(list)
if ((u'request' in context) and context[u'request'].user.is_staff):
comments_queryset = parent.comments.all()
else:
comments_queryset = parent.comments.visible()
for comment in comments_queryset.select_related(u'user'):
comments[comment.replied_to_id].append(comment)
context[u'all_comments'] = comments
parent_id = (parent.id if isinstance(parent, ThreadedComment) else None)
try:
replied_to = int(context[u'request'].POST[u'replied_to'])
except KeyError:
replied_to = 0
context.update({u'comments_for_thread': context[u'all_comments'].get(parent_id, []), u'no_comments': ((parent_id is None) and (not context[u'all_comments'])), u'replied_to': replied_to})
return context
| [
"@",
"register",
".",
"inclusion_tag",
"(",
"u'generic/includes/comment.html'",
",",
"takes_context",
"=",
"True",
")",
"def",
"comment_thread",
"(",
"context",
",",
"parent",
")",
":",
"if",
"(",
"u'all_comments'",
"not",
"in",
"context",
")",
":",
"comments",
... | return a list of child comments for the given parent . | train | false |
53,888 | def temp_fail_retry(error, fun, *args):
while 1:
try:
return fun(*args)
except error as e:
eintr = (errno.WSAEINTR if (os.name == 'nt') else errno.EINTR)
if (e.args[0] == eintr):
continue
raise
| [
"def",
"temp_fail_retry",
"(",
"error",
",",
"fun",
",",
"*",
"args",
")",
":",
"while",
"1",
":",
"try",
":",
"return",
"fun",
"(",
"*",
"args",
")",
"except",
"error",
"as",
"e",
":",
"eintr",
"=",
"(",
"errno",
".",
"WSAEINTR",
"if",
"(",
"os"... | retry to execute function . | train | true |
53,891 | def certificate():
mode = session.s3.hrm.mode
def prep(r):
if (mode is not None):
auth.permission.fail()
return True
s3.prep = prep
if (settings.get_hrm_filter_certificates() and (not auth.s3_has_role(ADMIN))):
s3.filter = auth.filter_by_root_org(s3db.hrm_certificate)
output = s3_rest_controller(rheader=s3db.hrm_rheader)
return output
| [
"def",
"certificate",
"(",
")",
":",
"mode",
"=",
"session",
".",
"s3",
".",
"hrm",
".",
"mode",
"def",
"prep",
"(",
"r",
")",
":",
"if",
"(",
"mode",
"is",
"not",
"None",
")",
":",
"auth",
".",
"permission",
".",
"fail",
"(",
")",
"return",
"T... | certificates controller . | train | false |
53,893 | def metric_cleanup():
logging.debug('metric_cleanup')
pass
| [
"def",
"metric_cleanup",
"(",
")",
":",
"logging",
".",
"debug",
"(",
"'metric_cleanup'",
")",
"pass"
] | teardown; part of gmond interface . | train | false |
53,896 | def tuple2str(tagged_token, sep='/'):
(word, tag) = tagged_token
if (tag is None):
return word
else:
assert (sep not in tag), 'tag may not contain sep!'
return ('%s%s%s' % (word, sep, tag))
| [
"def",
"tuple2str",
"(",
"tagged_token",
",",
"sep",
"=",
"'/'",
")",
":",
"(",
"word",
",",
"tag",
")",
"=",
"tagged_token",
"if",
"(",
"tag",
"is",
"None",
")",
":",
"return",
"word",
"else",
":",
"assert",
"(",
"sep",
"not",
"in",
"tag",
")",
... | given the tuple representation of a tagged token . | train | false |
53,897 | def close_enough(sa, sb):
sa = CLOSE_ENOUGH_FIX_RE.sub('', six.text_type(sa)).lower()
sb = CLOSE_ENOUGH_FIX_RE.sub('', six.text_type(sb)).lower()
return (sa == sb)
| [
"def",
"close_enough",
"(",
"sa",
",",
"sb",
")",
":",
"sa",
"=",
"CLOSE_ENOUGH_FIX_RE",
".",
"sub",
"(",
"''",
",",
"six",
".",
"text_type",
"(",
"sa",
")",
")",
".",
"lower",
"(",
")",
"sb",
"=",
"CLOSE_ENOUGH_FIX_RE",
".",
"sub",
"(",
"''",
",",... | compare two strings and return true if theyre the same notwithstanding any whitespace or case . | train | false |
53,898 | def get_server_info(request=None):
capabilities = _capabilities_defaults.copy()
capabilities.update(_registered_capabilities)
return {u'product': {u'name': u'Review Board', u'version': get_version_string(), u'package_version': get_package_version(), u'is_release': is_release()}, u'site': {u'url': get_server_url(request=request), u'administrators': [{u'name': name, u'email': email} for (name, email) in settings.ADMINS], u'time_zone': settings.TIME_ZONE}, u'capabilities': capabilities}
| [
"def",
"get_server_info",
"(",
"request",
"=",
"None",
")",
":",
"capabilities",
"=",
"_capabilities_defaults",
".",
"copy",
"(",
")",
"capabilities",
".",
"update",
"(",
"_registered_capabilities",
")",
"return",
"{",
"u'product'",
":",
"{",
"u'name'",
":",
"... | returns server information for use in the api . | train | false |
53,900 | def show_snapshot(kwargs=None, call=None):
if (call != 'function'):
raise SaltCloudSystemExit('The show_snapshot function must be called with -f or --function.')
if ((not kwargs) or ('name' not in kwargs)):
log.error('Must specify name.')
return False
conn = get_conn()
return _expand_item(conn.ex_get_snapshot(kwargs['name']))
| [
"def",
"show_snapshot",
"(",
"kwargs",
"=",
"None",
",",
"call",
"=",
"None",
")",
":",
"if",
"(",
"call",
"!=",
"'function'",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The show_snapshot function must be called with -f or --function.'",
")",
"if",
"(",
"(",... | show the details of an existing snapshot . | train | true |
53,901 | def _is_host_full(client, host):
luns = client.get_volume_mappings_for_host(host['hostRef'])
return (len(luns) >= utils.MAX_LUNS_PER_HOST)
| [
"def",
"_is_host_full",
"(",
"client",
",",
"host",
")",
":",
"luns",
"=",
"client",
".",
"get_volume_mappings_for_host",
"(",
"host",
"[",
"'hostRef'",
"]",
")",
"return",
"(",
"len",
"(",
"luns",
")",
">=",
"utils",
".",
"MAX_LUNS_PER_HOST",
")"
] | checks whether maximum volumes attached to a host have been reached . | train | false |
53,902 | def dict_subset(d, keys):
n = dict()
for key in keys:
if (key in d):
n[key] = d[key]
return n
| [
"def",
"dict_subset",
"(",
"d",
",",
"keys",
")",
":",
"n",
"=",
"dict",
"(",
")",
"for",
"key",
"in",
"keys",
":",
"if",
"(",
"key",
"in",
"d",
")",
":",
"n",
"[",
"key",
"]",
"=",
"d",
"[",
"key",
"]",
"return",
"n"
] | return a new dictionary that is built from copying select keys from d . | train | false |
53,903 | def Repr(class_instance, ordered_dictionary):
return ('search.%s(%s)' % (class_instance.__class__.__name__, ', '.join([("%s='%s'" % (key, value)) for (key, value) in ordered_dictionary if value])))
| [
"def",
"Repr",
"(",
"class_instance",
",",
"ordered_dictionary",
")",
":",
"return",
"(",
"'search.%s(%s)'",
"%",
"(",
"class_instance",
".",
"__class__",
".",
"__name__",
",",
"', '",
".",
"join",
"(",
"[",
"(",
"\"%s='%s'\"",
"%",
"(",
"key",
",",
"value... | generates an unambiguous representation for instance and ordered dict . | train | false |
53,905 | def set_cache_under_settings(destination, setting, key_prefix, value, ttl, list_=False):
default = ({} if (not list_) else [])
existing = destination.settings.get(setting, default)
if value:
if list_:
value.extend(existing)
else:
value.update(existing)
set_cache(((key_prefix + '.') + setting), value, ttl)
destination.settings[setting] = value
| [
"def",
"set_cache_under_settings",
"(",
"destination",
",",
"setting",
",",
"key_prefix",
",",
"value",
",",
"ttl",
",",
"list_",
"=",
"False",
")",
":",
"default",
"=",
"(",
"{",
"}",
"if",
"(",
"not",
"list_",
")",
"else",
"[",
"]",
")",
"existing",
... | take the value passed . | train | false |
53,906 | def _getPlatformString(dist=None):
if (dist == 'bdist'):
if (os.sys.platform == 'darwin'):
(OSXver, junk, architecture) = platform.mac_ver()
systemInfo = ('OSX_%s_%s' % (OSXver, architecture))
elif (os.sys.platform == 'linux'):
systemInfo = ('%s_%s_%s' % ('Linux', ':'.join([x for x in platform.dist() if (x != '')]), platform.release()))
elif (os.sys.platform == 'win32'):
ver = os.sys.getwindowsversion()
if (len(ver[4]) > 0):
systemInfo = ('win32_v%i.%i.%i (%s)' % (ver[0], ver[1], ver[2], ver[4]))
else:
systemInfo = ('win32_v%i.%i.%i' % (ver[0], ver[1], ver[2]))
else:
systemInfo = (platform.system() + platform.release())
else:
systemInfo = 'n/a'
return systemInfo
| [
"def",
"_getPlatformString",
"(",
"dist",
"=",
"None",
")",
":",
"if",
"(",
"dist",
"==",
"'bdist'",
")",
":",
"if",
"(",
"os",
".",
"sys",
".",
"platform",
"==",
"'darwin'",
")",
":",
"(",
"OSXver",
",",
"junk",
",",
"architecture",
")",
"=",
"pla... | if generic==true then returns empty __build_platform__ string . | train | false |
53,907 | def simulate_head(app, path, **kwargs):
return simulate_request(app, 'HEAD', path, **kwargs)
| [
"def",
"simulate_head",
"(",
"app",
",",
"path",
",",
"**",
"kwargs",
")",
":",
"return",
"simulate_request",
"(",
"app",
",",
"'HEAD'",
",",
"path",
",",
"**",
"kwargs",
")"
] | simulates a head request to a wsgi application . | train | false |
53,908 | def extract_lsq_problems():
problems = OrderedDict()
for (name, problem_class) in inspect.getmembers(sys.modules[__name__], inspect.isclass):
if ((name != 'LSQBenchmarkProblem') and issubclass(problem_class, LSQBenchmarkProblem) and hasattr(problem_class, 'INITIAL_GUESSES')):
for (i, x0) in enumerate(problem_class.INITIAL_GUESSES):
if (len(problem_class.INITIAL_GUESSES) > 1):
key_name = '{0}_{1}'.format(name, i)
else:
key_name = name
problems[key_name] = problem_class(x0)
return problems
| [
"def",
"extract_lsq_problems",
"(",
")",
":",
"problems",
"=",
"OrderedDict",
"(",
")",
"for",
"(",
"name",
",",
"problem_class",
")",
"in",
"inspect",
".",
"getmembers",
"(",
"sys",
".",
"modules",
"[",
"__name__",
"]",
",",
"inspect",
".",
"isclass",
"... | extract all least squares problems in this file for benchmarking . | train | false |
53,909 | def test_cons_correct():
can_compile(u'(cons a b)')
| [
"def",
"test_cons_correct",
"(",
")",
":",
"can_compile",
"(",
"u'(cons a b)'",
")"
] | ensure cons gets compiled correctly . | train | false |
53,911 | def is_operator(element):
return (isinstance(element, basestring) and (element in DOMAIN_OPERATORS))
| [
"def",
"is_operator",
"(",
"element",
")",
":",
"return",
"(",
"isinstance",
"(",
"element",
",",
"basestring",
")",
"and",
"(",
"element",
"in",
"DOMAIN_OPERATORS",
")",
")"
] | test whether an object is a valid domain operator . | train | false |
53,913 | def tree(node):
subtrees = []
for arg in node.args:
subtrees.append(tree(arg))
s = (print_node(node) + pprint_nodes(subtrees))
return s
| [
"def",
"tree",
"(",
"node",
")",
":",
"subtrees",
"=",
"[",
"]",
"for",
"arg",
"in",
"node",
".",
"args",
":",
"subtrees",
".",
"append",
"(",
"tree",
"(",
"arg",
")",
")",
"s",
"=",
"(",
"print_node",
"(",
"node",
")",
"+",
"pprint_nodes",
"(",
... | returns a tree representation of "node" as a string . | train | false |
53,915 | def RunInTransactionCustomRetries(retries, function, *args, **kwargs):
options = datastore_rpc.TransactionOptions(retries=retries)
return RunInTransactionOptions(options, function, *args, **kwargs)
| [
"def",
"RunInTransactionCustomRetries",
"(",
"retries",
",",
"function",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"options",
"=",
"datastore_rpc",
".",
"TransactionOptions",
"(",
"retries",
"=",
"retries",
")",
"return",
"RunInTransactionOptions",
"(",
"... | runs a function inside a datastore transaction . | train | false |
53,916 | def calculateNonce(realm, algorithm=MD5):
global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS
assert (algorithm in SUPPORTED_ALGORITHM)
try:
encoder = DIGEST_AUTH_ENCODERS[algorithm]
except KeyError:
raise NotImplementedError(('The chosen algorithm (%s) does not have an implementation yet' % algorithm))
return encoder(('%d:%s' % (time.time(), realm)))
| [
"def",
"calculateNonce",
"(",
"realm",
",",
"algorithm",
"=",
"MD5",
")",
":",
"global",
"SUPPORTED_ALGORITHM",
",",
"DIGEST_AUTH_ENCODERS",
"assert",
"(",
"algorithm",
"in",
"SUPPORTED_ALGORITHM",
")",
"try",
":",
"encoder",
"=",
"DIGEST_AUTH_ENCODERS",
"[",
"alg... | this is an auxaliary function that calculates nonce value . | train | false |
53,918 | def set_hostname(hostname=None, commit_change=True):
conn = __proxy__['junos.conn']()
ret = dict()
if (hostname is None):
ret['out'] = False
return ret
set_string = 'set system host-name {0}'.format(hostname)
conn.cu.load(set_string, format='set')
if commit_change:
return commit()
else:
ret['out'] = True
ret['msg'] = 'set system host-name {0} is queued'.format(hostname)
return ret
| [
"def",
"set_hostname",
"(",
"hostname",
"=",
"None",
",",
"commit_change",
"=",
"True",
")",
":",
"conn",
"=",
"__proxy__",
"[",
"'junos.conn'",
"]",
"(",
")",
"ret",
"=",
"dict",
"(",
")",
"if",
"(",
"hostname",
"is",
"None",
")",
":",
"ret",
"[",
... | to set the name of the device . | train | false |
53,919 | def with_backing_lock(method):
def wrapped_method(self, *args, **dargs):
already_have_lock = (self._backing_file_lock is not None)
if (not already_have_lock):
self._lock_backing_file()
try:
return method(self, *args, **dargs)
finally:
if (not already_have_lock):
self._unlock_backing_file()
wrapped_method.__name__ = method.__name__
wrapped_method.__doc__ = method.__doc__
return wrapped_method
| [
"def",
"with_backing_lock",
"(",
"method",
")",
":",
"def",
"wrapped_method",
"(",
"self",
",",
"*",
"args",
",",
"**",
"dargs",
")",
":",
"already_have_lock",
"=",
"(",
"self",
".",
"_backing_file_lock",
"is",
"not",
"None",
")",
"if",
"(",
"not",
"alre... | a decorator to perform a lock-*-unlock cycle . | train | false |
53,920 | def build_function(name, args=None, defaults=None, flag=0, doc=None):
(args, defaults) = ((args or []), (defaults or []))
func = Function(name, doc)
func.args = argsnode = Arguments()
argsnode.args = []
for arg in args:
argsnode.args.append(Name())
argsnode.args[(-1)].name = arg
argsnode.args[(-1)].parent = argsnode
argsnode.defaults = []
for default in defaults:
argsnode.defaults.append(const_factory(default))
argsnode.defaults[(-1)].parent = argsnode
argsnode.kwarg = None
argsnode.vararg = None
argsnode.parent = func
if args:
register_arguments(func)
return func
| [
"def",
"build_function",
"(",
"name",
",",
"args",
"=",
"None",
",",
"defaults",
"=",
"None",
",",
"flag",
"=",
"0",
",",
"doc",
"=",
"None",
")",
":",
"(",
"args",
",",
"defaults",
")",
"=",
"(",
"(",
"args",
"or",
"[",
"]",
")",
",",
"(",
"... | create and initialize a astroid function node . | train | true |
53,921 | def js_escaped_string(string_for_js):
if (string_for_js is None):
string_for_js = ''
string_for_js = decode.utf8(string_for_js)
string_for_js = escapejs(string_for_js)
return string_for_js
| [
"def",
"js_escaped_string",
"(",
"string_for_js",
")",
":",
"if",
"(",
"string_for_js",
"is",
"None",
")",
":",
"string_for_js",
"=",
"''",
"string_for_js",
"=",
"decode",
".",
"utf8",
"(",
"string_for_js",
")",
"string_for_js",
"=",
"escapejs",
"(",
"string_f... | mako filter that escapes text for use in a javascript string . | train | false |
53,923 | def _check_storage_models(current_release):
diff_list = _git_diff_names_only(current_release)
return [item for item in diff_list if item.startswith('core/storage')]
| [
"def",
"_check_storage_models",
"(",
"current_release",
")",
":",
"diff_list",
"=",
"_git_diff_names_only",
"(",
"current_release",
")",
"return",
"[",
"item",
"for",
"item",
"in",
"diff_list",
"if",
"item",
".",
"startswith",
"(",
"'core/storage'",
")",
"]"
] | check if files in core/storage have changed and returns them . | train | false |
53,924 | def get_help():
return stem.interpreter.msg('msg.help', address=DEFAULT_ARGS['control_address'], port=DEFAULT_ARGS['control_port'], socket=DEFAULT_ARGS['control_socket'])
| [
"def",
"get_help",
"(",
")",
":",
"return",
"stem",
".",
"interpreter",
".",
"msg",
"(",
"'msg.help'",
",",
"address",
"=",
"DEFAULT_ARGS",
"[",
"'control_address'",
"]",
",",
"port",
"=",
"DEFAULT_ARGS",
"[",
"'control_port'",
"]",
",",
"socket",
"=",
"DE... | provides our --help usage information . | train | false |
53,925 | @register(u'accept-line')
def accept_line(event):
b = event.current_buffer
b.accept_action.validate_and_handle(event.cli, b)
| [
"@",
"register",
"(",
"u'accept-line'",
")",
"def",
"accept_line",
"(",
"event",
")",
":",
"b",
"=",
"event",
".",
"current_buffer",
"b",
".",
"accept_action",
".",
"validate_and_handle",
"(",
"event",
".",
"cli",
",",
"b",
")"
] | accept the line regardless of where the cursor is . | train | false |
53,927 | def convolution_nd(x, W, b=None, stride=1, pad=0, use_cudnn=True, cover_all=False):
ndim = len(x.shape[2:])
func = ConvolutionND(ndim, stride, pad, use_cudnn, cover_all)
if (b is None):
return func(x, W)
else:
return func(x, W, b)
| [
"def",
"convolution_nd",
"(",
"x",
",",
"W",
",",
"b",
"=",
"None",
",",
"stride",
"=",
"1",
",",
"pad",
"=",
"0",
",",
"use_cudnn",
"=",
"True",
",",
"cover_all",
"=",
"False",
")",
":",
"ndim",
"=",
"len",
"(",
"x",
".",
"shape",
"[",
"2",
... | n-dimensional convolution function . | train | false |
53,930 | def get_features():
return get_var('FEATURES')
| [
"def",
"get_features",
"(",
")",
":",
"return",
"get_var",
"(",
"'FEATURES'",
")"
] | get the value of features variable in the make . | train | false |
53,932 | def _is_hierarchical(x):
item = x[0]
if isinstance(item, (list, tuple, np.ndarray, pd.Series, pd.DataFrame)):
return True
else:
return False
| [
"def",
"_is_hierarchical",
"(",
"x",
")",
":",
"item",
"=",
"x",
"[",
"0",
"]",
"if",
"isinstance",
"(",
"item",
",",
"(",
"list",
",",
"tuple",
",",
"np",
".",
"ndarray",
",",
"pd",
".",
"Series",
",",
"pd",
".",
"DataFrame",
")",
")",
":",
"r... | checks if the first item of an array-like object is also array-like if so . | train | false |
53,934 | def dup_random(n, a, b, K):
f = [K.convert(random.randint(a, b)) for _ in range(0, (n + 1))]
while (not f[0]):
f[0] = K.convert(random.randint(a, b))
return f
| [
"def",
"dup_random",
"(",
"n",
",",
"a",
",",
"b",
",",
"K",
")",
":",
"f",
"=",
"[",
"K",
".",
"convert",
"(",
"random",
".",
"randint",
"(",
"a",
",",
"b",
")",
")",
"for",
"_",
"in",
"range",
"(",
"0",
",",
"(",
"n",
"+",
"1",
")",
"... | return a polynomial of degree n with coefficients in [a . | train | false |
53,935 | def reconstruct_interp_matrix(idx, proj):
if _is_real(proj):
return backend.idd_reconint((idx + 1), proj)
else:
return backend.idz_reconint((idx + 1), proj)
| [
"def",
"reconstruct_interp_matrix",
"(",
"idx",
",",
"proj",
")",
":",
"if",
"_is_real",
"(",
"proj",
")",
":",
"return",
"backend",
".",
"idd_reconint",
"(",
"(",
"idx",
"+",
"1",
")",
",",
"proj",
")",
"else",
":",
"return",
"backend",
".",
"idz_reco... | reconstruct interpolation matrix from id . | train | false |
53,936 | def index_alt():
s3_redirect_default(URL(f='person'))
| [
"def",
"index_alt",
"(",
")",
":",
"s3_redirect_default",
"(",
"URL",
"(",
"f",
"=",
"'person'",
")",
")"
] | module homepage for non-admin users when no cms content found . | train | false |
53,938 | def ansible_dict_to_boto3_filter_list(filters_dict):
filters_list = []
for (k, v) in filters_dict.items():
filter_dict = {'Name': k}
if isinstance(v, string_types):
filter_dict['Values'] = [v]
else:
filter_dict['Values'] = v
filters_list.append(filter_dict)
return filters_list
| [
"def",
"ansible_dict_to_boto3_filter_list",
"(",
"filters_dict",
")",
":",
"filters_list",
"=",
"[",
"]",
"for",
"(",
"k",
",",
"v",
")",
"in",
"filters_dict",
".",
"items",
"(",
")",
":",
"filter_dict",
"=",
"{",
"'Name'",
":",
"k",
"}",
"if",
"isinstan... | convert an ansible dict of filters to list of dicts that boto3 can use args: filters_dict : dict of aws filters . | train | false |
53,939 | def _hide_frame(ax):
ax.get_yticks()
ax.xaxis.set_ticks([])
ax.yaxis.set_ticks([])
ax.set_frame_on(False)
| [
"def",
"_hide_frame",
"(",
"ax",
")",
":",
"ax",
".",
"get_yticks",
"(",
")",
"ax",
".",
"xaxis",
".",
"set_ticks",
"(",
"[",
"]",
")",
"ax",
".",
"yaxis",
".",
"set_ticks",
"(",
"[",
"]",
")",
"ax",
".",
"set_frame_on",
"(",
"False",
")"
] | helper to hide axis frame for topomaps . | train | false |
53,940 | def _auth_from_available(le_client, config, domains=None, certname=None, lineage=None):
if (lineage is None):
(action, lineage) = _find_lineage_for_domains_and_certname(config, domains, certname)
else:
action = 'renew'
if (action == 'reinstall'):
logger.info('Keeping the existing certificate')
return ('reinstall', lineage)
hooks.pre_hook(config)
try:
if (action == 'renew'):
logger.info('Renewing an existing certificate')
renewal.renew_cert(config, domains, le_client, lineage)
elif (action == 'newcert'):
logger.info('Obtaining a new certificate')
lineage = le_client.obtain_and_enroll_certificate(domains, certname)
if (lineage is False):
raise errors.Error('Certificate could not be obtained')
finally:
hooks.post_hook(config)
if ((not config.dry_run) and (not (config.verb == 'renew'))):
_report_new_cert(config, lineage.cert, lineage.fullchain)
return (action, lineage)
| [
"def",
"_auth_from_available",
"(",
"le_client",
",",
"config",
",",
"domains",
"=",
"None",
",",
"certname",
"=",
"None",
",",
"lineage",
"=",
"None",
")",
":",
"if",
"(",
"lineage",
"is",
"None",
")",
":",
"(",
"action",
",",
"lineage",
")",
"=",
"... | authenticate and enroll certificate . | train | false |
53,941 | @pytest.mark.parametrize((u'expr', u'result'), [((lambda x, y: (x + y)), [5.0, 5.0]), ((lambda x, y: (x - y)), [(-1.0), (-1.0)]), ((lambda x, y: (x * y)), [6.0, 6.0]), ((lambda x, y: (x / y)), [(2.0 / 3.0), (2.0 / 3.0)]), ((lambda x, y: (x ** y)), [8.0, 8.0])])
def test_model_set_raises_value_error(expr, result):
with pytest.raises(ValueError):
s = expr(Const1D((2, 2), n_models=2), Const1D(3, n_models=1))
| [
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"(",
"u'expr'",
",",
"u'result'",
")",
",",
"[",
"(",
"(",
"lambda",
"x",
",",
"y",
":",
"(",
"x",
"+",
"y",
")",
")",
",",
"[",
"5.0",
",",
"5.0",
"]",
")",
",",
"(",
"(",
"lambda",
"x",... | check that creating model sets with components whose _n_models are different raise a value error . | train | false |
53,942 | def websettings():
web = makeelement('webSettings')
web.append(makeelement('allowPNG'))
web.append(makeelement('doNotSaveAsSingleFile'))
return web
| [
"def",
"websettings",
"(",
")",
":",
"web",
"=",
"makeelement",
"(",
"'webSettings'",
")",
"web",
".",
"append",
"(",
"makeelement",
"(",
"'allowPNG'",
")",
")",
"web",
".",
"append",
"(",
"makeelement",
"(",
"'doNotSaveAsSingleFile'",
")",
")",
"return",
... | generate websettings . | train | true |
53,943 | def parse_format_method_string(format_string):
keys = []
num_args = 0
manual_pos_arg = set()
for name in collect_string_fields(format_string):
if (name and str(name).isdigit()):
manual_pos_arg.add(str(name))
elif name:
(keyname, fielditerator) = split_format_field_names(name)
if isinstance(keyname, numbers.Number):
manual_pos_arg.add(str(keyname))
keyname = int(keyname)
keys.append((keyname, list(fielditerator)))
else:
num_args += 1
return (keys, num_args, len(manual_pos_arg))
| [
"def",
"parse_format_method_string",
"(",
"format_string",
")",
":",
"keys",
"=",
"[",
"]",
"num_args",
"=",
"0",
"manual_pos_arg",
"=",
"set",
"(",
")",
"for",
"name",
"in",
"collect_string_fields",
"(",
"format_string",
")",
":",
"if",
"(",
"name",
"and",
... | parses a pep 3101 format string . | train | false |
53,944 | def proportions_chisquare_pairscontrol(count, nobs, value=None, multitest_method='hs', alternative='two-sided'):
if ((value is not None) or (not (alternative in ['two-sided', '2s']))):
raise NotImplementedError
all_pairs = [(0, k) for k in range(1, len(count))]
pvals = [proportions_chisquare(count[list(pair)], nobs[list(pair)])[1] for pair in all_pairs]
return AllPairsResults(pvals, all_pairs, multitest_method=multitest_method)
| [
"def",
"proportions_chisquare_pairscontrol",
"(",
"count",
",",
"nobs",
",",
"value",
"=",
"None",
",",
"multitest_method",
"=",
"'hs'",
",",
"alternative",
"=",
"'two-sided'",
")",
":",
"if",
"(",
"(",
"value",
"is",
"not",
"None",
")",
"or",
"(",
"not",
... | chisquare test of proportions for pairs of k samples compared to control performs a chisquare test for proportions for pairwise comparisons with a control . | train | false |
53,946 | def upsample_2d(incoming, kernel_size, name='UpSample2D'):
input_shape = utils.get_incoming_shape(incoming)
assert (len(input_shape) == 4), 'Incoming Tensor shape must be 4-D'
kernel = utils.autoformat_kernel_2d(kernel_size)
with tf.name_scope(name) as scope:
inference = tf.image.resize_nearest_neighbor(incoming, size=(input_shape[1:3] * tf.constant(kernel[1:3])))
inference.set_shape((None, (input_shape[1] * kernel[1]), (input_shape[2] * kernel[2]), None))
inference.scope = scope
tf.add_to_collection(((tf.GraphKeys.LAYER_TENSOR + '/') + name), inference)
return inference
| [
"def",
"upsample_2d",
"(",
"incoming",
",",
"kernel_size",
",",
"name",
"=",
"'UpSample2D'",
")",
":",
"input_shape",
"=",
"utils",
".",
"get_incoming_shape",
"(",
"incoming",
")",
"assert",
"(",
"len",
"(",
"input_shape",
")",
"==",
"4",
")",
",",
"'Incom... | upsample 2d . | train | false |
53,947 | def getAllDirectoriesWithFile(path, filename, excludeDirs):
directoryList = []
for (dirpath, dirnames, filenames) in os.walk(path):
for d in dirnames[:]:
if (d in excludeDirs):
dirnames.remove(d)
print ('EXCLUDING %s...' % os.path.join(dirpath, d))
elif ('UNDER_DEVELOPMENT' in os.listdir(os.path.join(dirpath, d))):
dirnames.remove(d)
print ('EXCLUDING %s...' % os.path.join(dirpath, d))
for f in filenames:
if (f == filename):
directoryList.append(dirpath)
return directoryList
| [
"def",
"getAllDirectoriesWithFile",
"(",
"path",
",",
"filename",
",",
"excludeDirs",
")",
":",
"directoryList",
"=",
"[",
"]",
"for",
"(",
"dirpath",
",",
"dirnames",
",",
"filenames",
")",
"in",
"os",
".",
"walk",
"(",
"path",
")",
":",
"for",
"d",
"... | returns a list of directories in the <path> with a given <filename> . | train | false |
53,950 | def write_rels(worksheet, comments_id=None, vba_controls_id=None):
root = Element('Relationships', xmlns=PKG_REL_NS)
rels = worksheet._rels
if (worksheet.vba_controls is not None):
rel = Relationship('vmlDrawing', id=worksheet.vba_controls, target=('/xl/drawings/vmlDrawing%s.vml' % vba_controls_id))
rels.append(rel)
if (worksheet._comment_count > 0):
rel = Relationship(type='comments', id='comments', target=('/xl/comments%s.xml' % comments_id))
rels.append(rel)
if (worksheet.vba_controls is None):
rel = Relationship(type='vmlDrawing', id='commentsvml', target=('/xl/drawings/commentsDrawing%s.vml' % comments_id))
rels.append(rel)
for (idx, rel) in enumerate(rels, 1):
if (rel.id is None):
rel.id = 'rId{0}'.format(idx)
root.append(rel.to_tree())
return root
| [
"def",
"write_rels",
"(",
"worksheet",
",",
"comments_id",
"=",
"None",
",",
"vba_controls_id",
"=",
"None",
")",
":",
"root",
"=",
"Element",
"(",
"'Relationships'",
",",
"xmlns",
"=",
"PKG_REL_NS",
")",
"rels",
"=",
"worksheet",
".",
"_rels",
"if",
"(",
... | write relationships for the worksheet to xml . | train | false |
53,951 | def test_ecliptic_heliobary():
icrs = ICRS((1 * u.deg), (2 * u.deg), distance=(1.5 * R_sun))
bary = icrs.transform_to(BarycentricTrueEcliptic)
helio = icrs.transform_to(HeliocentricTrueEcliptic)
assert (np.abs((bary.distance - helio.distance)) > (1 * u.km))
helio_in_bary_frame = bary.realize_frame(helio.cartesian)
assert (bary.separation(helio_in_bary_frame) > (1 * u.arcmin))
| [
"def",
"test_ecliptic_heliobary",
"(",
")",
":",
"icrs",
"=",
"ICRS",
"(",
"(",
"1",
"*",
"u",
".",
"deg",
")",
",",
"(",
"2",
"*",
"u",
".",
"deg",
")",
",",
"distance",
"=",
"(",
"1.5",
"*",
"R_sun",
")",
")",
"bary",
"=",
"icrs",
".",
"tra... | check that the ecliptic transformations for heliocentric and barycentric at least more or less make sense . | train | false |
53,952 | @intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError])
def set_user_preference(requesting_user, preference_key, preference_value, username=None):
existing_user = _get_authorized_user(requesting_user, username)
serializer = create_user_preference_serializer(existing_user, preference_key, preference_value)
validate_user_preference_serializer(serializer, preference_key, preference_value)
try:
serializer.save()
except Exception as error:
raise _create_preference_update_error(preference_key, preference_value, error)
| [
"@",
"intercept_errors",
"(",
"UserAPIInternalError",
",",
"ignore_errors",
"=",
"[",
"UserAPIRequestError",
"]",
")",
"def",
"set_user_preference",
"(",
"requesting_user",
",",
"preference_key",
",",
"preference_value",
",",
"username",
"=",
"None",
")",
":",
"exis... | update a user preference for the given username . | train | false |
53,953 | def _process_node(node, aliases, duplicates):
stack = _post_order(node)
key = list()
for item in stack:
if ((type(item[0]) is str) and (item not in aliases)):
key.append(item[0])
else:
key.append(item[0:2])
key = tuple(key)
dup_node = duplicates.get(key, False)
if dup_node:
node[0] = dup_node
stack = None
else:
duplicates[key] = stack[(-1)]
aliases.add(stack[(-1)])
while (len(node) > 3):
node.pop()
return stack
| [
"def",
"_process_node",
"(",
"node",
",",
"aliases",
",",
"duplicates",
")",
":",
"stack",
"=",
"_post_order",
"(",
"node",
")",
"key",
"=",
"list",
"(",
")",
"for",
"item",
"in",
"stack",
":",
"if",
"(",
"(",
"type",
"(",
"item",
"[",
"0",
"]",
... | takes a node from the tree and searchs for any previously processed duplicates . | train | false |
53,954 | def _contains(exp, cls):
if isinstance(exp, cls):
return True
elif isinstance(exp, Var):
return False
else:
return _contains(exp.sub, cls)
| [
"def",
"_contains",
"(",
"exp",
",",
"cls",
")",
":",
"if",
"isinstance",
"(",
"exp",
",",
"cls",
")",
":",
"return",
"True",
"elif",
"isinstance",
"(",
"exp",
",",
"Var",
")",
":",
"return",
"False",
"else",
":",
"return",
"_contains",
"(",
"exp",
... | does node contain a sub node of type cls . | train | false |
53,955 | def index_alt():
s3_redirect_default(URL(f='person'))
| [
"def",
"index_alt",
"(",
")",
":",
"s3_redirect_default",
"(",
"URL",
"(",
"f",
"=",
"'person'",
")",
")"
] | module homepage for non-admin users when no cms content found . | train | false |
53,956 | def render_openid_request(request, openid_request, return_to, trust_root=None):
if (trust_root is None):
trust_root = getattr(settings, 'OPENID_TRUST_ROOT', request.build_absolute_uri('/'))
if openid_request.shouldSendRedirect():
redirect_url = openid_request.redirectURL(trust_root, return_to)
return HttpResponseRedirect(redirect_url)
else:
form_html = openid_request.htmlMarkup(trust_root, return_to, form_tag_attrs={'id': 'openid_message'})
return HttpResponse(form_html, content_type='text/html;charset=UTF-8')
| [
"def",
"render_openid_request",
"(",
"request",
",",
"openid_request",
",",
"return_to",
",",
"trust_root",
"=",
"None",
")",
":",
"if",
"(",
"trust_root",
"is",
"None",
")",
":",
"trust_root",
"=",
"getattr",
"(",
"settings",
",",
"'OPENID_TRUST_ROOT'",
",",
... | render an openid authentication request . | train | false |
53,957 | def _load_editor(caller):
key = caller.db._multidesc_editkey
match = [ind for (ind, tup) in enumerate(caller.db.multidesc) if (tup[0] == key)]
if match:
return caller.db.multidesc[match[0]][1]
return ''
| [
"def",
"_load_editor",
"(",
"caller",
")",
":",
"key",
"=",
"caller",
".",
"db",
".",
"_multidesc_editkey",
"match",
"=",
"[",
"ind",
"for",
"(",
"ind",
",",
"tup",
")",
"in",
"enumerate",
"(",
"caller",
".",
"db",
".",
"multidesc",
")",
"if",
"(",
... | called when the editor loads contents . | train | false |
53,959 | def bin_constructor(func):
func.argtypes = [c_char_p, c_size_t]
func.restype = GEOM_PTR
func.errcheck = check_geom
return func
| [
"def",
"bin_constructor",
"(",
"func",
")",
":",
"func",
".",
"argtypes",
"=",
"[",
"c_char_p",
",",
"c_size_t",
"]",
"func",
".",
"restype",
"=",
"GEOM_PTR",
"func",
".",
"errcheck",
"=",
"check_geom",
"return",
"func"
] | generates a prototype for binary construction geos routines . | train | false |
53,960 | def find_playlist_changes(orig_tracks, modified_tracks):
s_pairs = get_id_pairs(orig_tracks)
d_pairs = get_id_pairs(modified_tracks)
s_count = Counter(s_pairs)
d_count = Counter(d_pairs)
to_del = (s_count - d_count)
to_add = (d_count - s_count)
to_keep = set((s_count & d_count))
return (to_del, to_add, to_keep)
| [
"def",
"find_playlist_changes",
"(",
"orig_tracks",
",",
"modified_tracks",
")",
":",
"s_pairs",
"=",
"get_id_pairs",
"(",
"orig_tracks",
")",
"d_pairs",
"=",
"get_id_pairs",
"(",
"modified_tracks",
")",
"s_count",
"=",
"Counter",
"(",
"s_pairs",
")",
"d_count",
... | finds the changes between two playlists . | train | false |
53,961 | def cmServiceAccept():
a = TpPd(pd=5)
b = MessageType(mesType=33)
packet = (a / b)
return packet
| [
"def",
"cmServiceAccept",
"(",
")",
":",
"a",
"=",
"TpPd",
"(",
"pd",
"=",
"5",
")",
"b",
"=",
"MessageType",
"(",
"mesType",
"=",
"33",
")",
"packet",
"=",
"(",
"a",
"/",
"b",
")",
"return",
"packet"
] | cm service accept section 9 . | train | true |
53,962 | def get_resampler_for_grouping(groupby, rule, how=None, fill_method=None, limit=None, kind=None, **kwargs):
tg = TimeGrouper(freq=rule, **kwargs)
resampler = tg._get_resampler(groupby.obj, kind=kind)
r = resampler._get_resampler_for_grouping(groupby=groupby)
return _maybe_process_deprecations(r, how=how, fill_method=fill_method, limit=limit)
| [
"def",
"get_resampler_for_grouping",
"(",
"groupby",
",",
"rule",
",",
"how",
"=",
"None",
",",
"fill_method",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"kind",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"tg",
"=",
"TimeGrouper",
"(",
"freq",
"=",
... | return our appropriate resampler when grouping as well . | train | true |
53,965 | def _update_secret(namespace, name, data, apiserver_url):
url = '{0}/api/v1/namespaces/{1}/secrets/{2}'.format(apiserver_url, namespace, name)
data = [{'op': 'replace', 'path': '/data', 'value': data}]
ret = _kpatch(url, data)
if (ret.get('status') == 404):
return "Node {0} doesn't exist".format(url)
return ret
| [
"def",
"_update_secret",
"(",
"namespace",
",",
"name",
",",
"data",
",",
"apiserver_url",
")",
":",
"url",
"=",
"'{0}/api/v1/namespaces/{1}/secrets/{2}'",
".",
"format",
"(",
"apiserver_url",
",",
"namespace",
",",
"name",
")",
"data",
"=",
"[",
"{",
"'op'",
... | replace secrets data by a new one . | train | true |
53,966 | def all_argmax(x):
m = max(x)
return [i for (i, v) in enumerate(x) if (v == m)]
| [
"def",
"all_argmax",
"(",
"x",
")",
":",
"m",
"=",
"max",
"(",
"x",
")",
"return",
"[",
"i",
"for",
"(",
"i",
",",
"v",
")",
"in",
"enumerate",
"(",
"x",
")",
"if",
"(",
"v",
"==",
"m",
")",
"]"
] | return the indices of all values that are equal to the maximum: no breaking ties . | train | false |
53,968 | def test_simple_create():
make_segment(1)
| [
"def",
"test_simple_create",
"(",
")",
":",
"make_segment",
"(",
"1",
")"
] | check __init__ . | train | false |
53,969 | def make_routine(name, expr, argument_sequence=None, global_vars=None, language='F95'):
code_gen = get_code_generator(language, 'nothingElseMatters')
return code_gen.routine(name, expr, argument_sequence, global_vars)
| [
"def",
"make_routine",
"(",
"name",
",",
"expr",
",",
"argument_sequence",
"=",
"None",
",",
"global_vars",
"=",
"None",
",",
"language",
"=",
"'F95'",
")",
":",
"code_gen",
"=",
"get_code_generator",
"(",
"language",
",",
"'nothingElseMatters'",
")",
"return"... | a factory that makes an appropriate routine from an expression . | train | false |
53,970 | @contextmanager
def temporary_folder():
tempdir = mkdtemp()
try:
(yield tempdir)
finally:
rmtree(tempdir)
| [
"@",
"contextmanager",
"def",
"temporary_folder",
"(",
")",
":",
"tempdir",
"=",
"mkdtemp",
"(",
")",
"try",
":",
"(",
"yield",
"tempdir",
")",
"finally",
":",
"rmtree",
"(",
"tempdir",
")"
] | creates a temporary folder . | train | false |
53,971 | def __determine_before_str(options):
now = datetime.datetime.now()
today = datetime.datetime(now.year, now.month, now.day)
day_offset = options.age
before_str = ((today - datetime.timedelta(day_offset)).isoformat() if (day_offset is not None) else None)
return before_str
| [
"def",
"__determine_before_str",
"(",
"options",
")",
":",
"now",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"today",
"=",
"datetime",
".",
"datetime",
"(",
"now",
".",
"year",
",",
"now",
".",
"month",
",",
"now",
".",
"day",
")",
"day_... | determine the date string for the newest timestamp to filter by . | train | false |
53,972 | def get_next_page_of_all_feedback_messages(page_size=feconf.FEEDBACK_TAB_PAGE_SIZE, urlsafe_start_cursor=None):
(results, new_urlsafe_start_cursor, more) = feedback_models.FeedbackMessageModel.get_all_messages(page_size, urlsafe_start_cursor)
result_messages = [_get_message_from_model(m) for m in results]
return (result_messages, new_urlsafe_start_cursor, more)
| [
"def",
"get_next_page_of_all_feedback_messages",
"(",
"page_size",
"=",
"feconf",
".",
"FEEDBACK_TAB_PAGE_SIZE",
",",
"urlsafe_start_cursor",
"=",
"None",
")",
":",
"(",
"results",
",",
"new_urlsafe_start_cursor",
",",
"more",
")",
"=",
"feedback_models",
".",
"Feedba... | fetches a single page from the list of all feedback messages that have been posted to any exploration on the site . | train | false |
53,973 | def sem(a, axis=0, ddof=1, nan_policy='propagate'):
(a, axis) = _chk_asarray(a, axis)
(contains_nan, nan_policy) = _contains_nan(a, nan_policy)
if (contains_nan and (nan_policy == 'omit')):
a = ma.masked_invalid(a)
return mstats_basic.sem(a, axis, ddof)
n = a.shape[axis]
s = (np.std(a, axis=axis, ddof=ddof) / np.sqrt(n))
return s
| [
"def",
"sem",
"(",
"a",
",",
"axis",
"=",
"0",
",",
"ddof",
"=",
"1",
",",
"nan_policy",
"=",
"'propagate'",
")",
":",
"(",
"a",
",",
"axis",
")",
"=",
"_chk_asarray",
"(",
"a",
",",
"axis",
")",
"(",
"contains_nan",
",",
"nan_policy",
")",
"=",
... | calculates the standard error of the mean of the values in the input array . | train | false |
53,974 | def _get_data_volumes(vm_):
ret = []
volumes = vm_['volumes']
for (key, value) in volumes.iteritems():
if ('disk_size' not in volumes[key].keys()):
raise SaltCloudConfigError("The volume '{0}' is missing 'disk_size'".format(key))
if ('disk_type' not in volumes[key].keys()):
volumes[key]['disk_type'] = 'HDD'
volume = Volume(name=key, size=volumes[key]['disk_size'], disk_type=volumes[key]['disk_type'], licence_type='OTHER')
if ('disk_availability_zone' in volumes[key].keys()):
volume.availability_zone = volumes[key]['disk_availability_zone']
ret.append(volume)
return ret
| [
"def",
"_get_data_volumes",
"(",
"vm_",
")",
":",
"ret",
"=",
"[",
"]",
"volumes",
"=",
"vm_",
"[",
"'volumes'",
"]",
"for",
"(",
"key",
",",
"value",
")",
"in",
"volumes",
".",
"iteritems",
"(",
")",
":",
"if",
"(",
"'disk_size'",
"not",
"in",
"vo... | construct a list of optional data volumes from the cloud profile . | train | true |
53,975 | def encode_notifications(tokens, notifications):
fmt = '!BH32sH%ds'
structify = (lambda t, p: struct.pack((fmt % len(p)), 0, 32, t, len(p), p))
binaryify = (lambda t: t.decode('hex'))
if ((type(notifications) is dict) and (type(tokens) in (str, unicode))):
(tokens, notifications) = ([tokens], [notifications])
if ((type(notifications) is list) and (type(tokens) is list)):
return ''.join(map((lambda y: structify(*y)), ((binaryify(t), json.dumps(p, separators=(',', ':'), ensure_ascii=False).encode('utf-8')) for (t, p) in zip(tokens, notifications))))
| [
"def",
"encode_notifications",
"(",
"tokens",
",",
"notifications",
")",
":",
"fmt",
"=",
"'!BH32sH%ds'",
"structify",
"=",
"(",
"lambda",
"t",
",",
"p",
":",
"struct",
".",
"pack",
"(",
"(",
"fmt",
"%",
"len",
"(",
"p",
")",
")",
",",
"0",
",",
"3... | returns the encoded bytes of tokens and notifications tokens a list of tokens or a string of only one token notifications a list of notifications or a dictionary of only one . | train | true |
53,976 | def fnames_presuffix(fnames, prefix=u'', suffix=u'', newpath=None, use_ext=True):
f2 = []
for fname in fnames:
f2.append(fname_presuffix(fname, prefix, suffix, newpath, use_ext))
return f2
| [
"def",
"fnames_presuffix",
"(",
"fnames",
",",
"prefix",
"=",
"u''",
",",
"suffix",
"=",
"u''",
",",
"newpath",
"=",
"None",
",",
"use_ext",
"=",
"True",
")",
":",
"f2",
"=",
"[",
"]",
"for",
"fname",
"in",
"fnames",
":",
"f2",
".",
"append",
"(",
... | calls fname_presuffix for a list of files . | train | false |
53,977 | def publish_progress(*args, **kwargs):
import frappe.async
return frappe.async.publish_progress(*args, **kwargs)
| [
"def",
"publish_progress",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"import",
"frappe",
".",
"async",
"return",
"frappe",
".",
"async",
".",
"publish_progress",
"(",
"*",
"args",
",",
"**",
"kwargs",
")"
] | show the user progress for a long request . | train | false |
53,979 | def use_resources(num_threads, num_gb):
def _use_gb_ram(num_gb):
u'\n Function to consume GB of memory\n '
import sys
bsize = (sys.getsizeof(u' ') - sys.getsizeof(u' '))
boffset = sys.getsizeof(u'')
num_bytes = int((num_gb * (1024 ** 3)))
gb_str = (u' ' * ((num_bytes - boffset) // bsize))
assert (sys.getsizeof(gb_str) == num_bytes)
ctr = 0
while (ctr < 30000000.0):
ctr += 1
del ctr
del gb_str
from multiprocessing import Process
from threading import Thread
num_gb = float(num_gb)
thread_list = []
for idx in range(num_threads):
thread = Thread(target=_use_gb_ram, args=((num_gb / num_threads),), name=str(idx))
thread_list.append(thread)
print((u'Using %.3f GB of memory over %d sub-threads...' % (num_gb, num_threads)))
for (idx, thread) in enumerate(thread_list):
thread.start()
for thread in thread_list:
thread.join()
| [
"def",
"use_resources",
"(",
"num_threads",
",",
"num_gb",
")",
":",
"def",
"_use_gb_ram",
"(",
"num_gb",
")",
":",
"import",
"sys",
"bsize",
"=",
"(",
"sys",
".",
"getsizeof",
"(",
"u' '",
")",
"-",
"sys",
".",
"getsizeof",
"(",
"u' '",
")",
")",
"... | function to execute multiple use_gb_ram functions in parallel . | train | false |
53,980 | @requires_good_network
def test_megsim():
data_dir = _TempDir()
paths = datasets.megsim.load_data('index', 'text', 'text', path=data_dir, update_path=False)
assert_equal(len(paths), 1)
assert_true(paths[0].endswith('index.html'))
| [
"@",
"requires_good_network",
"def",
"test_megsim",
"(",
")",
":",
"data_dir",
"=",
"_TempDir",
"(",
")",
"paths",
"=",
"datasets",
".",
"megsim",
".",
"load_data",
"(",
"'index'",
",",
"'text'",
",",
"'text'",
",",
"path",
"=",
"data_dir",
",",
"update_pa... | test megsim url handling . | train | false |
53,981 | def get_async_pillar(opts, grains, minion_id, saltenv=None, ext=None, funcs=None, pillar=None, pillarenv=None):
ptype = {'remote': AsyncRemotePillar, 'local': AsyncPillar}.get(opts['file_client'], AsyncPillar)
return ptype(opts, grains, minion_id, saltenv, ext, functions=funcs, pillar=pillar, pillarenv=pillarenv)
| [
"def",
"get_async_pillar",
"(",
"opts",
",",
"grains",
",",
"minion_id",
",",
"saltenv",
"=",
"None",
",",
"ext",
"=",
"None",
",",
"funcs",
"=",
"None",
",",
"pillar",
"=",
"None",
",",
"pillarenv",
"=",
"None",
")",
":",
"ptype",
"=",
"{",
"'remote... | return the correct pillar driver based on the file_client option . | train | false |
53,982 | def parse_propspec(propspec):
props = []
for objspec in propspec:
if (':' not in objspec):
raise Exception(("property specification '%s' does not contain property list" % objspec))
(objtype, objprops) = objspec.split(':', 1)
motype = getattr(vim, objtype, None)
if (motype is None):
raise Exception(("referenced type '%s' in property specification does not exist,\nconsult the managed object type reference in the vSphere API documentation" % objtype))
proplist = objprops.split(',')
props.append((motype, proplist))
return props
| [
"def",
"parse_propspec",
"(",
"propspec",
")",
":",
"props",
"=",
"[",
"]",
"for",
"objspec",
"in",
"propspec",
":",
"if",
"(",
"':'",
"not",
"in",
"objspec",
")",
":",
"raise",
"Exception",
"(",
"(",
"\"property specification '%s' does not contain property list... | parses property specifications . | train | false |
53,983 | def NormalizeString(value):
return _StripSeparators(value).lower().strip()
| [
"def",
"NormalizeString",
"(",
"value",
")",
":",
"return",
"_StripSeparators",
"(",
"value",
")",
".",
"lower",
"(",
")",
".",
"strip",
"(",
")"
] | lowers case . | train | false |
53,984 | def read_valuation(s, encoding=None):
if (encoding is not None):
s = s.decode(encoding)
statements = []
for (linenum, line) in enumerate(s.splitlines()):
line = line.strip()
if (line.startswith(u'#') or (line == u'')):
continue
try:
statements.append(_read_valuation_line(line))
except ValueError:
raise ValueError((u'Unable to parse line %s: %s' % (linenum, line)))
return Valuation(statements)
| [
"def",
"read_valuation",
"(",
"s",
",",
"encoding",
"=",
"None",
")",
":",
"if",
"(",
"encoding",
"is",
"not",
"None",
")",
":",
"s",
"=",
"s",
".",
"decode",
"(",
"encoding",
")",
"statements",
"=",
"[",
"]",
"for",
"(",
"linenum",
",",
"line",
... | convert a valuation string into a valuation . | train | false |
53,985 | def _context_dict_to_string(context):
return '{sel_user}:{sel_role}:{sel_type}:{sel_level}'.format(**context)
| [
"def",
"_context_dict_to_string",
"(",
"context",
")",
":",
"return",
"'{sel_user}:{sel_role}:{sel_type}:{sel_level}'",
".",
"format",
"(",
"**",
"context",
")"
] | converts an selinux file context from a dict to a string . | train | false |
53,986 | def addElementToPixelList(element, pixelDictionary, x, y):
addElementToListDictionary(element, (x, y), pixelDictionary)
| [
"def",
"addElementToPixelList",
"(",
"element",
",",
"pixelDictionary",
",",
"x",
",",
"y",
")",
":",
"addElementToListDictionary",
"(",
"element",
",",
"(",
"x",
",",
"y",
")",
",",
"pixelDictionary",
")"
] | add an element to the pixel list . | train | false |
53,988 | def network_delete_safe(context, network_id):
return IMPL.network_delete_safe(context, network_id)
| [
"def",
"network_delete_safe",
"(",
"context",
",",
"network_id",
")",
":",
"return",
"IMPL",
".",
"network_delete_safe",
"(",
"context",
",",
"network_id",
")"
] | delete network with key network_id . | train | false |
53,989 | def frozen(*args):
raise nx.NetworkXError("Frozen graph can't be modified")
| [
"def",
"frozen",
"(",
"*",
"args",
")",
":",
"raise",
"nx",
".",
"NetworkXError",
"(",
"\"Frozen graph can't be modified\"",
")"
] | dummy method for raising errors when trying to modify frozen graphs . | train | false |
53,990 | @permission_required([('Apps', 'ReadAbuse')])
def queue_abuse(request):
queues_helper = ReviewersQueuesHelper(request)
apps = queues_helper.get_abuse_queue()
page = paginate(request, apps, per_page=20)
abuse_formset = AppAbuseViewFormSet((request.POST or None), queryset=page.object_list, request=request)
if abuse_formset.is_valid():
abuse_formset.save()
return redirect(reverse('reviewers.apps.queue_abuse'))
return render(request, 'reviewers/queue.html', context(request, abuse_formset=abuse_formset, tab='abuse', page=page))
| [
"@",
"permission_required",
"(",
"[",
"(",
"'Apps'",
",",
"'ReadAbuse'",
")",
"]",
")",
"def",
"queue_abuse",
"(",
"request",
")",
":",
"queues_helper",
"=",
"ReviewersQueuesHelper",
"(",
"request",
")",
"apps",
"=",
"queues_helper",
".",
"get_abuse_queue",
"(... | queue for reviewing abuse reports for apps . | train | false |
53,991 | def numpy_cupy_array_less(err_msg='', verbose=True, name='xp', type_check=True, accept_error=False):
def check_func(x, y):
array.assert_array_less(x, y, err_msg, verbose)
return _make_decorator(check_func, name, type_check, accept_error)
| [
"def",
"numpy_cupy_array_less",
"(",
"err_msg",
"=",
"''",
",",
"verbose",
"=",
"True",
",",
"name",
"=",
"'xp'",
",",
"type_check",
"=",
"True",
",",
"accept_error",
"=",
"False",
")",
":",
"def",
"check_func",
"(",
"x",
",",
"y",
")",
":",
"array",
... | decorator that checks the cupy result is less than numpy result . | train | false |
53,992 | def releaseMsToNet(Cause_presence=0, Cause_presence1=0, Facility_presence=0, UserUser_presence=0, SsVersionIndicator_presence=0):
a = TpPd(pd=3)
b = MessageType(mesType=45)
packet = (a / b)
if (Cause_presence is 1):
c = CauseHdr(ieiC=8, eightBitC=0)
packet = (packet / c)
if (Cause_presence1 is 1):
d = CauseHdr(ieiC=8, eightBitC=0)
packet = (packet / d)
if (Facility_presence is 1):
e = FacilityHdr(ieiF=28, eightBitF=0)
packet = (packet / e)
if (UserUser_presence is 1):
f = UserUserHdr(ieiUU=126, eightBitUU=0)
packet = (packet / f)
if (SsVersionIndicator_presence is 1):
g = SsVersionIndicatorHdr(ieiSVI=127, eightBitSVI=0)
packet = (packet / g)
return packet
| [
"def",
"releaseMsToNet",
"(",
"Cause_presence",
"=",
"0",
",",
"Cause_presence1",
"=",
"0",
",",
"Facility_presence",
"=",
"0",
",",
"UserUser_presence",
"=",
"0",
",",
"SsVersionIndicator_presence",
"=",
"0",
")",
":",
"a",
"=",
"TpPd",
"(",
"pd",
"=",
"3... | release section 9 . | train | true |
53,993 | def get_review_type(file_):
addon_status = file_.version.addon.status
if ((addon_status == amo.STATUS_NOMINATED) or ((addon_status == amo.STATUS_PUBLIC) and (file_.status == amo.STATUS_AWAITING_REVIEW))):
return 'full'
| [
"def",
"get_review_type",
"(",
"file_",
")",
":",
"addon_status",
"=",
"file_",
".",
"version",
".",
"addon",
".",
"status",
"if",
"(",
"(",
"addon_status",
"==",
"amo",
".",
"STATUS_NOMINATED",
")",
"or",
"(",
"(",
"addon_status",
"==",
"amo",
".",
"STA... | return full or none depending on the file/addon status . | train | false |
53,994 | def idle(priority=0):
hub = get_hub()
watcher = hub.loop.idle()
if priority:
watcher.priority = priority
hub.wait(watcher)
| [
"def",
"idle",
"(",
"priority",
"=",
"0",
")",
":",
"hub",
"=",
"get_hub",
"(",
")",
"watcher",
"=",
"hub",
".",
"loop",
".",
"idle",
"(",
")",
"if",
"priority",
":",
"watcher",
".",
"priority",
"=",
"priority",
"hub",
".",
"wait",
"(",
"watcher",
... | cause the calling greenlet to wait until the event loop is idle . | train | false |
53,995 | def document_custom_method(section, method_name, method):
document_custom_signature(section, method_name, method)
method_intro_section = section.add_new_section('method-intro')
method_intro_section.writeln('')
doc_string = inspect.getdoc(method)
if (doc_string is not None):
method_intro_section.style.write_py_doc_string(doc_string)
| [
"def",
"document_custom_method",
"(",
"section",
",",
"method_name",
",",
"method",
")",
":",
"document_custom_signature",
"(",
"section",
",",
"method_name",
",",
"method",
")",
"method_intro_section",
"=",
"section",
".",
"add_new_section",
"(",
"'method-intro'",
... | documents a non-data driven method . | train | false |
53,997 | @yield_once
def _iter_choices(pattern):
start_pos = 0
split_pos_list = [match.start() for match in re.finditer('\\|', pattern)]
split_pos_list.append(len(pattern))
for end_pos in split_pos_list:
if (not _position_is_bracketed(pattern, end_pos)):
(yield pattern[start_pos:end_pos])
start_pos = (end_pos + 1)
| [
"@",
"yield_once",
"def",
"_iter_choices",
"(",
"pattern",
")",
":",
"start_pos",
"=",
"0",
"split_pos_list",
"=",
"[",
"match",
".",
"start",
"(",
")",
"for",
"match",
"in",
"re",
".",
"finditer",
"(",
"'\\\\|'",
",",
"pattern",
")",
"]",
"split_pos_lis... | iterate through each choice of an alternative . | train | false |
53,998 | def tags_for(cls, model, instance=None, **extra_filters):
kwargs = (extra_filters or {})
if (instance is not None):
kwargs.update({('%s__content_object' % cls.tag_relname()): instance})
return cls.tag_model().objects.filter(**kwargs)
kwargs.update({('%s__content_object__isnull' % cls.tag_relname()): False})
return cls.tag_model().objects.filter(**kwargs).distinct()
| [
"def",
"tags_for",
"(",
"cls",
",",
"model",
",",
"instance",
"=",
"None",
",",
"**",
"extra_filters",
")",
":",
"kwargs",
"=",
"(",
"extra_filters",
"or",
"{",
"}",
")",
"if",
"(",
"instance",
"is",
"not",
"None",
")",
":",
"kwargs",
".",
"update",
... | sadly copied from taggit to work around the issue of not being able to use the taggeditembase class that has tag field already defined . | train | false |
53,999 | def _extend_mode_to_code(mode):
if (mode == 'nearest'):
return 0
elif (mode == 'wrap'):
return 1
elif (mode == 'reflect'):
return 2
elif (mode == 'mirror'):
return 3
elif (mode == 'constant'):
return 4
else:
raise RuntimeError('boundary mode not supported')
| [
"def",
"_extend_mode_to_code",
"(",
"mode",
")",
":",
"if",
"(",
"mode",
"==",
"'nearest'",
")",
":",
"return",
"0",
"elif",
"(",
"mode",
"==",
"'wrap'",
")",
":",
"return",
"1",
"elif",
"(",
"mode",
"==",
"'reflect'",
")",
":",
"return",
"2",
"elif"... | convert an extension mode to the corresponding integer code . | train | false |
54,001 | def test_ada_wrong_nn_obj():
nn = 'rnd'
ada = ADASYN(random_state=RND_SEED, n_neighbors=nn)
assert_raises(ValueError, ada.fit_sample, X, Y)
| [
"def",
"test_ada_wrong_nn_obj",
"(",
")",
":",
"nn",
"=",
"'rnd'",
"ada",
"=",
"ADASYN",
"(",
"random_state",
"=",
"RND_SEED",
",",
"n_neighbors",
"=",
"nn",
")",
"assert_raises",
"(",
"ValueError",
",",
"ada",
".",
"fit_sample",
",",
"X",
",",
"Y",
")"
... | test either if an error is raised while passing a wrong nn object . | train | false |
54,003 | def breakfast():
import random
items = 'egg,bacon,sausage,baked beans,tomato'.split(',')
random.shuffle(items)
breakfast = items[:random.randint(0, len(items))]
breakfast += (['spam'] * random.randint(0, (len(breakfast) + 1)))
random.shuffle(breakfast)
if (len(breakfast) == 0):
breakfast = ['lobster thermidor aux crevettes']
log.warn('Breakfast is served:')
log.warn('%s and spam', ', '.join(breakfast))
| [
"def",
"breakfast",
"(",
")",
":",
"import",
"random",
"items",
"=",
"'egg,bacon,sausage,baked beans,tomato'",
".",
"split",
"(",
"','",
")",
"random",
".",
"shuffle",
"(",
"items",
")",
"breakfast",
"=",
"items",
"[",
":",
"random",
".",
"randint",
"(",
"... | serves a pythonic breakfast . | train | false |
54,005 | def configure_sc_logging(use_syslog=False):
log.setLevel(logging.DEBUG)
formatter = logging.Formatter(DEBUG_FORMAT_PID)
static.create_sc_config_dirs()
rfh = logging.handlers.RotatingFileHandler(static.DEBUG_FILE, maxBytes=1048576, backupCount=2)
rfh.setLevel(logging.DEBUG)
rfh.setFormatter(formatter)
log.addHandler(rfh)
console.setLevel(logging.INFO)
log.addHandler(console)
syslog_device = '/dev/log'
if (use_syslog and os.path.exists(syslog_device)):
log.debug(('Logging to %s' % syslog_device))
syslog_handler = logging.handlers.SysLogHandler(address=syslog_device)
syslog_handler.setFormatter(formatter)
syslog_handler.setLevel(logging.DEBUG)
log.addHandler(syslog_handler)
| [
"def",
"configure_sc_logging",
"(",
"use_syslog",
"=",
"False",
")",
":",
"log",
".",
"setLevel",
"(",
"logging",
".",
"DEBUG",
")",
"formatter",
"=",
"logging",
".",
"Formatter",
"(",
"DEBUG_FORMAT_PID",
")",
"static",
".",
"create_sc_config_dirs",
"(",
")",
... | configure logging for starcluster *application* code by default starclusters logger has no formatters and a nullhandler so that other developers using starcluster as a library can configure logging as they see fit . | train | false |
54,007 | def timeBoundExecution(algo, maxtime):
return algo
| [
"def",
"timeBoundExecution",
"(",
"algo",
",",
"maxtime",
")",
":",
"return",
"algo"
] | wrap the algo . | train | false |
54,008 | @gof.local_optimizer([AdvancedIncSubtensor1], inplace=True)
def local_inplace_incsubtensor1(node):
if (isinstance(node.op, AdvancedIncSubtensor1) and (not node.op.inplace)):
new_op = node.op.clone_inplace()
new_node = new_op(*node.inputs)
copy_stack_trace(node.outputs, new_node)
return [new_node]
return False
| [
"@",
"gof",
".",
"local_optimizer",
"(",
"[",
"AdvancedIncSubtensor1",
"]",
",",
"inplace",
"=",
"True",
")",
"def",
"local_inplace_incsubtensor1",
"(",
"node",
")",
":",
"if",
"(",
"isinstance",
"(",
"node",
".",
"op",
",",
"AdvancedIncSubtensor1",
")",
"an... | also work for gpuadvancedincsubtensor1 . | train | false |
54,009 | def is_primary_language(language=None):
if (not language):
language = translation.get_language()
return (language == settings.LANGUAGES[0][0])
| [
"def",
"is_primary_language",
"(",
"language",
"=",
"None",
")",
":",
"if",
"(",
"not",
"language",
")",
":",
"language",
"=",
"translation",
".",
"get_language",
"(",
")",
"return",
"(",
"language",
"==",
"settings",
".",
"LANGUAGES",
"[",
"0",
"]",
"["... | returns true if current or passed language is the primary language for this site . | train | false |
54,010 | def effective_request_host(request):
return eff_request_host(request)[1]
| [
"def",
"effective_request_host",
"(",
"request",
")",
":",
"return",
"eff_request_host",
"(",
"request",
")",
"[",
"1",
"]"
] | return the effective request-host . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.