id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
39,171
def vms(nictag): ret = {} nictagadm = _check_nictagadm() cmd = '{nictagadm} vms {nictag}'.format(nictagadm=nictagadm, nictag=nictag) res = __salt__['cmd.run_all'](cmd) retcode = res['retcode'] if (retcode != 0): ret['Error'] = (res['stderr'] if ('stderr' in res) else 'Failed to get list of vms.') else: ret = res['stdout'].splitlines() return ret
[ "def", "vms", "(", "nictag", ")", ":", "ret", "=", "{", "}", "nictagadm", "=", "_check_nictagadm", "(", ")", "cmd", "=", "'{nictagadm} vms {nictag}'", ".", "format", "(", "nictagadm", "=", "nictagadm", ",", "nictag", "=", "nictag", ")", "res", "=", "__sa...
list all vms connect to nictag nictag : string name of nictag cli example: .
train
true
39,172
def ilshift(a, b): a <<= b return a
[ "def", "ilshift", "(", "a", ",", "b", ")", ":", "a", "<<=", "b", "return", "a" ]
same as a <<= b .
train
false
39,173
def ipv6(value, options=None): return _ipv_filter(value, 6, options=options)
[ "def", "ipv6", "(", "value", ",", "options", "=", "None", ")", ":", "return", "_ipv_filter", "(", "value", ",", "6", ",", "options", "=", "options", ")" ]
filters a list and returns ipv6 values only .
train
false
39,174
def stSpectralFlux(X, Xprev): sumX = numpy.sum((X + eps)) sumPrevX = numpy.sum((Xprev + eps)) F = numpy.sum((((X / sumX) - (Xprev / sumPrevX)) ** 2)) return F
[ "def", "stSpectralFlux", "(", "X", ",", "Xprev", ")", ":", "sumX", "=", "numpy", ".", "sum", "(", "(", "X", "+", "eps", ")", ")", "sumPrevX", "=", "numpy", ".", "sum", "(", "(", "Xprev", "+", "eps", ")", ")", "F", "=", "numpy", ".", "sum", "(...
computes the spectral flux feature of the current frame arguments: x: the abs of the current frame xpre: the abs of the previous frame .
train
true
39,175
def _choose_tagged_tests(tests, tags): selected = [] tags = set(tags) for test in _flatten_suite(tests): assert isinstance(test, unittest.TestCase) func = getattr(test, test._testMethodName) try: func = func.im_func except AttributeError: pass try: if (func.tags & tags): selected.append(test) except AttributeError: pass return unittest.TestSuite(selected)
[ "def", "_choose_tagged_tests", "(", "tests", ",", "tags", ")", ":", "selected", "=", "[", "]", "tags", "=", "set", "(", "tags", ")", "for", "test", "in", "_flatten_suite", "(", "tests", ")", ":", "assert", "isinstance", "(", "test", ",", "unittest", "....
select tests that are tagged with at least one of the given tags .
train
false
39,176
def _clientJobsDB(): return cjdao.ClientJobsDAO.get()
[ "def", "_clientJobsDB", "(", ")", ":", "return", "cjdao", ".", "ClientJobsDAO", ".", "get", "(", ")" ]
returns: the shared cjdao .
train
false
39,177
def wait_for_stack_status(stack_id, target_status, aws_config): def predicate(): stack_report = get_stack_report(stack_id, aws_config) current_status = stack_report['StackStatus'] Message.log(function='wait_for_stack_status', stack_id=stack_id, target_status=target_status, current_status=current_status) if (current_status == target_status): return stack_report return loop_until(reactor, predicate, repeat(10, 120))
[ "def", "wait_for_stack_status", "(", "stack_id", ",", "target_status", ",", "aws_config", ")", ":", "def", "predicate", "(", ")", ":", "stack_report", "=", "get_stack_report", "(", "stack_id", ",", "aws_config", ")", "current_status", "=", "stack_report", "[", "...
poll the status of a cloudformation stack .
train
false
39,179
@snippet def client_list_jobs(client, _): def do_something_with(_): pass job_iterator = client.list_jobs() for job in job_iterator: do_something_with(job)
[ "@", "snippet", "def", "client_list_jobs", "(", "client", ",", "_", ")", ":", "def", "do_something_with", "(", "_", ")", ":", "pass", "job_iterator", "=", "client", ".", "list_jobs", "(", ")", "for", "job", "in", "job_iterator", ":", "do_something_with", "...
list jobs for a project .
train
false
39,180
def _get_subproject_base(subproject): entrypoint = _get_installed_entrypoint(subproject) return entrypoint.module_name.split('.')[0]
[ "def", "_get_subproject_base", "(", "subproject", ")", ":", "entrypoint", "=", "_get_installed_entrypoint", "(", "subproject", ")", "return", "entrypoint", ".", "module_name", ".", "split", "(", "'.'", ")", "[", "0", "]" ]
get the import base name for the installed subproject .
train
false
39,181
def check_user_support(user_obj): if (not is_authenticated(user_obj)): if (settings.ANONYMOUS_USER_NAME is None): return (False, user_obj) User = get_user_model() lookup = {User.USERNAME_FIELD: settings.ANONYMOUS_USER_NAME} user_obj = User.objects.get(**lookup) return (True, user_obj)
[ "def", "check_user_support", "(", "user_obj", ")", ":", "if", "(", "not", "is_authenticated", "(", "user_obj", ")", ")", ":", "if", "(", "settings", ".", "ANONYMOUS_USER_NAME", "is", "None", ")", ":", "return", "(", "False", ",", "user_obj", ")", "User", ...
returns a tuple of checkresult and user_obj which should be used for permission checks checks if the given user is supported .
train
false
39,182
def mock_open(mock=None, read_data=''): def _readlines_side_effect(*args, **kwargs): if (handle.readlines.return_value is not None): return handle.readlines.return_value return list(_data) def _read_side_effect(*args, **kwargs): if (handle.read.return_value is not None): return handle.read.return_value return ''.join(_data) def _readline_side_effect(): if (handle.readline.return_value is not None): while True: (yield handle.readline.return_value) for line in _data: (yield line) global file_spec if (file_spec is None): import _io file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO)))) if (mock is None): mock = MagicMock(name='open', spec=open) handle = MagicMock(spec=file_spec) handle.__enter__.return_value = handle _data = _iterate_read_data(read_data) handle.write.return_value = None handle.read.return_value = None handle.readline.return_value = None handle.readlines.return_value = None handle.read.side_effect = _read_side_effect handle.readline.side_effect = _readline_side_effect() handle.readlines.side_effect = _readlines_side_effect mock.return_value = handle return mock
[ "def", "mock_open", "(", "mock", "=", "None", ",", "read_data", "=", "''", ")", ":", "def", "_readlines_side_effect", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "handle", ".", "readlines", ".", "return_value", "is", "not", "None", ")",...
a helper function to create a mock to replace the use of open .
train
true
39,184
def kivy_configure(): for callback in __kivy_post_configuration: callback()
[ "def", "kivy_configure", "(", ")", ":", "for", "callback", "in", "__kivy_post_configuration", ":", "callback", "(", ")" ]
call post-configuration of kivy .
train
false
39,185
def safe_get_user_model(): (user_app, user_model) = settings.AUTH_USER_MODEL.split('.') return apps.get_registered_model(user_app, user_model)
[ "def", "safe_get_user_model", "(", ")", ":", "(", "user_app", ",", "user_model", ")", "=", "settings", ".", "AUTH_USER_MODEL", ".", "split", "(", "'.'", ")", "return", "apps", ".", "get_registered_model", "(", "user_app", ",", "user_model", ")" ]
safe loading of the user model .
train
true
39,186
def post_structure(entry, site): author = entry.authors.all()[0] return {'title': entry.title, 'description': six.text_type(entry.html_content), 'link': ('%s://%s%s' % (PROTOCOL, site.domain, entry.get_absolute_url())), 'permaLink': ('%s://%s%s' % (PROTOCOL, site.domain, entry.get_absolute_url())), 'categories': [cat.title for cat in entry.categories.all()], 'dateCreated': DateTime(entry.creation_date.isoformat()), 'postid': entry.pk, 'userid': author.get_username(), 'mt_excerpt': entry.excerpt, 'mt_allow_comments': int(entry.comment_enabled), 'mt_allow_pings': (int(entry.pingback_enabled) or int(entry.trackback_enabled)), 'mt_keywords': entry.tags, 'wp_author': author.get_username(), 'wp_author_id': author.pk, 'wp_author_display_name': author.__str__(), 'wp_password': entry.password, 'wp_slug': entry.slug, 'sticky': entry.featured}
[ "def", "post_structure", "(", "entry", ",", "site", ")", ":", "author", "=", "entry", ".", "authors", ".", "all", "(", ")", "[", "0", "]", "return", "{", "'title'", ":", "entry", ".", "title", ",", "'description'", ":", "six", ".", "text_type", "(", ...
a post structure with extensions .
train
true
39,188
def _composed(*decorators): def final_decorator(f): for d in decorators: if (not hasattr(d, '__code__')): setattr(d, '__code__', Fakecode()) f = d(f) return f return final_decorator
[ "def", "_composed", "(", "*", "decorators", ")", ":", "def", "final_decorator", "(", "f", ")", ":", "for", "d", "in", "decorators", ":", "if", "(", "not", "hasattr", "(", "d", ",", "'__code__'", ")", ")", ":", "setattr", "(", "d", ",", "'__code__'", ...
takes a list of decorators and returns a single decorator .
train
false
39,190
@safe_filter(error_output=u'auto') @register.filter def background_margin(file_, geometry_string): if ((not file_) or sorl_settings.THUMBNAIL_DUMMY): return u'auto' margin = [0, 0] image_file = default.kvstore.get_or_set(ImageFile(file_)) (x, y) = parse_geometry(geometry_string, image_file.ratio) ex = (x - image_file.x) margin[0] = (ex / 2) ey = (y - image_file.y) margin[1] = (ey / 2) return u' '.join([(u'%spx' % n) for n in margin])
[ "@", "safe_filter", "(", "error_output", "=", "u'auto'", ")", "@", "register", ".", "filter", "def", "background_margin", "(", "file_", ",", "geometry_string", ")", ":", "if", "(", "(", "not", "file_", ")", "or", "sorl_settings", ".", "THUMBNAIL_DUMMY", ")",...
returns the calculated margin for a background image and geometry .
train
false
39,191
def upretty(expr): return xpretty(expr, use_unicode=True, wrap_line=False)
[ "def", "upretty", "(", "expr", ")", ":", "return", "xpretty", "(", "expr", ",", "use_unicode", "=", "True", ",", "wrap_line", "=", "False", ")" ]
unicode pretty-printing .
train
false
39,192
def _FilterExcludedFiles(filenames): exclude_paths = [os.path.abspath(f) for f in _excludes] return [f for f in filenames if (os.path.abspath(f) not in exclude_paths)]
[ "def", "_FilterExcludedFiles", "(", "filenames", ")", ":", "exclude_paths", "=", "[", "os", ".", "path", ".", "abspath", "(", "f", ")", "for", "f", "in", "_excludes", "]", "return", "[", "f", "for", "f", "in", "filenames", "if", "(", "os", ".", "path...
filters out files listed in the --exclude command line switch .
train
true
39,194
def sdm_sort(f, O): return sorted(f, key=(lambda term: O(term[0])), reverse=True)
[ "def", "sdm_sort", "(", "f", ",", "O", ")", ":", "return", "sorted", "(", "f", ",", "key", "=", "(", "lambda", "term", ":", "O", "(", "term", "[", "0", "]", ")", ")", ",", "reverse", "=", "True", ")" ]
sort terms in f using the given monomial order o .
train
false
39,196
def p_item_expr(p): p[0] = ('', p[1])
[ "def", "p_item_expr", "(", "p", ")", ":", "p", "[", "0", "]", "=", "(", "''", ",", "p", "[", "1", "]", ")" ]
pitem : expr .
train
false
39,197
def _psrdp(cmd): rdp = '$RDP = Get-WmiObject -Class Win32_TerminalServiceSetting -Namespace root\\CIMV2\\TerminalServices -Computer . -Authentication 6 -ErrorAction Stop' return __salt__['cmd.run']('{0} ; {1}'.format(rdp, cmd), shell='powershell', python_shell=True)
[ "def", "_psrdp", "(", "cmd", ")", ":", "rdp", "=", "'$RDP = Get-WmiObject -Class Win32_TerminalServiceSetting -Namespace root\\\\CIMV2\\\\TerminalServices -Computer . -Authentication 6 -ErrorAction Stop'", "return", "__salt__", "[", "'cmd.run'", "]", "(", "'{0} ; {1}'", ".", "forma...
create a win32_terminalservicesetting wmi object as $rdp and execute the command cmd returns the stdout of the command .
train
true
39,198
def test_flattener_layer_state_separation_for_conv(): conv1 = ConvElemwise(8, [2, 2], 'sf1', SigmoidConvNonlinearity(), 0.1) conv2 = ConvElemwise(8, [2, 2], 'sf2', SigmoidConvNonlinearity(), 0.1) mlp = MLP(layers=[FlattenerLayer(CompositeLayer('comp', [conv1, conv2]))], input_space=Conv2DSpace(shape=[5, 5], num_channels=2)) topo_view = np.random.rand(10, 5, 5, 2).astype(theano.config.floatX) y = np.random.rand(10, 256).astype(theano.config.floatX) dataset = DenseDesignMatrix(topo_view=topo_view, y=y) train = Train(dataset, mlp, SGD(0.1, batch_size=5, monitoring_dataset=dataset)) train.algorithm.termination_criterion = EpochCounter(1) train.main_loop()
[ "def", "test_flattener_layer_state_separation_for_conv", "(", ")", ":", "conv1", "=", "ConvElemwise", "(", "8", ",", "[", "2", ",", "2", "]", ",", "'sf1'", ",", "SigmoidConvNonlinearity", "(", ")", ",", "0.1", ")", "conv2", "=", "ConvElemwise", "(", "8", "...
creates a compositelayer wrapping two conv layers and ensures that state gets correctly picked apart .
train
false
39,199
def combine_stains(stains, conv_matrix): from ..exposure import rescale_intensity stains = dtype.img_as_float(stains) logrgb2 = np.dot((- np.reshape(stains, ((-1), 3))), conv_matrix) rgb2 = np.exp(logrgb2) return rescale_intensity(np.reshape((rgb2 - 2), stains.shape), in_range=((-1), 1))
[ "def", "combine_stains", "(", "stains", ",", "conv_matrix", ")", ":", "from", ".", ".", "exposure", "import", "rescale_intensity", "stains", "=", "dtype", ".", "img_as_float", "(", "stains", ")", "logrgb2", "=", "np", ".", "dot", "(", "(", "-", "np", "."...
stain to rgb color space conversion .
train
false
39,200
def getBooleanFromDictionary(dictionary, key): return getBooleanFromDictionaryDefault(True, dictionary, key)
[ "def", "getBooleanFromDictionary", "(", "dictionary", ",", "key", ")", ":", "return", "getBooleanFromDictionaryDefault", "(", "True", ",", "dictionary", ",", "key", ")" ]
get boolean from the dictionary and key .
train
false
39,201
def get_scalar_constant_value(v): if (('sparse' in globals()) and isinstance(v.type, sparse.SparseType)): if ((v.owner is not None) and isinstance(v.owner.op, sparse.CSM)): data = v.owner.inputs[0] return tensor.get_scalar_constant_value(data) return tensor.get_scalar_constant_value(v)
[ "def", "get_scalar_constant_value", "(", "v", ")", ":", "if", "(", "(", "'sparse'", "in", "globals", "(", ")", ")", "and", "isinstance", "(", "v", ".", "type", ",", "sparse", ".", "SparseType", ")", ")", ":", "if", "(", "(", "v", ".", "owner", "is"...
return the constant scalar value underlying variable v if v is the output of dimshuffles .
train
false
39,202
@snippet def client_run_sync_query_timeout(client, _): TIMEOUT_MS = 10 all_rows = [] def do_something_with(rows): all_rows.extend(rows) query = client.run_sync_query(QUERY) query.timeout_ms = TIMEOUT_MS query.use_query_cache = False query.run() assert (not query.complete) job = query.job job.reload() retry_count = 0 while ((retry_count < 10) and (job.state != u'DONE')): time.sleep((1.5 ** retry_count)) retry_count += 1 job.reload() assert (job.state == u'DONE') (rows, total_count, token) = query.fetch_data() while True: do_something_with(rows) if (token is None): break (rows, total_count, token) = query.fetch_data(page_token=token) assert (len(all_rows) == total_count)
[ "@", "snippet", "def", "client_run_sync_query_timeout", "(", "client", ",", "_", ")", ":", "TIMEOUT_MS", "=", "10", "all_rows", "=", "[", "]", "def", "do_something_with", "(", "rows", ")", ":", "all_rows", ".", "extend", "(", "rows", ")", "query", "=", "...
run a synchronous query w/ timeout .
train
false
39,204
def attachment_specs_update_or_create(context, attachment_id, specs): return IMPL.attachment_specs_update_or_create(context, attachment_id, specs)
[ "def", "attachment_specs_update_or_create", "(", "context", ",", "attachment_id", ",", "specs", ")", ":", "return", "IMPL", ".", "attachment_specs_update_or_create", "(", "context", ",", "attachment_id", ",", "specs", ")" ]
create or update attachment specs .
train
false
39,205
def rs_atanh(p, x, prec): if rs_is_puiseux(p, x): return rs_puiseux(rs_atanh, p, x, prec) R = p.ring const = 0 if _has_constant_term(p, x): zm = R.zero_monom c = p[zm] if (R.domain is EX): c_expr = c.as_expr() const = atanh(c_expr) elif isinstance(c, PolyElement): try: c_expr = c.as_expr() const = R(atanh(c_expr)) except ValueError: raise DomainError("The given series can't be expanded in this domain.") else: try: const = R(atanh(c)) except ValueError: raise DomainError("The given series can't be expanded in this domain.") dp = rs_diff(p, x) p1 = ((- rs_square(p, x, prec)) + 1) p1 = rs_series_inversion(p1, x, (prec - 1)) p1 = rs_mul(dp, p1, x, (prec - 1)) return (rs_integrate(p1, x) + const)
[ "def", "rs_atanh", "(", "p", ",", "x", ",", "prec", ")", ":", "if", "rs_is_puiseux", "(", "p", ",", "x", ")", ":", "return", "rs_puiseux", "(", "rs_atanh", ",", "p", ",", "x", ",", "prec", ")", "R", "=", "p", ".", "ring", "const", "=", "0", "...
hyperbolic arctangent of a series return the series expansion of the atanh of p .
train
false
39,207
def AddFieldsToDocumentPb(doc_id, fields, document): if (doc_id is not None): document.set_id(doc_id) for field_tuple in fields: name = field_tuple[0] value = field_tuple[1] field = document.add_field() field.set_name(name) field_value = field.mutable_value() field_value.set_string_value(value) if (len(field_tuple) > 2): field_value.set_type(field_tuple[2])
[ "def", "AddFieldsToDocumentPb", "(", "doc_id", ",", "fields", ",", "document", ")", ":", "if", "(", "doc_id", "is", "not", "None", ")", ":", "document", ".", "set_id", "(", "doc_id", ")", "for", "field_tuple", "in", "fields", ":", "name", "=", "field_tup...
add the id and fields to document .
train
false
39,208
def _subs_tree(cls, tvars=None, args=None): if (cls.__origin__ is None): return cls current = cls.__origin__ orig_chain = [] while (current.__origin__ is not None): orig_chain.append(current) current = current.__origin__ tree_args = [] for arg in cls.__args__: tree_args.append(_replace_arg(arg, tvars, args)) for ocls in orig_chain: new_tree_args = [] for (i, arg) in enumerate(ocls.__args__): new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args)) tree_args = new_tree_args return tree_args
[ "def", "_subs_tree", "(", "cls", ",", "tvars", "=", "None", ",", "args", "=", "None", ")", ":", "if", "(", "cls", ".", "__origin__", "is", "None", ")", ":", "return", "cls", "current", "=", "cls", ".", "__origin__", "orig_chain", "=", "[", "]", "wh...
an internal helper function: calculate substitution tree for generic cls after replacing its type parameters with substitutions in tvars -> args .
train
true
39,209
def _buildTestClasses(_locals): for x in getModule('twisted').walkModules(): ignoredModules = ['twisted.test.reflect_helper', 'twisted.internet.test.process_', 'twisted.test.process_'] isIgnored = [x.name.startswith(ignored) for ignored in ignoredModules] if (True in isIgnored): continue class Test(NewStyleOnly, unittest.TestCase, ): '\n @see: L{NewStyleOnly}\n ' module = x.name acceptableName = x.name.replace('.', '_') Test.__name__ = acceptableName if hasattr(Test, '__qualname__'): Test.__qualname__ = acceptableName _locals.update({acceptableName: Test})
[ "def", "_buildTestClasses", "(", "_locals", ")", ":", "for", "x", "in", "getModule", "(", "'twisted'", ")", ".", "walkModules", "(", ")", ":", "ignoredModules", "=", "[", "'twisted.test.reflect_helper'", ",", "'twisted.internet.test.process_'", ",", "'twisted.test.p...
build the test classes that use l{newstyleonly} .
train
false
39,210
def sentinel_get_master_ip(master, host=None, port=None, password=None): server = _sconnect(host, port, password) ret = server.sentinel_get_master_addr_by_name(master) return dict(list(zip(('master_host', 'master_port'), ret)))
[ "def", "sentinel_get_master_ip", "(", "master", ",", "host", "=", "None", ",", "port", "=", "None", ",", "password", "=", "None", ")", ":", "server", "=", "_sconnect", "(", "host", ",", "port", ",", "password", ")", "ret", "=", "server", ".", "sentinel...
get ip for sentinel master .
train
true
39,211
def country_unalias(country): if (type(country) in [str, unicode]): return COUNTRY_ALIASES.get(country, country) if hasattr(country, '__iter__'): return [country_unalias(country_elt) for country_elt in country] return country
[ "def", "country_unalias", "(", "country", ")", ":", "if", "(", "type", "(", "country", ")", "in", "[", "str", ",", "unicode", "]", ")", ":", "return", "COUNTRY_ALIASES", ".", "get", "(", "country", ",", "country", ")", "if", "hasattr", "(", "country", ...
takes either a country code and returns either a country code or a list of country codes .
train
false
39,213
@shared_task(bind=True, base=DebugBasketTask, default_retry_delay=BASKET_TASK_RETRY_DELAY, max_retries=BASKET_TASK_MAX_RETRIES) def lookup_user_task(self, email): result = {} try: result = basket.lookup_user(email=email) except MaxRetriesExceededError as exc: raise exc except basket.BasketException as exc: if (not (exc[0] == u'User not found')): raise self.retry(exc=exc) result = exc.result return result
[ "@", "shared_task", "(", "bind", "=", "True", ",", "base", "=", "DebugBasketTask", ",", "default_retry_delay", "=", "BASKET_TASK_RETRY_DELAY", ",", "max_retries", "=", "BASKET_TASK_MAX_RETRIES", ")", "def", "lookup_user_task", "(", "self", ",", "email", ")", ":", ...
task responsible for getting information about a user in basket .
train
false
39,215
def xl_range_abs(first_row, first_col, last_row, last_col): range1 = xl_rowcol_to_cell(first_row, first_col, True, True) range2 = xl_rowcol_to_cell(last_row, last_col, True, True) return ((range1 + ':') + range2)
[ "def", "xl_range_abs", "(", "first_row", ",", "first_col", ",", "last_row", ",", "last_col", ")", ":", "range1", "=", "xl_rowcol_to_cell", "(", "first_row", ",", "first_col", ",", "True", ",", "True", ")", "range2", "=", "xl_rowcol_to_cell", "(", "last_row", ...
convert zero indexed row and col cell references to a $a$1:$b$1 absolute range string .
train
false
39,217
def test_singleton_dim(): image = np.random.rand(1, 20) noisy = random_noise(image, mode='salt', amount=0.1, seed=42) assert (np.sum((noisy == 1)) == 2)
[ "def", "test_singleton_dim", "(", ")", ":", "image", "=", "np", ".", "random", ".", "rand", "(", "1", ",", "20", ")", "noisy", "=", "random_noise", "(", "image", ",", "mode", "=", "'salt'", ",", "amount", "=", "0.1", ",", "seed", "=", "42", ")", ...
ensure images where size of a given dimension is 1 work correctly .
train
false
39,219
def isUndirected(G): for v in G: if (v in G[v]): return False for w in G[v]: if (v not in G[w]): return False return True
[ "def", "isUndirected", "(", "G", ")", ":", "for", "v", "in", "G", ":", "if", "(", "v", "in", "G", "[", "v", "]", ")", ":", "return", "False", "for", "w", "in", "G", "[", "v", "]", ":", "if", "(", "v", "not", "in", "G", "[", "w", "]", ")...
check that g represents a simple undirected graph .
train
false
39,220
def check_exists(name): if (name in checks_list()): log.debug('[uptime] found {0} in checks'.format(name)) return True return False
[ "def", "check_exists", "(", "name", ")", ":", "if", "(", "name", "in", "checks_list", "(", ")", ")", ":", "log", ".", "debug", "(", "'[uptime] found {0} in checks'", ".", "format", "(", "name", ")", ")", "return", "True", "return", "False" ]
check if a given url is in being monitored by uptime cli example: .
train
false
39,221
def apply_item_metadata(item, track_info): item.artist = track_info.artist item.artist_sort = track_info.artist_sort item.artist_credit = track_info.artist_credit item.title = track_info.title item.mb_trackid = track_info.track_id if track_info.artist_id: item.mb_artistid = track_info.artist_id
[ "def", "apply_item_metadata", "(", "item", ",", "track_info", ")", ":", "item", ".", "artist", "=", "track_info", ".", "artist", "item", ".", "artist_sort", "=", "track_info", ".", "artist_sort", "item", ".", "artist_credit", "=", "track_info", ".", "artist_cr...
set an items metadata from its matched trackinfo object .
train
false
39,222
def quote_ps_string(s): s = s.replace('\\', '\\\\') s = s.replace('(', '\\(') s = s.replace(')', '\\)') s = s.replace("'", '\\251') s = s.replace('`', '\\301') s = re.sub('[^ -~\\n]', (lambda x: ('\\%03o' % ord(x.group()))), s) return s
[ "def", "quote_ps_string", "(", "s", ")", ":", "s", "=", "s", ".", "replace", "(", "'\\\\'", ",", "'\\\\\\\\'", ")", "s", "=", "s", ".", "replace", "(", "'('", ",", "'\\\\('", ")", "s", "=", "s", ".", "replace", "(", "')'", ",", "'\\\\)'", ")", ...
quote dangerous characters of s for use in a postscript string constant .
train
false
39,223
def load_endpoint_json(path): with open(path, 'r') as endpoints_file: return json.load(endpoints_file)
[ "def", "load_endpoint_json", "(", "path", ")", ":", "with", "open", "(", "path", ",", "'r'", ")", "as", "endpoints_file", ":", "return", "json", ".", "load", "(", "endpoints_file", ")" ]
loads a given json file & returns it .
train
false
39,224
def test_no_ready_python_process(noready_pyproc): with pytest.raises(testprocess.ProcessExited): with stopwatch(max_ms=5000): noready_pyproc.start()
[ "def", "test_no_ready_python_process", "(", "noready_pyproc", ")", ":", "with", "pytest", ".", "raises", "(", "testprocess", ".", "ProcessExited", ")", ":", "with", "stopwatch", "(", "max_ms", "=", "5000", ")", ":", "noready_pyproc", ".", "start", "(", ")" ]
when a process quits immediately .
train
false
39,227
def export_library_to_xml(modulestore, contentstore, library_key, root_dir, library_dir): LibraryExportManager(modulestore, contentstore, library_key, root_dir, library_dir).export()
[ "def", "export_library_to_xml", "(", "modulestore", ",", "contentstore", ",", "library_key", ",", "root_dir", ",", "library_dir", ")", ":", "LibraryExportManager", "(", "modulestore", ",", "contentstore", ",", "library_key", ",", "root_dir", ",", "library_dir", ")",...
thin wrapper for the library export manager .
train
false
39,228
@image_comparison(baseline_images=[u'hatching_legend'], extensions=[u'pdf']) def test_hatching_legend(): fig = plt.figure(figsize=(1, 2)) a = plt.Rectangle([0, 0], 0, 0, facecolor=u'green', hatch=u'XXXX') b = plt.Rectangle([0, 0], 0, 0, facecolor=u'blue', hatch=u'XXXX') fig.legend([a, b, a, b], [u'', u'', u'', u''])
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'hatching_legend'", "]", ",", "extensions", "=", "[", "u'pdf'", "]", ")", "def", "test_hatching_legend", "(", ")", ":", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "1", ",", "2"...
test for correct hatching on patches in legend .
train
false
39,230
def abs(a): return _abs(a)
[ "def", "abs", "(", "a", ")", ":", "return", "_abs", "(", "a", ")" ]
returns the absolute url: .
train
false
39,231
def to_bundle_ingest_dirname(ts): return ts.isoformat().replace(':', ';')
[ "def", "to_bundle_ingest_dirname", "(", "ts", ")", ":", "return", "ts", ".", "isoformat", "(", ")", ".", "replace", "(", "':'", ",", "';'", ")" ]
convert a pandas timestamp into the name of the directory for the ingestion .
train
false
39,233
@handle_response_format @treeio_login_required def mlist_delete(request, mlist_id, response_format='html'): mlist = get_object_or_404(MailingList, pk=mlist_id) if (not request.user.profile.has_permission(mlist, mode='w')): return user_denied(request, message="You don't have access to this Mailing List", response_format=response_format) if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): mlist.trash = True mlist.save() else: mlist.delete() return HttpResponseRedirect('/messaging/') elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('messaging_mlist_view', args=[mlist.id])) context = _get_default_context(request) context.update({'mlist': mlist}) return render_to_response('messaging/mlist_delete', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "mlist_delete", "(", "request", ",", "mlist_id", ",", "response_format", "=", "'html'", ")", ":", "mlist", "=", "get_object_or_404", "(", "MailingList", ",", "pk", "=", "mlist_id", ")", "if", "(...
delete mlist page .
train
false
39,234
def revlookup(name): if (Base.defaults['server'] == []): Base.DiscoverNameServers() a = string.split(name, '.') a.reverse() b = (string.join(a, '.') + '.in-addr.arpa') return Base.DnsRequest(b, qtype='ptr').req().answers[0]['data']
[ "def", "revlookup", "(", "name", ")", ":", "if", "(", "Base", ".", "defaults", "[", "'server'", "]", "==", "[", "]", ")", ":", "Base", ".", "DiscoverNameServers", "(", ")", "a", "=", "string", ".", "split", "(", "name", ",", "'.'", ")", "a", ".",...
convenience routine for doing a reverse lookup of an address .
train
false
39,237
def get_hash_method(hash_method_name): if (hash_method_name not in HASH_METHODS): raise exception.SignatureVerificationError(reason=(_('Invalid signature hash method: %s') % hash_method_name)) return HASH_METHODS[hash_method_name]
[ "def", "get_hash_method", "(", "hash_method_name", ")", ":", "if", "(", "hash_method_name", "not", "in", "HASH_METHODS", ")", ":", "raise", "exception", ".", "SignatureVerificationError", "(", "reason", "=", "(", "_", "(", "'Invalid signature hash method: %s'", ")",...
verify the hash method name and create the hash method .
train
false
39,238
def dmp_gff_list(f, u, K): if (not u): return dup_gff_list(f, K) else: raise MultivariatePolynomialError(f)
[ "def", "dmp_gff_list", "(", "f", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_gff_list", "(", "f", ",", "K", ")", "else", ":", "raise", "MultivariatePolynomialError", "(", "f", ")" ]
compute greatest factorial factorization of f in k[x] .
train
false
39,239
def sonatype_clm(registry, xml_parent, data): clm = XML.SubElement(xml_parent, 'com.sonatype.insight.ci.hudson.PreBuildScan') clm.set('plugin', 'sonatype-clm-ci') SUPPORTED_VALUES = ['list', 'manual'] SUPPORTED_STAGES = ['build', 'stage-release', 'release', 'operate'] application_select = XML.SubElement(clm, 'applicationSelectType') application_mappings = [('value', 'value', 'list', SUPPORTED_VALUES), ('application-name', 'applicationId', None)] convert_mapping_to_xml(application_select, data, application_mappings, fail_required=True) path = XML.SubElement(clm, 'pathConfig') path_mappings = [('scan-targets', 'scanTargets', ''), ('module-excludes', 'moduleExcludes', ''), ('advanced-options', 'scanProperties', '')] convert_mapping_to_xml(path, data, path_mappings, fail_required=True) mappings = [('fail-on-clm-server-failure', 'failOnClmServerFailures', False), ('stage', 'stageId', 'build', SUPPORTED_STAGES), ('username', 'username', ''), ('password', 'password', '')] convert_mapping_to_xml(clm, data, mappings, fail_required=True)
[ "def", "sonatype_clm", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "clm", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'com.sonatype.insight.ci.hudson.PreBuildScan'", ")", "clm", ".", "set", "(", "'plugin'", ",", "'sonatype-clm-ci'", ")...
yaml: sonatype-clm requires the jenkins :jenkins-wiki:sonatype clm plugin <sonatype+clm+%28formerly+insight+for+ci%29> .
train
false
39,240
def FixVCMacroSlashes(s): if ('$' in s): s = fix_vc_macro_slashes_regex.sub('\\1', s) return s
[ "def", "FixVCMacroSlashes", "(", "s", ")", ":", "if", "(", "'$'", "in", "s", ")", ":", "s", "=", "fix_vc_macro_slashes_regex", ".", "sub", "(", "'\\\\1'", ",", "s", ")", "return", "s" ]
replace macros which have excessive following slashes .
train
false
39,241
@utils.arg('node', metavar='<node>', help=_('ID of node')) def do_baremetal_node_show(cs, args): _emit_deprecation_warning('baremetal-node-show') node = _find_baremetal_node(cs, args.node) _print_baremetal_resource(node)
[ "@", "utils", ".", "arg", "(", "'node'", ",", "metavar", "=", "'<node>'", ",", "help", "=", "_", "(", "'ID of node'", ")", ")", "def", "do_baremetal_node_show", "(", "cs", ",", "args", ")", ":", "_emit_deprecation_warning", "(", "'baremetal-node-show'", ")",...
deprecated: show information about a baremetal node .
train
false
39,243
def getSingleInfo(table, movieID, infoType, notAList=False): infoTypeID = InfoType.select((InfoType.q.info == infoType)) if (infoTypeID.count() == 0): return {} res = table.select(AND((table.q.movieID == movieID), (table.q.infoTypeID == infoTypeID[0].id))) retList = [] for r in res: info = r.info note = r.note if note: info += (u'::%s' % note) retList.append(info) if (not retList): return {} if (not notAList): return {infoType: retList} else: return {infoType: retList[0]}
[ "def", "getSingleInfo", "(", "table", ",", "movieID", ",", "infoType", ",", "notAList", "=", "False", ")", ":", "infoTypeID", "=", "InfoType", ".", "select", "(", "(", "InfoType", ".", "q", ".", "info", "==", "infoType", ")", ")", "if", "(", "infoTypeI...
return a dictionary in the form {infotype: infolistorstring} .
train
false
39,244
@pytest.mark.parametrize('encrypt', [True, False]) def test_token_inputs(db, config, encrypt, default_account): config['ENCRYPT_SECRETS'] = encrypt unicode_token = u'myunicodesecret' invalid_token = '\xff\x10' null_token = '\x1f\x00\xf1' default_account.refresh_token = unicode_token db.session.commit() secret_id = default_account.refresh_token_id secret = db.session.query(Secret).get(secret_id) assert (not isinstance(secret.secret, unicode)), 'secret cannot be unicode' assert (secret.secret == unicode_token), 'token not decrypted correctly' with pytest.raises(ValueError) as e: default_account.refresh_token = invalid_token assert (e.typename == 'ValueError'), 'token cannot be invalid UTF-8' with pytest.raises(ValueError) as f: default_account.refresh_token = null_token assert (f.typename == 'ValueError'), 'token cannot contain NULL byte' assert (default_account.refresh_token == unicode_token)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'encrypt'", ",", "[", "True", ",", "False", "]", ")", "def", "test_token_inputs", "(", "db", ",", "config", ",", "encrypt", ",", "default_account", ")", ":", "config", "[", "'ENCRYPT_SECRETS'", "]", "...
ensure unicode tokens are converted to bytes .
train
false
39,245
def validate_settings(): if (settings.DEBUG or settings.IN_TEST_SUITE): return for (key, value) in [('SECRET_KEY', 'please change this'), ('SESSION_COOKIE_SECURE', False), ('APP_PURCHASE_SECRET', 'please change this')]: if (getattr(settings, key) == value): raise ImproperlyConfigured('{0} must be changed from default'.format(key)) for key in ('CSP_SCRIPT_SRC',): for url in getattr(settings, key): if (url.startswith('http://') and (url != settings.SITE_URL)): raise ImproperlyConfigured('{0} has a http URL: {1}'.format(key, url))
[ "def", "validate_settings", "(", ")", ":", "if", "(", "settings", ".", "DEBUG", "or", "settings", ".", "IN_TEST_SUITE", ")", ":", "return", "for", "(", "key", ",", "value", ")", "in", "[", "(", "'SECRET_KEY'", ",", "'please change this'", ")", ",", "(", ...
validate that if not in debug mode .
train
false
39,246
@removals.remove(message='keystoneclient auth plugins are deprecated. Use keystoneauth.', version='2.1.0', removal_version='3.0.0') @positional() def register_argparse_arguments(parser, argv, default=None): in_parser = argparse.ArgumentParser(add_help=False) env_plugin = os.environ.get('OS_AUTH_PLUGIN', default) for p in (in_parser, parser): p.add_argument('--os-auth-plugin', metavar='<name>', default=env_plugin, help='The auth plugin to load') (options, _args) = in_parser.parse_known_args(argv) if (not options.os_auth_plugin): return None if isinstance(options.os_auth_plugin, type): msg = 'Default Authentication options' plugin = options.os_auth_plugin else: msg = ('Options specific to the %s plugin.' % options.os_auth_plugin) plugin = base.get_plugin_class(options.os_auth_plugin) group = parser.add_argument_group('Authentication Options', msg) plugin.register_argparse_arguments(group) return plugin
[ "@", "removals", ".", "remove", "(", "message", "=", "'keystoneclient auth plugins are deprecated. Use keystoneauth.'", ",", "version", "=", "'2.1.0'", ",", "removal_version", "=", "'3.0.0'", ")", "@", "positional", "(", ")", "def", "register_argparse_arguments", "(", ...
register cli options needed to create a plugin .
train
false
39,247
def createProtocolMessages(data, flags=const.FLAG_PAYLOAD): messages = [] while (len(data) > const.MPU): messages.append(ProtocolMessage(data[:const.MPU], flags=flags)) data = data[const.MPU:] messages.append(ProtocolMessage(data, flags=flags)) return messages
[ "def", "createProtocolMessages", "(", "data", ",", "flags", "=", "const", ".", "FLAG_PAYLOAD", ")", ":", "messages", "=", "[", "]", "while", "(", "len", "(", "data", ")", ">", "const", ".", "MPU", ")", ":", "messages", ".", "append", "(", "ProtocolMess...
create protocol messages out of the given payload .
train
false
39,248
@require_POST @csrf_protect def generic_delete(request, what, obj_name=None): if (not test_user_authenticated(request)): return login(request, next=('/cobbler_web/%s/delete/%s' % (what, obj_name)), expired=True) if (obj_name is None): return error_page(request, ('You must specify a %s to delete' % what)) if (not remote.has_item(what, obj_name)): return error_page(request, ('Unknown %s specified' % what)) elif (not remote.check_access_no_fail(request.session['token'], ('remove_%s' % what), obj_name)): return error_page(request, ('You do not have permission to delete this %s' % what)) else: recursive = simplejson.loads(request.POST.get('recursive', 'false')) try: remote.xapi_object_edit(what, obj_name, 'remove', {'name': obj_name, 'recursive': recursive}, request.session['token']) except Exception as e: return error_page(request, str(e)) return HttpResponseRedirect(('/cobbler_web/%s/list' % what))
[ "@", "require_POST", "@", "csrf_protect", "def", "generic_delete", "(", "request", ",", "what", ",", "obj_name", "=", "None", ")", ":", "if", "(", "not", "test_user_authenticated", "(", "request", ")", ")", ":", "return", "login", "(", "request", ",", "nex...
deletes an object .
train
false
39,252
def bake_lazy_loaders(): BakedLazyLoader._strategy_keys[:] = [] properties.RelationshipProperty.strategy_for(lazy='select')(BakedLazyLoader) properties.RelationshipProperty.strategy_for(lazy=True)(BakedLazyLoader) properties.RelationshipProperty.strategy_for(lazy='baked_select')(BakedLazyLoader) strategies.LazyLoader._strategy_keys[:] = BakedLazyLoader._strategy_keys[:]
[ "def", "bake_lazy_loaders", "(", ")", ":", "BakedLazyLoader", ".", "_strategy_keys", "[", ":", "]", "=", "[", "]", "properties", ".", "RelationshipProperty", ".", "strategy_for", "(", "lazy", "=", "'select'", ")", "(", "BakedLazyLoader", ")", "properties", "."...
enable the use of baked queries for all lazyloaders systemwide .
train
false
39,253
def EMA(ds, count, timeperiod=(- (2 ** 31))): return call_talib_with_ds(ds, count, talib.EMA, timeperiod)
[ "def", "EMA", "(", "ds", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ")", ":", "return", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "EMA", ",", "timeperiod", ")" ]
exponential moving average .
train
false
39,254
def parse_fuzzy(source, ch): saved_pos = source.pos if (ch != '{'): return None constraints = {} try: parse_fuzzy_item(source, constraints) while source.match(','): parse_fuzzy_item(source, constraints) except ParseError: source.pos = saved_pos return None if (not source.match('}')): raise error('expected }', source.string, source.pos) return constraints
[ "def", "parse_fuzzy", "(", "source", ",", "ch", ")", ":", "saved_pos", "=", "source", ".", "pos", "if", "(", "ch", "!=", "'{'", ")", ":", "return", "None", "constraints", "=", "{", "}", "try", ":", "parse_fuzzy_item", "(", "source", ",", "constraints",...
parses a fuzzy setting .
train
false
39,256
def user_full_name(strategy, details, user=None, **kwargs): if user: full_name = details.get(u'fullname', u'').strip() if ((not full_name) and ((u'first_name' in details) or (u'last_name' in details))): first_name = details.get(u'first_name', u'') last_name = details.get(u'last_name', u'') if (first_name and (first_name not in last_name)): full_name = u'{0} {1}'.format(first_name, last_name) elif first_name: full_name = first_name else: full_name = last_name full_name = full_name.strip() if (len(full_name) > 30): full_name = full_name[:30] if (full_name and (full_name != user.first_name)): user.first_name = full_name strategy.storage.user.changed(user)
[ "def", "user_full_name", "(", "strategy", ",", "details", ",", "user", "=", "None", ",", "**", "kwargs", ")", ":", "if", "user", ":", "full_name", "=", "details", ".", "get", "(", "u'fullname'", ",", "u''", ")", ".", "strip", "(", ")", "if", "(", "...
update user full name using data from provider .
train
false
39,257
def register_schema(path, schema): schema_paths[path] = schema
[ "def", "register_schema", "(", "path", ",", "schema", ")", ":", "schema_paths", "[", "path", "]", "=", "schema" ]
register schema to be available at path for $refs .
train
false
39,258
def serialize_json(obj, pretty=False, indent=None, **kwargs): for name in ['allow_nan', 'separators', 'sort_keys']: if (name in kwargs): raise ValueError(('The value of %r is computed internally, overriding is not permissable.' % name)) pretty = settings.pretty(pretty) if pretty: separators = (',', ': ') else: separators = (',', ':') if (pretty and (indent is None)): indent = 2 return json.dumps(obj, cls=BokehJSONEncoder, allow_nan=False, indent=indent, separators=separators, sort_keys=True, **kwargs)
[ "def", "serialize_json", "(", "obj", ",", "pretty", "=", "False", ",", "indent", "=", "None", ",", "**", "kwargs", ")", ":", "for", "name", "in", "[", "'allow_nan'", ",", "'separators'", ",", "'sort_keys'", "]", ":", "if", "(", "name", "in", "kwargs", ...
return a serialized json representation of objects .
train
true
39,259
def logFactorial(x): return lgamma((x + 1.0))
[ "def", "logFactorial", "(", "x", ")", ":", "return", "lgamma", "(", "(", "x", "+", "1.0", ")", ")" ]
approximation to the log of the factorial function .
train
false
39,261
def _shell_command(command): repo_dir = os.path.dirname(os.path.abspath(__file__)) command_subprocess = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=repo_dir, universal_newlines=True) return command_subprocess.communicate()[0]
[ "def", "_shell_command", "(", "command", ")", ":", "repo_dir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "command_subprocess", "=", "subprocess", ".", "Popen", "(", "command", ",", "stdout"...
return the first result of a shell command .
train
false
39,262
@ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_POST @login_required def set_course_mode_price(request, course_id): try: course_price = int(request.POST['course_price']) except ValueError: return JsonResponse({'message': _('Please Enter the numeric value for the course price')}, status=400) currency = request.POST['currency'] course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course_honor_mode = CourseMode.objects.filter(mode_slug='honor', course_id=course_key) if (not course_honor_mode): return JsonResponse({'message': _('CourseMode with the mode slug({mode_slug}) DoesNotExist').format(mode_slug='honor')}, status=400) CourseModesArchive.objects.create(course_id=course_id, mode_slug='honor', mode_display_name='Honor Code Certificate', min_price=course_honor_mode[0].min_price, currency=course_honor_mode[0].currency, expiration_datetime=datetime.datetime.now(pytz.utc), expiration_date=datetime.date.today()) course_honor_mode.update(min_price=course_price, currency=currency) return JsonResponse({'message': _('CourseMode price updated successfully')})
[ "@", "ensure_csrf_cookie", "@", "cache_control", "(", "no_cache", "=", "True", ",", "no_store", "=", "True", ",", "must_revalidate", "=", "True", ")", "@", "require_POST", "@", "login_required", "def", "set_course_mode_price", "(", "request", ",", "course_id", "...
set the new course price and add new entry in the coursemodesarchive table .
train
false
39,263
def backupConfigZip(fileList, archive, arcname=None): try: with zipfile.ZipFile(archive, u'w', zipfile.ZIP_DEFLATED, allowZip64=True) as z: for f in list(set(fileList)): z.write(f, os.path.relpath(f, arcname)) return True except Exception as e: sickrage.srCore.srLogger.error(u'Zip creation error: {} '.format(e.message)) return False
[ "def", "backupConfigZip", "(", "fileList", ",", "archive", ",", "arcname", "=", "None", ")", ":", "try", ":", "with", "zipfile", ".", "ZipFile", "(", "archive", ",", "u'w'", ",", "zipfile", ".", "ZIP_DEFLATED", ",", "allowZip64", "=", "True", ")", "as", ...
store the config file as a zip .
train
false
39,264
def _get_course_block_counts(auth_token, block_url): headers = {'Authorization': 'Bearer {}'.format(auth_token)} response = requests.get(block_url, headers=headers) if (response.status_code != 200): print 'url {} returned status code {}'.format(block_url, response.status_code) return {} response_json = response.json() if ((BLOCK_ROOT_KEY in response_json) and (BLOCKS_KEY in response_json)): root_val = response_json[BLOCK_ROOT_KEY] counts = response_json[BLOCKS_KEY][root_val][BLOCK_COUNTS_KEY] return counts return {}
[ "def", "_get_course_block_counts", "(", "auth_token", ",", "block_url", ")", ":", "headers", "=", "{", "'Authorization'", ":", "'Bearer {}'", ".", "format", "(", "auth_token", ")", "}", "response", "=", "requests", ".", "get", "(", "block_url", ",", "headers",...
get the block counts for a given block_url arguments: auth_token : the authentication token to access the api block_url : the respective url for a courses xblock data returns: dict: a dictionary containing the block counts .
train
false
39,267
def Overlay(child, parent): for arg in (child, parent): if (not isinstance(arg, collections.Mapping)): raise DefinitionError(('Trying to merge badly defined hints. Child: %s, Parent: %s' % (type(child), type(parent)))) for attr in ['fix', 'format', 'problem', 'summary']: if (not child.get(attr)): child[attr] = parent.get(attr, '').strip() return child
[ "def", "Overlay", "(", "child", ",", "parent", ")", ":", "for", "arg", "in", "(", "child", ",", "parent", ")", ":", "if", "(", "not", "isinstance", "(", "arg", ",", "collections", ".", "Mapping", ")", ")", ":", "raise", "DefinitionError", "(", "(", ...
adds hint attributes to a child hint if they are not defined .
train
true
39,268
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, dry_run=0, owner=None, group=None, logger=None): save_cwd = os.getcwd() if (root_dir is not None): if (logger is not None): logger.debug("changing into '%s'", root_dir) base_name = os.path.abspath(base_name) if (not dry_run): os.chdir(root_dir) if (base_dir is None): base_dir = os.curdir kwargs = {'dry_run': dry_run, 'logger': logger} try: format_info = _ARCHIVE_FORMATS[format] except KeyError: raise ValueError(("unknown archive format '%s'" % format)) func = format_info[0] for (arg, val) in format_info[1]: kwargs[arg] = val if (format != 'zip'): kwargs['owner'] = owner kwargs['group'] = group try: filename = func(base_name, base_dir, **kwargs) finally: if (root_dir is not None): if (logger is not None): logger.debug("changing back to '%s'", save_cwd) os.chdir(save_cwd) return filename
[ "def", "make_archive", "(", "base_name", ",", "format", ",", "root_dir", "=", "None", ",", "base_dir", "=", "None", ",", "verbose", "=", "0", ",", "dry_run", "=", "0", ",", "owner", "=", "None", ",", "group", "=", "None", ",", "logger", "=", "None", ...
create an archive file .
train
true
39,269
def writeXMLElement(fileNames, target, xmlElement): object = target.object if (object == None): print 'Warning, writeTarget in write could not get object for:' print xmlElement return fileNameRoot = evaluate.getEvaluatedStringDefault('', 'name', target) fileNameRoot = evaluate.getEvaluatedStringDefault(fileNameRoot, 'id', target) fileNameRoot = evaluate.getEvaluatedStringDefault(fileNameRoot, 'file', xmlElement) fileNameRoot += evaluate.getEvaluatedStringDefault('', 'suffix', xmlElement) extension = evaluate.getEvaluatedStringDefault(object.getFabricationExtension(), 'extension', xmlElement) fileName = ('%s.%s' % (fileNameRoot, extension)) suffixIndex = 1 while (fileName in fileNames): fileName = ('%s_%s.%s' % (fileNameRoot, suffixIndex, extension)) suffixIndex += 1 fileNames.append(fileName) folderName = evaluate.getEvaluatedStringDefault('', 'folder', xmlElement) absoluteFolderDirectory = os.path.join(os.path.dirname(xmlElement.getRoot().parser.fileName), folderName) archive.makeDirectory(absoluteFolderDirectory) archive.writeFileText(os.path.join(absoluteFolderDirectory, fileName), object.getFabricationText())
[ "def", "writeXMLElement", "(", "fileNames", ",", "target", ",", "xmlElement", ")", ":", "object", "=", "target", ".", "object", "if", "(", "object", "==", "None", ")", ":", "print", "'Warning, writeTarget in write could not get object for:'", "print", "xmlElement", ...
write target .
train
false
39,272
def iterdecode(value): if (not value): return tuple() result = [] accumulator = u'' escaped = False for c in value: if (not escaped): if (c == CHAR_ESCAPE): escaped = True continue elif (c == CHAR_SEPARATOR): result.append(accumulator) accumulator = u'' continue else: escaped = False accumulator += c result.append(accumulator) return tuple(result)
[ "def", "iterdecode", "(", "value", ")", ":", "if", "(", "not", "value", ")", ":", "return", "tuple", "(", ")", "result", "=", "[", "]", "accumulator", "=", "u''", "escaped", "=", "False", "for", "c", "in", "value", ":", "if", "(", "not", "escaped",...
decode enumerable from string presentation as a tuple .
train
false
39,273
def normalize_indices(context, builder, index_types, indices): if len(indices): (index_types, indices) = zip(*[normalize_index(context, builder, idxty, idx) for (idxty, idx) in zip(index_types, indices)]) return (index_types, indices)
[ "def", "normalize_indices", "(", "context", ",", "builder", ",", "index_types", ",", "indices", ")", ":", "if", "len", "(", "indices", ")", ":", "(", "index_types", ",", "indices", ")", "=", "zip", "(", "*", "[", "normalize_index", "(", "context", ",", ...
same as normalize_index() .
train
false
39,274
@verbose def compute_proj_epochs(epochs, n_grad=2, n_mag=2, n_eeg=2, n_jobs=1, desc_prefix=None, verbose=None): data = _compute_cov_epochs(epochs, n_jobs) event_id = epochs.event_id if ((event_id is None) or (len(list(event_id.keys())) == 0)): event_id = '0' elif (len(event_id.keys()) == 1): event_id = str(list(event_id.values())[0]) else: event_id = 'Multiple-events' if (desc_prefix is None): desc_prefix = ('%s-%-.3f-%-.3f' % (event_id, epochs.tmin, epochs.tmax)) return _compute_proj(data, epochs.info, n_grad, n_mag, n_eeg, desc_prefix)
[ "@", "verbose", "def", "compute_proj_epochs", "(", "epochs", ",", "n_grad", "=", "2", ",", "n_mag", "=", "2", ",", "n_eeg", "=", "2", ",", "n_jobs", "=", "1", ",", "desc_prefix", "=", "None", ",", "verbose", "=", "None", ")", ":", "data", "=", "_co...
compute ssp vectors on epochs .
train
false
39,276
def revoke_grant(key_id, grant_id, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if key_id.startswith('alias/'): key_id = _get_key_id(key_id) r = {} try: conn.revoke_grant(key_id, grant_id) r['result'] = True except boto.exception.BotoServerError as e: r['result'] = False r['error'] = __utils__['boto.get_error'](e) return r
[ "def", "revoke_grant", "(", "key_id", ",", "grant_id", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "...
revoke a grant from a key .
train
true
39,277
def addElementToListDictionary(element, key, listDictionary): if (key in listDictionary): listDictionary[key].append(element) else: listDictionary[key] = [element]
[ "def", "addElementToListDictionary", "(", "element", ",", "key", ",", "listDictionary", ")", ":", "if", "(", "key", "in", "listDictionary", ")", ":", "listDictionary", "[", "key", "]", ".", "append", "(", "element", ")", "else", ":", "listDictionary", "[", ...
add an element to the list table .
train
false
39,278
def _bit_length(num): return len(bin(num).lstrip(u'-0b'))
[ "def", "_bit_length", "(", "num", ")", ":", "return", "len", "(", "bin", "(", "num", ")", ".", "lstrip", "(", "u'-0b'", ")", ")" ]
return number of bits needed to encode given number .
train
false
39,279
def dorogovtsev_goltsev_mendes_graph(n, create_using=None): if (create_using is not None): if create_using.is_directed(): raise nx.NetworkXError('Directed Graph not supported') if create_using.is_multigraph(): raise nx.NetworkXError('Multigraph not supported') G = empty_graph(0, create_using) G.name = 'Dorogovtsev-Goltsev-Mendes Graph' G.add_edge(0, 1) if (n == 0): return G new_node = 2 for i in range(1, (n + 1)): last_generation_edges = list(G.edges()) number_of_edges_in_last_generation = len(last_generation_edges) for j in range(0, number_of_edges_in_last_generation): G.add_edge(new_node, last_generation_edges[j][0]) G.add_edge(new_node, last_generation_edges[j][1]) new_node += 1 return G
[ "def", "dorogovtsev_goltsev_mendes_graph", "(", "n", ",", "create_using", "=", "None", ")", ":", "if", "(", "create_using", "is", "not", "None", ")", ":", "if", "create_using", ".", "is_directed", "(", ")", ":", "raise", "nx", ".", "NetworkXError", "(", "'...
return the hierarchically constructed dorogovtsev-goltsev-mendes graph .
train
false
39,283
def course_discovery_api_client(user): scopes = ['email', 'profile'] expires_in = settings.OAUTH_ID_TOKEN_EXPIRATION jwt = JwtBuilder(user).build_token(scopes, expires_in) return EdxRestApiClient(settings.COURSE_CATALOG_API_URL, jwt=jwt)
[ "def", "course_discovery_api_client", "(", "user", ")", ":", "scopes", "=", "[", "'email'", ",", "'profile'", "]", "expires_in", "=", "settings", ".", "OAUTH_ID_TOKEN_EXPIRATION", "jwt", "=", "JwtBuilder", "(", "user", ")", ".", "build_token", "(", "scopes", "...
returns a course discovery api client setup with authentication for the specified user .
train
false
39,284
def _get_gecos(name): try: gecos_field = pwd.getpwnam(name).pw_gecos.split(',', 3) except KeyError: raise CommandExecutionError("User '{0}' does not exist".format(name)) if (not gecos_field): return {} else: while (len(gecos_field) < 4): gecos_field.append('') return {'fullname': locales.sdecode(gecos_field[0]), 'roomnumber': locales.sdecode(gecos_field[1]), 'workphone': locales.sdecode(gecos_field[2]), 'homephone': locales.sdecode(gecos_field[3])}
[ "def", "_get_gecos", "(", "name", ")", ":", "try", ":", "gecos_field", "=", "pwd", ".", "getpwnam", "(", "name", ")", ".", "pw_gecos", ".", "split", "(", "','", ",", "3", ")", "except", "KeyError", ":", "raise", "CommandExecutionError", "(", "\"User '{0}...
retrieve gecos field info and return it in dictionary form .
train
false
39,285
def trace_value(value, msg='Value: '): print msg, value return value
[ "def", "trace_value", "(", "value", ",", "msg", "=", "'Value: '", ")", ":", "print", "msg", ",", "value", "return", "value" ]
prints value and returns value .
train
false
39,286
def check_headers_valid(headers): reason = invalid_headers_reason(headers) if (reason is not None): raise InvalidEmailError(reason)
[ "def", "check_headers_valid", "(", "headers", ")", ":", "reason", "=", "invalid_headers_reason", "(", "headers", ")", "if", "(", "reason", "is", "not", "None", ")", ":", "raise", "InvalidEmailError", "(", "reason", ")" ]
check that headers is a valid dictionary for headers .
train
false
39,288
def filter_eliot_main(args, base_path, top_level, stdin=None, stdout=None): stdin = (sys.stdin if (stdin is None) else stdin) stdout = (sys.stdout if (stdout is None) else stdout) if args: lines = chain_files(args) else: lines = stdin for line in lines: for output in process_line(line): stdout.write(output)
[ "def", "filter_eliot_main", "(", "args", ",", "base_path", ",", "top_level", ",", "stdin", "=", "None", ",", "stdout", "=", "None", ")", ":", "stdin", "=", "(", "sys", ".", "stdin", "if", "(", "stdin", "is", "None", ")", "else", "stdin", ")", "stdout...
filter logs .
train
false
39,290
def MakeTokenRegex(meta_left, meta_right): key = (meta_left, meta_right) if (key not in _token_re_cache): _token_re_cache[key] = re.compile((((('(' + re.escape(meta_left)) + '.+?') + re.escape(meta_right)) + ')')) return _token_re_cache[key]
[ "def", "MakeTokenRegex", "(", "meta_left", ",", "meta_right", ")", ":", "key", "=", "(", "meta_left", ",", "meta_right", ")", "if", "(", "key", "not", "in", "_token_re_cache", ")", ":", "_token_re_cache", "[", "key", "]", "=", "re", ".", "compile", "(", ...
return a regular expression for tokenization .
train
false
39,291
def determine_run_start_sync_state(run_change_sysuptime, start_save_sysuptime): if (start_save_sysuptime == 0): return False elif (start_save_sysuptime >= run_change_sysuptime): return True return False
[ "def", "determine_run_start_sync_state", "(", "run_change_sysuptime", ",", "start_save_sysuptime", ")", ":", "if", "(", "start_save_sysuptime", "==", "0", ")", ":", "return", "False", "elif", "(", "start_save_sysuptime", ">=", "run_change_sysuptime", ")", ":", "return...
return true if run/start are in sync return false if run/start are out of sync three cases: 1 .
train
false
39,292
def extract_url_path_and_query(full_url=None, no_query=False): if (full_url is None): full_url = request.url split = urlsplit(full_url) result = (split.path or '/') if ((not no_query) and split.query): result += ('?' + split.query) return result
[ "def", "extract_url_path_and_query", "(", "full_url", "=", "None", ",", "no_query", "=", "False", ")", ":", "if", "(", "full_url", "is", "None", ")", ":", "full_url", "=", "request", ".", "url", "split", "=", "urlsplit", "(", "full_url", ")", "result", "...
convert URL to /aaa/p .
train
false
39,293
def _setup_server(hass, config): conf = config.get(DOMAIN, {}) if ((CONF_EMBEDDED not in conf) and (CONF_BROKER in conf)): return None server = prepare_setup_platform(hass, config, DOMAIN, 'server') if (server is None): _LOGGER.error('Unable to load embedded server') return None (success, broker_config) = server.start(hass, conf.get(CONF_EMBEDDED)) return (success and broker_config)
[ "def", "_setup_server", "(", "hass", ",", "config", ")", ":", "conf", "=", "config", ".", "get", "(", "DOMAIN", ",", "{", "}", ")", "if", "(", "(", "CONF_EMBEDDED", "not", "in", "conf", ")", "and", "(", "CONF_BROKER", "in", "conf", ")", ")", ":", ...
try to start embedded mqtt broker .
train
false
39,294
def testtable(): testtable = table([['A1', 'A2'], ['B1', 'B2'], ['C1', 'C2']]) assert (testtable.xpath('/ns0:tbl/ns0:tr[2]/ns0:tc[2]/ns0:p/ns0:r/ns0:t', namespaces={'ns0': 'http://schemas.openxmlformats.org/wordprocessingml/2006/main'})[0].text == 'B2')
[ "def", "testtable", "(", ")", ":", "testtable", "=", "table", "(", "[", "[", "'A1'", ",", "'A2'", "]", ",", "[", "'B1'", ",", "'B2'", "]", ",", "[", "'C1'", ",", "'C2'", "]", "]", ")", "assert", "(", "testtable", ".", "xpath", "(", "'/ns0:tbl/ns0...
ensure tables make sense .
train
false
39,296
def test_takes_all_arguments(): assert (not hug.introspect.takes_all_arguments(function_with_kwargs, 'argument1', 'argument2', 'argument3')) assert (not hug.introspect.takes_all_arguments(function_with_args, 'argument1', 'argument2', 'argument3')) assert (not hug.introspect.takes_all_arguments(function_with_neither, 'argument1', 'argument2', 'argument3')) assert hug.introspect.takes_all_arguments(function_with_both, 'argument1', 'argument2', 'argument3')
[ "def", "test_takes_all_arguments", "(", ")", ":", "assert", "(", "not", "hug", ".", "introspect", ".", "takes_all_arguments", "(", "function_with_kwargs", ",", "'argument1'", ",", "'argument2'", ",", "'argument3'", ")", ")", "assert", "(", "not", "hug", ".", "...
test to ensure hug introspection can correctly identify if a function takes all specified arguments .
train
false
39,297
def matrix_IQR(x): num_cols = x.shape[1] min_vals = zeros(num_cols) max_vals = zeros(num_cols) for i in range(x.shape[1]): col = x[:, i] (min_vals[i], max_vals[i]) = IQR(col) return (min_vals, max_vals)
[ "def", "matrix_IQR", "(", "x", ")", ":", "num_cols", "=", "x", ".", "shape", "[", "1", "]", "min_vals", "=", "zeros", "(", "num_cols", ")", "max_vals", "=", "zeros", "(", "num_cols", ")", "for", "i", "in", "range", "(", "x", ".", "shape", "[", "1...
calculates the iqr for each column in an array .
train
false
39,298
def get_course_creator_status(user): user = CourseCreator.objects.filter(user=user) if (user.count() == 0): return None else: return user[0].state
[ "def", "get_course_creator_status", "(", "user", ")", ":", "user", "=", "CourseCreator", ".", "objects", ".", "filter", "(", "user", "=", "user", ")", "if", "(", "user", ".", "count", "(", ")", "==", "0", ")", ":", "return", "None", "else", ":", "ret...
returns the status for a particular user .
train
false
39,299
@pytest.mark.django_db def test_user_has_manager_permissions(no_perms_user, administrate, tp0): language0 = tp0.language project0 = tp0.project assert (not no_perms_user.has_manager_permissions()) criteria = {'user': no_perms_user, 'directory': tp0.directory} ps = PermissionSet.objects.get_or_create(**criteria)[0] ps.positive_permissions.set([administrate]) ps.save() assert no_perms_user.has_manager_permissions() ps.positive_permissions.clear() assert (not no_perms_user.has_manager_permissions()) criteria['directory'] = language0.directory ps = PermissionSet.objects.get_or_create(**criteria)[0] ps.positive_permissions.set([administrate]) ps.save() assert no_perms_user.has_manager_permissions() ps.positive_permissions.clear() assert (not no_perms_user.has_manager_permissions()) criteria['directory'] = project0.directory ps = PermissionSet.objects.get_or_create(**criteria)[0] ps.positive_permissions.set([administrate]) ps.save() assert no_perms_user.has_manager_permissions() ps.positive_permissions.clear() assert (not no_perms_user.has_manager_permissions())
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_user_has_manager_permissions", "(", "no_perms_user", ",", "administrate", ",", "tp0", ")", ":", "language0", "=", "tp0", ".", "language", "project0", "=", "tp0", ".", "project", "assert", "(", "not", ...
test user has_manager_permissions method .
train
false
39,300
def unicode_list(_list): return [unicode_obj(x) for x in _list]
[ "def", "unicode_list", "(", "_list", ")", ":", "return", "[", "unicode_obj", "(", "x", ")", "for", "x", "in", "_list", "]" ]
make sure every element in list is unicode .
train
false
39,301
def func2(a, b): return (a, b)
[ "def", "func2", "(", "a", ",", "b", ")", ":", "return", "(", "a", ",", "b", ")" ]
test function for issue 9936 .
train
false
39,303
def div_by_zero(x): return (x / 0)
[ "def", "div_by_zero", "(", "x", ")", ":", "return", "(", "x", "/", "0", ")" ]
prepare for a division-by-zero exception .
train
false
39,307
def test_temperature_unit(): schema = vol.Schema(cv.temperature_unit) with pytest.raises(vol.MultipleInvalid): schema('K') schema('C') schema('F')
[ "def", "test_temperature_unit", "(", ")", ":", "schema", "=", "vol", ".", "Schema", "(", "cv", ".", "temperature_unit", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "MultipleInvalid", ")", ":", "schema", "(", "'K'", ")", "schema", "(", "'C'", ...
test temperature unit validation .
train
false
39,308
def write_blob_diff(f, old_file, new_file): (old_path, old_mode, old_blob) = old_file (new_path, new_mode, new_blob) = new_file old_path = patch_filename(old_path, 'a') new_path = patch_filename(new_path, 'b') def lines(blob): if (blob is not None): return blob.splitlines() else: return [] f.writelines(gen_diff_header((old_path, new_path), (old_mode, new_mode), (getattr(old_blob, 'id', None), getattr(new_blob, 'id', None)))) old_contents = lines(old_blob) new_contents = lines(new_blob) f.writelines(unified_diff(old_contents, new_contents, old_path, new_path))
[ "def", "write_blob_diff", "(", "f", ",", "old_file", ",", "new_file", ")", ":", "(", "old_path", ",", "old_mode", ",", "old_blob", ")", "=", "old_file", "(", "new_path", ",", "new_mode", ",", "new_blob", ")", "=", "new_file", "old_path", "=", "patch_filena...
write blob diff .
train
false
39,309
def executable(path): return os.access(path, os.X_OK)
[ "def", "executable", "(", "path", ")", ":", "return", "os", ".", "access", "(", "path", ",", "os", ".", "X_OK", ")" ]
test for executable file .
train
false