id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
17,384
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
return a new shad256 hashing object .
train
false
17,385
@utils.arg('monitor', metavar='<monitor>', help='ID of the monitor to rename.') @utils.arg('display_name', nargs='?', metavar='<display-name>', help='New display-name for the monitor.') @utils.arg('--display-description', metavar='<display-description>', help='Optional monitor description. (Default=None)', default=None) @utils.service_type('monitor') def do_rename(cs, args): kwargs = {} if (args.display_name is not None): kwargs['display_name'] = args.display_name if (args.display_description is not None): kwargs['display_description'] = args.display_description _find_monitor(cs, args.monitor).update(**kwargs)
[ "@", "utils", ".", "arg", "(", "'monitor'", ",", "metavar", "=", "'<monitor>'", ",", "help", "=", "'ID of the monitor to rename.'", ")", "@", "utils", ".", "arg", "(", "'display_name'", ",", "nargs", "=", "'?'", ",", "metavar", "=", "'<display-name>'", ",", ...
rename a monitor .
train
false
17,386
def parse_testfile(fname): with open(fname) as fp: for line in fp: if (line.startswith('--') or (not line.strip())): continue (lhs, rhs) = line.split('->') (id, fn, arg_real, arg_imag) = lhs.split() rhs_pieces = rhs.split() (exp_real, exp_imag) = (rhs_pieces[0], rhs_pieces[1]) flags = rhs_pieces[2:] (yield (id, fn, float(arg_real), float(arg_imag), float(exp_real), float(exp_imag), flags))
[ "def", "parse_testfile", "(", "fname", ")", ":", "with", "open", "(", "fname", ")", "as", "fp", ":", "for", "line", "in", "fp", ":", "if", "(", "line", ".", "startswith", "(", "'--'", ")", "or", "(", "not", "line", ".", "strip", "(", ")", ")", ...
parse a file with test values empty lines or lines starting with -- are ignored yields id .
train
false
17,387
def DictionaryToParamList(url_parameters, escape_params=True): transform_op = [str, urllib.quote_plus][bool(escape_params)] parameter_tuples = [(transform_op(param), transform_op(value)) for (param, value) in (url_parameters or {}).items()] return ['='.join(x) for x in parameter_tuples]
[ "def", "DictionaryToParamList", "(", "url_parameters", ",", "escape_params", "=", "True", ")", ":", "transform_op", "=", "[", "str", ",", "urllib", ".", "quote_plus", "]", "[", "bool", "(", "escape_params", ")", "]", "parameter_tuples", "=", "[", "(", "trans...
convert a dictionary of url arguments into a url parameter string .
train
false
17,388
def convert_to_arn(arns, region=None, key=None, keyid=None, profile=None): results = [] for arn in arns: if arn.startswith('scaling_policy:'): (_, as_group, scaling_policy_name) = arn.split(':') policy_arn = __salt__['boto_asg.get_scaling_policy_arn'](as_group, scaling_policy_name, region, key, keyid, profile) if policy_arn: results.append(policy_arn) else: log.error('Could not convert: {0}'.format(arn)) else: results.append(arn) return results
[ "def", "convert_to_arn", "(", "arns", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "results", "=", "[", "]", "for", "arn", "in", "arns", ":", "if", "arn", ".", "startswith", ...
convert a list of strings into actual arns .
train
true
17,390
def posToPix(stim): return convertToPix([0, 0], stim.pos, stim.win.units, stim.win)
[ "def", "posToPix", "(", "stim", ")", ":", "return", "convertToPix", "(", "[", "0", ",", "0", "]", ",", "stim", ".", "pos", ",", "stim", ".", "win", ".", "units", ",", "stim", ".", "win", ")" ]
returns the stims position in pixels .
train
false
17,391
def _get_supervisorctl_bin(bin_env): cmd = 'supervisorctl' if (not bin_env): which_result = __salt__['cmd.which_bin']([cmd]) if (which_result is None): raise CommandNotFoundError('Could not find a `{0}` binary'.format(cmd)) return which_result if os.path.isdir(bin_env): cmd_bin = os.path.join(bin_env, 'bin', cmd) if os.path.isfile(cmd_bin): return cmd_bin raise CommandNotFoundError('Could not find a `{0}` binary'.format(cmd)) return bin_env
[ "def", "_get_supervisorctl_bin", "(", "bin_env", ")", ":", "cmd", "=", "'supervisorctl'", "if", "(", "not", "bin_env", ")", ":", "which_result", "=", "__salt__", "[", "'cmd.which_bin'", "]", "(", "[", "cmd", "]", ")", "if", "(", "which_result", "is", "None...
return supervisorctl command to call .
train
true
17,392
def test_newline(): handlers = logger.handlers level = logger.getEffectiveLevel() newline(logger) assert (handlers == logger.handlers) assert (level == logger.getEffectiveLevel())
[ "def", "test_newline", "(", ")", ":", "handlers", "=", "logger", ".", "handlers", "level", "=", "logger", ".", "getEffectiveLevel", "(", ")", "newline", "(", "logger", ")", "assert", "(", "handlers", "==", "logger", ".", "handlers", ")", "assert", "(", "...
test the state of a the logger passed to the newline function .
train
false
17,394
def elementStream(): try: es = ExpatElementStream() return es except ImportError: if (SuxElementStream is None): raise Exception('No parsers available :(') es = SuxElementStream() return es
[ "def", "elementStream", "(", ")", ":", "try", ":", "es", "=", "ExpatElementStream", "(", ")", "return", "es", "except", "ImportError", ":", "if", "(", "SuxElementStream", "is", "None", ")", ":", "raise", "Exception", "(", "'No parsers available :('", ")", "e...
preferred method to construct an elementstream uses expat-based stream if available .
train
false
17,395
def decode_network_packet(buf): off = 0 blen = len(buf) while (off < blen): (ptype, plen) = header.unpack_from(buf, off) if (plen > (blen - off)): raise ValueError('Packet longer than amount of data in buffer') if (ptype not in _decoders): raise ValueError(('Message type %i not recognized' % ptype)) (yield (ptype, _decoders[ptype](ptype, plen, buf[off:]))) off += plen
[ "def", "decode_network_packet", "(", "buf", ")", ":", "off", "=", "0", "blen", "=", "len", "(", "buf", ")", "while", "(", "off", "<", "blen", ")", ":", "(", "ptype", ",", "plen", ")", "=", "header", ".", "unpack_from", "(", "buf", ",", "off", ")"...
decodes a network packet in collectd format .
train
true
17,396
def launch(dpid, port, port_eth=None, name=None, __INSTANCE__=None): if (port_eth in (True, None)): pass else: port_eth = EthAddr(port_eth) dpid = str_to_dpid(dpid) try: port = int(port) except: pass def dhcpclient_init(): n = name if (n is None): s = '' while True: if (not core.hasComponent(('DHCPClient' + s))): n = ('DHCPClient' + s) break s = str((int(('0' + s)) + 1)) elif core.hasComponent(n): self.log.error('Already have component %s', n) return client = DHCPClient(port=port, dpid=dpid, name=n, port_eth=port_eth) core.register(n, client) core.call_when_ready(dhcpclient_init, ['openflow'])
[ "def", "launch", "(", "dpid", ",", "port", ",", "port_eth", "=", "None", ",", "name", "=", "None", ",", "__INSTANCE__", "=", "None", ")", ":", "if", "(", "port_eth", "in", "(", "True", ",", "None", ")", ")", ":", "pass", "else", ":", "port_eth", ...
pxshark -- prints packets .
train
false
17,397
def refuse_with_confirmation(proc, TIMEOUT): _set_confirmation(proc, True) proc.sendline(u'ehco test') proc.sendline(u'fuck') assert proc.expect([TIMEOUT, u'echo test']) assert proc.expect([TIMEOUT, u'enter']) assert proc.expect_exact([TIMEOUT, u'ctrl+c']) proc.send('\x03') assert proc.expect([TIMEOUT, u'Aborted'])
[ "def", "refuse_with_confirmation", "(", "proc", ",", "TIMEOUT", ")", ":", "_set_confirmation", "(", "proc", ",", "True", ")", "proc", ".", "sendline", "(", "u'ehco test'", ")", "proc", ".", "sendline", "(", "u'fuck'", ")", "assert", "proc", ".", "expect", ...
ensures that fix can be refused when confirmation enabled .
train
false
17,398
def suite(): testSuite = unittest.TestSuite() testSuite.addTest(SampleTests('testGoodStuff')) return testSuite
[ "def", "suite", "(", ")", ":", "testSuite", "=", "unittest", ".", "TestSuite", "(", ")", "testSuite", ".", "addTest", "(", "SampleTests", "(", "'testGoodStuff'", ")", ")", "return", "testSuite" ]
builds a test suite for the gdal tests .
train
false
17,399
def stub_function_only(func): return check_supported_function(func, (lambda o: o.IS_STUB))
[ "def", "stub_function_only", "(", "func", ")", ":", "return", "check_supported_function", "(", "func", ",", "(", "lambda", "o", ":", "o", ".", "IS_STUB", ")", ")" ]
attribute for a function only supported in the software stub loader .
train
false
17,400
def stack_size(size=None): if (size is not None): raise error('setting thread stack size not supported') return 0
[ "def", "stack_size", "(", "size", "=", "None", ")", ":", "if", "(", "size", "is", "not", "None", ")", ":", "raise", "error", "(", "'setting thread stack size not supported'", ")", "return", "0" ]
dummy implementation of _thread .
train
false
17,401
def get_template_id(kwargs=None, call=None): if (call == 'action'): raise SaltCloudSystemExit('The list_nodes_full function must be called with -f or --function.') if (kwargs is None): kwargs = {} name = kwargs.get('name', None) if (name is None): raise SaltCloudSystemExit("The get_template_id function requires a 'name'.") try: ret = list_templates()[name]['id'] except KeyError: raise SaltCloudSystemExit("The template '{0}' could not be foound.".format(name)) return ret
[ "def", "get_template_id", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "==", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The list_nodes_full function must be called with -f or --function.'", ")", "if", "(", "k...
returns a templates id from the given template name .
train
true
17,403
def compareDistanceFromCenter(self, other): distanceFromCenter = abs(self.yMinusCenter) distanceFromCenterOther = abs(other.yMinusCenter) if (distanceFromCenter > distanceFromCenterOther): return 1 if (distanceFromCenter < distanceFromCenterOther): return (-1) return 0
[ "def", "compareDistanceFromCenter", "(", "self", ",", "other", ")", ":", "distanceFromCenter", "=", "abs", "(", "self", ".", "yMinusCenter", ")", "distanceFromCenterOther", "=", "abs", "(", "other", ".", "yMinusCenter", ")", "if", "(", "distanceFromCenter", ">",...
get comparison in order to sort y intersections in ascending order of distance from the center .
train
false
17,404
def monomial_ldiv(A, B): return tuple([(a - b) for (a, b) in zip(A, B)])
[ "def", "monomial_ldiv", "(", "A", ",", "B", ")", ":", "return", "tuple", "(", "[", "(", "a", "-", "b", ")", "for", "(", "a", ",", "b", ")", "in", "zip", "(", "A", ",", "B", ")", "]", ")" ]
division of tuples representing monomials .
train
false
17,405
def remove_immutable_attribute(path): if ((platform.system() == constants.PLATFORM_DARWIN) and os.path.isfile('/usr/bin/chflags')): subprocess.call(['/usr/bin/chflags', '-R', 'nouchg', path]) elif ((platform.system() == constants.PLATFORM_LINUX) and os.path.isfile('/usr/bin/chattr')): subprocess.call(['/usr/bin/chattr', '-R', '-i', path])
[ "def", "remove_immutable_attribute", "(", "path", ")", ":", "if", "(", "(", "platform", ".", "system", "(", ")", "==", "constants", ".", "PLATFORM_DARWIN", ")", "and", "os", ".", "path", ".", "isfile", "(", "'/usr/bin/chflags'", ")", ")", ":", "subprocess"...
remove the immutable attribute of the given path .
train
true
17,406
def _pinv_1d(v, eps=1e-05): return np.array([(0 if (abs(x) <= eps) else (1 / x)) for x in v], dtype=float)
[ "def", "_pinv_1d", "(", "v", ",", "eps", "=", "1e-05", ")", ":", "return", "np", ".", "array", "(", "[", "(", "0", "if", "(", "abs", "(", "x", ")", "<=", "eps", ")", "else", "(", "1", "/", "x", ")", ")", "for", "x", "in", "v", "]", ",", ...
a helper function for computing the pseudoinverse .
train
false
17,407
def clear_existing_package(parent_product): children = parent_product.get_package_child_to_quantity_map().keys() ProductPackageLink.objects.filter(parent=parent_product).delete() parent_product.verify_mode() parent_product.save() for child in children: child.verify_mode() child.save()
[ "def", "clear_existing_package", "(", "parent_product", ")", ":", "children", "=", "parent_product", ".", "get_package_child_to_quantity_map", "(", ")", ".", "keys", "(", ")", "ProductPackageLink", ".", "objects", ".", "filter", "(", "parent", "=", "parent_product",...
utility function for clearing existing package .
train
false
17,408
def BinomialCoef(n, k): return scipy.misc.comb(n, k)
[ "def", "BinomialCoef", "(", "n", ",", "k", ")", ":", "return", "scipy", ".", "misc", ".", "comb", "(", "n", ",", "k", ")" ]
compute the binomial coefficient "n choose k" .
train
false
17,409
def insert_metadata(module_data, new_metadata, insertion_line, targets=('ANSIBLE_METADATA',)): assignments = ' = '.join(targets) pretty_metadata = pformat(new_metadata, width=1).split('\n') new_lines = [] new_lines.append('{} = {}'.format(assignments, pretty_metadata[0])) if (len(pretty_metadata) > 1): for line in pretty_metadata[1:]: new_lines.append('{}{}'.format((' ' * ((len(assignments) - 1) + len(' = {'))), line)) old_lines = module_data.split('\n') lines = (((old_lines[:insertion_line] + new_lines) + ['']) + old_lines[insertion_line:]) return '\n'.join(lines)
[ "def", "insert_metadata", "(", "module_data", ",", "new_metadata", ",", "insertion_line", ",", "targets", "=", "(", "'ANSIBLE_METADATA'", ",", ")", ")", ":", "assignments", "=", "' = '", ".", "join", "(", "targets", ")", "pretty_metadata", "=", "pformat", "(",...
insert a new set of metadata at a specified line .
train
false
17,412
def read_random_bits(nbits): (nbytes, rbits) = divmod(nbits, 8) randomdata = os.urandom(nbytes) if (rbits > 0): randomvalue = ord(os.urandom(1)) randomvalue >>= (8 - rbits) randomdata = (byte(randomvalue) + randomdata) return randomdata
[ "def", "read_random_bits", "(", "nbits", ")", ":", "(", "nbytes", ",", "rbits", ")", "=", "divmod", "(", "nbits", ",", "8", ")", "randomdata", "=", "os", ".", "urandom", "(", "nbytes", ")", "if", "(", "rbits", ">", "0", ")", ":", "randomvalue", "="...
reads nbits random bits .
train
false
17,413
def validate_read_preference_tags(name, value): if (not isinstance(value, list)): value = [value] tag_sets = [] for tag_set in value: if (tag_set == ''): tag_sets.append({}) continue try: tag_sets.append(dict([tag.split(':') for tag in tag_set.split(',')])) except Exception: raise ValueError(('%r not a valid value for %s' % (tag_set, name))) return tag_sets
[ "def", "validate_read_preference_tags", "(", "name", ",", "value", ")", ":", "if", "(", "not", "isinstance", "(", "value", ",", "list", ")", ")", ":", "value", "=", "[", "value", "]", "tag_sets", "=", "[", "]", "for", "tag_set", "in", "value", ":", "...
parse readpreferencetags if passed as a client kwarg .
train
true
17,414
@CELERY_APP.task(name='lti_provider.tasks.send_composite_outcome') def send_composite_outcome(user_id, course_id, assignment_id, version): assignment = GradedAssignment.objects.get(id=assignment_id) if (version != assignment.version_number): log.info('Score passback for GradedAssignment %s skipped. More recent score available.', assignment.id) return course_key = CourseKey.from_string(course_id) mapped_usage_key = assignment.usage_key.map_into_course(course_key) user = User.objects.get(id=user_id) course = modulestore().get_course(course_key, depth=0) course_grade = CourseGradeFactory().create(user, course) (earned, possible) = course_grade.score_for_module(mapped_usage_key) if (possible == 0): weighted_score = 0 else: weighted_score = (float(earned) / float(possible)) assignment = GradedAssignment.objects.get(id=assignment_id) if (assignment.version_number == version): outcomes.send_score_update(assignment, weighted_score)
[ "@", "CELERY_APP", ".", "task", "(", "name", "=", "'lti_provider.tasks.send_composite_outcome'", ")", "def", "send_composite_outcome", "(", "user_id", ",", "course_id", ",", "assignment_id", ",", "version", ")", ":", "assignment", "=", "GradedAssignment", ".", "obje...
calculate and transmit the score for a composite module .
train
false
17,415
def get_dict_from_obj(obj): obj_dict = obj.__dict__ obj_dict_result = obj_dict.copy() for (key, value) in obj_dict.items(): if key.endswith('_id'): key2 = key.replace('_id', '') try: (field, model, direct, m2m) = obj._meta.get_field_by_name(key2) if isinstance(field, ForeignKey): obj_dict_result[key2] = obj_dict_result[key] del obj_dict_result[key] except FieldDoesNotExist: pass manytomany_list = obj._meta.many_to_many for manytomany in manytomany_list: ids = [obj_rel.id for obj_rel in manytomany.value_from_object(obj).select_related()] if ids: obj_dict_result[manytomany.name] = ids return obj_dict_result
[ "def", "get_dict_from_obj", "(", "obj", ")", ":", "obj_dict", "=", "obj", ".", "__dict__", "obj_dict_result", "=", "obj_dict", ".", "copy", "(", ")", "for", "(", "key", ",", "value", ")", "in", "obj_dict", ".", "items", "(", ")", ":", "if", "key", "....
edit to get the dict even when the object is a genericrelatedobjectmanager .
train
true
17,416
@frappe.whitelist() def validate_link(): import frappe import frappe.utils (value, options, fetch) = (frappe.form_dict.get(u'value'), frappe.form_dict.get(u'options'), frappe.form_dict.get(u'fetch')) if ((not options) or (options == u'null') or (options == u'undefined')): frappe.response[u'message'] = u'Ok' return valid_value = frappe.db.sql((u'select name from `tab%s` where name=%s' % (frappe.db.escape(options), u'%s')), (value,)) if valid_value: valid_value = valid_value[0][0] if fetch: fetch = u', '.join((u'`{0}`'.format(frappe.db.escape(f.strip())) for f in fetch.split(u','))) frappe.response[u'fetch_values'] = [frappe.utils.parse_val(c) for c in frappe.db.sql((u'select %s from `tab%s` where name=%s' % (fetch, frappe.db.escape(options), u'%s')), (value,))[0]] frappe.response[u'valid_value'] = valid_value frappe.response[u'message'] = u'Ok'
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "validate_link", "(", ")", ":", "import", "frappe", "import", "frappe", ".", "utils", "(", "value", ",", "options", ",", "fetch", ")", "=", "(", "frappe", ".", "form_dict", ".", "get", "(", "u'value'", ...
validate link when updated by user .
train
false
17,417
def is_darwin(): return (sys.platform == u'darwin')
[ "def", "is_darwin", "(", ")", ":", "return", "(", "sys", ".", "platform", "==", "u'darwin'", ")" ]
return true on osx .
train
false
17,418
def test_sensitivity_specificity_error_multilabels(): y_true = [1, 3, 3, 2] y_pred = [1, 1, 3, 2] y_true_bin = label_binarize(y_true, classes=np.arange(5)) y_pred_bin = label_binarize(y_pred, classes=np.arange(5)) assert_raises(ValueError, sensitivity_score, y_true_bin, y_pred_bin)
[ "def", "test_sensitivity_specificity_error_multilabels", "(", ")", ":", "y_true", "=", "[", "1", ",", "3", ",", "3", ",", "2", "]", "y_pred", "=", "[", "1", ",", "1", ",", "3", ",", "2", "]", "y_true_bin", "=", "label_binarize", "(", "y_true", ",", "...
test either if an error is raised when the input are multilabels .
train
false
17,420
def _delete_fields(srv, data): if (srv[0] in DELETE_FIELDS): for i in DELETE_FIELDS[srv[0]]: if (i in data): del data[i] return data
[ "def", "_delete_fields", "(", "srv", ",", "data", ")", ":", "if", "(", "srv", "[", "0", "]", "in", "DELETE_FIELDS", ")", ":", "for", "i", "in", "DELETE_FIELDS", "[", "srv", "[", "0", "]", "]", ":", "if", "(", "i", "in", "data", ")", ":", "del",...
delete not needed fields in post request .
train
false
17,423
def redirecting_io(sys=_sys): assert (sys is not None) return (not hasattr(sys.stdout, 'fileno'))
[ "def", "redirecting_io", "(", "sys", "=", "_sys", ")", ":", "assert", "(", "sys", "is", "not", "None", ")", "return", "(", "not", "hasattr", "(", "sys", ".", "stdout", ",", "'fileno'", ")", ")" ]
predicate to determine if we are redicting i/o in process .
train
false
17,424
def remove_readonly(func, path, _): os.chmod(path, stat.S_IWRITE) func(path)
[ "def", "remove_readonly", "(", "func", ",", "path", ",", "_", ")", ":", "os", ".", "chmod", "(", "path", ",", "stat", ".", "S_IWRITE", ")", "func", "(", "path", ")" ]
clear the readonly bit and reattempt the removal .
train
true
17,425
@register(u'backward-delete-char') def backward_delete_char(event): if (event.arg < 0): deleted = event.current_buffer.delete(count=(- event.arg)) else: deleted = event.current_buffer.delete_before_cursor(count=event.arg) if (not deleted): event.cli.output.bell()
[ "@", "register", "(", "u'backward-delete-char'", ")", "def", "backward_delete_char", "(", "event", ")", ":", "if", "(", "event", ".", "arg", "<", "0", ")", ":", "deleted", "=", "event", ".", "current_buffer", ".", "delete", "(", "count", "=", "(", "-", ...
delete the character behind the cursor .
train
true
17,427
@testing.requires_testing_data @requires_mayavi def test_coreg_gui(): from mne.gui._coreg_gui import CoregFrame frame = CoregFrame() frame.edit_traits() frame.model.mri.subjects_dir = subjects_dir frame.model.mri.subject = 'sample' assert_false(frame.model.mri.fid_ok) frame.model.mri.lpa = [[(-0.06), 0, 0]] frame.model.mri.nasion = [[0, 0.05, 0]] frame.model.mri.rpa = [[0.08, 0, 0]] assert_true(frame.model.mri.fid_ok)
[ "@", "testing", ".", "requires_testing_data", "@", "requires_mayavi", "def", "test_coreg_gui", "(", ")", ":", "from", "mne", ".", "gui", ".", "_coreg_gui", "import", "CoregFrame", "frame", "=", "CoregFrame", "(", ")", "frame", ".", "edit_traits", "(", ")", "...
test coregistration gui .
train
false
17,428
def _load_plugins_from_packages(): for entrypoint in pkg_resources.iter_entry_points(u'FlexGet.plugins'): try: plugin_module = entrypoint.load() except DependencyError as e: if e.has_message(): msg = e.message else: msg = (u'Plugin `%s` requires `%s` to load.', (e.issued_by or entrypoint.module_name), (e.missing or u'N/A')) if (not e.silent): log.warning(msg) else: log.debug(msg) except ImportError: log.critical(u'Plugin `%s` failed to import dependencies', entrypoint.module_name, exc_info=True) except Exception: log.critical(u'Exception while loading plugin %s', entrypoint.module_name, exc_info=True) raise else: log.trace(u'Loaded packaged module %s from %s', entrypoint.module_name, plugin_module.__file__) _check_phase_queue()
[ "def", "_load_plugins_from_packages", "(", ")", ":", "for", "entrypoint", "in", "pkg_resources", ".", "iter_entry_points", "(", "u'FlexGet.plugins'", ")", ":", "try", ":", "plugin_module", "=", "entrypoint", ".", "load", "(", ")", "except", "DependencyError", "as"...
load plugins installed via pip .
train
false
17,429
def getIsLarge(loop, radius): return (euclidean.getMaximumSpan(loop) > (2.01 * abs(radius)))
[ "def", "getIsLarge", "(", "loop", ",", "radius", ")", ":", "return", "(", "euclidean", ".", "getMaximumSpan", "(", "loop", ")", ">", "(", "2.01", "*", "abs", "(", "radius", ")", ")", ")" ]
determine if the loop is large enough .
train
false
17,430
def connect_server(server, namespace='', user='', password='', locale='', authority='', impersonation_level='', authentication_level='', security_flags=128, named_value_set=None): if impersonation_level: try: impersonation = getattr(obj._constants, ('wbemImpersonationLevel%s' % impersonation_level.title())) except AttributeError: raise x_wmi_authentication(('No such impersonation level: %s' % impersonation_level)) else: impersonation = None if authentication_level: try: authentication = getattr(obj._constants, ('wbemAuthenticationLevel%s' % authentication_level.title())) except AttributeError: raise x_wmi_authentication(('No such impersonation level: %s' % impersonation_level)) else: authentication = None server = Dispatch('WbemScripting.SWbemLocator').ConnectServer(server, namespace, user, password, locale, authority, security_flags, named_value_set) if impersonation: server.Security_.ImpersonationLevel = impersonation if authentication: server.Security_.AuthenticationLevel = authentication return server
[ "def", "connect_server", "(", "server", ",", "namespace", "=", "''", ",", "user", "=", "''", ",", "password", "=", "''", ",", "locale", "=", "''", ",", "authority", "=", "''", ",", "impersonation_level", "=", "''", ",", "authentication_level", "=", "''",...
return a remote server running wmi .
train
false
17,431
def summary(): assess_tables() return s3_rest_controller()
[ "def", "summary", "(", ")", ":", "assess_tables", "(", ")", "return", "s3_rest_controller", "(", ")" ]
return a short html summary of a genericimagedatasetjob .
train
false
17,432
def task_description(task): name = (task.__name__ if hasattr(task, '__name__') else None) if ((name is not None) and isinstance(task, (types.MethodType, types.FunctionType))): if (getattr(task, '__self__', None) is not None): return ('%s from %s' % (six.text_type(name), task.__self__)) else: return six.text_type(name) return encodeutils.safe_decode(repr(task))
[ "def", "task_description", "(", "task", ")", ":", "name", "=", "(", "task", ".", "__name__", "if", "hasattr", "(", "task", ",", "'__name__'", ")", "else", "None", ")", "if", "(", "(", "name", "is", "not", "None", ")", "and", "isinstance", "(", "task"...
return a human-readable string description of a task .
train
false
17,433
def unqote(text): if ((text.startswith('"') and text.endswith('"')) or (text.startswith("'") and text.endswith("'"))): text = text[1:(-1)] return text
[ "def", "unqote", "(", "text", ")", ":", "if", "(", "(", "text", ".", "startswith", "(", "'\"'", ")", "and", "text", ".", "endswith", "(", "'\"'", ")", ")", "or", "(", "text", ".", "startswith", "(", "\"'\"", ")", "and", "text", ".", "endswith", "...
strip pair of leading and trailing quotes from text .
train
false
17,435
def _get_test_app(): config['ckan.legacy_templates'] = False config['testing'] = True app = ckan.config.middleware.make_app(config['global_conf'], **config) app = CKANTestApp(app) return app
[ "def", "_get_test_app", "(", ")", ":", "config", "[", "'ckan.legacy_templates'", "]", "=", "False", "config", "[", "'testing'", "]", "=", "True", "app", "=", "ckan", ".", "config", ".", "middleware", ".", "make_app", "(", "config", "[", "'global_conf'", "]...
return a webtest .
train
false
17,436
def shuffle_row_order(m): order = range(m.shape[0]) shuffle(order) return array(m[order])
[ "def", "shuffle_row_order", "(", "m", ")", ":", "order", "=", "range", "(", "m", ".", "shape", "[", "0", "]", ")", "shuffle", "(", "order", ")", "return", "array", "(", "m", "[", "order", "]", ")" ]
jk thinks this should be the one used for monte carlo analyses procrustes takes samples by dimensions .
train
false
17,437
def canonical(request, uploaded_at, file_id): filer_file = get_object_or_404(File, pk=file_id, is_public=True) if ((not filer_file.file) or (int(uploaded_at) != filer_file.canonical_time)): raise Http404((u'No %s matches the given query.' % File._meta.object_name)) return redirect(filer_file.url)
[ "def", "canonical", "(", "request", ",", "uploaded_at", ",", "file_id", ")", ":", "filer_file", "=", "get_object_or_404", "(", "File", ",", "pk", "=", "file_id", ",", "is_public", "=", "True", ")", "if", "(", "(", "not", "filer_file", ".", "file", ")", ...
redirect to the current url of a public file .
train
true
17,438
def _host_dhcp(data): if CONF.use_single_default_gateway: return ('%s,%s.%s,%s,%s' % (data['vif_address'], data['instance_hostname'], CONF.dhcp_domain, data['address'], ('net:' + _host_dhcp_network(data)))) else: return ('%s,%s.%s,%s' % (data['vif_address'], data['instance_hostname'], CONF.dhcp_domain, data['address']))
[ "def", "_host_dhcp", "(", "data", ")", ":", "if", "CONF", ".", "use_single_default_gateway", ":", "return", "(", "'%s,%s.%s,%s,%s'", "%", "(", "data", "[", "'vif_address'", "]", ",", "data", "[", "'instance_hostname'", "]", ",", "CONF", ".", "dhcp_domain", "...
return a host string for an address in dhcp-host format .
train
false
17,439
def standard_normal_ll(input_): res = ((-0.5) * (tf.square(input_) + numpy.log((2.0 * numpy.pi)))) return res
[ "def", "standard_normal_ll", "(", "input_", ")", ":", "res", "=", "(", "(", "-", "0.5", ")", "*", "(", "tf", ".", "square", "(", "input_", ")", "+", "numpy", ".", "log", "(", "(", "2.0", "*", "numpy", ".", "pi", ")", ")", ")", ")", "return", ...
log-likelihood of standard gaussian distribution .
train
false
17,441
@contextmanager def environment_as(**kwargs): new_environment = kwargs old_environment = {} def setenv(key, val): if (val is not None): os.environ[key] = val elif (key in os.environ): del os.environ[key] for (key, val) in new_environment.items(): old_environment[key] = os.environ.get(key) setenv(key, val) try: (yield) finally: for (key, val) in old_environment.items(): setenv(key, val)
[ "@", "contextmanager", "def", "environment_as", "(", "**", "kwargs", ")", ":", "new_environment", "=", "kwargs", "old_environment", "=", "{", "}", "def", "setenv", "(", "key", ",", "val", ")", ":", "if", "(", "val", "is", "not", "None", ")", ":", "os",...
update the environment to the supplied values .
train
false
17,442
def try_(module, return_try_exception=False, **kwargs): try: return __salt__[module](**kwargs) except Exception as e: if return_try_exception: return e return None
[ "def", "try_", "(", "module", ",", "return_try_exception", "=", "False", ",", "**", "kwargs", ")", ":", "try", ":", "return", "__salt__", "[", "module", "]", "(", "**", "kwargs", ")", "except", "Exception", "as", "e", ":", "if", "return_try_exception", "...
try to run a module command .
train
false
17,444
def doc_role(name, rawtext, text, lineno, inliner, options={}, content=[]): (success, twin_slugs, title, permalink, slug) = _doc_link(rawtext, text, options, content) if success: if twin_slugs: inliner.reporter.warning('More than one post with the same slug. Using "{0}"'.format(permalink)) LOGGER.warn('More than one post with the same slug. Using "{0}" for doc role'.format(permalink)) node = make_link_node(rawtext, title, permalink, options) return ([node], []) else: msg = inliner.reporter.error('"{0}" slug doesn\'t exist.'.format(slug), line=lineno) prb = inliner.problematic(rawtext, rawtext, msg) return ([prb], [msg])
[ "def", "doc_role", "(", "name", ",", "rawtext", ",", "text", ",", "lineno", ",", "inliner", ",", "options", "=", "{", "}", ",", "content", "=", "[", "]", ")", ":", "(", "success", ",", "twin_slugs", ",", "title", ",", "permalink", ",", "slug", ")",...
handle the doc role .
train
false
17,446
@pytest.mark.network def test_pip_wheel_builds_editable_deps(script, data): script.pip('install', 'wheel') editable_path = os.path.join(data.src, 'requires_simple') result = script.pip('wheel', '--no-index', '-f', data.find_links, '-e', editable_path) wheel_file_name = ('simple-1.0-py%s-none-any.whl' % pyversion[0]) wheel_file_path = (script.scratch / wheel_file_name) assert (wheel_file_path in result.files_created), result.stdout
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_pip_wheel_builds_editable_deps", "(", "script", ",", "data", ")", ":", "script", ".", "pip", "(", "'install'", ",", "'wheel'", ")", "editable_path", "=", "os", ".", "path", ".", "join", "(", "data", ...
test pip wheel finds and builds dependencies of editables .
train
false
17,447
def SmartStr(string): if (type(string) == unicode): return string.encode('utf8', 'ignore') return str(string)
[ "def", "SmartStr", "(", "string", ")", ":", "if", "(", "type", "(", "string", ")", "==", "unicode", ")", ":", "return", "string", ".", "encode", "(", "'utf8'", ",", "'ignore'", ")", "return", "str", "(", "string", ")" ]
returns a string or encodes a unicode object .
train
false
17,448
def mod_import(module): if (not module): return None if isinstance(module, types.ModuleType): mod = module else: try: mod = __import__(module, fromlist=['None']) except ImportError as ex: if (str(ex) != 'Import by filename is not supported.'): raise if (not os.path.isabs(module)): module = os.path.abspath(module) (path, filename) = module.rsplit(os.path.sep, 1) modname = re.sub('\\.py$', '', filename) try: result = imp.find_module(modname, [path]) except ImportError: logger.log_trace(("Could not find module '%s' (%s.py) at path '%s'" % (modname, modname, path))) return try: mod = imp.load_module(modname, *result) except ImportError: logger.log_trace(("Could not find or import module %s at path '%s'" % (modname, path))) mod = None result[0].close() return mod
[ "def", "mod_import", "(", "module", ")", ":", "if", "(", "not", "module", ")", ":", "return", "None", "if", "isinstance", "(", "module", ",", "types", ".", "ModuleType", ")", ":", "mod", "=", "module", "else", ":", "try", ":", "mod", "=", "__import__...
a generic python module loader .
train
false
17,449
def _parse_entry(line, quoted, escaped): if (line == ''): raise IndexError('no remaining content to parse') (next_entry, remainder) = ('', line) if quoted: (start_quote, end_quote) = _get_quote_indices(remainder, escaped) if ((start_quote != 0) or (end_quote == (-1))): raise ValueError(("the next entry isn't a quoted value: " + line)) (next_entry, remainder) = (remainder[1:end_quote], remainder[(end_quote + 1):]) elif (' ' in remainder): (next_entry, remainder) = remainder.split(' ', 1) else: (next_entry, remainder) = (remainder, '') if escaped: next_entry = _unescape(next_entry) return (next_entry, remainder.lstrip())
[ "def", "_parse_entry", "(", "line", ",", "quoted", ",", "escaped", ")", ":", "if", "(", "line", "==", "''", ")", ":", "raise", "IndexError", "(", "'no remaining content to parse'", ")", "(", "next_entry", ",", "remainder", ")", "=", "(", "''", ",", "line...
parses the next entry from the given space separated content .
train
false
17,450
def test_iterable(value): try: iter(value) except TypeError: return False return True
[ "def", "test_iterable", "(", "value", ")", ":", "try", ":", "iter", "(", "value", ")", "except", "TypeError", ":", "return", "False", "return", "True" ]
check if its possible to iterate over an object .
train
false
17,451
def FilterCollection(collection, offset, count=0, filter_value=None): if (offset < 0): raise ValueError('Offset needs to be greater than or equal to zero') if (count < 0): raise ValueError('Count needs to be greater than or equal to zero') count = (count or sys.maxint) if filter_value: index = 0 items = [] for item in collection.GenerateItems(): serialized_item = item.SerializeToString() if re.search(re.escape(filter_value), serialized_item, re.I): if (index >= offset): items.append(item) index += 1 if (len(items) >= count): break else: items = list(itertools.islice(collection.GenerateItems(offset), count)) return items
[ "def", "FilterCollection", "(", "collection", ",", "offset", ",", "count", "=", "0", ",", "filter_value", "=", "None", ")", ":", "if", "(", "offset", "<", "0", ")", ":", "raise", "ValueError", "(", "'Offset needs to be greater than or equal to zero'", ")", "if...
filters an aff4 collection .
train
true
17,452
def test_validate_if_list_of_axes(): import matplotlib.pyplot as plt (fig, ax) = plt.subplots(2, 2) assert_raises(ValueError, _validate_if_list_of_axes, ax) ax_flat = ax.ravel() ax = ax.ravel().tolist() _validate_if_list_of_axes(ax_flat) _validate_if_list_of_axes(ax_flat, 4) assert_raises(ValueError, _validate_if_list_of_axes, ax_flat, 5) assert_raises(ValueError, _validate_if_list_of_axes, ax, 3) assert_raises(ValueError, _validate_if_list_of_axes, 'error') assert_raises(ValueError, _validate_if_list_of_axes, (['error'] * 2)) assert_raises(ValueError, _validate_if_list_of_axes, ax[0]) assert_raises(ValueError, _validate_if_list_of_axes, ax, 3) ax_flat[2] = 23 assert_raises(ValueError, _validate_if_list_of_axes, ax_flat) _validate_if_list_of_axes(ax, 4)
[ "def", "test_validate_if_list_of_axes", "(", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "(", "fig", ",", "ax", ")", "=", "plt", ".", "subplots", "(", "2", ",", "2", ")", "assert_raises", "(", "ValueError", ",", "_validate_if_list_of_axes",...
test validation of axes .
train
false
17,453
def ipv4(value, options=None): return _ipv_filter(value, 4, options=options)
[ "def", "ipv4", "(", "value", ",", "options", "=", "None", ")", ":", "return", "_ipv_filter", "(", "value", ",", "4", ",", "options", "=", "options", ")" ]
return whether or not given value is a valid ip version 4 address .
train
false
17,454
def google_voice_login(email, passwd): global _voice if (not _gv_available): print >>sys.stderr, 'The pygooglevoice Python package is required in order to use Google Voice.' return _voice = googlevoice.Voice() _voice.login(email, passwd)
[ "def", "google_voice_login", "(", "email", ",", "passwd", ")", ":", "global", "_voice", "if", "(", "not", "_gv_available", ")", ":", "print", ">>", "sys", ".", "stderr", ",", "'The pygooglevoice Python package is required in order to use Google Voice.'", "return", "_v...
logs into your google voice account with your full email address and password .
train
false
17,455
def download_dataset(filename='titanic_dataset.csv', work_directory='./'): url = 'http://tflearn.org/resources/titanic_dataset.csv' if (not os.path.exists(work_directory)): os.mkdir(work_directory) filepath = os.path.join(work_directory, filename) if (not os.path.exists(filepath)): print('Downloading Titanic dataset...') (filepath, _) = urllib.request.urlretrieve(url, filepath) statinfo = os.stat(filepath) print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.') return filepath
[ "def", "download_dataset", "(", "filename", "=", "'titanic_dataset.csv'", ",", "work_directory", "=", "'./'", ")", ":", "url", "=", "'http://tflearn.org/resources/titanic_dataset.csv'", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "work_directory", ")", ...
downloads and extracts the dataset .
train
false
17,458
def _servicegroup_get(sg_name, **connection_args): nitro = _connect(**connection_args) if (nitro is None): return None sg = NSServiceGroup() sg.set_servicegroupname(sg_name) try: sg = NSServiceGroup.get(nitro, sg) except NSNitroError as error: log.debug('netscaler module error - NSServiceGroup.get() failed: {0}'.format(error)) sg = None _disconnect(nitro) return sg
[ "def", "_servicegroup_get", "(", "sg_name", ",", "**", "connection_args", ")", ":", "nitro", "=", "_connect", "(", "**", "connection_args", ")", "if", "(", "nitro", "is", "None", ")", ":", "return", "None", "sg", "=", "NSServiceGroup", "(", ")", "sg", "....
return a service group ressource or none .
train
true
17,461
def task_sort(): def list_sort(l): l = l[::(-1)] l.sort() return (list_sort, (list(range(1000)),))
[ "def", "task_sort", "(", ")", ":", "def", "list_sort", "(", "l", ")", ":", "l", "=", "l", "[", ":", ":", "(", "-", "1", ")", "]", "l", ".", "sort", "(", ")", "return", "(", "list_sort", ",", "(", "list", "(", "range", "(", "1000", ")", ")",...
list sorting (c) .
train
false
17,463
@verbose def lcmv_raw(raw, forward, noise_cov, data_cov, reg=0.01, label=None, start=None, stop=None, picks=None, pick_ori=None, rank=None, verbose=None): _check_reference(raw) info = raw.info picks = _setup_picks(picks, info, forward, noise_cov) (data, times) = raw[picks, start:stop] tmin = times[0] stc = _apply_lcmv(data=data, info=info, tmin=tmin, forward=forward, noise_cov=noise_cov, data_cov=data_cov, reg=reg, label=label, picks=picks, rank=rank, pick_ori=pick_ori) return six.advance_iterator(stc)
[ "@", "verbose", "def", "lcmv_raw", "(", "raw", ",", "forward", ",", "noise_cov", ",", "data_cov", ",", "reg", "=", "0.01", ",", "label", "=", "None", ",", "start", "=", "None", ",", "stop", "=", "None", ",", "picks", "=", "None", ",", "pick_ori", "...
linearly constrained minimum variance beamformer .
train
false
17,467
def load_file_list(path=None, regx='\\.npz', printable=True): if (path == False): path = os.getcwd() file_list = os.listdir(path) return_list = [] for (idx, f) in enumerate(file_list): if re.search(regx, f): return_list.append(f) if printable: print ('Match file list = %s' % return_list) print ('Number of files = %d' % len(return_list)) return return_list
[ "def", "load_file_list", "(", "path", "=", "None", ",", "regx", "=", "'\\\\.npz'", ",", "printable", "=", "True", ")", ":", "if", "(", "path", "==", "False", ")", ":", "path", "=", "os", ".", "getcwd", "(", ")", "file_list", "=", "os", ".", "listdi...
return a file list in a folder by given a path and regular expression .
train
false
17,468
def getComplexPolygonByComplexRadius(radius, sides, startAngle=0.0): complexPolygon = [] sideAngle = ((2.0 * math.pi) / float(sides)) for side in xrange(abs(sides)): unitPolar = getWiddershinsUnitPolar(startAngle) complexPolygon.append(complex((unitPolar.real * radius.real), (unitPolar.imag * radius.imag))) startAngle += sideAngle return complexPolygon
[ "def", "getComplexPolygonByComplexRadius", "(", "radius", ",", "sides", ",", "startAngle", "=", "0.0", ")", ":", "complexPolygon", "=", "[", "]", "sideAngle", "=", "(", "(", "2.0", "*", "math", ".", "pi", ")", "/", "float", "(", "sides", ")", ")", "for...
get the complex polygon .
train
false
17,471
def can_resize_fs(image, size, use_cow=False): LOG.debug(_('Checking if we can resize image %(image)s. size=%(size)s, CoW=%(use_cow)s'), locals()) virt_size = get_disk_size(image) if (virt_size >= size): LOG.debug(_('Cannot resize filesystem %s to a smaller size.'), image) return False if use_cow: try: fs = vfs.VFS.instance_for_image(image, 'qcow2', None) fs.setup() fs.teardown() except exception.NovaException as e: LOG.debug(_('Unable to mount image %(image)s with error %(error)s. Cannot resize.'), {'image': image, 'error': e}) return False else: try: utils.execute('e2label', image) except exception.ProcessExecutionError as e: LOG.debug(_('Unable to determine label for image %(image)s with error %(errror)s. Cannot resize.'), {'image': image, 'error': e}) return False return True
[ "def", "can_resize_fs", "(", "image", ",", "size", ",", "use_cow", "=", "False", ")", ":", "LOG", ".", "debug", "(", "_", "(", "'Checking if we can resize image %(image)s. size=%(size)s, CoW=%(use_cow)s'", ")", ",", "locals", "(", ")", ")", "virt_size", "=", "ge...
check whether we can resize contained file system .
train
false
17,472
def check_linecache_ipython(*args): linecache._checkcache_ori(*args) linecache.cache.update(linecache._ipython_cache)
[ "def", "check_linecache_ipython", "(", "*", "args", ")", ":", "linecache", ".", "_checkcache_ori", "(", "*", "args", ")", "linecache", ".", "cache", ".", "update", "(", "linecache", ".", "_ipython_cache", ")" ]
call linecache .
train
false
17,475
def classify_class_attrs(cls): mro = inspect.getmro(cls) names = dir(cls) result = [] for name in names: if (name in cls.__dict__): obj = cls.__dict__[name] else: try: obj = getattr(cls, name) except AttributeError as err: continue homecls = getattr(obj, '__objclass__', None) if (homecls is None): for base in mro: if (name in base.__dict__): homecls = base break if ((homecls is not None) and (name in homecls.__dict__)): obj = homecls.__dict__[name] obj_via_getattr = getattr(cls, name) if isinstance(obj, staticmethod): kind = 'static method' elif isinstance(obj, classmethod): kind = 'class method' elif isinstance(obj, property): kind = 'property' elif (inspect.ismethod(obj_via_getattr) or inspect.ismethoddescriptor(obj_via_getattr)): kind = 'method' else: kind = 'data' result.append((name, kind, homecls, obj)) return result
[ "def", "classify_class_attrs", "(", "cls", ")", ":", "mro", "=", "inspect", ".", "getmro", "(", "cls", ")", "names", "=", "dir", "(", "cls", ")", "result", "=", "[", "]", "for", "name", "in", "names", ":", "if", "(", "name", "in", "cls", ".", "__...
return list of attribute-descriptor tuples .
train
true
17,476
def temporary(location): to(location, falcon.HTTP_307)
[ "def", "temporary", "(", "location", ")", ":", "to", "(", "location", ",", "falcon", ".", "HTTP_307", ")" ]
redirects to the specified location using http 304 status code .
train
false
17,477
def package_name(pkg_or_module): if ((pkg_or_module is None) or (pkg_or_module.__name__ == '__main__')): return '__main__' pkg_name = pkg_or_module.__name__ pkg_filename = getattr(pkg_or_module, '__file__', None) if (pkg_filename is None): return pkg_name splitted = os.path.split(pkg_filename) if (splitted[(-1)] in init_names): return pkg_name return pkg_name.rsplit('.', 1)[0]
[ "def", "package_name", "(", "pkg_or_module", ")", ":", "if", "(", "(", "pkg_or_module", "is", "None", ")", "or", "(", "pkg_or_module", ".", "__name__", "==", "'__main__'", ")", ")", ":", "return", "'__main__'", "pkg_name", "=", "pkg_or_module", ".", "__name_...
if this function is passed a module .
train
false
17,478
def repository_has_cookiecutter_json(repo_directory): repo_directory_exists = os.path.isdir(repo_directory) repo_config_exists = os.path.isfile(os.path.join(repo_directory, u'cookiecutter.json')) return (repo_directory_exists and repo_config_exists)
[ "def", "repository_has_cookiecutter_json", "(", "repo_directory", ")", ":", "repo_directory_exists", "=", "os", ".", "path", ".", "isdir", "(", "repo_directory", ")", "repo_config_exists", "=", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "jo...
determine if repo_directory contains a cookiecutter .
train
true
17,479
def add_tags(gce, module, instance_name, tags): zone = module.params.get('zone') if (not instance_name): module.fail_json(msg='Must supply instance_name', changed=False) if (not tags): module.fail_json(msg='Must supply tags', changed=False) tags = [x.lower() for x in tags] try: node = gce.ex_get_node(instance_name, zone=zone) except ResourceNotFoundError: module.fail_json(msg=('Instance %s not found in zone %s' % (instance_name, zone)), changed=False) except GoogleBaseError as e: module.fail_json(msg=str(e), changed=False) node_tags = node.extra['tags'] changed = False tags_changed = [] for t in tags: if (t not in node_tags): changed = True node_tags.append(t) tags_changed.append(t) if (not changed): return (False, None) try: gce.ex_set_node_tags(node, node_tags) return (True, tags_changed) except (GoogleBaseError, InvalidRequestError) as e: module.fail_json(msg=str(e), changed=False)
[ "def", "add_tags", "(", "gce", ",", "module", ",", "instance_name", ",", "tags", ")", ":", "zone", "=", "module", ".", "params", ".", "get", "(", "'zone'", ")", "if", "(", "not", "instance_name", ")", ":", "module", ".", "fail_json", "(", "msg", "=",...
add tags to a trail returns {tagged: true} if the trail was tagged and returns {tagged: false} if the trail was not tagged .
train
false
17,480
def test_sobel_h_zeros(): result = filters.sobel_h(np.zeros((10, 10)), np.ones((10, 10), bool)) assert np.all((result == 0))
[ "def", "test_sobel_h_zeros", "(", ")", ":", "result", "=", "filters", ".", "sobel_h", "(", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ")", ",", "np", ".", "ones", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert", "np", ...
horizontal sobel on an array of all zeros .
train
false
17,481
def _all_the_same_checkbox(matches): name = None for match in matches: if (match.type not in ['checkbox', 'hidden']): return False if (name is None): name = match.name elif (match.name != name): return False return True
[ "def", "_all_the_same_checkbox", "(", "matches", ")", ":", "name", "=", "None", "for", "match", "in", "matches", ":", "if", "(", "match", ".", "type", "not", "in", "[", "'checkbox'", ",", "'hidden'", "]", ")", ":", "return", "False", "if", "(", "name",...
check whether all these controls are actually the the same checkbox .
train
false
17,482
def make_path_cmdlog(r): path = ((AUTOTEST_CACHE_DIR + '/recipes/') + r) if (not os.path.exists(path)): os.makedirs(path) if (not os.path.isdir(path)): raise BkrProxyException(('Path(%s) exists and is not a directory' % path)) return (path + '/cmd_log')
[ "def", "make_path_cmdlog", "(", "r", ")", ":", "path", "=", "(", "(", "AUTOTEST_CACHE_DIR", "+", "'/recipes/'", ")", "+", "r", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", ":", "os", ".", "makedirs", "(", "path", ...
converts a recipe id into an internal path for logging purposes .
train
false
17,484
def get_tool_shed_url_from_tool_shed_registry(app, tool_shed): cleaned_tool_shed = remove_protocol_from_tool_shed_url(tool_shed) for shed_url in app.tool_shed_registry.tool_sheds.values(): if (shed_url.find(cleaned_tool_shed) >= 0): if shed_url.endswith('/'): shed_url = shed_url.rstrip('/') return shed_url return None
[ "def", "get_tool_shed_url_from_tool_shed_registry", "(", "app", ",", "tool_shed", ")", ":", "cleaned_tool_shed", "=", "remove_protocol_from_tool_shed_url", "(", "tool_shed", ")", "for", "shed_url", "in", "app", ".", "tool_shed_registry", ".", "tool_sheds", ".", "values"...
the value of tool_shed is something like: toolshed .
train
false
17,485
def assert_response(response, status_code, body, headers=None, body_cmp=operator.eq): headers = (headers or {}) assert (response.status_code == status_code), 'Expected status code {}, got {}'.format(status_code, response.status_code) for (header, value) in headers.items(): response_value = response.headers.get(header) assert (response_value == value), 'Expected {} header value {}, got {}'.format(header, value, response_value) assert body_cmp(response.data, body), 'Unexpected response body'
[ "def", "assert_response", "(", "response", ",", "status_code", ",", "body", ",", "headers", "=", "None", ",", "body_cmp", "=", "operator", ".", "eq", ")", ":", "headers", "=", "(", "headers", "or", "{", "}", ")", "assert", "(", "response", ".", "status...
assert response has the expected status_code .
train
false
17,487
def __split_quoted(s): if (len(s) == 0): return ('', '') q = quoted = s[0] rest = s[1:] while True: next_q = rest.find(q) if (next_q == (-1)): raise ValueError(("can't find ending quote '%s' in '%s'" % (q, s))) is_escaped = False i = (next_q - 1) while ((i >= 0) and (rest[i] == '\\')): i -= 1 is_escaped = (not is_escaped) quoted += rest[0:(next_q + 1)] rest = rest[(next_q + 1):] if (not is_escaped): return (quoted, rest.lstrip())
[ "def", "__split_quoted", "(", "s", ")", ":", "if", "(", "len", "(", "s", ")", "==", "0", ")", ":", "return", "(", "''", ",", "''", ")", "q", "=", "quoted", "=", "s", "[", "0", "]", "rest", "=", "s", "[", "1", ":", "]", "while", "True", ":...
looks for the ending quote character in the string that starts with quote character .
train
false
17,489
def call_of_leaf(leaf, cut_own_trailer=False): trailer = leaf.parent if ((trailer.type != 'trailer') or (leaf not in (trailer.children[0], trailer.children[(-1)]))): if (trailer.type == 'atom'): return trailer return leaf power = trailer.parent index = power.children.index(trailer) power = deep_ast_copy(power) if cut_own_trailer: cut = index else: cut = (index + 1) power.children[cut:] = [] if (power.type == 'error_node'): start = index while True: start -= 1 if (power.children[start].type != 'trailer'): break transformed = tree.Node('power', power.children[start:]) transformed.parent = power.parent return transformed return power
[ "def", "call_of_leaf", "(", "leaf", ",", "cut_own_trailer", "=", "False", ")", ":", "trailer", "=", "leaf", ".", "parent", "if", "(", "(", "trailer", ".", "type", "!=", "'trailer'", ")", "or", "(", "leaf", "not", "in", "(", "trailer", ".", "children", ...
creates a "call" node that consist of all trailer and power objects .
train
false
17,492
def mark_safe(s): if hasattr(s, '__html__'): return s if (isinstance(s, bytes) or (isinstance(s, Promise) and s._delegate_bytes)): return SafeBytes(s) if isinstance(s, (str, Promise)): return SafeText(s) if callable(s): return _safety_decorator(mark_safe, s) return SafeString(str(s))
[ "def", "mark_safe", "(", "s", ")", ":", "if", "hasattr", "(", "s", ",", "'__html__'", ")", ":", "return", "s", "if", "(", "isinstance", "(", "s", ",", "bytes", ")", "or", "(", "isinstance", "(", "s", ",", "Promise", ")", "and", "s", ".", "_delega...
explicitly mark a string as safe for output purposes .
train
false
17,493
def ParseFileSystemsStruct(struct_class, fs_count, data): results = [] cstr = (lambda x: x.split('\x00', 1)[0]) for count in range(0, fs_count): struct_size = struct_class.GetSize() s_data = data[(count * struct_size):((count + 1) * struct_size)] s = struct_class(s_data) s.f_fstypename = cstr(s.f_fstypename) s.f_mntonname = cstr(s.f_mntonname) s.f_mntfromname = cstr(s.f_mntfromname) results.append(s) return results
[ "def", "ParseFileSystemsStruct", "(", "struct_class", ",", "fs_count", ",", "data", ")", ":", "results", "=", "[", "]", "cstr", "=", "(", "lambda", "x", ":", "x", ".", "split", "(", "'\\x00'", ",", "1", ")", "[", "0", "]", ")", "for", "count", "in"...
take the struct type and parse it into a list of structs .
train
true
17,494
def generated_jit(function=None, target='cpu', cache=False, **options): wrapper = _jit(sigs=None, locals={}, target=target, cache=cache, targetoptions=options, impl_kind='generated') if (function is not None): return wrapper(function) else: return wrapper
[ "def", "generated_jit", "(", "function", "=", "None", ",", "target", "=", "'cpu'", ",", "cache", "=", "False", ",", "**", "options", ")", ":", "wrapper", "=", "_jit", "(", "sigs", "=", "None", ",", "locals", "=", "{", "}", ",", "target", "=", "targ...
this decorator allows flexible type-based compilation of a jitted function .
train
false
17,495
def dnsdomain_get(context, fqdomain): return IMPL.dnsdomain_get(context, fqdomain)
[ "def", "dnsdomain_get", "(", "context", ",", "fqdomain", ")", ":", "return", "IMPL", ".", "dnsdomain_get", "(", "context", ",", "fqdomain", ")" ]
get the db record for the specified domain .
train
false
17,496
def ordering_list(attr, count_from=None, **kw): kw = _unsugar_count_from(count_from=count_from, **kw) return (lambda : OrderingList(attr, **kw))
[ "def", "ordering_list", "(", "attr", ",", "count_from", "=", "None", ",", "**", "kw", ")", ":", "kw", "=", "_unsugar_count_from", "(", "count_from", "=", "count_from", ",", "**", "kw", ")", "return", "(", "lambda", ":", "OrderingList", "(", "attr", ",", ...
prepares an :class:orderinglist factory for use in mapper definitions .
train
false
17,497
def _ignore_response_headers_rewriter(ignored_response_headers, state): for (name, value) in state.headers.items(): if (name.lower() in ignored_response_headers): del state.headers[name] try: if isinstance(name, unicode): name = name.encode('ascii') if isinstance(value, unicode): value = value.encode('ascii') except UnicodeEncodeError: del state.headers[name] if ((set(name) - ALLOWED_HEADER_NAME_CHARACTERS) or (set(value) - ALLOWED_HEADER_VALUE_CHARACTERS)): del state.headers[name]
[ "def", "_ignore_response_headers_rewriter", "(", "ignored_response_headers", ",", "state", ")", ":", "for", "(", "name", ",", "value", ")", "in", "state", ".", "headers", ".", "items", "(", ")", ":", "if", "(", "name", ".", "lower", "(", ")", "in", "igno...
ignore specific response headers .
train
false
17,499
@skip('win32') def test_dict_to_idict(): load_iron_python_test() from IronPythonTest import DictConversion class MyDict(dict, ): pass class KOld: pass class KNew(object, ): pass class KOldDerived(KOld, ): pass class KNewDerived(KNew, ): pass test_dicts = [{}, {1: 100}, {None: None}, {object: object}, {1: 100, 2: 200}, {1: 100, 2: 200, 3: 300, 4: 400}, MyDict.__dict__, KOld.__dict__, KNew.__dict__, KOldDerived.__dict__, KNewDerived.__dict__] for temp_dict in test_dicts: expected = (temp_dict.keys() + temp_dict.values()) expected.sort() to_idict = list(DictConversion.ToIDictionary(temp_dict)) to_idict.sort() AreEqual(to_idict, expected) to_idict = list(DictConversion.ToIDictionary(MyDict(temp_dict))) to_idict.sort() AreEqual(to_idict, expected)
[ "@", "skip", "(", "'win32'", ")", "def", "test_dict_to_idict", "(", ")", ":", "load_iron_python_test", "(", ")", "from", "IronPythonTest", "import", "DictConversion", "class", "MyDict", "(", "dict", ",", ")", ":", "pass", "class", "KOld", ":", "pass", "class...
verify dicts can be converted to idictionaries .
train
false
17,500
def Transactional(_func=None, **kwargs): if (_func is not None): return Transactional()(_func) if (not kwargs.pop('require_new', None)): kwargs.setdefault('propagation', datastore_rpc.TransactionOptions.ALLOWED) options = datastore_rpc.TransactionOptions(**kwargs) def outer_wrapper(func): def inner_wrapper(*args, **kwds): return RunInTransactionOptions(options, func, *args, **kwds) return inner_wrapper return outer_wrapper
[ "def", "Transactional", "(", "_func", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "_func", "is", "not", "None", ")", ":", "return", "Transactional", "(", ")", "(", "_func", ")", "if", "(", "not", "kwargs", ".", "pop", "(", "'require_new'",...
a decorator that makes sure a function is run in a transaction .
train
false
17,501
def cache_page(*args, **kwargs): cache_alias = kwargs.pop('cache', None) key_prefix = kwargs.pop('key_prefix', None) assert (not kwargs), 'The only keyword arguments are cache and key_prefix' def warn(): import warnings warnings.warn('The cache_page decorator must be called like: cache_page(timeout, [cache=cache name], [key_prefix=key prefix]). All other ways are deprecated.', PendingDeprecationWarning, stacklevel=3) if (len(args) > 1): assert (len(args) == 2), 'cache_page accepts at most 2 arguments' warn() if callable(args[0]): return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[1], cache_alias=cache_alias, key_prefix=key_prefix)(args[0]) elif callable(args[1]): return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[0], cache_alias=cache_alias, key_prefix=key_prefix)(args[1]) else: assert False, 'cache_page must be passed a view function if called with two arguments' elif (len(args) == 1): if callable(args[0]): warn() return decorator_from_middleware_with_args(CacheMiddleware)(cache_alias=cache_alias, key_prefix=key_prefix)(args[0]) else: return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[0], cache_alias=cache_alias, key_prefix=key_prefix) else: warn() return decorator_from_middleware_with_args(CacheMiddleware)(cache_alias=cache_alias, key_prefix=key_prefix)
[ "def", "cache_page", "(", "*", "args", ",", "**", "kwargs", ")", ":", "cache_alias", "=", "kwargs", ".", "pop", "(", "'cache'", ",", "None", ")", "key_prefix", "=", "kwargs", ".", "pop", "(", "'key_prefix'", ",", "None", ")", "assert", "(", "not", "k...
decorator for views that tries getting the page from the cache and populates the cache if the page isnt in the cache yet .
train
false
17,502
def p_compound_statement_1(t): pass
[ "def", "p_compound_statement_1", "(", "t", ")", ":", "pass" ]
compound_statement : lbrace declaration_list statement_list rbrace .
train
false
17,503
def _fit_magnetic_dipole(B_orig, x0, coils, scale, method): from scipy.optimize import fmin_cobyla B = np.dot(scale, B_orig) B2 = np.dot(B, B) objective = partial(_magnetic_dipole_objective, B=B, B2=B2, coils=coils, scale=scale, method=method) x = fmin_cobyla(objective, x0, (), rhobeg=0.0001, rhoend=1e-05, disp=False) return (x, (1.0 - (objective(x) / B2)))
[ "def", "_fit_magnetic_dipole", "(", "B_orig", ",", "x0", ",", "coils", ",", "scale", ",", "method", ")", ":", "from", "scipy", ".", "optimize", "import", "fmin_cobyla", "B", "=", "np", ".", "dot", "(", "scale", ",", "B_orig", ")", "B2", "=", "np", "....
fit a single bit of data .
train
false
17,504
@utils.arg('name', metavar='<name>', help='Name of the new monitor type') @utils.service_type('monitor') def do_type_create(cs, args): vtype = cs.monitor_types.create(args.name) _print_monitor_type_list([vtype])
[ "@", "utils", ".", "arg", "(", "'name'", ",", "metavar", "=", "'<name>'", ",", "help", "=", "'Name of the new monitor type'", ")", "@", "utils", ".", "service_type", "(", "'monitor'", ")", "def", "do_type_create", "(", "cs", ",", "args", ")", ":", "vtype",...
create a new monitor type .
train
false
17,506
@log_call def metadef_namespace_get_by_id(context, namespace_id): try: namespace = next((namespace for namespace in DATA['metadef_namespaces'] if (namespace['id'] == namespace_id))) except StopIteration: msg = (_('Metadata definition namespace not found for id=%s') % namespace_id) LOG.warn(msg) raise exception.MetadefNamespaceNotFound(msg) if (not _is_namespace_visible(context, namespace)): LOG.debug('Forbidding request, metadata definition namespace=%s is not visible.', namespace.namespace) emsg = (_('Forbidding request, metadata definition namespace=%s is not visible.') % namespace.namespace) raise exception.MetadefForbidden(emsg) return namespace
[ "@", "log_call", "def", "metadef_namespace_get_by_id", "(", "context", ",", "namespace_id", ")", ":", "try", ":", "namespace", "=", "next", "(", "(", "namespace", "for", "namespace", "in", "DATA", "[", "'metadef_namespaces'", "]", "if", "(", "namespace", "[", ...
get a namespace object .
train
false
17,508
def offset_copy(trans, fig=None, x=0.0, y=0.0, units=u'inches'): if (units == u'dots'): return (trans + Affine2D().translate(x, y)) if (fig is None): raise ValueError(u'For units of inches or points a fig kwarg is needed') if (units == u'points'): x /= 72.0 y /= 72.0 elif (not (units == u'inches')): raise ValueError(u'units must be dots, points, or inches') return (trans + ScaledTranslation(x, y, fig.dpi_scale_trans))
[ "def", "offset_copy", "(", "trans", ",", "fig", "=", "None", ",", "x", "=", "0.0", ",", "y", "=", "0.0", ",", "units", "=", "u'inches'", ")", ":", "if", "(", "units", "==", "u'dots'", ")", ":", "return", "(", "trans", "+", "Affine2D", "(", ")", ...
return a new transform with an added offset .
train
false
17,509
def list_registry_opts(): return [(g, copy.deepcopy(o)) for (g, o) in _registry_opts]
[ "def", "list_registry_opts", "(", ")", ":", "return", "[", "(", "g", ",", "copy", ".", "deepcopy", "(", "o", ")", ")", "for", "(", "g", ",", "o", ")", "in", "_registry_opts", "]" ]
return a list of oslo_config options available in glance registry service .
train
false
17,510
def open_in_browser(response, _openfunc=webbrowser.open): from scrapy.http import HtmlResponse, TextResponse body = response.body if isinstance(response, HtmlResponse): if ('<base' not in body): repl = ('<head><base href="%s">' % response.url) body = body.replace('<head>', to_bytes(repl)) ext = '.html' elif isinstance(response, TextResponse): ext = '.txt' else: raise TypeError(('Unsupported response type: %s' % response.__class__.__name__)) (fd, fname) = tempfile.mkstemp(ext) os.write(fd, body) os.close(fd) return _openfunc(('file://%s' % fname))
[ "def", "open_in_browser", "(", "response", ",", "_openfunc", "=", "webbrowser", ".", "open", ")", ":", "from", "scrapy", ".", "http", "import", "HtmlResponse", ",", "TextResponse", "body", "=", "response", ".", "body", "if", "isinstance", "(", "response", ",...
open the given response in a local web browser .
train
false
17,511
@utils.arg('node', metavar='<node>', help=_('ID of node')) def do_baremetal_interface_list(cs, args): _emit_deprecation_warning('baremetal-interface-list') interfaces = cs.baremetal.list_interfaces(args.node) _print_baremetal_node_interfaces(interfaces)
[ "@", "utils", ".", "arg", "(", "'node'", ",", "metavar", "=", "'<node>'", ",", "help", "=", "_", "(", "'ID of node'", ")", ")", "def", "do_baremetal_interface_list", "(", "cs", ",", "args", ")", ":", "_emit_deprecation_warning", "(", "'baremetal-interface-list...
deprecated: list network interfaces associated with a baremetal node .
train
false
17,512
def fixup_old_jsargs(): for i in range(len(sys.argv)): if (sys.argv[i] == '--build_js'): print('WARNING: --build_js (with underscore) is deprecated, use --build-js') sys.argv[i] = '--build-js' if (sys.argv[i] == '--install_js'): print('WARNING: --install_js (with underscore) is deprecated, use --install-js') sys.argv[i] = '--install-js'
[ "def", "fixup_old_jsargs", "(", ")", ":", "for", "i", "in", "range", "(", "len", "(", "sys", ".", "argv", ")", ")", ":", "if", "(", "sys", ".", "argv", "[", "i", "]", "==", "'--build_js'", ")", ":", "print", "(", "'WARNING: --build_js (with underscore)...
fixup old style command line options with underscores .
train
false
17,513
def _pointervalue(gdbval): if (gdbval.address is not None): return int(gdbval.address) else: return int(gdbval)
[ "def", "_pointervalue", "(", "gdbval", ")", ":", "if", "(", "gdbval", ".", "address", "is", "not", "None", ")", ":", "return", "int", "(", "gdbval", ".", "address", ")", "else", ":", "return", "int", "(", "gdbval", ")" ]
return the value of the pionter as a python int .
train
false
17,514
def test_image_filename_defaults(): tpath = ipath.get_ipython_package_dir() nt.assert_raises(ValueError, display.Image, filename=os.path.join(tpath, 'testing/tests/badformat.gif'), embed=True) nt.assert_raises(ValueError, display.Image) nt.assert_raises(ValueError, display.Image, data='this is not an image', format='badformat', embed=True) imgfile = os.path.join(tpath, 'core/tests/2x2.png') img = display.Image(filename=imgfile) nt.assert_equal('png', img.format) nt.assert_is_not_none(img._repr_png_()) img = display.Image(filename=os.path.join(tpath, 'testing/tests/logo.jpg'), embed=False) nt.assert_equal('jpeg', img.format) nt.assert_is_none(img._repr_jpeg_())
[ "def", "test_image_filename_defaults", "(", ")", ":", "tpath", "=", "ipath", ".", "get_ipython_package_dir", "(", ")", "nt", ".", "assert_raises", "(", "ValueError", ",", "display", ".", "Image", ",", "filename", "=", "os", ".", "path", ".", "join", "(", "...
test format constraint .
train
false
17,515
def polygon(r, c, shape=None): return _polygon(r, c, shape)
[ "def", "polygon", "(", "r", ",", "c", ",", "shape", "=", "None", ")", ":", "return", "_polygon", "(", "r", ",", "c", ",", "shape", ")" ]
generate coordinates of pixels within polygon .
train
false
17,516
def FormatLogArgument(s): MAX_LEN = 256 if isinstance(s, unicode): s = s.encode('utf-8') else: s = str(s) if (len(s) <= MAX_LEN): return s return ('%s...[%d bytes truncated]' % (s[:MAX_LEN], (len(s) - MAX_LEN)))
[ "def", "FormatLogArgument", "(", "s", ")", ":", "MAX_LEN", "=", "256", "if", "isinstance", "(", "s", ",", "unicode", ")", ":", "s", "=", "s", ".", "encode", "(", "'utf-8'", ")", "else", ":", "s", "=", "str", "(", "s", ")", "if", "(", "len", "("...
format "s" in a human-readable way for logging by truncating it to at most 256 characters .
train
false