id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
42,820
def open_archive(archive_path): archive = None if (not os.path.exists(archive_path)): raise PathError(u"Path doesn't exist") if zipfile.is_zipfile(archive_path): archive = ZipArchive(archive_path) log.debug(u'Successfully opened ZIP: %s', archive_path) elif (rarfile and rarfile.is_rarfile(archive_path)): archive = RarArchive(archive_path) log.debug(u'Successfully opened RAR: %s', archive_path) elif (not rarfile): log.warning(u'Rarfile module not installed; unable to handle RAR archives.') return archive
[ "def", "open_archive", "(", "archive_path", ")", ":", "archive", "=", "None", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "archive_path", ")", ")", ":", "raise", "PathError", "(", "u\"Path doesn't exist\"", ")", "if", "zipfile", ".", "is_zipfi...
returns the appropriate archive object .
train
false
42,821
def get_redis_connection(server_name='default'): global connection_pool if (connection_pool is None): connection_pool = setup_redis() pool = connection_pool[server_name] return redis.StrictRedis(connection_pool=pool)
[ "def", "get_redis_connection", "(", "server_name", "=", "'default'", ")", ":", "global", "connection_pool", "if", "(", "connection_pool", "is", "None", ")", ":", "connection_pool", "=", "setup_redis", "(", ")", "pool", "=", "connection_pool", "[", "server_name", ...
gets the specified redis connection .
train
false
42,822
def check_zero(result, func, cargs): if (result == 0): raise GEOSException(('Error encountered in GEOS C function "%s".' % func.__name__)) else: return result
[ "def", "check_zero", "(", "result", ",", "func", ",", "cargs", ")", ":", "if", "(", "result", "==", "0", ")", ":", "raise", "GEOSException", "(", "(", "'Error encountered in GEOS C function \"%s\".'", "%", "func", ".", "__name__", ")", ")", "else", ":", "r...
error checking on routines that should not return 0 .
train
false
42,823
def durbin_watson(resids, axis=0): resids = np.asarray(resids) diff_resids = np.diff(resids, 1, axis=axis) dw = (np.sum((diff_resids ** 2), axis=axis) / np.sum((resids ** 2), axis=axis)) return dw
[ "def", "durbin_watson", "(", "resids", ",", "axis", "=", "0", ")", ":", "resids", "=", "np", ".", "asarray", "(", "resids", ")", "diff_resids", "=", "np", ".", "diff", "(", "resids", ",", "1", ",", "axis", "=", "axis", ")", "dw", "=", "(", "np", ...
calculates the durbin-watson statistic parameters resids : array-like returns dw : float .
train
false
42,824
def _project_cert_subject(project_id): return (CONF.project_cert_subject % (project_id, timeutils.isotime()))
[ "def", "_project_cert_subject", "(", "project_id", ")", ":", "return", "(", "CONF", ".", "project_cert_subject", "%", "(", "project_id", ",", "timeutils", ".", "isotime", "(", ")", ")", ")" ]
helper to generate user cert subject .
train
false
42,826
def p_direct_declarator_1(t): pass
[ "def", "p_direct_declarator_1", "(", "t", ")", ":", "pass" ]
direct_declarator : id .
train
false
42,827
def inflate_long(s, always_positive=False): out = long(0) negative = 0 if ((not always_positive) and (len(s) > 0) and (byte_ord(s[0]) >= 128)): negative = 1 if (len(s) % 4): filler = zero_byte if negative: filler = max_byte s = ((filler * (4 - (len(s) % 4))) + s) for i in range(0, len(s), 4): out = ((out << 32) + struct.unpack('>I', s[i:(i + 4)])[0]) if negative: out -= (long(1) << (8 * len(s))) return out
[ "def", "inflate_long", "(", "s", ",", "always_positive", "=", "False", ")", ":", "out", "=", "long", "(", "0", ")", "negative", "=", "0", "if", "(", "(", "not", "always_positive", ")", "and", "(", "len", "(", "s", ")", ">", "0", ")", "and", "(", ...
turns a normalized byte string into a long-int .
train
true
42,828
def check_gaussian_samples(value, nsamples, nvis, rows, cols, channels, expected_mean, tol): if nvis: expected_shape = (nsamples, nvis) else: expected_shape = (nsamples, rows, cols, channels) assert (value.shape == expected_shape) assert (not is_binary(value)) mean = value.mean(axis=0) max_error = np.abs((mean - expected_mean)).max() print('Actual mean:') print(mean) print('Expected mean:') print(expected_mean) print('Maximal error:', max_error) print('Tolerable variance:', tol) if (max_error > tol): raise ValueError("Samples don't seem to have the right mean.") else: print('Mean is within expected range')
[ "def", "check_gaussian_samples", "(", "value", ",", "nsamples", ",", "nvis", ",", "rows", ",", "cols", ",", "channels", ",", "expected_mean", ",", "tol", ")", ":", "if", "nvis", ":", "expected_shape", "=", "(", "nsamples", ",", "nvis", ")", "else", ":", ...
tests that a matrix of gaussian samples 1) has the right shape 2) is not binary 3) converges to the right mean .
train
false
42,829
def test_gae(): generate('shifts') yaml_file_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../gae_demo')) save_path = os.path.dirname(os.path.realpath(__file__)) train(yaml_file_path, save_path) try: os.remove('{0}/train_preprocessed.pkl'.format(save_path)) os.remove('{0}/gae_196_50.pkl'.format(save_path)) os.remove('{0}/gae_196_50_best.pkl'.format(save_path)) except OSError: logging.warning('Files not found')
[ "def", "test_gae", "(", ")", ":", "generate", "(", "'shifts'", ")", "yaml_file_path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'../gae_demo'", ")"...
the function generates a dataset and uses it to train the model .
train
false
42,830
def __get_ssh_credentials(vm_): ssh_user = config.get_cloud_config_value('ssh_username', vm_, __opts__, default=os.getenv('USER')) ssh_key = config.get_cloud_config_value('ssh_keyfile', vm_, __opts__, default=os.path.expanduser('~/.ssh/google_compute_engine')) return (ssh_user, ssh_key)
[ "def", "__get_ssh_credentials", "(", "vm_", ")", ":", "ssh_user", "=", "config", ".", "get_cloud_config_value", "(", "'ssh_username'", ",", "vm_", ",", "__opts__", ",", "default", "=", "os", ".", "getenv", "(", "'USER'", ")", ")", "ssh_key", "=", "config", ...
get configured ssh credentials .
train
true
42,833
def fixed_batch_normalization(x, gamma, beta, mean, var, eps=2e-05, use_cudnn=True): return BatchNormalizationFunction(eps, None, None, False, 0.0, use_cudnn)(x, gamma, beta, mean, var)
[ "def", "fixed_batch_normalization", "(", "x", ",", "gamma", ",", "beta", ",", "mean", ",", "var", ",", "eps", "=", "2e-05", ",", "use_cudnn", "=", "True", ")", ":", "return", "BatchNormalizationFunction", "(", "eps", ",", "None", ",", "None", ",", "False...
batch normalization function with fixed statistics .
train
false
42,835
def encipher_bifid6(msg, key): (msg, key, _) = _prep(msg.upper(), key.upper(), None, bifid6) key = padded_key(key, bifid6) return encipher_bifid(msg, '', key)
[ "def", "encipher_bifid6", "(", "msg", ",", "key", ")", ":", "(", "msg", ",", "key", ",", "_", ")", "=", "_prep", "(", "msg", ".", "upper", "(", ")", ",", "key", ".", "upper", "(", ")", ",", "None", ",", "bifid6", ")", "key", "=", "padded_key", ...
performs the bifid cipher encryption on plaintext msg .
train
false
42,836
def unitdoctest(): pass
[ "def", "unitdoctest", "(", ")", ":", "pass" ]
is used to describe test cases and might in future become helpful as an experimental tutorial as well .
train
false
42,837
def getTclTkVersion(configfile, versionline): try: f = open(configfile, 'r') except OSError: fatal(('Framework configuration file not found: %s' % configfile)) for l in f: if l.startswith(versionline): f.close() return l fatal(('Version variable %s not found in framework configuration file: %s' % (versionline, configfile)))
[ "def", "getTclTkVersion", "(", "configfile", ",", "versionline", ")", ":", "try", ":", "f", "=", "open", "(", "configfile", ",", "'r'", ")", "except", "OSError", ":", "fatal", "(", "(", "'Framework configuration file not found: %s'", "%", "configfile", ")", ")...
search tcl or tk configuration file for version line .
train
false
42,838
@snippet def dataset_update(client, to_delete): DATASET_NAME = ('dataset_update_%d' % (_millis(),)) dataset = client.dataset(DATASET_NAME) dataset.description = ORIGINAL_DESCRIPTION dataset.create() to_delete.append(dataset) dataset.reload() from google.cloud.bigquery import AccessGrant assert (dataset.description == ORIGINAL_DESCRIPTION) assert (dataset.default_table_expiration_ms is None) grant = AccessGrant(role='READER', entity_type='domain', entity_id='example.com') assert (grant not in dataset.access_grants) ONE_DAY_MS = (((24 * 60) * 60) * 1000) dataset.description = UPDATED_DESCRIPTION dataset.default_table_expiration_ms = ONE_DAY_MS grants = list(dataset.access_grants) grants.append(grant) dataset.access_grants = grants dataset.update() assert (dataset.description == UPDATED_DESCRIPTION) assert (dataset.default_table_expiration_ms == ONE_DAY_MS) assert (grant in dataset.access_grants)
[ "@", "snippet", "def", "dataset_update", "(", "client", ",", "to_delete", ")", ":", "DATASET_NAME", "=", "(", "'dataset_update_%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "dataset", "=", "client", ".", "dataset", "(", "DATASET_NAME", ")", "dataset"...
update a datasets metadata .
train
false
42,839
def human_readable_delta(start, end): start_date = datetime.datetime.fromtimestamp(start) end_date = datetime.datetime.fromtimestamp(end) delta = (end_date - start_date) result = [] if (delta.days > 0): result.append(('%d days' % (delta.days,))) if (delta.seconds > 0): hours = int((delta.seconds / 3600)) if (hours > 0): result.append(('%d hours' % (hours,))) minutes = int(((delta.seconds - (hours * 3600)) / 60)) if minutes: result.append(('%d minutes' % (minutes,))) seconds = (delta.seconds % 60) if (seconds > 0): result.append(('%d seconds' % (seconds,))) if result: return ', '.join(result) else: return 'super fast'
[ "def", "human_readable_delta", "(", "start", ",", "end", ")", ":", "start_date", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "start", ")", "end_date", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "end", ")", "delta", "=", "...
return a string of human readable time delta .
train
true
42,840
def make_path_watchdog(r): return (('/recipes/' + r) + '/watchdog')
[ "def", "make_path_watchdog", "(", "r", ")", ":", "return", "(", "(", "'/recipes/'", "+", "r", ")", "+", "'/watchdog'", ")" ]
converts a recipe id into a beaker path for the watchdog .
train
false
42,842
def test_hsl_to_rgb_part_15(): assert (hsl_to_rgb(120, 100, 0) == (0, 0, 0)) assert (hsl_to_rgb(120, 100, 10) == (0, 51, 0)) assert (hsl_to_rgb(120, 100, 20) == (0, 102, 0)) assert (hsl_to_rgb(120, 100, 30) == (0, 153, 0)) assert (hsl_to_rgb(120, 100, 40) == (0, 204, 0)) assert (hsl_to_rgb(120, 100, 50) == (0, 255, 0)) assert (hsl_to_rgb(120, 100, 60) == (51, 255, 51)) assert (hsl_to_rgb(120, 100, 70) == (102, 255, 102)) assert (hsl_to_rgb(120, 100, 80) == (153, 255, 153)) assert (hsl_to_rgb(120, 100, 90) == (204, 255, 204)) assert (hsl_to_rgb(120, 100, 100) == (255, 255, 255))
[ "def", "test_hsl_to_rgb_part_15", "(", ")", ":", "assert", "(", "hsl_to_rgb", "(", "120", ",", "100", ",", "0", ")", "==", "(", "0", ",", "0", ",", "0", ")", ")", "assert", "(", "hsl_to_rgb", "(", "120", ",", "100", ",", "10", ")", "==", "(", "...
test hsl to rgb color function .
train
false
42,844
def get_color_scheme(name): color_scheme = {} for key in sh.COLOR_SCHEME_KEYS: color_scheme[key] = CONF.get('color_schemes', ('%s/%s' % (name, key))) return color_scheme
[ "def", "get_color_scheme", "(", "name", ")", ":", "color_scheme", "=", "{", "}", "for", "key", "in", "sh", ".", "COLOR_SCHEME_KEYS", ":", "color_scheme", "[", "key", "]", "=", "CONF", ".", "get", "(", "'color_schemes'", ",", "(", "'%s/%s'", "%", "(", "...
get syntax color scheme .
train
true
42,845
def convert_GeoPtProperty(model, prop, kwargs): return GeoPtPropertyField(**kwargs)
[ "def", "convert_GeoPtProperty", "(", "model", ",", "prop", ",", "kwargs", ")", ":", "return", "GeoPtPropertyField", "(", "**", "kwargs", ")" ]
returns a form field for a db .
train
false
42,846
def _filter_non_json_lines(data): warnings = [] lines = data.splitlines() for (start, line) in enumerate(lines): line = line.strip() if line.startswith(u'{'): endchar = u'}' break elif line.startswith(u'['): endchar = u']' break else: raise ValueError('No start of json char found') lines = lines[start:] for (reverse_end_offset, line) in enumerate(reversed(lines)): if line.strip().endswith(endchar): break else: raise ValueError('No end of json char found') if (reverse_end_offset > 0): trailing_junk = lines[(len(lines) - reverse_end_offset):] for line in trailing_junk: if line.strip(): warnings.append(('Module invocation had junk after the JSON data: %s' % '\n'.join(trailing_junk))) break lines = lines[:(len(lines) - reverse_end_offset)] return ('\n'.join(lines), warnings)
[ "def", "_filter_non_json_lines", "(", "data", ")", ":", "warnings", "=", "[", "]", "lines", "=", "data", ".", "splitlines", "(", ")", "for", "(", "start", ",", "line", ")", "in", "enumerate", "(", "lines", ")", ":", "line", "=", "line", ".", "strip",...
used to filter unrelated output around module json output .
train
false
42,847
def proto(*a, **kw): return Tag('hello')(*a, **kw)
[ "def", "proto", "(", "*", "a", ",", "**", "kw", ")", ":", "return", "Tag", "(", "'hello'", ")", "(", "*", "a", ",", "**", "kw", ")" ]
produce a new tag for testing .
train
false
42,848
def read_crl(crl): text = _text_or_file(crl) text = get_pem_entry(text, pem_type='X509 CRL') crltempfile = tempfile.NamedTemporaryFile() crltempfile.write(text) crltempfile.flush() crlparsed = _parse_openssl_crl(crltempfile.name) crltempfile.close() return crlparsed
[ "def", "read_crl", "(", "crl", ")", ":", "text", "=", "_text_or_file", "(", "crl", ")", "text", "=", "get_pem_entry", "(", "text", ",", "pem_type", "=", "'X509 CRL'", ")", "crltempfile", "=", "tempfile", ".", "NamedTemporaryFile", "(", ")", "crltempfile", ...
returns a dict containing details of a certificate revocation list .
train
false
42,849
def clean_hanging_newline(t): if (t and (t[(-1)] == '\n')): return t[:(-1)] return t
[ "def", "clean_hanging_newline", "(", "t", ")", ":", "if", "(", "t", "and", "(", "t", "[", "(", "-", "1", ")", "]", "==", "'\\n'", ")", ")", ":", "return", "t", "[", ":", "(", "-", "1", ")", "]", "return", "t" ]
many editors will silently add a newline to the final line of a document .
train
false
42,850
def previous(task): params = task.get_params() previous_params = {} previous_date_params = {} for (param_name, param_obj) in params: param_value = getattr(task, param_name) if isinstance(param_obj, parameter.DateParameter): previous_date_params[param_name] = (param_value - datetime.timedelta(days=1)) elif isinstance(param_obj, parameter.DateSecondParameter): previous_date_params[param_name] = (param_value - datetime.timedelta(seconds=1)) elif isinstance(param_obj, parameter.DateMinuteParameter): previous_date_params[param_name] = (param_value - datetime.timedelta(minutes=1)) elif isinstance(param_obj, parameter.DateHourParameter): previous_date_params[param_name] = (param_value - datetime.timedelta(hours=1)) elif isinstance(param_obj, parameter.DateIntervalParameter): previous_date_params[param_name] = param_value.prev() else: previous_params[param_name] = param_value previous_params.update(previous_date_params) if (len(previous_date_params) == 0): raise NotImplementedError("No task parameter - can't determine previous task") elif (len(previous_date_params) > 1): raise NotImplementedError("Too many date-related task parameters - can't determine previous task") else: return task.clone(**previous_params)
[ "def", "previous", "(", "task", ")", ":", "params", "=", "task", ".", "get_params", "(", ")", "previous_params", "=", "{", "}", "previous_date_params", "=", "{", "}", "for", "(", "param_name", ",", "param_obj", ")", "in", "params", ":", "param_value", "=...
return a previous task of the same family .
train
true
42,851
def dup_primitive_prs(f, g, K): prs = [f, g] (_, h) = dup_primitive(dup_prem(f, g, K), K) while h: prs.append(h) (f, g) = (g, h) (_, h) = dup_primitive(dup_prem(f, g, K), K) return prs
[ "def", "dup_primitive_prs", "(", "f", ",", "g", ",", "K", ")", ":", "prs", "=", "[", "f", ",", "g", "]", "(", "_", ",", "h", ")", "=", "dup_primitive", "(", "dup_prem", "(", "f", ",", "g", ",", "K", ")", ",", "K", ")", "while", "h", ":", ...
primitive polynomial remainder sequence in k[x] .
train
false
42,852
def escape_regex(p, white=u''): return u''.join(((c if (c.isalnum() or (c in white)) else (u'\\000' if (c == u'\x00') else (u'\\' + c))) for c in p))
[ "def", "escape_regex", "(", "p", ",", "white", "=", "u''", ")", ":", "return", "u''", ".", "join", "(", "(", "(", "c", "if", "(", "c", ".", "isalnum", "(", ")", "or", "(", "c", "in", "white", ")", ")", "else", "(", "u'\\\\000'", "if", "(", "c...
escape string for use within a regular expression .
train
false
42,853
def fileSaveDlg(initFilePath='', initFileName='', prompt=_translate('Select file to save'), allowed=None): if (allowed is None): allowed = 'All files (*.*);;txt (*.txt);;pickled files (*.pickle *.pkl);;shelved files (*.shelf)' global qtapp qtapp = ensureQtApp() fdir = os.path.join(initFilePath, initFileName) r = QtWidgets.QFileDialog.getSaveFileName(parent=None, caption=prompt, directory=fdir, filter=allowed) return (unicode(r) or None)
[ "def", "fileSaveDlg", "(", "initFilePath", "=", "''", ",", "initFileName", "=", "''", ",", "prompt", "=", "_translate", "(", "'Select file to save'", ")", ",", "allowed", "=", "None", ")", ":", "if", "(", "allowed", "is", "None", ")", ":", "allowed", "="...
a simple dialogue allowing write access to the file system .
train
false
42,854
def _TestUpdatePhoto(tester, user_cookie, request_dict): validator = tester.validator (user_id, device_id) = tester.GetIdsFromCookie(user_cookie) request_dict = deepcopy(request_dict) actual_dict = tester.SendRequest('update_photo', user_cookie, request_dict) photo_dict = deepcopy(request_dict) photo_dict['user_id'] = user_id photo_dict.pop('headers', None) photo_dict.pop('activity', None) photo_dict.pop('asset_keys', None) validator.ValidateUpdateDBObject(Photo, **photo_dict) if request_dict.get('asset_keys'): up_dict = {'user_id': user_id, 'photo_id': request_dict['photo_id'], 'asset_keys': request_dict['asset_keys']} validator.ValidateUpdateDBObject(UserPhoto, **up_dict) tester._CompareResponseDicts('update_photo', user_id, request_dict, {}, actual_dict) return actual_dict
[ "def", "_TestUpdatePhoto", "(", "tester", ",", "user_cookie", ",", "request_dict", ")", ":", "validator", "=", "tester", ".", "validator", "(", "user_id", ",", "device_id", ")", "=", "tester", ".", "GetIdsFromCookie", "(", "user_cookie", ")", "request_dict", "...
called by the servicetester in order to test update_photo service api call .
train
false
42,856
@db_api.api_context_manager.writer def _update_inventory(context, rp, inventory): _ensure_rc_cache(context) rc_id = _RC_CACHE.id_from_string(inventory.resource_class) inv_list = InventoryList(objects=[inventory]) conn = context.session.connection() with conn.begin(): exceeded = _update_inventory_for_provider(conn, rp, inv_list, set([rc_id])) rp.generation = _increment_provider_generation(conn, rp) return exceeded
[ "@", "db_api", ".", "api_context_manager", ".", "writer", "def", "_update_inventory", "(", "context", ",", "rp", ",", "inventory", ")", ":", "_ensure_rc_cache", "(", "context", ")", "rc_id", "=", "_RC_CACHE", ".", "id_from_string", "(", "inventory", ".", "reso...
update an inventory already on the provider .
train
false
42,857
@hypothesis.given(strategies.text()) def test_entry_parse_hypothesis(text): try: history.Entry.from_str(text) except ValueError: pass
[ "@", "hypothesis", ".", "given", "(", "strategies", ".", "text", "(", ")", ")", "def", "test_entry_parse_hypothesis", "(", "text", ")", ":", "try", ":", "history", ".", "Entry", ".", "from_str", "(", "text", ")", "except", "ValueError", ":", "pass" ]
make sure parsing works or gives us valueerror .
train
false
42,860
def GetClientTestTargets(client_ids=None, hostnames=None, token=None, checkin_duration_threshold='20m'): if client_ids: client_ids = set(client_ids) else: client_ids = set(config_lib.CONFIG.Get('Test.end_to_end_client_ids')) if hostnames: hosts = set(hostnames) else: hosts = set(config_lib.CONFIG.Get('Test.end_to_end_client_hostnames')) if hosts: client_id_dict = client_index.GetClientURNsForHostnames(hosts, token=token) for client_list in client_id_dict.values(): client_ids.update(client_list) client_id_set = set([rdf_client.ClientURN(x) for x in client_ids]) duration_threshold = rdfvalue.Duration(checkin_duration_threshold) for client in aff4.FACTORY.MultiOpen(client_id_set, token=token): if ((rdfvalue.RDFDatetime.Now() - client.Get(client.Schema.LAST)) > duration_threshold): client_id_set.remove(client.urn) return client_id_set
[ "def", "GetClientTestTargets", "(", "client_ids", "=", "None", ",", "hostnames", "=", "None", ",", "token", "=", "None", ",", "checkin_duration_threshold", "=", "'20m'", ")", ":", "if", "client_ids", ":", "client_ids", "=", "set", "(", "client_ids", ")", "el...
get client urns for end-to-end tests .
train
false
42,862
def register_template_library(package_name): if (not django.template.libraries.get(package_name, None)): django.template.add_to_builtins(package_name)
[ "def", "register_template_library", "(", "package_name", ")", ":", "if", "(", "not", "django", ".", "template", ".", "libraries", ".", "get", "(", "package_name", ",", "None", ")", ")", ":", "django", ".", "template", ".", "add_to_builtins", "(", "package_na...
registers a template extension module to make it usable in templates .
train
false
42,863
def get_normal_points(cx, cy, cos_t, sin_t, length): if (length == 0.0): return (cx, cy, cx, cy) (cos_t1, sin_t1) = (sin_t, (- cos_t)) (cos_t2, sin_t2) = ((- sin_t), cos_t) (x1, y1) = (((length * cos_t1) + cx), ((length * sin_t1) + cy)) (x2, y2) = (((length * cos_t2) + cx), ((length * sin_t2) + cy)) return (x1, y1, x2, y2)
[ "def", "get_normal_points", "(", "cx", ",", "cy", ",", "cos_t", ",", "sin_t", ",", "length", ")", ":", "if", "(", "length", "==", "0.0", ")", ":", "return", "(", "cx", ",", "cy", ",", "cx", ",", "cy", ")", "(", "cos_t1", ",", "sin_t1", ")", "="...
for a line passing through and having a angle *t* .
train
false
42,864
def fact(name, puppet=False): opt_puppet = ('--puppet' if puppet else '') ret = __salt__['cmd.run']('facter {0} {1}'.format(opt_puppet, name), python_shell=False) if (not ret): return '' return ret
[ "def", "fact", "(", "name", ",", "puppet", "=", "False", ")", ":", "opt_puppet", "=", "(", "'--puppet'", "if", "puppet", "else", "''", ")", "ret", "=", "__salt__", "[", "'cmd.run'", "]", "(", "'facter {0} {1}'", ".", "format", "(", "opt_puppet", ",", "...
run facter for a specific fact cli example: .
train
true
42,865
def OneHotEncoder(data, keymap=None): if (keymap is None): keymap = [] for col in data.T: uniques = set(list(col)) keymap.append(dict(((key, i) for (i, key) in enumerate(uniques)))) total_pts = data.shape[0] outdat = [] for (i, col) in enumerate(data.T): km = keymap[i] num_labels = len(km) spmat = sparse.lil_matrix((total_pts, num_labels)) for (j, val) in enumerate(col): if (val in km): spmat[(j, km[val])] = 1 outdat.append(spmat) outdat = sparse.hstack(outdat).tocsr() return (outdat, keymap)
[ "def", "OneHotEncoder", "(", "data", ",", "keymap", "=", "None", ")", ":", "if", "(", "keymap", "is", "None", ")", ":", "keymap", "=", "[", "]", "for", "col", "in", "data", ".", "T", ":", "uniques", "=", "set", "(", "list", "(", "col", ")", ")"...
onehotencoder takes data matrix with categorical columns and converts it to a sparse binary matrix .
train
false
42,866
def dup_compose(f, g, K): if (len(g) <= 1): return dup_strip([dup_eval(f, dup_LC(g, K), K)]) if (not f): return [] h = [f[0]] for c in f[1:]: h = dup_mul(h, g, K) h = dup_add_term(h, c, 0, K) return h
[ "def", "dup_compose", "(", "f", ",", "g", ",", "K", ")", ":", "if", "(", "len", "(", "g", ")", "<=", "1", ")", ":", "return", "dup_strip", "(", "[", "dup_eval", "(", "f", ",", "dup_LC", "(", "g", ",", "K", ")", ",", "K", ")", "]", ")", "i...
evaluate functional composition f(g) in k[x] .
train
false
42,868
def smart_is_broken(drives): if os.path.exists(ARCCONF): return is_adaptec_driver_broken() if os.path.exists(TWCLI): return is_3ware_driver_broken(drives) return False
[ "def", "smart_is_broken", "(", "drives", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "ARCCONF", ")", ":", "return", "is_adaptec_driver_broken", "(", ")", "if", "os", ".", "path", ".", "exists", "(", "TWCLI", ")", ":", "return", "is_3ware_drive...
determines whether smart can be used .
train
false
42,869
def getparser(use_datetime=0): if (use_datetime and (not datetime)): raise ValueError, 'the datetime module is not available' if (FastParser and FastUnmarshaller): if use_datetime: mkdatetime = _datetime_type else: mkdatetime = _datetime target = FastUnmarshaller(True, False, _binary, mkdatetime, Fault) parser = FastParser(target) else: target = Unmarshaller(use_datetime=use_datetime) if FastParser: parser = FastParser(target) elif ExpatParser: parser = ExpatParser(target) else: parser = SlowParser(target) return (parser, target)
[ "def", "getparser", "(", "use_datetime", "=", "0", ")", ":", "if", "(", "use_datetime", "and", "(", "not", "datetime", ")", ")", ":", "raise", "ValueError", ",", "'the datetime module is not available'", "if", "(", "FastParser", "and", "FastUnmarshaller", ")", ...
getparser() -> parser .
train
true
42,870
def require_http_methods(request_method_list): def decorator(func): def inner(request, *args, **kwargs): if (request.method not in request_method_list): return HttpResponseNotAllowed(request_method_list) return func(request, *args, **kwargs) return inner return decorator
[ "def", "require_http_methods", "(", "request_method_list", ")", ":", "def", "decorator", "(", "func", ")", ":", "def", "inner", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "request", ".", "method", "not", "in", "request_me...
decorator to make a view only accept particular request methods .
train
false
42,871
@login_required def message_remove(request, object_id, template_name='messages/message_remove_confirm.html'): message = get_object_or_404(Message, pk=object_id) next = request.GET.get('next', None) if (request.method == 'POST'): if (message.to_user == request.user): message.to_status = TO_STATUS_DELETED else: message.from_status = FROM_STATUS_DELETED message.save() return HttpResponseRedirect((next or reverse('messages:messages'))) return render_to_response(template_name, {'message': message, 'next': next}, context_instance=RequestContext(request))
[ "@", "login_required", "def", "message_remove", "(", "request", ",", "object_id", ",", "template_name", "=", "'messages/message_remove_confirm.html'", ")", ":", "message", "=", "get_object_or_404", "(", "Message", ",", "pk", "=", "object_id", ")", "next", "=", "re...
remove a message .
train
false
42,873
def test_cluster_permutation_test(): (condition1_1d, condition2_1d, condition1_2d, condition2_2d) = _get_conditions() for (condition1, condition2) in zip((condition1_1d, condition1_2d), (condition2_1d, condition2_2d)): (T_obs, clusters, cluster_p_values, hist) = permutation_cluster_test([condition1, condition2], n_permutations=100, tail=1, seed=1, buffer_size=None) assert_equal(np.sum((cluster_p_values < 0.05)), 1) (T_obs, clusters, cluster_p_values, hist) = permutation_cluster_test([condition1, condition2], n_permutations=100, tail=0, seed=1, buffer_size=None) assert_equal(np.sum((cluster_p_values < 0.05)), 1) buffer_size = (condition1.shape[1] // 10) (T_obs, clusters, cluster_p_values_buff, hist) = permutation_cluster_test([condition1, condition2], n_permutations=100, tail=0, seed=1, n_jobs=2, buffer_size=buffer_size) assert_array_equal(cluster_p_values, cluster_p_values_buff)
[ "def", "test_cluster_permutation_test", "(", ")", ":", "(", "condition1_1d", ",", "condition2_1d", ",", "condition1_2d", ",", "condition2_2d", ")", "=", "_get_conditions", "(", ")", "for", "(", "condition1", ",", "condition2", ")", "in", "zip", "(", "(", "cond...
test cluster level permutations tests .
train
false
42,874
@register.filter def sigsort(l): if (not l): return for x in l: if (x.get('type', x.get('_type')) == 'generic'): (yield x) for x in l: if (x.get('type', x.get('_type')) == 'ioc'): (yield x) first = True for x in l: if (x.get('type', x.get('_type')) == 'call'): if first: x['first'] = True first = False (yield x)
[ "@", "register", ".", "filter", "def", "sigsort", "(", "l", ")", ":", "if", "(", "not", "l", ")", ":", "return", "for", "x", "in", "l", ":", "if", "(", "x", ".", "get", "(", "'type'", ",", "x", ".", "get", "(", "'_type'", ")", ")", "==", "'...
sort signatures entries .
train
false
42,875
def _local(tag): if (tag[0] == '{'): return tag[(tag.index('}') + 1):] return tag
[ "def", "_local", "(", "tag", ")", ":", "if", "(", "tag", "[", "0", "]", "==", "'{'", ")", ":", "return", "tag", "[", "(", "tag", ".", "index", "(", "'}'", ")", "+", "1", ")", ":", "]", "return", "tag" ]
extract the local tag from a namespaced tag name .
train
false
42,876
def plugin_interface(): b = pudge.browser.Browser(['nose.plugins.base'], None) m = b.modules()[0] intf = list([c for c in m.classes() if (c.name == 'IPluginInterface')])[0] doc = wikirst(intf.doc()) methods = [m for m in intf.routines() if (not m.name.startswith('_'))] methods.sort((lambda a, b: cmp(a.name, b.name))) mdoc = [] for m in methods: mdoc.append(('*%s%s*\n\n' % (m.name, formatargspec(m.obj)))) mdoc.append((' ' + m.doc().replace('\n', '\n '))) mdoc.append('\n\n') doc = (doc + ''.join(mdoc)) return doc
[ "def", "plugin_interface", "(", ")", ":", "b", "=", "pudge", ".", "browser", ".", "Browser", "(", "[", "'nose.plugins.base'", "]", ",", "None", ")", "m", "=", "b", ".", "modules", "(", ")", "[", "0", "]", "intf", "=", "list", "(", "[", "c", "for"...
use pudge browser to generate interface docs from nose .
train
false
42,877
def numpy_cupy_array_list_equal(err_msg='', verbose=True, name='xp'): def decorator(impl): @functools.wraps(impl) def test_func(self, *args, **kw): kw[name] = cupy x = impl(self, *args, **kw) kw[name] = numpy y = impl(self, *args, **kw) self.assertIsNotNone(x) self.assertIsNotNone(y) array.assert_array_list_equal(x, y, err_msg, verbose) return test_func return decorator
[ "def", "numpy_cupy_array_list_equal", "(", "err_msg", "=", "''", ",", "verbose", "=", "True", ",", "name", "=", "'xp'", ")", ":", "def", "decorator", "(", "impl", ")", ":", "@", "functools", ".", "wraps", "(", "impl", ")", "def", "test_func", "(", "sel...
decorator that checks the resulting lists of numpy and cupys one are equal .
train
false
42,878
def imifft(X): return numpy.abs(ifft2(ifftshift(X)))
[ "def", "imifft", "(", "X", ")", ":", "return", "numpy", ".", "abs", "(", "ifft2", "(", "ifftshift", "(", "X", ")", ")", ")" ]
inverse 2d fft with decentering .
train
false
42,879
@contextmanager def if_zero(builder, value, likely=False): with builder.if_then(is_scalar_zero(builder, value), likely=likely): (yield)
[ "@", "contextmanager", "def", "if_zero", "(", "builder", ",", "value", ",", "likely", "=", "False", ")", ":", "with", "builder", ".", "if_then", "(", "is_scalar_zero", "(", "builder", ",", "value", ")", ",", "likely", "=", "likely", ")", ":", "(", "yie...
execute the given block if the scalar value is zero .
train
false
42,880
def hmac_digest(secret, message, encoding='utf-8'): if isinstance(secret, six.text_type): secret = secret.encode(encoding) return hmac.new(secret, message.encode(encoding), hashlib.sha256).hexdigest()
[ "def", "hmac_digest", "(", "secret", ",", "message", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "isinstance", "(", "secret", ",", "six", ".", "text_type", ")", ":", "secret", "=", "secret", ".", "encode", "(", "encoding", ")", "return", "hmac", "...
return hex digest of a message hmac using secret .
train
false
42,881
@snippet def metric_crud(client, to_delete): METRIC_NAME = ('robots-%d' % (_millis(),)) DESCRIPTION = 'Robots all up in your server' FILTER = 'logName:apache-access AND textPayload:robot' UPDATED_FILTER = 'textPayload:robot' UPDATED_DESCRIPTION = 'Danger, Will Robinson!' for metric in client.list_metrics(): do_something_with(metric) metric = client.metric(METRIC_NAME, filter_=FILTER, description=DESCRIPTION) assert (not metric.exists()) metric.create() assert metric.exists() to_delete.append(metric) existing_metric = client.metric(METRIC_NAME) existing_metric.reload() assert (existing_metric.filter_ == FILTER) assert (existing_metric.description == DESCRIPTION) existing_metric.filter_ = UPDATED_FILTER existing_metric.description = UPDATED_DESCRIPTION existing_metric.update() existing_metric.reload() assert (existing_metric.filter_ == UPDATED_FILTER) assert (existing_metric.description == UPDATED_DESCRIPTION) def _metric_delete(): metric.delete() _backoff_not_found(_metric_delete) to_delete.remove(metric)
[ "@", "snippet", "def", "metric_crud", "(", "client", ",", "to_delete", ")", ":", "METRIC_NAME", "=", "(", "'robots-%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "DESCRIPTION", "=", "'Robots all up in your server'", "FILTER", "=", "'logName:apache-access AN...
metric crud .
train
true
42,882
def _read_string_data(f): length = _read_long(f) if (length > 0): length = _read_long(f) string_data = _read_bytes(f, length) _align_32(f) else: string_data = '' return string_data
[ "def", "_read_string_data", "(", "f", ")", ":", "length", "=", "_read_long", "(", "f", ")", "if", "(", "length", ">", "0", ")", ":", "length", "=", "_read_long", "(", "f", ")", "string_data", "=", "_read_bytes", "(", "f", ",", "length", ")", "_align_...
read a data string .
train
false
42,883
def get_split_user_partitions(user_partitions): return [user_partition for user_partition in user_partitions if (user_partition.scheme.name == 'random')]
[ "def", "get_split_user_partitions", "(", "user_partitions", ")", ":", "return", "[", "user_partition", "for", "user_partition", "in", "user_partitions", "if", "(", "user_partition", ".", "scheme", ".", "name", "==", "'random'", ")", "]" ]
helper method that filters a list of user_partitions and returns just the ones that are suitable for the split_test module .
train
false
42,884
def cluster_basic_fields(): return {'id': 1, 'created_at': timeutils.utcnow(with_timezone=False), 'deleted': False, 'name': 'cluster_name', 'binary': 'cinder-volume', 'race_preventer': 0}
[ "def", "cluster_basic_fields", "(", ")", ":", "return", "{", "'id'", ":", "1", ",", "'created_at'", ":", "timeutils", ".", "utcnow", "(", "with_timezone", "=", "False", ")", ",", "'deleted'", ":", "False", ",", "'name'", ":", "'cluster_name'", ",", "'binar...
return basic fields for a cluster .
train
false
42,885
def _unlock_file(f): if fcntl: fcntl.lockf(f, fcntl.LOCK_UN) if os.path.exists((f.name + '.lock')): os.remove((f.name + '.lock'))
[ "def", "_unlock_file", "(", "f", ")", ":", "if", "fcntl", ":", "fcntl", ".", "lockf", "(", "f", ",", "fcntl", ".", "LOCK_UN", ")", "if", "os", ".", "path", ".", "exists", "(", "(", "f", ".", "name", "+", "'.lock'", ")", ")", ":", "os", ".", "...
unlock file f using lockf and dot locking .
train
false
42,886
def compare_path_file_name(file_path_a, file_path_b): file_name_a = os.path.basename(file_path_a) file_name_b = os.path.basename(file_path_b) return (file_name_a < file_name_b)
[ "def", "compare_path_file_name", "(", "file_path_a", ",", "file_path_b", ")", ":", "file_name_a", "=", "os", ".", "path", ".", "basename", "(", "file_path_a", ")", "file_name_b", "=", "os", ".", "path", ".", "basename", "(", "file_path_b", ")", "return", "("...
custom compare function which compares full absolute file paths just using the file name .
train
false
42,888
@register.filter('linebreaks', is_safe=True, needs_autoescape=True) @stringfilter def linebreaks_filter(value, autoescape=None): autoescape = (autoescape and (not isinstance(value, SafeData))) return mark_safe(linebreaks(value, autoescape))
[ "@", "register", ".", "filter", "(", "'linebreaks'", ",", "is_safe", "=", "True", ",", "needs_autoescape", "=", "True", ")", "@", "stringfilter", "def", "linebreaks_filter", "(", "value", ",", "autoescape", "=", "None", ")", ":", "autoescape", "=", "(", "a...
replaces line breaks in plain text with appropriate html; a single newline becomes an html line break and a new line followed by a blank line becomes a paragraph break .
train
false
42,890
def EventReturn(halt=False, remove=False): return (halt, remove)
[ "def", "EventReturn", "(", "halt", "=", "False", ",", "remove", "=", "False", ")", ":", "return", "(", "halt", ",", "remove", ")" ]
event handlers can return special values .
train
false
42,891
def guess_n_streams(size, warn=False): if (isinstance(size, (tuple, list)) and all([isinstance(i, integer_types) for i in size])): r = 1 for s in size: r *= s if (r > 6): r = (r // 6) return min(r, (60 * 256)) else: if warn: warnings.warn(("MRG_RandomStreams Can't determine #streams from size (%s), guessing 60*256" % str(size)), stacklevel=3) return (60 * 256)
[ "def", "guess_n_streams", "(", "size", ",", "warn", "=", "False", ")", ":", "if", "(", "isinstance", "(", "size", ",", "(", "tuple", ",", "list", ")", ")", "and", "all", "(", "[", "isinstance", "(", "i", ",", "integer_types", ")", "for", "i", "in",...
return a guess at a good number of streams .
train
false
42,893
def load_app(target): global NORUN (NORUN, nr_old) = (True, NORUN) tmp = default_app.push() try: rv = load(target) return (rv if callable(rv) else tmp) finally: default_app.remove(tmp) NORUN = nr_old
[ "def", "load_app", "(", "target", ")", ":", "global", "NORUN", "(", "NORUN", ",", "nr_old", ")", "=", "(", "True", ",", "NORUN", ")", "tmp", "=", "default_app", ".", "push", "(", ")", "try", ":", "rv", "=", "load", "(", "target", ")", "return", "...
loads the app with the provided fully qualified name .
train
true
42,894
@pytest.mark.django_db def test_verify_user_already_verified(member_with_email): accounts.utils.verify_user(member_with_email) with pytest.raises(ValueError): accounts.utils.verify_user(member_with_email) EmailAddress.objects.get(user=member_with_email, email=member_with_email.email, primary=True, verified=True)
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_verify_user_already_verified", "(", "member_with_email", ")", ":", "accounts", ".", "utils", ".", "verify_user", "(", "member_with_email", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":...
test verifying user using verify_user function that has an allauth .
train
false
42,895
def get_windows_username(): import ctypes try: advapi32 = ctypes.windll.advapi32 GetUserName = getattr(advapi32, u'GetUserNameW') except AttributeError: pass else: buf = ctypes.create_unicode_buffer(257) n = ctypes.c_int(257) if GetUserName(buf, ctypes.byref(n)): return buf.value return get_unicode_windows_env_var(u'USERNAME')
[ "def", "get_windows_username", "(", ")", ":", "import", "ctypes", "try", ":", "advapi32", "=", "ctypes", ".", "windll", ".", "advapi32", "GetUserName", "=", "getattr", "(", "advapi32", ",", "u'GetUserNameW'", ")", "except", "AttributeError", ":", "pass", "else...
return the user name of the currently loggen in user as a unicode string .
train
false
42,896
def _has_abstract_methods(node): return (len(unimplemented_abstract_methods(node)) > 0)
[ "def", "_has_abstract_methods", "(", "node", ")", ":", "return", "(", "len", "(", "unimplemented_abstract_methods", "(", "node", ")", ")", ">", "0", ")" ]
determine if the given node has abstract methods .
train
false
42,897
def get_mercurial_default_options_dict(command, command_table=None, **kwd): if (command_table is None): command_table = commands.table possible = cmdutil.findpossible(command, command_table) if (type(possible) is tuple): possible = possible[0] if (len(possible) != 1): raise Exception(('unable to find mercurial command "%s"' % command)) default_options_dict = dict(((r[1].replace('-', '_'), r[2]) for r in next(iter(possible.values()))[1][1])) for option in kwd: default_options_dict[option] = kwd[option] return default_options_dict
[ "def", "get_mercurial_default_options_dict", "(", "command", ",", "command_table", "=", "None", ",", "**", "kwd", ")", ":", "if", "(", "command_table", "is", "None", ")", ":", "command_table", "=", "commands", ".", "table", "possible", "=", "cmdutil", ".", "...
borrowed from repoman - get default parameters for a mercurial command .
train
false
42,898
def uri_to_path(uri): if isinstance(uri, compat.text_type): uri = uri.encode(u'utf-8') return urllib.parse.unquote(urllib.parse.urlsplit(uri).path)
[ "def", "uri_to_path", "(", "uri", ")", ":", "if", "isinstance", "(", "uri", ",", "compat", ".", "text_type", ")", ":", "uri", "=", "uri", ".", "encode", "(", "u'utf-8'", ")", "return", "urllib", ".", "parse", ".", "unquote", "(", "urllib", ".", "pars...
convert an uri to a os specific path .
train
false
42,899
def sequences(fileh, header): fposition = header['header_length'] reads_read = 0 while True: if (fposition == header['index_offset']): fposition += header['index_length'] continue else: (bytes_read, seq_data) = read_sequence(header=header, fileh=fileh, fposition=fposition) (yield seq_data) fposition += bytes_read reads_read += 1 if (reads_read >= header['number_of_reads']): break
[ "def", "sequences", "(", "fileh", ",", "header", ")", ":", "fposition", "=", "header", "[", "'header_length'", "]", "reads_read", "=", "0", "while", "True", ":", "if", "(", "fposition", "==", "header", "[", "'index_offset'", "]", ")", ":", "fposition", "...
it returns a generator with the data for each read .
train
false
42,900
def oo_chomp_commit_offset(version): if (version is None): return version else: return str(version).split('+')[0]
[ "def", "oo_chomp_commit_offset", "(", "version", ")", ":", "if", "(", "version", "is", "None", ")", ":", "return", "version", "else", ":", "return", "str", "(", "version", ")", ".", "split", "(", "'+'", ")", "[", "0", "]" ]
chomp any "+git .
train
false
42,901
@docfiller def prewitt(input, axis=(-1), output=None, mode='reflect', cval=0.0): input = numpy.asarray(input) axis = _ni_support._check_axis(axis, input.ndim) (output, return_value) = _ni_support._get_output(output, input) modes = _ni_support._normalize_sequence(mode, input.ndim) correlate1d(input, [(-1), 0, 1], axis, output, modes[axis], cval, 0) axes = [ii for ii in range(input.ndim) if (ii != axis)] for ii in axes: correlate1d(output, [1, 1, 1], ii, output, modes[ii], cval, 0) return return_value
[ "@", "docfiller", "def", "prewitt", "(", "input", ",", "axis", "=", "(", "-", "1", ")", ",", "output", "=", "None", ",", "mode", "=", "'reflect'", ",", "cval", "=", "0.0", ")", ":", "input", "=", "numpy", ".", "asarray", "(", "input", ")", "axis"...
calculate a prewitt filter .
train
false
42,902
def hdmi_boost_custom_default(): ' Yet to be implemented ' return '2'
[ "def", "hdmi_boost_custom_default", "(", ")", ":", "return", "'2'" ]
tests the users system to see which hdmi_boost figure should be used .
train
false
42,903
def get_request(): return crum.get_current_request()
[ "def", "get_request", "(", ")", ":", "return", "crum", ".", "get_current_request", "(", ")" ]
return the current request .
train
false
42,904
def sigmoid(x): return tf.nn.sigmoid(x)
[ "def", "sigmoid", "(", "x", ")", ":", "return", "tf", ".", "nn", ".", "sigmoid", "(", "x", ")" ]
elementwise sigmoid logistic function :math:f(x)=(1 + exp)^{-1} .
train
false
42,905
def test_attr_conflicts(): sc = SkyCoord(1, 2, frame=u'icrs', unit=u'deg', equinox=u'J1999', obstime=u'J2001') SkyCoord(sc, equinox=u'J1999', obstime=u'J2001') SkyCoord(sc.frame, equinox=u'J1999', obstime=u'J2100') with pytest.raises(ValueError) as err: SkyCoord(sc, equinox=u'J1999', obstime=u'J2002') assert (u"Coordinate attribute 'obstime'=" in str(err)) sc = SkyCoord(1, 2, frame=u'fk4', unit=u'deg', equinox=u'J1999', obstime=u'J2001') SkyCoord(sc, equinox=u'J1999', obstime=u'J2001') with pytest.raises(ValueError) as err: SkyCoord(sc, equinox=u'J1999', obstime=u'J2002') assert (u"Coordinate attribute 'obstime'=" in str(err)) with pytest.raises(ValueError) as err: SkyCoord(sc.frame, equinox=u'J1999', obstime=u'J2002') assert (u"Coordinate attribute 'obstime'=" in str(err))
[ "def", "test_attr_conflicts", "(", ")", ":", "sc", "=", "SkyCoord", "(", "1", ",", "2", ",", "frame", "=", "u'icrs'", ",", "unit", "=", "u'deg'", ",", "equinox", "=", "u'J1999'", ",", "obstime", "=", "u'J2001'", ")", "SkyCoord", "(", "sc", ",", "equi...
check conflicts resolution between coordinate attributes and init kwargs .
train
false
42,906
def create_java_app_env(app_name): env_vars = {'APPSCALE_HOME': constants.APPSCALE_HOME} config_file = find_web_xml(app_name) custom_env_vars = extract_env_vars_from_xml(config_file) env_vars.update(custom_env_vars) gcs_config = {'scheme': 'https', 'port': 443} try: gcs_config.update(deployment_config.get_config('gcs')) except ConfigInaccessible: logging.warning('Unable to fetch GCS configuration.') if ('host' in gcs_config): env_vars['GCS_HOST'] = '{scheme}://{host}:{port}'.format(**gcs_config) return env_vars
[ "def", "create_java_app_env", "(", "app_name", ")", ":", "env_vars", "=", "{", "'APPSCALE_HOME'", ":", "constants", ".", "APPSCALE_HOME", "}", "config_file", "=", "find_web_xml", "(", "app_name", ")", "custom_env_vars", "=", "extract_env_vars_from_xml", "(", "config...
returns the environment variables java application servers uses .
train
false
42,907
def unprovide(callback, resource_type): _get_manager().unregister(callback, resource_type)
[ "def", "unprovide", "(", "callback", ",", "resource_type", ")", ":", "_get_manager", "(", ")", ".", "unregister", "(", "callback", ",", "resource_type", ")" ]
unregister a callback for corresponding resource type .
train
false
42,909
def dump_module_sessions(module): global HOUSE if (not (module in HOUSE.keys())): Error(("Module '%s' not found." % module)) return else: mod = HOUSE[module] print (((((((color.B_YELLOW + '[') + color.B_RED) + '!') + color.B_YELLOW) + '] ') + color.B_WHITE) + module) for (cnt, obj) in enumerate(mod.keys()): print (((((((color.B_GREEN + ' DCTB [') + color.B_YELLOW) + str(cnt)) + color.B_GREEN) + '] ') + color.B_WHITE) + str(obj))
[ "def", "dump_module_sessions", "(", "module", ")", ":", "global", "HOUSE", "if", "(", "not", "(", "module", "in", "HOUSE", ".", "keys", "(", ")", ")", ")", ":", "Error", "(", "(", "\"Module '%s' not found.\"", "%", "module", ")", ")", "return", "else", ...
dump running sessions for a module .
train
false
42,910
def handle_exe_click(name): message = ('To use %(name)s, you must install it as a service.\n\nTo install %(name)s as a service, you must run the following in the console:\n\n %(name)s.exe install\n\nFor all available options, including how to install the service for a particular user, run the following in a console:\n\n %(name)s.exe help\n' % {'name': name}) MessageBox = ctypes.windll.user32.MessageBoxA MessageBox(None, message, 'Install as a Service', 0)
[ "def", "handle_exe_click", "(", "name", ")", ":", "message", "=", "(", "'To use %(name)s, you must install it as a service.\\n\\nTo install %(name)s as a service, you must run the following in the console:\\n\\n %(name)s.exe install\\n\\nFor all available options, including how to install the s...
when the executables are clicked directly in the ui .
train
false
42,911
def num_fields(pkt): return int(pkt[4], 16)
[ "def", "num_fields", "(", "pkt", ")", ":", "return", "int", "(", "pkt", "[", "4", "]", ",", "16", ")" ]
return the number of fields in a query response .
train
false
42,912
def api_format(api_response): http_response = api_response.http_response content_type = 'application/json' content = '' if ((api_response.data is not None) and (api_response.data != '')): content = json.dumps(api_response.data) http_response['Content-type'] = content_type http_response.content = content log.debug('API response type: {0} content: {1}'.format(content_type, content)) return http_response
[ "def", "api_format", "(", "api_response", ")", ":", "http_response", "=", "api_response", ".", "http_response", "content_type", "=", "'application/json'", "content", "=", "''", "if", "(", "(", "api_response", ".", "data", "is", "not", "None", ")", "and", "(", ...
takes an apiresponse and returns an httpresponse .
train
false
42,913
def _cast_pointer_p(value): return cast(value, pointer_p)
[ "def", "_cast_pointer_p", "(", "value", ")", ":", "return", "cast", "(", "value", ",", "pointer_p", ")" ]
casts a value to a pointer of a pointer .
train
false
42,917
@cmd def flake8(): py_files = subprocess.check_output('git ls-files') if PY3: py_files = py_files.decode() py_files = [x for x in py_files.split() if x.endswith('.py')] py_files = ' '.join(py_files) sh(('%s -m flake8 %s' % (PYTHON, py_files)))
[ "@", "cmd", "def", "flake8", "(", ")", ":", "py_files", "=", "subprocess", ".", "check_output", "(", "'git ls-files'", ")", "if", "PY3", ":", "py_files", "=", "py_files", ".", "decode", "(", ")", "py_files", "=", "[", "x", "for", "x", "in", "py_files",...
run flake8 against all py files .
train
false
42,918
def _EncodeUniquifier(uniquifier): if (type(uniquifier) in (int, long)): byte_str = util.EncodeVarLengthNumber(uniquifier) else: (client_id, server_id) = uniquifier assert ((server_id is None) or (type(server_id) in (str, unicode))), (server_id, type(server_id)) byte_str = util.EncodeVarLengthNumber(client_id) if (server_id is not None): byte_str += str(server_id) return byte_str
[ "def", "_EncodeUniquifier", "(", "uniquifier", ")", ":", "if", "(", "type", "(", "uniquifier", ")", "in", "(", "int", ",", "long", ")", ")", ":", "byte_str", "=", "util", ".", "EncodeVarLengthNumber", "(", "uniquifier", ")", "else", ":", "(", "client_id"...
if "uniquifier" is an int or long .
train
false
42,919
def _fit_ovo_binary(estimator, X, y, i, j): cond = np.logical_or((y == i), (y == j)) y = y[cond] y_binary = np.empty(y.shape, np.int) y_binary[(y == i)] = 0 y_binary[(y == j)] = 1 indcond = np.arange(X.shape[0])[cond] return (_fit_binary(estimator, _safe_split(estimator, X, None, indices=indcond)[0], y_binary, classes=[i, j]), indcond)
[ "def", "_fit_ovo_binary", "(", "estimator", ",", "X", ",", "y", ",", "i", ",", "j", ")", ":", "cond", "=", "np", ".", "logical_or", "(", "(", "y", "==", "i", ")", ",", "(", "y", "==", "j", ")", ")", "y", "=", "y", "[", "cond", "]", "y_binar...
fit a single binary estimator .
train
false
42,920
def massaged_documents_for_json(documents, user): docs = {} for document in documents: try: url = ((document.content_object and hasattr(document.content_object, 'get_absolute_url') and document.content_object.get_absolute_url()) or '') except: LOG.exception('failed to get absolute url') url = '' docs[document.id] = massage_doc_for_json(document, user, url) return docs
[ "def", "massaged_documents_for_json", "(", "documents", ",", "user", ")", ":", "docs", "=", "{", "}", "for", "document", "in", "documents", ":", "try", ":", "url", "=", "(", "(", "document", ".", "content_object", "and", "hasattr", "(", "document", ".", ...
var documents_defaults = { 1: { id: 1 .
train
false
42,921
def isPathEntirelyInsideLoop(loop, path): for point in path: if (not isPointInsideLoop(loop, point)): return False return True
[ "def", "isPathEntirelyInsideLoop", "(", "loop", ",", "path", ")", ":", "for", "point", "in", "path", ":", "if", "(", "not", "isPointInsideLoop", "(", "loop", ",", "point", ")", ")", ":", "return", "False", "return", "True" ]
determine if a path is entirely inside another loop .
train
false
42,922
def spm_config(path): defaults = DEFAULT_MASTER_OPTS.copy() defaults.update(DEFAULT_SPM_OPTS) overrides = load_config(path, 'SPM_CONFIG', DEFAULT_SPM_OPTS['spm_conf_file']) default_include = overrides.get('spm_default_include', defaults['spm_default_include']) include = overrides.get('include', []) overrides.update(include_config(default_include, path, verbose=False)) overrides.update(include_config(include, path, verbose=True)) defaults = apply_master_config(overrides, defaults) defaults = apply_spm_config(overrides, defaults) return client_config(path, env_var='SPM_CONFIG', defaults=defaults)
[ "def", "spm_config", "(", "path", ")", ":", "defaults", "=", "DEFAULT_MASTER_OPTS", ".", "copy", "(", ")", "defaults", ".", "update", "(", "DEFAULT_SPM_OPTS", ")", "overrides", "=", "load_config", "(", "path", ",", "'SPM_CONFIG'", ",", "DEFAULT_SPM_OPTS", "[",...
read in the salt master config file and add additional configs that need to be stubbed out for spm .
train
true
42,923
def transform_to_bool(value): if (value in ['1', 'true', 'True']): return True elif (value in ['0', 'false', 'False']): return False raise ValueError(('Invalid bool representation "%s" provided.' % value))
[ "def", "transform_to_bool", "(", "value", ")", ":", "if", "(", "value", "in", "[", "'1'", ",", "'true'", ",", "'True'", "]", ")", ":", "return", "True", "elif", "(", "value", "in", "[", "'0'", ",", "'false'", ",", "'False'", "]", ")", ":", "return"...
transforms a certain set of values to true or false .
train
false
42,924
@cmd def test_platform(): install() sh(('%s -m unittest -v psutil.tests.test_windows' % PYTHON))
[ "@", "cmd", "def", "test_platform", "(", ")", ":", "install", "(", ")", "sh", "(", "(", "'%s -m unittest -v psutil.tests.test_windows'", "%", "PYTHON", ")", ")" ]
run windows only tests .
train
false
42,925
def getFloatByPrefixSide(prefix, side, xmlElement): floatByDenominatorPrefix = evaluate.getEvaluatedFloatDefault(0.0, prefix, xmlElement) return (floatByDenominatorPrefix + (evaluate.getEvaluatedFloatDefault(0.0, (prefix + 'OverSide'), xmlElement) * side))
[ "def", "getFloatByPrefixSide", "(", "prefix", ",", "side", ",", "xmlElement", ")", ":", "floatByDenominatorPrefix", "=", "evaluate", ".", "getEvaluatedFloatDefault", "(", "0.0", ",", "prefix", ",", "xmlElement", ")", "return", "(", "floatByDenominatorPrefix", "+", ...
get float by prefix and side .
train
false
42,926
def save_any_to_npy(save_dict={}, name='any.npy'): np.save(name, save_dict)
[ "def", "save_any_to_npy", "(", "save_dict", "=", "{", "}", ",", "name", "=", "'any.npy'", ")", ":", "np", ".", "save", "(", "name", ",", "save_dict", ")" ]
save variables to .
train
false
42,927
def boolean_ops(): names = [u'negation', u'conjunction', u'disjunction', u'implication', u'equivalence'] for pair in zip(names, [Tokens.NOT, Tokens.AND, Tokens.OR, Tokens.IMP, Tokens.IFF]): print((u'%-15s DCTB %s' % pair))
[ "def", "boolean_ops", "(", ")", ":", "names", "=", "[", "u'negation'", ",", "u'conjunction'", ",", "u'disjunction'", ",", "u'implication'", ",", "u'equivalence'", "]", "for", "pair", "in", "zip", "(", "names", ",", "[", "Tokens", ".", "NOT", ",", "Tokens",...
boolean operators .
train
false
42,928
def _link_to(text, *args, **kwargs): assert (len(args) < 2), 'Too many unnamed arguments' def _link_class(kwargs): ' creates classes for the link_to calls ' suppress_active_class = kwargs.pop('suppress_active_class', False) if ((not suppress_active_class) and _link_active(kwargs)): active = ' active' else: active = '' kwargs.pop('highlight_actions', '') return ((kwargs.pop('class_', '') + active) or None) def _create_link_text(text, **kwargs): ' Update link text to add a icon or span if specified in the\n kwargs ' if kwargs.pop('inner_span', None): text = ((literal('<span>') + text) + literal('</span>')) if icon: text = (literal(('<i class="icon-%s"></i> ' % icon)) + text) return text icon = kwargs.pop('icon', None) class_ = _link_class(kwargs) return tags.link_to(_create_link_text(text, **kwargs), url_for(*args, **kwargs), class_=class_)
[ "def", "_link_to", "(", "text", ",", "*", "args", ",", "**", "kwargs", ")", ":", "assert", "(", "len", "(", "args", ")", "<", "2", ")", ",", "'Too many unnamed arguments'", "def", "_link_class", "(", "kwargs", ")", ":", "suppress_active_class", "=", "kwa...
common link making code for several helper functions .
train
false
42,929
def csvdata(nodelist): data = '' for subnode in nodelist: if (subnode.nodeType == subnode.ELEMENT_NODE): try: data = ((data + ',') + subnode.childNodes[0].data) except: data = (data + ',') return (data[1:] + '\n')
[ "def", "csvdata", "(", "nodelist", ")", ":", "data", "=", "''", "for", "subnode", "in", "nodelist", ":", "if", "(", "subnode", ".", "nodeType", "==", "subnode", ".", "ELEMENT_NODE", ")", ":", "try", ":", "data", "=", "(", "(", "data", "+", "','", "...
returns the data in the given node as a comma separated string @todo: deprecate .
train
false
42,931
def allow_jsonp(handler): def allow_jsonp_wrapper_fn(request, *args, **kwargs): if (('callback' in request.REQUEST) and (request.method == 'OPTIONS')): response = HttpResponse('', content_type='text/plain') else: response = handler(request, *args, **kwargs) if (not isinstance(response, JsonResponse)): return response elif ('callback' in request.REQUEST): response = JsonpResponse(response.content, request.REQUEST['callback']) if (('callback' in request.REQUEST) and (request.method in ['OPTIONS', 'GET']) and ('HTTP_ORIGIN' in request.META)): response['Access-Control-Allow-Origin'] = request.META['HTTP_ORIGIN'] response['Access-Control-Allow-Methods'] = 'GET, OPTIONS' response['Access-Control-Max-Age'] = '1000' response['Access-Control-Allow-Headers'] = 'Authorization,Content-Type,Accept,Origin,User-Agent,DNT,Cache-Control,X-Mx-ReqToken,Keep-Alive,X-Requested-With,If-Modified-Since' return response return allow_jsonp_wrapper_fn
[ "def", "allow_jsonp", "(", "handler", ")", ":", "def", "allow_jsonp_wrapper_fn", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "(", "'callback'", "in", "request", ".", "REQUEST", ")", "and", "(", "request", ".", "method", ...
a general wrapper for api views that should be permitted to return jsonp .
train
false
42,932
def wtf(message, exit_code=1): print_log(message, RED, BOLD) if (exit_code is not None): exit(exit_code)
[ "def", "wtf", "(", "message", ",", "exit_code", "=", "1", ")", ":", "print_log", "(", "message", ",", "RED", ",", "BOLD", ")", "if", "(", "exit_code", "is", "not", "None", ")", ":", "exit", "(", "exit_code", ")" ]
what a terrible failure! .
train
true
42,934
def Attr(obj, attr): return [obj, Node(syms.trailer, [Dot(), attr])]
[ "def", "Attr", "(", "obj", ",", "attr", ")", ":", "return", "[", "obj", ",", "Node", "(", "syms", ".", "trailer", ",", "[", "Dot", "(", ")", ",", "attr", "]", ")", "]" ]
a node tuple for obj .
train
false
42,936
def discover_settings(conf_base=None): settings = {'zmq_prefix': '', 'libzmq_extension': False, 'no_libzmq_extension': False, 'skip_check_zmq': False, 'allow_legacy_libzmq': False, 'bundle_msvcp': None, 'build_ext': {}, 'bdist_egg': {}} if sys.platform.startswith('win'): settings['have_sys_un_h'] = False if conf_base: merge(settings, load_config('config', conf_base)) merge(settings, get_cfg_args()) merge(settings, get_env_args()) return settings
[ "def", "discover_settings", "(", "conf_base", "=", "None", ")", ":", "settings", "=", "{", "'zmq_prefix'", ":", "''", ",", "'libzmq_extension'", ":", "False", ",", "'no_libzmq_extension'", ":", "False", ",", "'skip_check_zmq'", ":", "False", ",", "'allow_legacy_...
discover custom settings for zmq path .
train
true
42,937
def _retry_exception(f, steps=((0.1,) * 10), sleep=sleep): steps = iter(steps) while True: try: Message.new(message_type=u'flocker:provision:libcloud:retry_exception:trying', function=fullyQualifiedName(f)).write() return f() except: for step in steps: write_traceback() sleep(step) break else: raise
[ "def", "_retry_exception", "(", "f", ",", "steps", "=", "(", "(", "0.1", ",", ")", "*", "10", ")", ",", "sleep", "=", "sleep", ")", ":", "steps", "=", "iter", "(", "steps", ")", "while", "True", ":", "try", ":", "Message", ".", "new", "(", "mes...
retry a function if it raises an exception .
train
false
42,938
def download_dictionary_ttkit(export_format, prj, lang, words): exporter = get_exporter(export_format)(prj, lang, get_site_url(reverse('show_dictionary', kwargs={'project': prj.slug, 'lang': lang.code}))) for word in words.iterator(): exporter.add_dictionary(word) return exporter.get_response('glossary-{project}-{language}.{extension}')
[ "def", "download_dictionary_ttkit", "(", "export_format", ",", "prj", ",", "lang", ",", "words", ")", ":", "exporter", "=", "get_exporter", "(", "export_format", ")", "(", "prj", ",", "lang", ",", "get_site_url", "(", "reverse", "(", "'show_dictionary'", ",", ...
translate-toolkit builder for dictionary downloads .
train
false
42,939
def snapped_speed_limits(client, path): params = {'path': convert.location_list(path)} return client._get('/v1/speedLimits', params, base_url=_ROADS_BASE_URL, accepts_clientid=False, extract_body=_roads_extract)
[ "def", "snapped_speed_limits", "(", "client", ",", "path", ")", ":", "params", "=", "{", "'path'", ":", "convert", ".", "location_list", "(", "path", ")", "}", "return", "client", ".", "_get", "(", "'/v1/speedLimits'", ",", "params", ",", "base_url", "=", ...
returns the posted speed limit for given road segments .
train
true
42,940
def full_restart(name): restart(name)
[ "def", "full_restart", "(", "name", ")", ":", "restart", "(", "name", ")" ]
calls daemontools .
train
false
42,941
def read_pack_header(read): header = read(12) if (not header): return (None, None) if (header[:4] != 'PACK'): raise AssertionError(('Invalid pack header %r' % header)) (version,) = unpack_from('>L', header, 4) if (version not in (2, 3)): raise AssertionError(('Version was %d' % version)) (num_objects,) = unpack_from('>L', header, 8) return (version, num_objects)
[ "def", "read_pack_header", "(", "read", ")", ":", "header", "=", "read", "(", "12", ")", "if", "(", "not", "header", ")", ":", "return", "(", "None", ",", "None", ")", "if", "(", "header", "[", ":", "4", "]", "!=", "'PACK'", ")", ":", "raise", ...
read the header of a pack file .
train
false