id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
39,310
def pack_dbobj(item): _init_globals() obj = item natural_key = _FROM_MODEL_MAP[(hasattr(obj, 'id') and hasattr(obj, 'db_date_created') and hasattr(obj, '__dbclass__') and obj.__dbclass__.__name__.lower())] return ((natural_key and ('__packed_dbobj__', natural_key, _TO_DATESTRING(obj), _GA(obj, 'id'))) or item)
[ "def", "pack_dbobj", "(", "item", ")", ":", "_init_globals", "(", ")", "obj", "=", "item", "natural_key", "=", "_FROM_MODEL_MAP", "[", "(", "hasattr", "(", "obj", ",", "'id'", ")", "and", "hasattr", "(", "obj", ",", "'db_date_created'", ")", "and", "hasa...
check and convert django database objects to an internal representation .
train
false
39,314
def rename_tables_with_constraints(renames, constraints, engine): if (engine.name != 'sqlite'): remove_constraints(constraints) for to_table_name in renames: from_table = renames[to_table_name] from_table.rename(to_table_name) if (engine != 'sqlite'): add_constraints(constraints)
[ "def", "rename_tables_with_constraints", "(", "renames", ",", "constraints", ",", "engine", ")", ":", "if", "(", "engine", ".", "name", "!=", "'sqlite'", ")", ":", "remove_constraints", "(", "constraints", ")", "for", "to_table_name", "in", "renames", ":", "fr...
rename tables with foreign key constraints .
train
false
39,316
def get_namespace(): name = os.environ.get(_ENV_CURRENT_NAMESPACE, None) if (name is None): name = _config.default_namespace_for_request() if (name is not None): set_namespace(name) if (name is None): name = '' return name
[ "def", "get_namespace", "(", ")", ":", "name", "=", "os", ".", "environ", ".", "get", "(", "_ENV_CURRENT_NAMESPACE", ",", "None", ")", "if", "(", "name", "is", "None", ")", ":", "name", "=", "_config", ".", "default_namespace_for_request", "(", ")", "if"...
get the current default namespace or () namespace if unset .
train
false
39,317
def _require_valid_version(version_from_payload, exploration_version): if (version_from_payload is None): raise base.BaseHandler.InvalidInputException('Invalid POST request: a version must be specified.') if (version_from_payload != exploration_version): raise base.BaseHandler.InvalidInputException(('Trying to update version %s of exploration from version %s, which is too old. Please reload the page and try again.' % (exploration_version, version_from_payload)))
[ "def", "_require_valid_version", "(", "version_from_payload", ",", "exploration_version", ")", ":", "if", "(", "version_from_payload", "is", "None", ")", ":", "raise", "base", ".", "BaseHandler", ".", "InvalidInputException", "(", "'Invalid POST request: a version must be...
check that the payload version matches the given exploration version .
train
false
39,319
def download_signed(request, uuid, **kwargs): extension = get_object_or_404(Extension.objects.without_deleted().public(), uuid=uuid) version = get_object_or_404(extension.versions.without_deleted().public(), pk=kwargs['version_id']) log.info(('Downloading public add-on: %s version %s from %s' % (extension.pk, version.pk, version.signed_file_path))) return _download(request, extension, version, version.signed_file_path)
[ "def", "download_signed", "(", "request", ",", "uuid", ",", "**", "kwargs", ")", ":", "extension", "=", "get_object_or_404", "(", "Extension", ".", "objects", ".", "without_deleted", "(", ")", ".", "public", "(", ")", ",", "uuid", "=", "uuid", ")", "vers...
download the signed archive for a given public extension/version .
train
false
39,320
def py_native_string(source): if PY2: if isinstance(source, str): return source.encode('ascii') elif callable(source): @functools.wraps(source) def new_func(*args, **kwargs): rv = source(*args, **kwargs) if isinstance(rv, str): rv = rv.encode('unicode-escape') return rv return new_func return source
[ "def", "py_native_string", "(", "source", ")", ":", "if", "PY2", ":", "if", "isinstance", "(", "source", ",", "str", ")", ":", "return", "source", ".", "encode", "(", "'ascii'", ")", "elif", "callable", "(", "source", ")", ":", "@", "functools", ".", ...
converts unicode strings to bytestrings on python 2 .
train
false
39,321
def test_cache_deactivated_update_metadata(config_stub, tmpdir): config_stub.data = {'storage': {'cache-size': 1024}, 'general': {'private-browsing': True}} url = 'http://qutebrowser.org' disk_cache = cache.DiskCache(str(tmpdir)) metadata = QNetworkCacheMetaData() metadata.setUrl(QUrl(url)) assert metadata.isValid() assert (disk_cache.updateMetaData(metadata) is None)
[ "def", "test_cache_deactivated_update_metadata", "(", "config_stub", ",", "tmpdir", ")", ":", "config_stub", ".", "data", "=", "{", "'storage'", ":", "{", "'cache-size'", ":", "1024", "}", ",", "'general'", ":", "{", "'private-browsing'", ":", "True", "}", "}"...
test updating the meta data when cache is not activated .
train
false
39,322
def term_translation_update(context, data_dict): model = context['model'] _check_access('term_translation_update', context, data_dict) schema = {'term': [validators.not_empty, unicode], 'term_translation': [validators.not_empty, unicode], 'lang_code': [validators.not_empty, unicode]} (data, errors) = _validate(data_dict, schema, context) if errors: model.Session.rollback() raise ValidationError(errors) trans_table = model.term_translation_table update = trans_table.update() update = update.where((trans_table.c.term == data['term'])) update = update.where((trans_table.c.lang_code == data['lang_code'])) update = update.values(term_translation=data['term_translation']) conn = model.Session.connection() result = conn.execute(update) if (not result.rowcount): conn.execute(trans_table.insert().values(**data)) if (not context.get('defer_commit')): model.Session.commit() return data
[ "def", "term_translation_update", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "_check_access", "(", "'term_translation_update'", ",", "context", ",", "data_dict", ")", "schema", "=", "{", "'term'", ":", "[", "valid...
create or update a term translation .
train
false
39,323
def _solve_cg_mg(lap_sparse, B, tol, return_full_prob=False): X = [] ml = ruge_stuben_solver(lap_sparse) M = ml.aspreconditioner(cycle='V') for i in range(len(B)): x0 = cg(lap_sparse, (- B[i].todense()), tol=tol, M=M, maxiter=30)[0] X.append(x0) if (not return_full_prob): X = np.array(X) X = np.argmax(X, axis=0) return X
[ "def", "_solve_cg_mg", "(", "lap_sparse", ",", "B", ",", "tol", ",", "return_full_prob", "=", "False", ")", ":", "X", "=", "[", "]", "ml", "=", "ruge_stuben_solver", "(", "lap_sparse", ")", "M", "=", "ml", ".", "aspreconditioner", "(", "cycle", "=", "'...
solves lap_sparse x_i = b_i for each phase i .
train
false
39,325
def autoload(Model, extract_key, inject_key, func): @functools.wraps(func) def wrapper(*args, **kwargs): primary_key = kwargs.get(extract_key) instance = get_or_http_error(Model, primary_key) kwargs[inject_key] = instance return func(*args, **kwargs) return wrapper
[ "def", "autoload", "(", "Model", ",", "extract_key", ",", "inject_key", ",", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "primary_key", "=", "kwargs", ".", "ge...
find assets by looking for an assets module within each installed application .
train
false
39,326
def normalizeUnicode(value): return (unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') if isinstance(value, unicode) else value)
[ "def", "normalizeUnicode", "(", "value", ")", ":", "return", "(", "unicodedata", ".", "normalize", "(", "'NFKD'", ",", "value", ")", ".", "encode", "(", "'ascii'", ",", "'ignore'", ")", "if", "isinstance", "(", "value", ",", "unicode", ")", "else", "valu...
does an ascii normalization of unicode strings reference: URL .
train
false
39,328
def new_secret(): secret = ''.join(random.sample(_all_chars, 64)) if six.PY3: secret = secret.encode('utf8') return secret
[ "def", "new_secret", "(", ")", ":", "secret", "=", "''", ".", "join", "(", "random", ".", "sample", "(", "_all_chars", ",", "64", ")", ")", "if", "six", ".", "PY3", ":", "secret", "=", "secret", ".", "encode", "(", "'utf8'", ")", "return", "secret"...
returns a 64 byte secret .
train
false
39,330
def test_join_exceptions(): a = symbol('a', 'var * {x: int}') b = symbol('b', 'var * {x: string}') with pytest.raises(TypeError) as excinfo: join(a, b, 'x') assert ('Schemata of joining columns do not match,' in str(excinfo.value)) assert ('x=int32 and x=string' in str(excinfo.value)) b = symbol('b', 'var * {z: int}') with pytest.raises(ValueError) as excinfo: join(a, b) assert ('No shared columns between a and b' in str(excinfo.value)) b = symbol('b', 'var * {x: int}') with pytest.raises(ValueError) as excinfo: join(a, b, how='inner_') assert ('Got: inner_' in str(excinfo.value))
[ "def", "test_join_exceptions", "(", ")", ":", "a", "=", "symbol", "(", "'a'", ",", "'var * {x: int}'", ")", "b", "=", "symbol", "(", "'b'", ",", "'var * {x: string}'", ")", "with", "pytest", ".", "raises", "(", "TypeError", ")", "as", "excinfo", ":", "jo...
exception raised for mismatched schema; exception raised for no shared fields .
train
false
39,331
def Erlang(name, k, l): return rv(name, GammaDistribution, (k, (1 / l)))
[ "def", "Erlang", "(", "name", ",", "k", ",", "l", ")", ":", "return", "rv", "(", "name", ",", "GammaDistribution", ",", "(", "k", ",", "(", "1", "/", "l", ")", ")", ")" ]
create a continuous random variable with an erlang distribution .
train
false
39,332
def test_write_no_delimiter(): out = StringIO() ascii.write(dat, out, Writer=ascii.FixedWidth, bookend=False, delimiter=None) assert_equal_splitlines(out.getvalue(), 'Col1 Col2 Col3 Col4\n 1.2 "hello" 1 a\n 2.4 \'s worlds 2 2\n')
[ "def", "test_write_no_delimiter", "(", ")", ":", "out", "=", "StringIO", "(", ")", "ascii", ".", "write", "(", "dat", ",", "out", ",", "Writer", "=", "ascii", ".", "FixedWidth", ",", "bookend", "=", "False", ",", "delimiter", "=", "None", ")", "assert_...
write a table as a fixed width table with no delimiter .
train
false
39,334
@require_POST @login_required def post_preview_async(request, document_slug): statsd.incr('forums.preview') post = Post(creator=request.user, content=request.POST.get('content', '')) return render(request, 'kbforums/includes/post_preview.html', {'post_preview': post})
[ "@", "require_POST", "@", "login_required", "def", "post_preview_async", "(", "request", ",", "document_slug", ")", ":", "statsd", ".", "incr", "(", "'forums.preview'", ")", "post", "=", "Post", "(", "creator", "=", "request", ".", "user", ",", "content", "=...
ajax preview of posts .
train
false
39,335
def _breakHDB1(HDB1): NDB = (HDB1 & 15) return NDB
[ "def", "_breakHDB1", "(", "HDB1", ")", ":", "NDB", "=", "(", "HDB1", "&", "15", ")", "return", "NDB" ]
decode header byte 1 .
train
false
39,336
def test_repr_array_of_quantity(): a = np.array([(1 * u.m), (2 * u.s)], dtype=object) assert (repr(a) == u'array([<Quantity 1.0 m>, <Quantity 2.0 s>], dtype=object)') assert (str(a) == u'[<Quantity 1.0 m> <Quantity 2.0 s>]')
[ "def", "test_repr_array_of_quantity", "(", ")", ":", "a", "=", "np", ".", "array", "(", "[", "(", "1", "*", "u", ".", "m", ")", ",", "(", "2", "*", "u", ".", "s", ")", "]", ",", "dtype", "=", "object", ")", "assert", "(", "repr", "(", "a", ...
test print/repr of object arrays of quantity objects with different units .
train
false
39,337
def rax_find_server(module, rax_module, server): cs = rax_module.cloudservers try: UUID(server) server = cs.servers.get(server) except ValueError: servers = cs.servers.list(search_opts=dict(name=('^%s$' % server))) if (not servers): module.fail_json(msg='No Server was matched by name, try using the Server ID instead') if (len(servers) > 1): module.fail_json(msg='Multiple servers matched by name, try using the Server ID instead') server = servers[0] return server
[ "def", "rax_find_server", "(", "module", ",", "rax_module", ",", "server", ")", ":", "cs", "=", "rax_module", ".", "cloudservers", "try", ":", "UUID", "(", "server", ")", "server", "=", "cs", ".", "servers", ".", "get", "(", "server", ")", "except", "V...
find a cloud server by id or name .
train
false
39,338
def isValidHMAC(hmac1, hmac2, key): assert (len(hmac1) == len(hmac2)) doubleHmac1 = mycrypto.HMAC_SHA256_128(key, hmac1) doubleHmac2 = mycrypto.HMAC_SHA256_128(key, hmac2) if (doubleHmac1 != doubleHmac2): return False log.debug('The computed HMAC is valid.') return True
[ "def", "isValidHMAC", "(", "hmac1", ",", "hmac2", ",", "key", ")", ":", "assert", "(", "len", "(", "hmac1", ")", "==", "len", "(", "hmac2", ")", ")", "doubleHmac1", "=", "mycrypto", ".", "HMAC_SHA256_128", "(", "key", ",", "hmac1", ")", "doubleHmac2", ...
compares hmac1 and hmac2 after hmacing them again using key .
train
false
39,339
def get__all__entries(obj): try: words = getattr(obj, '__all__') except: return [] return [cast_unicode_py2(w) for w in words if isinstance(w, str)]
[ "def", "get__all__entries", "(", "obj", ")", ":", "try", ":", "words", "=", "getattr", "(", "obj", ",", "'__all__'", ")", "except", ":", "return", "[", "]", "return", "[", "cast_unicode_py2", "(", "w", ")", "for", "w", "in", "words", "if", "isinstance"...
returns the strings in the __all__ attribute .
train
false
39,340
def delete_async(models, **kwargs): if isinstance(models, (basestring, Model, Key)): models = [models] else: try: models = iter(models) except TypeError: models = [models] keys = [_coerce_to_key(v) for v in models] return datastore.DeleteAsync(keys, **kwargs)
[ "def", "delete_async", "(", "models", ",", "**", "kwargs", ")", ":", "if", "isinstance", "(", "models", ",", "(", "basestring", ",", "Model", ",", "Key", ")", ")", ":", "models", "=", "[", "models", "]", "else", ":", "try", ":", "models", "=", "ite...
delete a blob from blobstore -- async version .
train
false
39,341
def mult_and_reduce(input_fft_v, filters_fft_v, input_shape=None, filter_shape=None): if (input_shape is None): input_shape = input_fft_v.shape if (filter_shape is None): filter_shape = filters_fft_v.shape (b, ic, i0, i1_f, _) = input_shape oc = filter_shape[0] input_r = input_fft_v.reshape((b, ic, (i0 * i1_f), 2)) filters_r = filters_fft_v.reshape((oc, ic, (i0 * i1_f), 2)) input_s = input_r.dimshuffle(2, 0, 1, 3) filters_s = filters_r.dimshuffle(2, 1, 0, 3) output_s = batched_complex_dot(input_s, filters_s) output_r = output_s.dimshuffle(1, 2, 0, 3) output = output_r.reshape((b, oc, i0, i1_f, 2)) return output
[ "def", "mult_and_reduce", "(", "input_fft_v", ",", "filters_fft_v", ",", "input_shape", "=", "None", ",", "filter_shape", "=", "None", ")", ":", "if", "(", "input_shape", "is", "None", ")", ":", "input_shape", "=", "input_fft_v", ".", "shape", "if", "(", "...
parameters input_fft_v its .
train
false
39,343
def matchStrengthNoNoise(x, y, n): return sum(((xi == yi) for (xi, yi, ni) in zip(x, y, n) if (ni != '#')))
[ "def", "matchStrengthNoNoise", "(", "x", ",", "y", ",", "n", ")", ":", "return", "sum", "(", "(", "(", "xi", "==", "yi", ")", "for", "(", "xi", ",", "yi", ",", "ni", ")", "in", "zip", "(", "x", ",", "y", ",", "n", ")", "if", "(", "ni", "!...
compute the match strength for the individual *x* on the string *y* excluding noise *n* .
train
false
39,345
@membership_required def topic_remove(request, slug, topic_id, template_name='groups/topics/topic_remove_confirm.html'): group = get_object_or_404(Group, slug=slug) topic = get_object_or_404(GroupTopic, pk=topic_id, group=group, user=request.user) if (request.method == 'POST'): topic.is_active = False topic.save() return redirect(request, group) return render(request, template_name, {'topic': topic})
[ "@", "membership_required", "def", "topic_remove", "(", "request", ",", "slug", ",", "topic_id", ",", "template_name", "=", "'groups/topics/topic_remove_confirm.html'", ")", ":", "group", "=", "get_object_or_404", "(", "Group", ",", "slug", "=", "slug", ")", "topi...
returns a group topic delete confirmation page .
train
false
39,346
@testing.requires_testing_data @requires_mayavi def test_inst_source(): from mne.gui._file_traits import InstSource inst = InstSource() assert_equal(inst.inst_fname, '-') inst.file = inst_path assert_equal(inst.inst_dir, os.path.dirname(inst_path)) lpa = array([[(-0.0713766068), 0.0, 5.12227416e-09]]) nasion = array([[3.7252903e-09, 0.102605611, 4.19095159e-09]]) rpa = array([[0.07526768, 0.0, 5.58793545e-09]]) assert_allclose(inst.lpa, lpa) assert_allclose(inst.nasion, nasion) assert_allclose(inst.rpa, rpa)
[ "@", "testing", ".", "requires_testing_data", "@", "requires_mayavi", "def", "test_inst_source", "(", ")", ":", "from", "mne", ".", "gui", ".", "_file_traits", "import", "InstSource", "inst", "=", "InstSource", "(", ")", "assert_equal", "(", "inst", ".", "inst...
test instsource .
train
false
39,347
def error_msg(text): msg(colorize(('Error: ' + str(text)), 'red'))
[ "def", "error_msg", "(", "text", ")", ":", "msg", "(", "colorize", "(", "(", "'Error: '", "+", "str", "(", "text", ")", ")", ",", "'red'", ")", ")" ]
colorize error message with prefix .
train
false
39,348
def impact_type(): impact_tables() module = 'impact' resourcename = 'type' return s3_rest_controller(module, resourcename)
[ "def", "impact_type", "(", ")", ":", "impact_tables", "(", ")", "module", "=", "'impact'", "resourcename", "=", "'type'", "return", "s3_rest_controller", "(", "module", ",", "resourcename", ")" ]
restful crud controller .
train
false
39,349
def _get_id(vpc_name=None, cidr=None, tags=None, region=None, key=None, keyid=None, profile=None): if (vpc_name and (not any((cidr, tags)))): vpc_id = _cache_id(vpc_name, region=region, key=key, keyid=keyid, profile=profile) if vpc_id: return vpc_id vpc_ids = _find_vpcs(vpc_name=vpc_name, cidr=cidr, tags=tags, region=region, key=key, keyid=keyid, profile=profile) if vpc_ids: log.debug('Matching VPC: {0}'.format(' '.join(vpc_ids))) if (len(vpc_ids) == 1): vpc_id = vpc_ids[0] if vpc_name: _cache_id(vpc_name, vpc_id, region=region, key=key, keyid=keyid, profile=profile) return vpc_id else: raise CommandExecutionError('Found more than one VPC matching the criteria.') else: log.info('No VPC found.') return None
[ "def", "_get_id", "(", "vpc_name", "=", "None", ",", "cidr", "=", "None", ",", "tags", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "(", "vpc_name", "and...
given vpc properties .
train
true
39,351
def release(): return uname()[2]
[ "def", "release", "(", ")", ":", "return", "uname", "(", ")", "[", "2", "]" ]
automate everything to be done for a release with numpy-vendor .
train
false
39,352
def interactive_download(url, output_file, title='', chunk_size=(100 * 1024)): output_dir = os.path.dirname(output_file) output_file = open(output_file, 'w+b') input_file = urllib2.urlopen(url) try: file_size = int(input_file.headers['Content-Length']) except KeyError: raise ValueError('Could not find file size in HTTP headers') logging.info('Downloading %s, %s to %s', os.path.basename(url), display_data_size(file_size), output_dir) if title: width = (progressbar.ProgressBar.DEFAULT_WIDTH - len(title)) progress_bar = progressbar.ProgressBar(maximum=file_size, width=width, title=title) else: progress_bar = progressbar.ProgressBar(maximum=file_size) progress_bar.update_screen() while True: data = input_file.read(chunk_size) if data: progress_bar.increment(len(data)) output_file.write(data) else: progress_bar.update(file_size) print break output_file.close()
[ "def", "interactive_download", "(", "url", ",", "output_file", ",", "title", "=", "''", ",", "chunk_size", "=", "(", "100", "*", "1024", ")", ")", ":", "output_dir", "=", "os", ".", "path", ".", "dirname", "(", "output_file", ")", "output_file", "=", "...
interactively downloads a given file url to a given output file :type url: string .
train
false
39,353
def _generate_bagging_indices(random_state, bootstrap_features, bootstrap_samples, n_features, n_samples, max_features, max_samples): random_state = check_random_state(random_state) feature_indices = _generate_indices(random_state, bootstrap_features, n_features, max_features) sample_indices = _generate_indices(random_state, bootstrap_samples, n_samples, max_samples) return (feature_indices, sample_indices)
[ "def", "_generate_bagging_indices", "(", "random_state", ",", "bootstrap_features", ",", "bootstrap_samples", ",", "n_features", ",", "n_samples", ",", "max_features", ",", "max_samples", ")", ":", "random_state", "=", "check_random_state", "(", "random_state", ")", "...
randomly draw feature and sample indices .
train
false
39,354
def modules(modulePath): try: mod = sys.modules[modulePath] if (mod is None): raise KeyError() except KeyError: __import__(modulePath) mod = sys.modules[modulePath] return mod
[ "def", "modules", "(", "modulePath", ")", ":", "try", ":", "mod", "=", "sys", ".", "modules", "[", "modulePath", "]", "if", "(", "mod", "is", "None", ")", ":", "raise", "KeyError", "(", ")", "except", "KeyError", ":", "__import__", "(", "modulePath", ...
returns the available modules .
train
false
39,356
def iface_dtype(obj): dynamic_go_type = iface_commontype(obj) if (dynamic_go_type is None): return dtype_name = dynamic_go_type['string'].dereference()['str'].string() dynamic_gdb_type = lookup_type(dtype_name) if (dynamic_gdb_type is None): return type_size = int(dynamic_go_type['size']) uintptr_size = int(dynamic_go_type['size'].type.sizeof) if (type_size > uintptr_size): dynamic_gdb_type = dynamic_gdb_type.pointer() return dynamic_gdb_type
[ "def", "iface_dtype", "(", "obj", ")", ":", "dynamic_go_type", "=", "iface_commontype", "(", "obj", ")", "if", "(", "dynamic_go_type", "is", "None", ")", ":", "return", "dtype_name", "=", "dynamic_go_type", "[", "'string'", "]", ".", "dereference", "(", ")",...
decode type of the data field of an eface or iface struct .
train
false
39,357
def _process_dataset(name, images, vocab, num_shards): images = [ImageMetadata(image.image_id, image.filename, [caption]) for image in images for caption in image.captions] random.seed(12345) random.shuffle(images) num_threads = min(num_shards, FLAGS.num_threads) spacing = np.linspace(0, len(images), (num_threads + 1)).astype(np.int) ranges = [] threads = [] for i in xrange((len(spacing) - 1)): ranges.append([spacing[i], spacing[(i + 1)]]) coord = tf.train.Coordinator() decoder = ImageDecoder() print(('Launching %d threads for spacings: %s' % (num_threads, ranges))) for thread_index in xrange(len(ranges)): args = (thread_index, ranges, name, images, decoder, vocab, num_shards) t = threading.Thread(target=_process_image_files, args=args) t.start() threads.append(t) coord.join(threads) print(("%s: Finished processing all %d image-caption pairs in data set '%s'." % (datetime.now(), len(images), name)))
[ "def", "_process_dataset", "(", "name", ",", "images", ",", "vocab", ",", "num_shards", ")", ":", "images", "=", "[", "ImageMetadata", "(", "image", ".", "image_id", ",", "image", ".", "filename", ",", "[", "caption", "]", ")", "for", "image", "in", "i...
process a complete data set and save it as a tfrecord .
train
false
39,358
@dec.onlyif((lambda : ((sys.platform != 'win32') or has_pywin32())), 'This test runs on posix or in win32 with win32api installed') def test_find_cmd_fail(): nt.assert_raises(FindCmdError, find_cmd, 'asdfasdf')
[ "@", "dec", ".", "onlyif", "(", "(", "lambda", ":", "(", "(", "sys", ".", "platform", "!=", "'win32'", ")", "or", "has_pywin32", "(", ")", ")", ")", ",", "'This test runs on posix or in win32 with win32api installed'", ")", "def", "test_find_cmd_fail", "(", ")...
make sure that findcmderror is raised if we cant find the cmd .
train
false
39,360
def getSkeinforgePluginsPath(subName=''): return getJoinedPath(getSkeinforgePath('skeinforge_plugins'), subName)
[ "def", "getSkeinforgePluginsPath", "(", "subName", "=", "''", ")", ":", "return", "getJoinedPath", "(", "getSkeinforgePath", "(", "'skeinforge_plugins'", ")", ",", "subName", ")" ]
get the skeinforge plugins directory path .
train
false
39,361
def unjellyFromAOT(aot): return AOTUnjellier().unjelly(aot)
[ "def", "unjellyFromAOT", "(", "aot", ")", ":", "return", "AOTUnjellier", "(", ")", ".", "unjelly", "(", "aot", ")" ]
pass me an abstract object tree .
train
false
39,362
def _retrieve_rpm_probes(): return __salt__['probes.config']()
[ "def", "_retrieve_rpm_probes", "(", ")", ":", "return", "__salt__", "[", "'probes.config'", "]", "(", ")" ]
will retrive the probes from the network device using salt module "probes" throught napalm proxy .
train
false
39,364
def find_dir_item_metadata_by_name(dir, name): meta_stream = None try: mfile = dir.metadata_file() if mfile: meta_stream = mfile.open() meta = metadata.Metadata.read(meta_stream) if (name == ''): return meta for sub in dir: if stat.S_ISDIR(sub.mode): meta = find_dir_item_metadata_by_name(sub, '') else: meta = metadata.Metadata.read(meta_stream) if (sub.name == name): return meta finally: if meta_stream: meta_stream.close()
[ "def", "find_dir_item_metadata_by_name", "(", "dir", ",", "name", ")", ":", "meta_stream", "=", "None", "try", ":", "mfile", "=", "dir", ".", "metadata_file", "(", ")", "if", "mfile", ":", "meta_stream", "=", "mfile", ".", "open", "(", ")", "meta", "=", ...
find metadata in dir for an item with the given name .
train
false
39,365
def load_list_setting(settings, name): value = settings.get(name) if (not value): value = [] if isinstance(value, str_cls): value = [value] return value
[ "def", "load_list_setting", "(", "settings", ",", "name", ")", ":", "value", "=", "settings", ".", "get", "(", "name", ")", "if", "(", "not", "value", ")", ":", "value", "=", "[", "]", "if", "isinstance", "(", "value", ",", "str_cls", ")", ":", "va...
sometimes users accidentally change settings that should be lists to just individual strings .
train
false
39,366
def testRemoteNet(remote='ubuntu2', link=RemoteGRELink): info('*** Remote Node Test\n') net = Mininet(host=RemoteHost, switch=RemoteOVSSwitch, link=link) c0 = net.addController('c0') Intf('eth0', node=c0).updateIP() info('*** Creating local h1\n') h1 = net.addHost('h1') info('*** Creating remote h2\n') h2 = net.addHost('h2', server=remote) info('*** Creating local s1\n') s1 = net.addSwitch('s1') info('*** Creating remote s2\n') s2 = net.addSwitch('s2', server=remote) info('*** Adding links\n') net.addLink(h1, s1) net.addLink(s1, s2) net.addLink(h2, s2) net.start() info('Mininet is running on', quietRun('hostname').strip(), '\n') for node in (c0, h1, h2, s1, s2): info('Node', node, 'is running on', node.cmd('hostname').strip(), '\n') net.pingAll() CLI(net) net.stop()
[ "def", "testRemoteNet", "(", "remote", "=", "'ubuntu2'", ",", "link", "=", "RemoteGRELink", ")", ":", "info", "(", "'*** Remote Node Test\\n'", ")", "net", "=", "Mininet", "(", "host", "=", "RemoteHost", ",", "switch", "=", "RemoteOVSSwitch", ",", "link", "=...
test remote node classes .
train
false
39,367
def _parse_xfs_info(data): ret = {} spr = re.compile('\\s+') entry = None for line in [spr.sub(' ', l).strip().replace(', ', ' ') for l in data.split('\n')]: if (not line): continue nfo = _xfs_info_get_kv(line) if (not line.startswith('=')): entry = nfo.pop(0) ret[entry[0]] = {'section': entry[(((entry[1] != '***') and 1) or 0)]} ret[entry[0]].update(dict(nfo)) return ret
[ "def", "_parse_xfs_info", "(", "data", ")", ":", "ret", "=", "{", "}", "spr", "=", "re", ".", "compile", "(", "'\\\\s+'", ")", "entry", "=", "None", "for", "line", "in", "[", "spr", ".", "sub", "(", "' '", ",", "l", ")", ".", "strip", "(", ")",...
parse output from "xfs_info" or "xfs_growfs -n" .
train
true
39,368
def parse_link_id(link): return link.get('href')[(link.get('href').find("'") + 1):link.get('href').rfind("'")]
[ "def", "parse_link_id", "(", "link", ")", ":", "return", "link", ".", "get", "(", "'href'", ")", "[", "(", "link", ".", "get", "(", "'href'", ")", ".", "find", "(", "\"'\"", ")", "+", "1", ")", ":", "link", ".", "get", "(", "'href'", ")", ".", ...
extracts the div[@id] from the links on the documentsforbill pages .
train
false
39,369
def set_etcd_facts_if_unset(facts): if (('master' in facts) and safe_get_bool(facts['master']['embedded_etcd'])): etcd_facts = (facts['etcd'] if ('etcd' in facts) else dict()) if ('etcd_data_dir' not in etcd_facts): try: master_cfg_path = os.path.join(facts['common']['config_base'], 'master/master-config.yaml') master_cfg_f = open(master_cfg_path, 'r') config = yaml.safe_load(master_cfg_f.read()) master_cfg_f.close() etcd_facts['etcd_data_dir'] = config['etcdConfig']['storageDirectory'] facts['etcd'] = etcd_facts except Exception: pass else: etcd_facts = (facts['etcd'] if ('etcd' in facts) else dict()) try: ini_str = text_type(('[root]\n' + open('/etc/etcd/etcd.conf', 'r').read()), 'utf-8') ini_fp = io.StringIO(ini_str) config = configparser.RawConfigParser() config.readfp(ini_fp) etcd_data_dir = config.get('root', 'ETCD_DATA_DIR') if (etcd_data_dir.startswith('"') and etcd_data_dir.endswith('"')): etcd_data_dir = etcd_data_dir[1:(-1)] etcd_facts['etcd_data_dir'] = etcd_data_dir facts['etcd'] = etcd_facts except Exception: pass return facts
[ "def", "set_etcd_facts_if_unset", "(", "facts", ")", ":", "if", "(", "(", "'master'", "in", "facts", ")", "and", "safe_get_bool", "(", "facts", "[", "'master'", "]", "[", "'embedded_etcd'", "]", ")", ")", ":", "etcd_facts", "=", "(", "facts", "[", "'etcd...
if using embedded etcd .
train
false
39,370
def resolve_content_toctree(environment, docname, builder, toctree, prune=True, maxdepth=0, titles_only=False, collapse=False, includehidden=False): return toctree
[ "def", "resolve_content_toctree", "(", "environment", ",", "docname", ",", "builder", ",", "toctree", ",", "prune", "=", "True", ",", "maxdepth", "=", "0", ",", "titles_only", "=", "False", ",", "collapse", "=", "False", ",", "includehidden", "=", "False", ...
alternative toctree resolution for main content: dont resolve the toctree .
train
false
39,371
def load_digits(n_class=10, return_X_y=False): module_path = dirname(__file__) data = np.loadtxt(join(module_path, 'data', 'digits.csv.gz'), delimiter=',') with open(join(module_path, 'descr', 'digits.rst')) as f: descr = f.read() target = data[:, (-1)].astype(np.int) flat_data = data[:, :(-1)] images = flat_data.view() images.shape = ((-1), 8, 8) if (n_class < 10): idx = (target < n_class) (flat_data, target) = (flat_data[idx], target[idx]) images = images[idx] if return_X_y: return (flat_data, target) return Bunch(data=flat_data, target=target, target_names=np.arange(10), images=images, DESCR=descr)
[ "def", "load_digits", "(", "n_class", "=", "10", ",", "return_X_y", "=", "False", ")", ":", "module_path", "=", "dirname", "(", "__file__", ")", "data", "=", "np", ".", "loadtxt", "(", "join", "(", "module_path", ",", "'data'", ",", "'digits.csv.gz'", ")...
load and return the digits dataset .
train
false
39,372
def scp_from(ip_address, keyname, remote_file, local_file): key_file = '{}/{}.key'.format(KEY_DIRECTORY, keyname) remote_location = '{}:{}'.format(ip_address, remote_file) scp_cmd = ['scp', '-i', key_file, remote_location, local_file] subprocess.check_call(scp_cmd)
[ "def", "scp_from", "(", "ip_address", ",", "keyname", ",", "remote_file", ",", "local_file", ")", ":", "key_file", "=", "'{}/{}.key'", ".", "format", "(", "KEY_DIRECTORY", ",", "keyname", ")", "remote_location", "=", "'{}:{}'", ".", "format", "(", "ip_address"...
copies a file from a remote machine to the local one .
train
false
39,373
def sha512b64(*data): return _hash(hashlib.sha512, data).digest().encode('base64')
[ "def", "sha512b64", "(", "*", "data", ")", ":", "return", "_hash", "(", "hashlib", ".", "sha512", ",", "data", ")", ".", "digest", "(", ")", ".", "encode", "(", "'base64'", ")" ]
apply the sha512 hash algorithm to a string and return the base64-encoded hash value .
train
false
39,375
def client_host(server_host): if (server_host == '0.0.0.0'): return '127.0.0.1' if (server_host in ('::', '::0', '::0.0.0.0')): return '::1' return server_host
[ "def", "client_host", "(", "server_host", ")", ":", "if", "(", "server_host", "==", "'0.0.0.0'", ")", ":", "return", "'127.0.0.1'", "if", "(", "server_host", "in", "(", "'::'", ",", "'::0'", ",", "'::0.0.0.0'", ")", ")", ":", "return", "'::1'", "return", ...
return the host on which a client can connect to the given listener .
train
false
39,376
def _version_control(version): repo_path = get_migrate_repo_path() sql_connection = CONF.sql_connection if (version is None): version = versioning_repository.Repository(repo_path).latest return versioning_api.version_control(sql_connection, repo_path, version)
[ "def", "_version_control", "(", "version", ")", ":", "repo_path", "=", "get_migrate_repo_path", "(", ")", "sql_connection", "=", "CONF", ".", "sql_connection", "if", "(", "version", "is", "None", ")", ":", "version", "=", "versioning_repository", ".", "Repositor...
place a database under migration control this will only set the specific version of a database .
train
false
39,377
def test_elemwise_comparaison_cast(): a = tensor.fmatrix() b = tensor.fmatrix() av = theano._asarray(numpy.random.rand(4, 4), dtype='float32') bv = numpy.ones((4, 4), dtype='float32') for (g, ans) in [(tensor.lt, (av < bv)), (tensor.gt, (av > bv)), (tensor.le, (av <= bv)), (tensor.ge, (av >= bv))]: f = pfunc([a, b], tensor.cast(g(a, b), 'float32'), mode=mode_with_gpu) out = f(av, bv) assert numpy.all((out == ans)) assert any([isinstance(node.op, cuda.GpuElemwise) for node in f.maker.fgraph.toposort()])
[ "def", "test_elemwise_comparaison_cast", "(", ")", ":", "a", "=", "tensor", ".", "fmatrix", "(", ")", "b", "=", "tensor", ".", "fmatrix", "(", ")", "av", "=", "theano", ".", "_asarray", "(", "numpy", ".", "random", ".", "rand", "(", "4", ",", "4", ...
test if an elemwise comparaison followed by a cast to float32 are pushed to gpu .
train
false
39,378
def get_magic(): return '\xb3\xf2\r\n'
[ "def", "get_magic", "(", ")", ":", "return", "'\\xb3\\xf2\\r\\n'" ]
return the magic string used to recognize byte-compiled code files .
train
false
39,379
def CartanMatrix(ct): return CartanType(ct).cartan_matrix()
[ "def", "CartanMatrix", "(", "ct", ")", ":", "return", "CartanType", "(", "ct", ")", ".", "cartan_matrix", "(", ")" ]
access the cartan matrix of a specific lie algebra examples .
train
false
39,380
@register.filter('slice', is_safe=True) def slice_filter(value, arg): try: bits = [] for x in arg.split(':'): if (len(x) == 0): bits.append(None) else: bits.append(int(x)) return value[slice(*bits)] except (ValueError, TypeError): return value
[ "@", "register", ".", "filter", "(", "'slice'", ",", "is_safe", "=", "True", ")", "def", "slice_filter", "(", "value", ",", "arg", ")", ":", "try", ":", "bits", "=", "[", "]", "for", "x", "in", "arg", ".", "split", "(", "':'", ")", ":", "if", "...
returns a slice of the list .
train
false
39,381
def addSymmetricXPaths(outputs, paths, x): for path in paths: addSymmetricXPath(outputs, path, x)
[ "def", "addSymmetricXPaths", "(", "outputs", ",", "paths", ",", "x", ")", ":", "for", "path", "in", "paths", ":", "addSymmetricXPath", "(", "outputs", ",", "path", ",", "x", ")" ]
add x paths outputs to outputs .
train
false
39,382
def get_persistent_graph(request, *args, **kwargs): from open_facebook.api import OpenFacebook if (not request): raise (ValidationError, 'Request is required if you want to use persistent tokens') graph = None require_refresh = False code = request.POST.get('code', request.GET.get('code')) if code: require_refresh = True local_graph = getattr(request, 'facebook', None) if local_graph: graph = local_graph if (not graph): cached_graph_dict = request.session.get('graph_dict') if cached_graph_dict: graph = OpenFacebook() graph.__setstate__(cached_graph_dict) graph._me = None if ((not graph) or require_refresh): graph = get_facebook_graph(request, *args, **kwargs) if ((graph is not None) and graph.access_token): request.session['graph_dict'] = graph.__getstate__() _add_current_user_id(graph, request.user) request.facebook = graph return graph
[ "def", "get_persistent_graph", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "from", "open_facebook", ".", "api", "import", "OpenFacebook", "if", "(", "not", "request", ")", ":", "raise", "(", "ValidationError", ",", "'Request is required if...
wraps itself around get facebook graph but stores the graph in the session .
train
false
39,384
def _removeSender(senderkey): _removeBackrefs(senderkey) connections.pop(senderkey, None) senders.pop(senderkey, None)
[ "def", "_removeSender", "(", "senderkey", ")", ":", "_removeBackrefs", "(", "senderkey", ")", "connections", ".", "pop", "(", "senderkey", ",", "None", ")", "senders", ".", "pop", "(", "senderkey", ",", "None", ")" ]
remove senderkey from connections .
train
false
39,387
def getCraftSequence(): return 'chop preface outset mill multiply drill lift flow feed home lash fillet limit unpause alteration export'.split()
[ "def", "getCraftSequence", "(", ")", ":", "return", "'chop preface outset mill multiply drill lift flow feed home lash fillet limit unpause alteration export'", ".", "split", "(", ")" ]
get the cutting craft sequence .
train
false
39,388
def find_vowel(w): for (i, ch) in enumerate(w): if (ch in VOWELS): return i return len(w)
[ "def", "find_vowel", "(", "w", ")", ":", "for", "(", "i", ",", "ch", ")", "in", "enumerate", "(", "w", ")", ":", "if", "(", "ch", "in", "VOWELS", ")", ":", "return", "i", "return", "len", "(", "w", ")" ]
returns the index of the first vowel in the word .
train
false
39,389
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
39,390
def test_mapping_basic_permutations(): (x, y) = Rotation2D(90)(1, 2) RS = (Rotation2D | Mapping((1, 0))) (x_prime, y_prime) = RS(90)(1, 2) assert_allclose((x, y), (y_prime, x_prime)) M = (Rotation2D & Scale) m = M(90, 2) (x, y, z) = m(1, 2, 3) MS = (M | Mapping((2, 0, 1))) ms = MS(90, 2) (x_prime, y_prime, z_prime) = ms(1, 2, 3) assert_allclose((x, y, z), (y_prime, z_prime, x_prime))
[ "def", "test_mapping_basic_permutations", "(", ")", ":", "(", "x", ",", "y", ")", "=", "Rotation2D", "(", "90", ")", "(", "1", ",", "2", ")", "RS", "=", "(", "Rotation2D", "|", "Mapping", "(", "(", "1", ",", "0", ")", ")", ")", "(", "x_prime", ...
tests a couple basic examples of the mapping model--specifically examples that merely permute the outputs .
train
false
39,392
@require_POST @login_required @permitted def unfollow_commentable(request, course_id, commentable_id): user = cc.User.from_django_user(request.user) commentable = cc.Commentable.find(commentable_id) user.unfollow(commentable) return JsonResponse({})
[ "@", "require_POST", "@", "login_required", "@", "permitted", "def", "unfollow_commentable", "(", "request", ",", "course_id", ",", "commentable_id", ")", ":", "user", "=", "cc", ".", "User", ".", "from_django_user", "(", "request", ".", "user", ")", "commenta...
given a course id and commentable id stop following commentable ajax only .
train
false
39,393
def _window_evoked(evoked, size): if isinstance(size, (float, int)): lsize = rsize = float(size) else: (lsize, rsize) = size evoked = evoked.copy() sfreq = float(evoked.info['sfreq']) lsize = int((lsize * sfreq)) rsize = int((rsize * sfreq)) lhann = signal.hann((lsize * 2)) rhann = signal.hann((rsize * 2)) window = np.r_[(lhann[:lsize], np.ones(((len(evoked.times) - lsize) - rsize)), rhann[(- rsize):])] evoked.data *= window[None, :] return evoked
[ "def", "_window_evoked", "(", "evoked", ",", "size", ")", ":", "if", "isinstance", "(", "size", ",", "(", "float", ",", "int", ")", ")", ":", "lsize", "=", "rsize", "=", "float", "(", "size", ")", "else", ":", "(", "lsize", ",", "rsize", ")", "="...
window evoked .
train
false
39,394
def get_extension_manager(): global _extension_manager if (not _extension_manager): _extension_manager = ExtensionManager(u'reviewboard.extensions') return _extension_manager
[ "def", "get_extension_manager", "(", ")", ":", "global", "_extension_manager", "if", "(", "not", "_extension_manager", ")", ":", "_extension_manager", "=", "ExtensionManager", "(", "u'reviewboard.extensions'", ")", "return", "_extension_manager" ]
return the extension manager used by review board .
train
false
39,395
def multiclass_logloss(actual, predicted, eps=1e-15): if (len(actual.shape) == 1): actual2 = np.zeros((actual.shape[0], predicted.shape[1])) for (i, val) in enumerate(actual): actual2[(i, val)] = 1 actual = actual2 clip = np.clip(predicted, eps, (1 - eps)) rows = actual.shape[0] vsota = np.sum((actual * np.log(clip))) return (((-1.0) / rows) * vsota)
[ "def", "multiclass_logloss", "(", "actual", ",", "predicted", ",", "eps", "=", "1e-15", ")", ":", "if", "(", "len", "(", "actual", ".", "shape", ")", "==", "1", ")", ":", "actual2", "=", "np", ".", "zeros", "(", "(", "actual", ".", "shape", "[", ...
multi class version of logarithmic loss metric .
train
true
39,396
def _find_sr(session): host = session.host_ref try: tokens = CONF.xenserver.sr_matching_filter.split(':') filter_criteria = tokens[0] filter_pattern = tokens[1] except IndexError: LOG.warning(_LW("Flag sr_matching_filter '%s' does not respect formatting convention"), CONF.xenserver.sr_matching_filter) return None if (filter_criteria == 'other-config'): (key, value) = filter_pattern.split('=', 1) for (sr_ref, sr_rec) in session.get_all_refs_and_recs('SR'): if (not ((key in sr_rec['other_config']) and (sr_rec['other_config'][key] == value))): continue for pbd_ref in sr_rec['PBDs']: pbd_rec = session.get_rec('PBD', pbd_ref) if (pbd_rec and (pbd_rec['host'] == host)): return sr_ref elif ((filter_criteria == 'default-sr') and (filter_pattern == 'true')): pool_ref = session.call_xenapi('pool.get_all')[0] sr_ref = session.call_xenapi('pool.get_default_SR', pool_ref) if sr_ref: return sr_ref LOG.error(_LE("XenAPI is unable to find a Storage Repository to install guest instances on. Please check your configuration (e.g. set a default SR for the pool) and/or configure the flag 'sr_matching_filter'.")) return None
[ "def", "_find_sr", "(", "session", ")", ":", "host", "=", "session", ".", "host_ref", "try", ":", "tokens", "=", "CONF", ".", "xenserver", ".", "sr_matching_filter", ".", "split", "(", "':'", ")", "filter_criteria", "=", "tokens", "[", "0", "]", "filter_...
return the storage repository to hold vm images .
train
false
39,398
def compute_live_variables(cfg, blocks, var_def_map, var_dead_map): block_entry_vars = defaultdict(set) def fix_point_progress(): return tuple(map(len, block_entry_vars.values())) old_point = None new_point = fix_point_progress() while (old_point != new_point): for offset in blocks: avail = (block_entry_vars[offset] | var_def_map[offset]) avail -= var_dead_map[offset] for (succ, _data) in cfg.successors(offset): block_entry_vars[succ] |= avail old_point = new_point new_point = fix_point_progress() return block_entry_vars
[ "def", "compute_live_variables", "(", "cfg", ",", "blocks", ",", "var_def_map", ",", "var_dead_map", ")", ":", "block_entry_vars", "=", "defaultdict", "(", "set", ")", "def", "fix_point_progress", "(", ")", ":", "return", "tuple", "(", "map", "(", "len", ","...
compute the live variables at the beginning of each block and at each yield point .
train
false
39,400
def _tgrep_parens_action(_s, _l, tokens): assert (len(tokens) == 3) assert (tokens[0] == u'(') assert (tokens[2] == u')') return tokens[1]
[ "def", "_tgrep_parens_action", "(", "_s", ",", "_l", ",", "tokens", ")", ":", "assert", "(", "len", "(", "tokens", ")", "==", "3", ")", "assert", "(", "tokens", "[", "0", "]", "==", "u'('", ")", "assert", "(", "tokens", "[", "2", "]", "==", "u')'...
builds a lambda function representing a predicate on a tree node from a parenthetical notation .
train
false
39,401
def ignore_headers(headers=('Range',), debug=False): request = cherrypy.serving.request for name in headers: if (name in request.headers): if debug: cherrypy.log(('Ignoring request header %r' % name), 'TOOLS.IGNORE_HEADERS') del request.headers[name]
[ "def", "ignore_headers", "(", "headers", "=", "(", "'Range'", ",", ")", ",", "debug", "=", "False", ")", ":", "request", "=", "cherrypy", ".", "serving", ".", "request", "for", "name", "in", "headers", ":", "if", "(", "name", "in", "request", ".", "h...
delete request headers whose field names are included in headers .
train
false
39,402
def merged_dict(dict_a, dict_b): dict_a.update(dict_b) return dict_a
[ "def", "merged_dict", "(", "dict_a", ",", "dict_b", ")", ":", "dict_a", ".", "update", "(", "dict_b", ")", "return", "dict_a" ]
merges two dicts and returns output .
train
false
39,404
def print_generator_text(widget, text_view_obj, generator_instance): pass
[ "def", "print_generator_text", "(", "widget", ",", "text_view_obj", ",", "generator_instance", ")", ":", "pass" ]
print the generator name to the textview .
train
false
39,405
def image_member_create(context, values, session=None): memb_ref = models.ImageMember() _image_member_update(context, memb_ref, values, session=session) return _image_member_format(memb_ref)
[ "def", "image_member_create", "(", "context", ",", "values", ",", "session", "=", "None", ")", ":", "memb_ref", "=", "models", ".", "ImageMember", "(", ")", "_image_member_update", "(", "context", ",", "memb_ref", ",", "values", ",", "session", "=", "session...
create an imagemember object .
train
false
39,407
def propagate_exchange(cursor, participant, route, error, amount): route.update_error((error or u'')) new_balance = cursor.one(u'\n UPDATE participants\n SET balance=(balance + %s)\n WHERE id=%s\n RETURNING balance\n ', (amount, participant.id)) if ((amount < 0) and (new_balance < 0)): raise NegativeBalance if hasattr(participant, u'set_attributes'): participant.set_attributes(balance=new_balance)
[ "def", "propagate_exchange", "(", "cursor", ",", "participant", ",", "route", ",", "error", ",", "amount", ")", ":", "route", ".", "update_error", "(", "(", "error", "or", "u''", ")", ")", "new_balance", "=", "cursor", ".", "one", "(", "u'\\n UPDATE...
propagates an exchanges result to the participants balance and the routes status .
train
false
39,408
def bootstrap_container(name, dist=None, version=None): if (not dist): dist = __grains__['os'].lower() log.debug("nspawn.bootstrap: no dist provided, defaulting to '{0}'".format(dist)) try: return globals()['_bootstrap_{0}'.format(dist)](name, version=version) except KeyError: raise CommandExecutionError('Unsupported distribution "{0}"'.format(dist))
[ "def", "bootstrap_container", "(", "name", ",", "dist", "=", "None", ",", "version", "=", "None", ")", ":", "if", "(", "not", "dist", ")", ":", "dist", "=", "__grains__", "[", "'os'", "]", ".", "lower", "(", ")", "log", ".", "debug", "(", "\"nspawn...
bootstrap a container from package servers .
train
true
39,409
def get_block_device_info(instance, block_device_mapping): from nova.virt import block_device as virt_block_device block_device_info = {'root_device_name': instance.root_device_name, 'ephemerals': virt_block_device.convert_ephemerals(block_device_mapping), 'block_device_mapping': virt_block_device.convert_all_volumes(*block_device_mapping)} swap_list = virt_block_device.convert_swap(block_device_mapping) block_device_info['swap'] = virt_block_device.get_swap(swap_list) return block_device_info
[ "def", "get_block_device_info", "(", "instance", ",", "block_device_mapping", ")", ":", "from", "nova", ".", "virt", "import", "block_device", "as", "virt_block_device", "block_device_info", "=", "{", "'root_device_name'", ":", "instance", ".", "root_device_name", ","...
converts block device mappings for an instance to driver format .
train
false
39,411
@gen.engine def ListFilesAndDirs(store, pattern, callback): result = (yield gen.Task(ListAllCommonPrefixes, store, '/', prefix=PrefixFromPattern(pattern))) callback(result)
[ "@", "gen", ".", "engine", "def", "ListFilesAndDirs", "(", "store", ",", "pattern", ",", "callback", ")", ":", "result", "=", "(", "yield", "gen", ".", "Task", "(", "ListAllCommonPrefixes", ",", "store", ",", "'/'", ",", "prefix", "=", "PrefixFromPattern",...
list all subdirectories and files in a directory .
train
false
39,413
def create_temporary_ca_file(anchor_list): try: (f, fname) = tempfile.mkstemp() for a in anchor_list: s = a.output(fmt='PEM') l = os.write(f, s) os.close(f) except: return None return fname
[ "def", "create_temporary_ca_file", "(", "anchor_list", ")", ":", "try", ":", "(", "f", ",", "fname", ")", "=", "tempfile", ".", "mkstemp", "(", ")", "for", "a", "in", "anchor_list", ":", "s", "=", "a", ".", "output", "(", "fmt", "=", "'PEM'", ")", ...
concatenate all the certificates in anchor_list and write the result to file to a temporary file using mkstemp() from tempfile module .
train
true
39,414
@task def bazaar(): from fabric.api import cd, sudo with cd('/tmp'): sudo('rm -rf *') bzr_wc_source_remote() bzr_wc_source_local() bzr_wc_default_target() bzr_wc_version() bzr_wc_target_exists_no_update() bzr_wc_target_exists_update() bzr_wc_target_exists_version() bzr_wc_target_exists_local_mods_no_force() bzr_wc_target_exists_local_mods_force() bzr_wc_target_exists_plain_no_force() bzr_wc_target_exists_plain_force() bzr_wc_sudo() bzr_wc_sudo_user()
[ "@", "task", "def", "bazaar", "(", ")", ":", "from", "fabric", ".", "api", "import", "cd", ",", "sudo", "with", "cd", "(", "'/tmp'", ")", ":", "sudo", "(", "'rm -rf *'", ")", "bzr_wc_source_remote", "(", ")", "bzr_wc_source_local", "(", ")", "bzr_wc_defa...
test some low level bazaar tools .
train
false
39,415
def interpolate_2d(values, method='pad', axis=0, limit=None, fill_value=None, dtype=None): transf = ((lambda x: x) if (axis == 0) else (lambda x: x.T)) ndim = values.ndim if (values.ndim == 1): if (axis != 0): raise AssertionError('cannot interpolate on a ndim == 1 with axis != 0') values = values.reshape(tuple(((1,) + values.shape))) if (fill_value is None): mask = None else: mask = mask_missing(transf(values), fill_value) method = clean_fill_method(method) if (method == 'pad'): values = transf(pad_2d(transf(values), limit=limit, mask=mask, dtype=dtype)) else: values = transf(backfill_2d(transf(values), limit=limit, mask=mask, dtype=dtype)) if (ndim == 1): values = values[0] return values
[ "def", "interpolate_2d", "(", "values", ",", "method", "=", "'pad'", ",", "axis", "=", "0", ",", "limit", "=", "None", ",", "fill_value", "=", "None", ",", "dtype", "=", "None", ")", ":", "transf", "=", "(", "(", "lambda", "x", ":", "x", ")", "if...
perform an actual interpolation of values .
train
true
39,416
def handler(req): Handler(req).run(gluon.main.wsgibase) return apache.OK
[ "def", "handler", "(", "req", ")", ":", "Handler", "(", "req", ")", ".", "run", "(", "gluon", ".", "main", ".", "wsgibase", ")", "return", "apache", ".", "OK" ]
decorator for setting up a handler .
train
false
39,417
def _salt_configuration_error(filename): raise SaltConfigurationError('Configuration error in {0}'.format(filename))
[ "def", "_salt_configuration_error", "(", "filename", ")", ":", "raise", "SaltConfigurationError", "(", "'Configuration error in {0}'", ".", "format", "(", "filename", ")", ")" ]
raise an error to indicate error in the salt configuration file .
train
false
39,419
def get_collapse_fns(): return {'median': _collapse_to_median, 'first': _collapse_to_first, 'random': _collapse_to_random, 'sum': _collapse_to_sum, 'mean': _collapse_to_mean}
[ "def", "get_collapse_fns", "(", ")", ":", "return", "{", "'median'", ":", "_collapse_to_median", ",", "'first'", ":", "_collapse_to_first", ",", "'random'", ":", "_collapse_to_random", ",", "'sum'", ":", "_collapse_to_sum", ",", "'mean'", ":", "_collapse_to_mean", ...
return lookup of functions that can be used with biom .
train
false
39,420
def bannerParser(banner): xmlfile = None if Backend.isDbms(DBMS.MSSQL): xmlfile = paths.MSSQL_XML elif Backend.isDbms(DBMS.MYSQL): xmlfile = paths.MYSQL_XML elif Backend.isDbms(DBMS.ORACLE): xmlfile = paths.ORACLE_XML elif Backend.isDbms(DBMS.PGSQL): xmlfile = paths.PGSQL_XML if (not xmlfile): return if Backend.isDbms(DBMS.MSSQL): handler = MSSQLBannerHandler(banner, kb.bannerFp) parseXmlFile(xmlfile, handler) handler = FingerprintHandler(banner, kb.bannerFp) parseXmlFile(paths.GENERIC_XML, handler) else: handler = FingerprintHandler(banner, kb.bannerFp) parseXmlFile(xmlfile, handler) parseXmlFile(paths.GENERIC_XML, handler)
[ "def", "bannerParser", "(", "banner", ")", ":", "xmlfile", "=", "None", "if", "Backend", ".", "isDbms", "(", "DBMS", ".", "MSSQL", ")", ":", "xmlfile", "=", "paths", ".", "MSSQL_XML", "elif", "Backend", ".", "isDbms", "(", "DBMS", ".", "MYSQL", ")", ...
this function calls a class to extract information from the given dbms banner based upon the data in xml file .
train
false
39,421
@cmd def test_memleaks(): install() sh(('%s psutil\\tests\\test_memory_leaks.py' % PYTHON))
[ "@", "cmd", "def", "test_memleaks", "(", ")", ":", "install", "(", ")", "sh", "(", "(", "'%s psutil\\\\tests\\\\test_memory_leaks.py'", "%", "PYTHON", ")", ")" ]
run memory leaks tests .
train
false
39,422
def create_panel_section(app, **kwargs): log.debug('Updating in-memory tool panel') app.toolbox.create_section(kwargs)
[ "def", "create_panel_section", "(", "app", ",", "**", "kwargs", ")", ":", "log", ".", "debug", "(", "'Updating in-memory tool panel'", ")", "app", ".", "toolbox", ".", "create_section", "(", "kwargs", ")" ]
updates in memory toolbox dictionary .
train
false
39,423
def parse_object(response, infotype): if (infotype in ('idletime', 'refcount')): return int_or_none(response) return response
[ "def", "parse_object", "(", "response", ",", "infotype", ")", ":", "if", "(", "infotype", "in", "(", "'idletime'", ",", "'refcount'", ")", ")", ":", "return", "int_or_none", "(", "response", ")", "return", "response" ]
parse a string referring to an object .
train
false
39,425
def refs_aggregate(lookup_parts, aggregates): for i in range((len(lookup_parts) + 1)): if (LOOKUP_SEP.join(lookup_parts[0:i]) in aggregates): return True return False
[ "def", "refs_aggregate", "(", "lookup_parts", ",", "aggregates", ")", ":", "for", "i", "in", "range", "(", "(", "len", "(", "lookup_parts", ")", "+", "1", ")", ")", ":", "if", "(", "LOOKUP_SEP", ".", "join", "(", "lookup_parts", "[", "0", ":", "i", ...
a little helper method to check if the lookup_parts contains references to the given aggregates set .
train
false
39,426
def CyclicGroup(n): a = list(range(1, n)) a.append(0) gen = _af_new(a) G = PermutationGroup([gen]) G._is_abelian = True G._is_nilpotent = True G._is_solvable = True G._degree = n G._is_transitive = True G._order = n return G
[ "def", "CyclicGroup", "(", "n", ")", ":", "a", "=", "list", "(", "range", "(", "1", ",", "n", ")", ")", "a", ".", "append", "(", "0", ")", "gen", "=", "_af_new", "(", "a", ")", "G", "=", "PermutationGroup", "(", "[", "gen", "]", ")", "G", "...
generates the cyclic group of order n as a permutation group .
train
false
39,429
def make_anonymous_struct(builder, values, struct_type=None): if (struct_type is None): struct_type = ir.LiteralStructType([v.type for v in values]) struct_val = struct_type(ir.Undefined) for (i, v) in enumerate(values): struct_val = builder.insert_value(struct_val, v, i) return struct_val
[ "def", "make_anonymous_struct", "(", "builder", ",", "values", ",", "struct_type", "=", "None", ")", ":", "if", "(", "struct_type", "is", "None", ")", ":", "struct_type", "=", "ir", ".", "LiteralStructType", "(", "[", "v", ".", "type", "for", "v", "in", ...
create an anonymous struct containing the given llvm *values* .
train
false
39,430
def _triangle2unit(tb, lower=0): if lower: d = tb[0].copy() else: d = tb[(-1)].copy() if lower: return (d, (tb / d)) else: l = _upper2lower(tb) return (d, _lower2upper((l / d)))
[ "def", "_triangle2unit", "(", "tb", ",", "lower", "=", "0", ")", ":", "if", "lower", ":", "d", "=", "tb", "[", "0", "]", ".", "copy", "(", ")", "else", ":", "d", "=", "tb", "[", "(", "-", "1", ")", "]", ".", "copy", "(", ")", "if", "lower...
take a banded triangular matrix and return its diagonal and the unit matrix: the banded triangular matrix with 1s on the diagonal .
train
false
39,431
def _isscalar(x): return (np.isscalar(x) or (hasattr(x, 'shape') and (x.shape == ())))
[ "def", "_isscalar", "(", "x", ")", ":", "return", "(", "np", ".", "isscalar", "(", "x", ")", "or", "(", "hasattr", "(", "x", ",", "'shape'", ")", "and", "(", "x", ".", "shape", "==", "(", ")", ")", ")", ")" ]
check whether x is if a scalar type .
train
false
39,432
def volume_destroy(context, volume_id): return IMPL.volume_destroy(context, volume_id)
[ "def", "volume_destroy", "(", "context", ",", "volume_id", ")", ":", "return", "IMPL", ".", "volume_destroy", "(", "context", ",", "volume_id", ")" ]
destroy the volume or raise if it does not exist .
train
false
39,434
def _get_url(view_name, backend_name, auth_entry=None, redirect_url=None, extra_params=None, url_params=None): url_params = (url_params or {}) url_params['backend'] = backend_name url = reverse(view_name, kwargs=url_params) query_params = OrderedDict() if auth_entry: query_params[AUTH_ENTRY_KEY] = auth_entry if redirect_url: query_params[AUTH_REDIRECT_KEY] = redirect_url if extra_params: query_params.update(extra_params) return u'{url}?{params}'.format(url=url, params=urllib.urlencode(query_params))
[ "def", "_get_url", "(", "view_name", ",", "backend_name", ",", "auth_entry", "=", "None", ",", "redirect_url", "=", "None", ",", "extra_params", "=", "None", ",", "url_params", "=", "None", ")", ":", "url_params", "=", "(", "url_params", "or", "{", "}", ...
creates a url to hook into social auth endpoints .
train
false
39,436
def naglog_result(level, result, *args): prev_formatters = [] for handler in logger.handlers: prev_formatters.append(handler.formatter) handler.setFormatter(nagFormatter) log_fun = getattr(logger, level) if log_fun: log_fun(result) for (index, handler) in enumerate(logger.handlers): handler.setFormatter(prev_formatters[index])
[ "def", "naglog_result", "(", "level", ",", "result", ",", "*", "args", ")", ":", "prev_formatters", "=", "[", "]", "for", "handler", "in", "logger", ".", "handlers", ":", "prev_formatters", ".", "append", "(", "handler", ".", "formatter", ")", "handler", ...
function use for old nag compatibility .
train
false
39,437
def fields_of(obj, primitives_only=False, primitives_and_composites_only=False, allow_caps=False, ignore=set()): r = {} for k in dir(obj): if k.startswith('_'): continue if (k in ignore): continue v = getattr(obj, k) if hasattr(v, '__call__'): continue if ((not allow_caps) and (k.upper() == k)): continue if primitives_only: if (not isinstance(v, _scalar_types)): continue elif primitives_and_composites_only: if (not isinstance(v, (int, long, basestring, float, bool, set, dict, list))): continue r[k] = v return r
[ "def", "fields_of", "(", "obj", ",", "primitives_only", "=", "False", ",", "primitives_and_composites_only", "=", "False", ",", "allow_caps", "=", "False", ",", "ignore", "=", "set", "(", ")", ")", ":", "r", "=", "{", "}", "for", "k", "in", "dir", "(",...
returns key/value pairs of things that seem like public fields of an object .
train
false
39,438
def _roll_random(n): bits = util.bit_length((n - 1)) byte_count = ((bits + 7) // 8) hbyte_mask = (pow(2, (bits % 8)) - 1) while True: x = os.urandom(byte_count) if (hbyte_mask > 0): x = (byte_mask(x[0], hbyte_mask) + x[1:]) num = util.inflate_long(x, 1) if (num < n): break return num
[ "def", "_roll_random", "(", "n", ")", ":", "bits", "=", "util", ".", "bit_length", "(", "(", "n", "-", "1", ")", ")", "byte_count", "=", "(", "(", "bits", "+", "7", ")", "//", "8", ")", "hbyte_mask", "=", "(", "pow", "(", "2", ",", "(", "bits...
returns a random # from 0 to n-1 .
train
true
39,439
def test_bind_completion(qtmodeltester, monkeypatch, stubs, config_stub, key_config_stub): _patch_cmdutils(monkeypatch, stubs, 'qutebrowser.completion.models.miscmodels.cmdutils') config_stub.data['aliases'] = {'rock': 'roll'} key_config_stub.set_bindings_for('normal', {'s': 'stop', 'rr': 'roll', 'ro': 'rock'}) model = miscmodels.BindCompletionModel() qtmodeltester.data_display_may_return_none = True qtmodeltester.check(model) _check_completions(model, {'Commands': [('stop', 'stop qutebrowser', 's'), ('drop', 'drop all user data', ''), ('hide', '', ''), ('roll', 'never gonna give you up', 'rr'), ('rock', "Alias for 'roll'", 'ro')]})
[ "def", "test_bind_completion", "(", "qtmodeltester", ",", "monkeypatch", ",", "stubs", ",", "config_stub", ",", "key_config_stub", ")", ":", "_patch_cmdutils", "(", "monkeypatch", ",", "stubs", ",", "'qutebrowser.completion.models.miscmodels.cmdutils'", ")", "config_stub"...
test the results of keybinding command completion .
train
false
39,440
def is_vcs_installed(repo_type): return bool(which(repo_type))
[ "def", "is_vcs_installed", "(", "repo_type", ")", ":", "return", "bool", "(", "which", "(", "repo_type", ")", ")" ]
check if the version control system for a repo type is installed .
train
false
39,442
def readAudioFile(path): extension = os.path.splitext(path)[1] try: if (extension.lower() == '.wav'): [Fs, x] = wavfile.read(path) elif ((extension.lower() == '.aif') or (extension.lower() == '.aiff')): s = aifc.open(path, 'r') nframes = s.getnframes() strsig = s.readframes(nframes) x = numpy.fromstring(strsig, numpy.short).byteswap() Fs = s.getframerate() else: print 'Error in readAudioFile(): Unknown file type!' return ((-1), (-1)) except IOError: print 'Error: file not found or other I/O error.' return ((-1), (-1)) return (Fs, x)
[ "def", "readAudioFile", "(", "path", ")", ":", "extension", "=", "os", ".", "path", ".", "splitext", "(", "path", ")", "[", "1", "]", "try", ":", "if", "(", "extension", ".", "lower", "(", ")", "==", "'.wav'", ")", ":", "[", "Fs", ",", "x", "]"...
this function returns a numpy array that stores the audio samples of a specified wav of aiff file .
train
false