id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
12,579
def _open_log_file(filename): global _LOG_FILE_HANDLES, _LOGDIR if (not _LOGDIR): from django.conf import settings _LOGDIR = settings.LOG_DIR filename = os.path.join(_LOGDIR, filename) if (filename in _LOG_FILE_HANDLES): return _LOG_FILE_HANDLES[filename] else: try: filehandle = open(filename, 'a+') _LOG_FILE_HANDLES[filename] = filehandle return filehandle except IOError: log_trace() return None
[ "def", "_open_log_file", "(", "filename", ")", ":", "global", "_LOG_FILE_HANDLES", ",", "_LOGDIR", "if", "(", "not", "_LOGDIR", ")", ":", "from", "django", ".", "conf", "import", "settings", "_LOGDIR", "=", "settings", ".", "LOG_DIR", "filename", "=", "os", ...
helper to open the log file and cache its handle .
train
false
12,580
def assert_not_match(pattern, string, msg=None): assert_none(re.search(pattern, string), msg)
[ "def", "assert_not_match", "(", "pattern", ",", "string", ",", "msg", "=", "None", ")", ":", "assert_none", "(", "re", ".", "search", "(", "pattern", ",", "string", ")", ",", "msg", ")" ]
verify that the pattern does not match the string .
train
false
12,582
def AES_dec(cipher, data): if HAVE_AES: return cipher.decrypt(data) else: decrypter = PYAES.Decrypter(cipher) dec = decrypter.feed(data) dec += decrypter.feed() return dec
[ "def", "AES_dec", "(", "cipher", ",", "data", ")", ":", "if", "HAVE_AES", ":", "return", "cipher", ".", "decrypt", "(", "data", ")", "else", ":", "decrypter", "=", "PYAES", ".", "Decrypter", "(", "cipher", ")", "dec", "=", "decrypter", ".", "feed", "...
decrypt data with the cipher .
train
false
12,583
def compiler_type(): dist = Distribution() dist.parse_config_files() cmd = dist.get_command_obj('build') cmd.ensure_finalized() compiler = new_compiler(compiler=cmd.compiler) return compiler.compiler_type
[ "def", "compiler_type", "(", ")", ":", "dist", "=", "Distribution", "(", ")", "dist", ".", "parse_config_files", "(", ")", "cmd", "=", "dist", ".", "get_command_obj", "(", "'build'", ")", "cmd", ".", "ensure_finalized", "(", ")", "compiler", "=", "new_comp...
gets the compiler type from distutils .
train
false
12,584
def _update_organization_context(context, course): (partner_long_name, organization_logo) = (None, None) partner_short_name = (course.display_organization if course.display_organization else course.org) organizations = organization_api.get_course_organizations(course_id=course.id) if organizations: organization = organizations[0] partner_long_name = organization.get('name', partner_long_name) partner_short_name = organization.get('short_name', partner_short_name) organization_logo = organization.get('logo', None) context['organization_long_name'] = partner_long_name context['organization_short_name'] = partner_short_name context['accomplishment_copy_course_org'] = partner_short_name context['organization_logo'] = organization_logo
[ "def", "_update_organization_context", "(", "context", ",", "course", ")", ":", "(", "partner_long_name", ",", "organization_logo", ")", "=", "(", "None", ",", "None", ")", "partner_short_name", "=", "(", "course", ".", "display_organization", "if", "course", "....
updates context with organization related info .
train
false
12,585
@with_environment def test_get_home_dir_4(): if ('HOME' in env): del env['HOME'] home = path.get_home_dir(False)
[ "@", "with_environment", "def", "test_get_home_dir_4", "(", ")", ":", "if", "(", "'HOME'", "in", "env", ")", ":", "del", "env", "[", "'HOME'", "]", "home", "=", "path", ".", "get_home_dir", "(", "False", ")" ]
get_home_dir() still works if $home is not set .
train
false
12,586
def available_distributions(flocker_source_path): return set((path.basename() for path in flocker_source_path.descendant(BUILD_TARGETS_SEGMENTS).children() if (path.isdir() and path.child('Dockerfile').exists())))
[ "def", "available_distributions", "(", "flocker_source_path", ")", ":", "return", "set", "(", "(", "path", ".", "basename", "(", ")", "for", "path", "in", "flocker_source_path", ".", "descendant", "(", "BUILD_TARGETS_SEGMENTS", ")", ".", "children", "(", ")", ...
determine the distributions for which packages can be built .
train
false
12,587
def env_script(registry, xml_parent, data): el = XML.SubElement(xml_parent, 'com.lookout.jenkins.EnvironmentScript') XML.SubElement(el, 'script').text = data.get('script-content', '') valid_script_types = {'unix-script': 'unixScript', 'power-shell': 'powerShell', 'batch-script': 'batchScript'} script_type = data.get('script-type', 'unix-script') if (script_type not in valid_script_types): raise InvalidAttributeError('script-type', script_type, valid_script_types) XML.SubElement(el, 'scriptType').text = valid_script_types[script_type] only_on_parent = str(data.get('only-run-on-parent', False)).lower() XML.SubElement(el, 'onlyRunOnParent').text = only_on_parent
[ "def", "env_script", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "el", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'com.lookout.jenkins.EnvironmentScript'", ")", "XML", ".", "SubElement", "(", "el", ",", "'script'", ")", ".", "text...
yaml: env-script add or override environment variables to the whole build process .
train
false
12,588
def pull_repository(repo, repository_clone_url, ctx_rev): commands.pull(get_configured_ui(), repo, source=repository_clone_url, rev=[ctx_rev])
[ "def", "pull_repository", "(", "repo", ",", "repository_clone_url", ",", "ctx_rev", ")", ":", "commands", ".", "pull", "(", "get_configured_ui", "(", ")", ",", "repo", ",", "source", "=", "repository_clone_url", ",", "rev", "=", "[", "ctx_rev", "]", ")" ]
pull changes from a remote repository to a local one .
train
false
12,589
@contextmanager def capture_registrations(): registrations = [] def _on(app, **data): registrations.append(data) user_registered.connect(_on) try: (yield registrations) finally: user_registered.disconnect(_on)
[ "@", "contextmanager", "def", "capture_registrations", "(", ")", ":", "registrations", "=", "[", "]", "def", "_on", "(", "app", ",", "**", "data", ")", ":", "registrations", ".", "append", "(", "data", ")", "user_registered", ".", "connect", "(", "_on", ...
testing utility for capturing registrations .
train
true
12,590
def get_bin_linesep(encoding, linesep): if (encoding == 'utf-16'): return linesep.encode('utf-16')[2:] else: return linesep.encode(encoding)
[ "def", "get_bin_linesep", "(", "encoding", ",", "linesep", ")", ":", "if", "(", "encoding", "==", "'utf-16'", ")", ":", "return", "linesep", ".", "encode", "(", "'utf-16'", ")", "[", "2", ":", "]", "else", ":", "return", "linesep", ".", "encode", "(", ...
simply doing linesep .
train
false
12,591
def build_entry(etype, entity, permissions=None, use_nfsv4_acls=False): if use_nfsv4_acls: return ':'.join([etype, entity, permissions, 'allow']) if permissions: return ((((etype + ':') + entity) + ':') + permissions) else: return ((etype + ':') + entity)
[ "def", "build_entry", "(", "etype", ",", "entity", ",", "permissions", "=", "None", ",", "use_nfsv4_acls", "=", "False", ")", ":", "if", "use_nfsv4_acls", ":", "return", "':'", ".", "join", "(", "[", "etype", ",", "entity", ",", "permissions", ",", "'all...
builds and returns an entry string .
train
false
12,594
def memoize_stacks(func): cache = {} @wraps(func) def memoizer(be, optree): (optree_key, tensor_index_map, index_tensor_map) = optree.intrinsic_key_maps() optree_key = (optree_key, id(be)) if (optree_key in cache): (stacks, cached_tensor_index_map) = cache[optree_key] for stack in stacks: for i in range(len(stack)): if isinstance(stack[i], Tensor): if (stack[i] in cached_tensor_index_map): stack[i] = index_tensor_map[cached_tensor_index_map[stack[i]]] cache[optree_key] = (stacks, tensor_index_map) else: stacks = func(be, optree) cache[optree_key] = (stacks, tensor_index_map) return stacks return memoizer
[ "def", "memoize_stacks", "(", "func", ")", ":", "cache", "=", "{", "}", "@", "wraps", "(", "func", ")", "def", "memoizer", "(", "be", ",", "optree", ")", ":", "(", "optree_key", ",", "tensor_index_map", ",", "index_tensor_map", ")", "=", "optree", ".",...
memoize the stacks using intrinsic_key_maps .
train
false
12,595
@contextfunction def core_watchlist(context, objects=None, skip_group=False, paginate=False): request = context['request'] profile = request.user.profile if (not objects): objects = profile.subscriptions.all() if ('unsubscribe' in request.GET): for object in objects.filter(pk=request.GET.get('unsubscribe')): object.subscribers.remove(profile) objects = profile.subscriptions.all() pathurl = (request.path + '?') if request.GET: params = request.GET.copy() if ('unsubscribe' in params): del params['unsubscribe'] pathurl += (urllib.urlencode(params) + '&') response_format = 'html' if ('response_format' in context): response_format = context['response_format'] return Markup(render_to_string('core/tags/watchlist', {'objects': objects, 'skip_group': skip_group, 'dopaginate': paginate, 'pathurl': pathurl}, context_instance=RequestContext(request), response_format=response_format))
[ "@", "contextfunction", "def", "core_watchlist", "(", "context", ",", "objects", "=", "None", ",", "skip_group", "=", "False", ",", "paginate", "=", "False", ")", ":", "request", "=", "context", "[", "'request'", "]", "profile", "=", "request", ".", "user"...
print a list of objects a user is subscribed to .
train
false
12,596
def _synchronized(method): def synchronized_wrapper(self, *args): with self._lock: return method(self, *args) return synchronized_wrapper
[ "def", "_synchronized", "(", "method", ")", ":", "def", "synchronized_wrapper", "(", "self", ",", "*", "args", ")", ":", "with", "self", ".", "_lock", ":", "return", "method", "(", "self", ",", "*", "args", ")", "return", "synchronized_wrapper" ]
a decorator that synchronizes the method call with self .
train
false
12,598
def get_modules(): modules = [] module_list = exec_action('modules', 'list', action_parameter='--only-names') if (not module_list): return None for module in module_list: if (module not in ['help', 'usage', 'version']): modules.append(module) return modules
[ "def", "get_modules", "(", ")", ":", "modules", "=", "[", "]", "module_list", "=", "exec_action", "(", "'modules'", ",", "'list'", ",", "action_parameter", "=", "'--only-names'", ")", "if", "(", "not", "module_list", ")", ":", "return", "None", "for", "mod...
given a package directory return a list of all available python modules in the package and its subpackages :type package: str .
train
true
12,599
def refresh_signin_vcode(cookie, tokens, vcodetype): url = ''.join([const.PASSPORT_BASE, 'v2/?reggetcodestr', '&token=', tokens['token'], '&tpl=pp&apiver=v3', '&tt=', util.timestamp(), '&fr=ligin', '&vcodetype=', encoder.encode_uri(vcodetype)]) headers = {'Cookie': cookie.header_output(), 'Referer': const.REFERER} logger.debug(('refresh vcode url: %s' % url)) req = net.urlopen(url, headers=headers) if req: try: data = req.data.decode('gbk') logger.debug(('refresh vcode: %s' % data)) return json.loads(data) except ValueError: logger.error(traceback.format_exc()) return None
[ "def", "refresh_signin_vcode", "(", "cookie", ",", "tokens", ",", "vcodetype", ")", ":", "url", "=", "''", ".", "join", "(", "[", "const", ".", "PASSPORT_BASE", ",", "'v2/?reggetcodestr'", ",", "'&token='", ",", "tokens", "[", "'token'", "]", ",", "'&tpl=p...
vcodetype - 在调用check_login()时返回的vcodetype .
train
true
12,600
def propagate_changes(session): from inbox.models.message import Message for obj in session.dirty: if isinstance(obj, Message): obj_state = inspect(obj) for attr in obj.propagated_attributes: if getattr(obj_state.attrs, attr).history.has_changes(): if obj.thread: obj.thread.dirty = True
[ "def", "propagate_changes", "(", "session", ")", ":", "from", "inbox", ".", "models", ".", "message", "import", "Message", "for", "obj", "in", "session", ".", "dirty", ":", "if", "isinstance", "(", "obj", ",", "Message", ")", ":", "obj_state", "=", "insp...
mark an objects related object as dirty when certain attributes of the object change .
train
false
12,601
def lcg(x, length=16): if (x == 0): return bytes(length) out = bytearray(length) for i in range(length): x = (((214013 * x) + 2531011) & 2147483647) out[i] = ((x >> 16) & 255) return bytes(out)
[ "def", "lcg", "(", "x", ",", "length", "=", "16", ")", ":", "if", "(", "x", "==", "0", ")", ":", "return", "bytes", "(", "length", ")", "out", "=", "bytearray", "(", "length", ")", "for", "i", "in", "range", "(", "length", ")", ":", "x", "=",...
linear congruential generator .
train
false
12,603
def generate_function(info, method=False): assert isinstance(info, GIFunctionInfo) arg_infos = list(info.get_args()) arg_types = [a.get_type() for a in arg_infos] return_type = info.get_return_type() func = None messages = [] for backend in list_backends(): instance = backend() try: func = _generate_function(instance, info, arg_infos, arg_types, return_type, method) except NotImplementedError: messages.append(('%s: %s' % (backend.NAME, traceback.format_exc()))) else: break if func: return func raise NotImplementedError('\n'.join(messages))
[ "def", "generate_function", "(", "info", ",", "method", "=", "False", ")", ":", "assert", "isinstance", "(", "info", ",", "GIFunctionInfo", ")", "arg_infos", "=", "list", "(", "info", ".", "get_args", "(", ")", ")", "arg_types", "=", "[", "a", ".", "ge...
creates a python callable for a gifunctioninfo instance .
train
true
12,604
def clear_lock(backend=None, remote=None): fileserver = salt.fileserver.Fileserver(__opts__) (cleared, errors) = fileserver.clear_lock(back=backend, remote=remote) ret = {} if cleared: ret['cleared'] = cleared if errors: ret['errors'] = errors if (not ret): return 'No locks were removed' return ret
[ "def", "clear_lock", "(", "backend", "=", "None", ",", "remote", "=", "None", ")", ":", "fileserver", "=", "salt", ".", "fileserver", ".", "Fileserver", "(", "__opts__", ")", "(", "cleared", ",", "errors", ")", "=", "fileserver", ".", "clear_lock", "(", ...
function to allow non-fileserver functions to clear update locks clear_func a function reference .
train
true
12,605
def to_hdf(df, path, key, mode='a', append=False, get=None, name_function=None, compute=True, lock=None, dask_kwargs={}, **kwargs): name = ('to-hdf-' + uuid.uuid1().hex) pd_to_hdf = getattr(df._partition_type, 'to_hdf') single_file = True single_node = True if isinstance(path, str): if ((path.count('*') + key.count('*')) > 1): raise ValueError('A maximum of one asterisk is accepted in file path and dataset key') fmt_obj = (lambda path, i_name: path.replace('*', i_name)) if ('*' in path): single_file = False else: if (key.count('*') > 1): raise ValueError('A maximum of one asterisk is accepted in dataset key') fmt_obj = (lambda path, _: path) if ('*' in key): single_node = False if (('format' in kwargs) and (kwargs['format'] != 'table')): raise ValueError("Dask only support 'table' format in hdf files.") if (mode not in ('a', 'w', 'r+')): raise ValueError("Mode must be one of 'a', 'w' or 'r+'") if (name_function is None): name_function = build_name_function((df.npartitions - 1)) if (not (single_file and single_node)): formatted_names = [name_function(i) for i in range(df.npartitions)] if (formatted_names != sorted(formatted_names)): warn('To preserve order between partitions name_function must preserve the order of its input') if ((get is None) and ('get' not in _globals) and single_node and single_file): get = get_sync _actual_get = effective_get(get, df) if (lock is None): if (not single_node): lock = True elif ((not single_file) and (_actual_get is not multiprocessing.get)): lock = True else: lock = False if lock: lock = get_scheduler_lock(get, df) kwargs.update({'format': 'table', 'mode': mode, 'append': append}) dsk = dict() i_name = name_function(0) dsk[(name, 0)] = (_pd_to_hdf, pd_to_hdf, lock, [(df._name, 0), fmt_obj(path, i_name), key.replace('*', i_name)], kwargs) kwargs2 = kwargs.copy() if single_file: kwargs2['mode'] = 'a' if single_node: kwargs2['append'] = True for i in range(1, df.npartitions): i_name = name_function(i) task = (_pd_to_hdf, pd_to_hdf, lock, [(df._name, i), fmt_obj(path, i_name), key.replace('*', i_name)], kwargs2) if single_file: link_dep = ((i - 1) if single_node else 0) task = (_link, (name, link_dep), task) dsk[(name, i)] = task dsk = merge(df.dask, dsk) if (single_file and single_node): keys = [(name, (df.npartitions - 1))] else: keys = [(name, i) for i in range(df.npartitions)] if compute: return DataFrame._get(dsk, keys, get=get, **dask_kwargs) else: return delayed([Delayed(k, [dsk]) for k in keys])
[ "def", "to_hdf", "(", "df", ",", "path", ",", "key", ",", "mode", "=", "'a'", ",", "append", "=", "False", ",", "get", "=", "None", ",", "name_function", "=", "None", ",", "compute", "=", "True", ",", "lock", "=", "None", ",", "dask_kwargs", "=", ...
store this object .
train
false
12,606
def print_progression(percent, width=50, delimiters=['[', ']'], symbol='#'): n_symbols = int(((percent / 100.0) * width)) progress_bar = ((((delimiters[0] + (n_symbols * symbol)) + ((width - n_symbols) * ' ')) + delimiters[1]) + ' ') sys.stdout.write(((('\r' + progress_bar) + str(percent)) + '%')) sys.stdout.flush()
[ "def", "print_progression", "(", "percent", ",", "width", "=", "50", ",", "delimiters", "=", "[", "'['", ",", "']'", "]", ",", "symbol", "=", "'#'", ")", ":", "n_symbols", "=", "int", "(", "(", "(", "percent", "/", "100.0", ")", "*", "width", ")", ...
prints a progress bar to the command line parameters percent : float completion value between 0 and 100 width : int .
train
false
12,607
def network_set_host(context, network_id, host_id): return IMPL.network_set_host(context, network_id, host_id)
[ "def", "network_set_host", "(", "context", ",", "network_id", ",", "host_id", ")", ":", "return", "IMPL", ".", "network_set_host", "(", "context", ",", "network_id", ",", "host_id", ")" ]
safely set the host for network .
train
false
12,608
def handle_plot_request(id, max_age): model = models.EmbeddedGraphingQuery.objects.get(id=id) now = datetime.datetime.now() update_time = (model.last_updated + datetime.timedelta(minutes=int(max_age))) if (now > update_time): cursor = django.db.connection.cursor() query = '\n UPDATE embedded_graphing_queries\n SET refresh_time = NOW()\n WHERE id = %s AND (\n refresh_time IS NULL OR\n refresh_time + INTERVAL %s MINUTE < NOW()\n )\n ' cursor.execute(query, (id, _cache_timeout)) if cursor.rowcount: model.cached_png = create_embedded_plot(model, now.ctime()) model.last_updated = now model.refresh_time = None model.save() return model.cached_png
[ "def", "handle_plot_request", "(", "id", ",", "max_age", ")", ":", "model", "=", "models", ".", "EmbeddedGraphingQuery", ".", "objects", ".", "get", "(", "id", "=", "id", ")", "now", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "update_time", ...
given the embedding id of a graph .
train
false
12,609
def addCollarShaftSetDerivation(collarDerivation, collarLength, derivation, elementNode, negatives, positives): collarSides = evaluate.getSidesMinimumThreeBasedOnPrecision(elementNode, derivation.shaftRimRadius) collarProfile = euclidean.getComplexPolygon(complex(), derivation.shaftRimRadius, collarSides) vector3CollarProfile = euclidean.getVector3Path(collarProfile) extrude.addPositives(collarDerivation, [vector3CollarProfile], positives) addShaft(derivation, negatives, positives) drillZ = (derivation.faceWidth + (0.5 * collarLength)) drillEnd = Vector3(0.0, derivation.shaftRimRadius, drillZ) drillStart = Vector3(0.0, 0.0, drillZ) teardrop.addNegativesByRadius(elementNode, drillEnd, negatives, derivation.keywayRadius, drillStart)
[ "def", "addCollarShaftSetDerivation", "(", "collarDerivation", ",", "collarLength", ",", "derivation", ",", "elementNode", ",", "negatives", ",", "positives", ")", ":", "collarSides", "=", "evaluate", ".", "getSidesMinimumThreeBasedOnPrecision", "(", "elementNode", ",",...
add collar and shaft .
train
false
12,610
def docker_plugin_main(): return FlockerScriptRunner(script=DockerPluginScript(), options=DockerPluginOptions()).main()
[ "def", "docker_plugin_main", "(", ")", ":", "return", "FlockerScriptRunner", "(", "script", "=", "DockerPluginScript", "(", ")", ",", "options", "=", "DockerPluginOptions", "(", ")", ")", ".", "main", "(", ")" ]
script entry point that runs the docker plugin .
train
false
12,611
def _count_jinja2_blocks(token, cur_depth, open_token, close_token): num_open = token.count(open_token) num_close = token.count(close_token) if (num_open != num_close): cur_depth += (num_open - num_close) if (cur_depth < 0): cur_depth = 0 return cur_depth
[ "def", "_count_jinja2_blocks", "(", "token", ",", "cur_depth", ",", "open_token", ",", "close_token", ")", ":", "num_open", "=", "token", ".", "count", "(", "open_token", ")", "num_close", "=", "token", ".", "count", "(", "close_token", ")", "if", "(", "nu...
this function counts the number of opening/closing blocks for a given opening/closing type and adjusts the current depth for that block based on the difference .
train
false
12,612
@contextlib.contextmanager def defer_cleanup(): deallocs = current_context().deallocations with deallocs.disable(): (yield)
[ "@", "contextlib", ".", "contextmanager", "def", "defer_cleanup", "(", ")", ":", "deallocs", "=", "current_context", "(", ")", ".", "deallocations", "with", "deallocs", ".", "disable", "(", ")", ":", "(", "yield", ")" ]
temporarily disable memory deallocation .
train
false
12,613
def mixture_rvs(prob, size, dist, kwargs=None): if (len(prob) != len(dist)): raise ValueError('You must provide as many probabilities as distributions') if (not np.allclose(np.sum(prob), 1)): raise ValueError('prob does not sum to 1') if (kwargs is None): kwargs = (({},) * len(prob)) idx = _make_index(prob, size) sample = np.empty(size) for i in range(len(prob)): sample_idx = idx[..., i] sample_size = sample_idx.sum() loc = kwargs[i].get('loc', 0) scale = kwargs[i].get('scale', 1) args = kwargs[i].get('args', ()) sample[sample_idx] = dist[i].rvs(*args, **dict(loc=loc, scale=scale, size=sample_size)) return sample
[ "def", "mixture_rvs", "(", "prob", ",", "size", ",", "dist", ",", "kwargs", "=", "None", ")", ":", "if", "(", "len", "(", "prob", ")", "!=", "len", "(", "dist", ")", ")", ":", "raise", "ValueError", "(", "'You must provide as many probabilities as distribu...
sample from a mixture of distributions .
train
false
12,614
@pytest.mark.parametrize('data', ((('a', 'b'), ('c', 'd')), (('c', 'd'), ('a', 'b')), (('a', 'b'), ('c', 'd'), ('e', 'f')))) def test_data_argument_accepts_tuples(data): p = PreparedRequest() p.prepare(method='GET', url='http://www.example.com', data=data, hooks=default_hooks()) assert (p.body == urlencode(data))
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'data'", ",", "(", "(", "(", "'a'", ",", "'b'", ")", ",", "(", "'c'", ",", "'d'", ")", ")", ",", "(", "(", "'c'", ",", "'d'", ")", ",", "(", "'a'", ",", "'b'", ")", ")", ",", "(", "(",...
ensure that the data argument will accept tuples of strings and properly encode them .
train
false
12,615
@_ConfigurableFilter(executable='CLOSURE_COMPILER_EXECUTABLE') def closure_compiler(infile, executable='closure-compiler'): return runinplace('{} --warning_level QUIET --js %1 --js_output_file %2'.format(executable), infile)
[ "@", "_ConfigurableFilter", "(", "executable", "=", "'CLOSURE_COMPILER_EXECUTABLE'", ")", "def", "closure_compiler", "(", "infile", ",", "executable", "=", "'closure-compiler'", ")", ":", "return", "runinplace", "(", "'{} --warning_level QUIET --js %1 --js_output_file %2'", ...
run closure-compiler on a file .
train
false
12,616
def getHalfSimplifiedLoop(loop, radius, remainder): if (len(loop) < 2): return loop channelRadius = (radius * 0.01) simplified = [] addIndex = 0 if (remainder == 1): addIndex = (len(loop) - 1) for pointIndex in xrange(len(loop)): point = loop[pointIndex] if (((pointIndex % 2) == remainder) or (pointIndex == addIndex)): simplified.append(point) elif (not isWithinChannel(channelRadius, pointIndex, loop)): simplified.append(point) return simplified
[ "def", "getHalfSimplifiedLoop", "(", "loop", ",", "radius", ",", "remainder", ")", ":", "if", "(", "len", "(", "loop", ")", "<", "2", ")", ":", "return", "loop", "channelRadius", "=", "(", "radius", "*", "0.01", ")", "simplified", "=", "[", "]", "add...
get the loop with half of the points inside the channel removed .
train
false
12,618
def ratelimit_sleep(running_time, max_rate, incr_by=1, rate_buffer=5): if ((max_rate <= 0) or (incr_by <= 0)): return running_time clock_accuracy = 1000.0 now = (time.time() * clock_accuracy) time_per_request = (clock_accuracy * (float(incr_by) / max_rate)) if ((now - running_time) > (rate_buffer * clock_accuracy)): running_time = now elif ((running_time - now) > time_per_request): eventlet.sleep(((running_time - now) / clock_accuracy)) return (running_time + time_per_request)
[ "def", "ratelimit_sleep", "(", "running_time", ",", "max_rate", ",", "incr_by", "=", "1", ",", "rate_buffer", "=", "5", ")", ":", "if", "(", "(", "max_rate", "<=", "0", ")", "or", "(", "incr_by", "<=", "0", ")", ")", ":", "return", "running_time", "c...
will eventlet .
train
false
12,619
def pyo_init(rate=44100, nchnls=1, buffersize=32, duplex=1): global pyo_server if (rate < 16000): raise ValueError('sample rate must be 16000 or higher') if hasattr(pyo_server, 'shutdown'): pyo_server.stop() sleep(0.25) pyo_server.shutdown() sleep(0.25) pyo_server.reinit(sr=rate, nchnls=nchnls, buffersize=buffersize, duplex=duplex) else: pyo_server = pyo.Server(sr=rate, nchnls=nchnls, buffersize=buffersize, duplex=duplex) pyo_server.boot().start() if (sys.platform == 'darwin'): z2 = np.zeros(2) _sndTable = pyo.DataTable(size=2, init=z2.T.tolist(), chnls=nchnls) _snd = pyo.TableRead(_sndTable, freq=rate, mul=0) _snd.play() time.sleep(0.51)
[ "def", "pyo_init", "(", "rate", "=", "44100", ",", "nchnls", "=", "1", ",", "buffersize", "=", "32", ",", "duplex", "=", "1", ")", ":", "global", "pyo_server", "if", "(", "rate", "<", "16000", ")", ":", "raise", "ValueError", "(", "'sample rate must be...
start and boot a global pyo server .
train
false
12,620
def trim_features(value): return trim_var('FEATURES', value)
[ "def", "trim_features", "(", "value", ")", ":", "return", "trim_var", "(", "'FEATURES'", ",", "value", ")" ]
remove a value from features variable in the make .
train
false
12,621
@command('shuffle') def shuffle_fn(): random.shuffle(g.model.songs) g.message = ((c.y + 'Items shuffled') + c.w) g.content = content.generate_songlist_display()
[ "@", "command", "(", "'shuffle'", ")", "def", "shuffle_fn", "(", ")", ":", "random", ".", "shuffle", "(", "g", ".", "model", ".", "songs", ")", "g", ".", "message", "=", "(", "(", "c", ".", "y", "+", "'Items shuffled'", ")", "+", "c", ".", "w", ...
shuffle displayed items .
train
false
12,622
@waffle_flag('testing-autovouch-views') @allow_unvouched @never_cache def unvouch(request, username): profile = get_object_or_404(UserProfile, user__username=username) profile.vouches_received.all().delete() messages.success(request, _('Successfully unvouched user.')) return redirect('phonebook:profile_view', profile.user.username)
[ "@", "waffle_flag", "(", "'testing-autovouch-views'", ")", "@", "allow_unvouched", "@", "never_cache", "def", "unvouch", "(", "request", ",", "username", ")", ":", "profile", "=", "get_object_or_404", "(", "UserProfile", ",", "user__username", "=", "username", ")"...
automatically remove all vouches from username .
train
false
12,623
def installed_features(image=None): return _get_components('Feature Name', 'Features', 'Enabled')
[ "def", "installed_features", "(", "image", "=", "None", ")", ":", "return", "_get_components", "(", "'Feature Name'", ",", "'Features'", ",", "'Enabled'", ")" ]
list the features installed on the system args: image : the path to the root directory of an offline windows image .
train
false
12,624
def set_theme_requires(sector_ids): ttable = s3db.project_theme tstable = s3db.project_theme_sector rows = db().select(ttable.id, tstable.sector_id, left=tstable.on((ttable.id == tstable.theme_id))) sector_ids = (sector_ids or []) theme_ids = [row.project_theme.id for row in rows if ((not row.project_theme_sector.sector_id) or (row.project_theme_sector.sector_id in sector_ids))] table = s3db.project_theme_project field = table.theme_id field.requires = IS_EMPTY_OR(IS_ONE_OF(db, 'project_theme.id', field.represent, filterby='id', filter_opts=theme_ids, sort=True))
[ "def", "set_theme_requires", "(", "sector_ids", ")", ":", "ttable", "=", "s3db", ".", "project_theme", "tstable", "=", "s3db", ".", "project_theme_sector", "rows", "=", "db", "(", ")", ".", "select", "(", "ttable", ".", "id", ",", "tstable", ".", "sector_i...
filters the theme_id based on the sector_id .
train
false
12,626
def get_c_sync(r, name, sub): return ('\n if (!%(failure_var)s) {\n %(sync)s\n PyObject* old = PyList_GET_ITEM(storage_%(name)s, 0);\n {Py_XINCREF(py_%(name)s);}\n PyList_SET_ITEM(storage_%(name)s, 0, py_%(name)s);\n {Py_XDECREF(old);}\n }\n ' % dict(sync=r.type.c_sync(name, sub), name=name, **sub))
[ "def", "get_c_sync", "(", "r", ",", "name", ",", "sub", ")", ":", "return", "(", "'\\n if (!%(failure_var)s) {\\n %(sync)s\\n PyObject* old = PyList_GET_ITEM(storage_%(name)s, 0);\\n {Py_XINCREF(py_%(name)s);}\\n PyList_SET_ITEM(storage_%(name)s, 0, py_%(name)s);\\n ...
wrapper around c_sync that syncs py_name with storage .
train
false
12,627
def _verify_centralizer(group, arg, centr=None): if (centr is None): centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive)
[ "def", "_verify_centralizer", "(", "group", ",", "arg", ",", "centr", "=", "None", ")", ":", "if", "(", "centr", "is", "None", ")", ":", "centr", "=", "group", ".", "centralizer", "(", "arg", ")", "centr_list", "=", "list", "(", "centr", ".", "genera...
verify the centralizer of a group/set/element inside another group .
train
false
12,628
def validate_phone_number(value): value = value.replace(' ', '') value = re.sub('^00', '+', value) pattern = re.compile('^\\+\\d{5,15}$') if (not pattern.match(value)): raise ValidationError(_('Please enter a valid phone number in international format (e.g. +1 555 555 5555)')) return value
[ "def", "validate_phone_number", "(", "value", ")", ":", "value", "=", "value", ".", "replace", "(", "' '", ",", "''", ")", "value", "=", "re", ".", "sub", "(", "'^00'", ",", "'+'", ",", "value", ")", "pattern", "=", "re", ".", "compile", "(", "'^\\...
validate that a phone number is in international format .
train
false
12,629
def normalize_actions(actions): result = [] for v in actions: if (not isinstance(v, dict)): v = expand_action_tuple(*v) result.append(v) return result
[ "def", "normalize_actions", "(", "actions", ")", ":", "result", "=", "[", "]", "for", "v", "in", "actions", ":", "if", "(", "not", "isinstance", "(", "v", ",", "dict", ")", ")", ":", "v", "=", "expand_action_tuple", "(", "*", "v", ")", "result", "....
convert old-style tuple actions to new-style dicts .
train
false
12,630
def get_response(args, config_dir): ssl_version = None if args.ssl_version: ssl_version = SSL_VERSION_ARG_MAPPING[args.ssl_version] requests_session = get_requests_session(ssl_version) requests_session.max_redirects = args.max_redirects if ((not args.session) and (not args.session_read_only)): kwargs = get_requests_kwargs(args) if args.debug: dump_request(kwargs) response = requests_session.request(**kwargs) else: response = sessions.get_response(requests_session=requests_session, args=args, config_dir=config_dir, session_name=(args.session or args.session_read_only), read_only=bool(args.session_read_only)) return response
[ "def", "get_response", "(", "args", ",", "config_dir", ")", ":", "ssl_version", "=", "None", "if", "args", ".", "ssl_version", ":", "ssl_version", "=", "SSL_VERSION_ARG_MAPPING", "[", "args", ".", "ssl_version", "]", "requests_session", "=", "get_requests_session"...
read a serialized response message from a socket .
train
false
12,631
def define_categories(force=False): global CFG, categories try: for cat in CFG['categories']: ConfigCat(cat, CFG['categories'][cat]) except KeyError: pass
[ "def", "define_categories", "(", "force", "=", "False", ")", ":", "global", "CFG", ",", "categories", "try", ":", "for", "cat", "in", "CFG", "[", "'categories'", "]", ":", "ConfigCat", "(", "cat", ",", "CFG", "[", "'categories'", "]", "[", "cat", "]", ...
define categories listed in the setup file return a list of configcat instances .
train
false
12,632
def status_autostart(name): return (not os.path.exists(os.path.join(_service_path(name), 'down')))
[ "def", "status_autostart", "(", "name", ")", ":", "return", "(", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "_service_path", "(", "name", ")", ",", "'down'", ")", ")", ")" ]
return true if service <name> is autostarted by sv nb: return false if the service is not enabled .
train
true
12,633
def print_max(x, y): x = int(x) y = int(y) if (x > y): print (x, 'is maximum') else: print (y, 'is maximum')
[ "def", "print_max", "(", "x", ",", "y", ")", ":", "x", "=", "int", "(", "x", ")", "y", "=", "int", "(", "y", ")", "if", "(", "x", ">", "y", ")", ":", "print", "(", "x", ",", "'is maximum'", ")", "else", ":", "print", "(", "y", ",", "'is m...
prints the maximum of two numbers .
train
false
12,634
def carmichael(n): return carmichael_of_factorized(factorization(n))
[ "def", "carmichael", "(", "n", ")", ":", "return", "carmichael_of_factorized", "(", "factorization", "(", "n", ")", ")" ]
return carmichael function of n .
train
false
12,635
def validate_safedir(root, value, default): if (sabnzbd.WIN32 and value and (len(sabnzbd.misc.real_path(root, value)) >= MAX_WIN_DFOLDER)): return ((T('Error: Path length should be below %s.') % MAX_WIN_DFOLDER), None) if sabnzbd.empty_queues(): return validate_no_unc(root, value, default) else: return (T('Error: Queue not empty, cannot change folder.'), None)
[ "def", "validate_safedir", "(", "root", ",", "value", ",", "default", ")", ":", "if", "(", "sabnzbd", ".", "WIN32", "and", "value", "and", "(", "len", "(", "sabnzbd", ".", "misc", ".", "real_path", "(", "root", ",", "value", ")", ")", ">=", "MAX_WIN_...
allow only when queues are empty and no unc on windows path should be small .
train
false
12,636
def find_parent_pid(pid): try: ppid = execute(['ps', '-o', 'ppid=', pid], log_fail_as_error=False) except ProcessExecutionError as e: with excutils.save_and_reraise_exception() as ctxt: no_such_pid = (e.returncode == 1) if no_such_pid: ctxt.reraise = False return return ppid.strip()
[ "def", "find_parent_pid", "(", "pid", ")", ":", "try", ":", "ppid", "=", "execute", "(", "[", "'ps'", ",", "'-o'", ",", "'ppid='", ",", "pid", "]", ",", "log_fail_as_error", "=", "False", ")", "except", "ProcessExecutionError", "as", "e", ":", "with", ...
retrieve the pid of the parent process of the given pid .
train
false
12,637
def config_section_map(config_file, section): config = ConfigParser.ConfigParser() config.read(config_file) dict1 = {} if (section not in config.sections()): return dict1 options = config.options(section) for option in options: try: dict1[option] = config.get(section, option) except: dict1[option] = None return dict1
[ "def", "config_section_map", "(", "config_file", ",", "section", ")", ":", "config", "=", "ConfigParser", ".", "ConfigParser", "(", ")", "config", ".", "read", "(", "config_file", ")", "dict1", "=", "{", "}", "if", "(", "section", "not", "in", "config", ...
map the values of a config file to a dictionary .
train
false
12,638
def REGIONS_CHOICES_SORTED_BY_NAME(): return [(v.id, v.name) for v in REGIONS_LIST_SORTED_BY_NAME()]
[ "def", "REGIONS_CHOICES_SORTED_BY_NAME", "(", ")", ":", "return", "[", "(", "v", ".", "id", ",", "v", ".", "name", ")", "for", "v", "in", "REGIONS_LIST_SORTED_BY_NAME", "(", ")", "]" ]
get the region choices and sort by name .
train
false
12,639
def getFlagNames(flags): if (flags == 1): return 'PAYLOAD' elif (flags == 2): return 'NEW_TICKET' elif (flags == 4): return 'PRNG_SEED' else: return 'Undefined'
[ "def", "getFlagNames", "(", "flags", ")", ":", "if", "(", "flags", "==", "1", ")", ":", "return", "'PAYLOAD'", "elif", "(", "flags", "==", "2", ")", ":", "return", "'NEW_TICKET'", "elif", "(", "flags", "==", "4", ")", ":", "return", "'PRNG_SEED'", "e...
return the flag name encoded in the integer flags as string .
train
false
12,641
@contextmanager def temporary_unloaded_module(python_file_contents): with tempfile.NamedTemporaryFile(dir='test/', prefix='_test_time_generated_module', suffix='.py') as temp_module_file: temp_module_file.file.write(python_file_contents) temp_module_file.file.flush() temp_module_path = temp_module_file.name temp_module_name = re.search('/(_test_time_generated_module.*).py', temp_module_path).group(1) (yield temp_module_name)
[ "@", "contextmanager", "def", "temporary_unloaded_module", "(", "python_file_contents", ")", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "dir", "=", "'test/'", ",", "prefix", "=", "'_test_time_generated_module'", ",", "suffix", "=", "'.py'", ")", "as", ...
create an importable module return the name of importable module name given its file contents .
train
false
12,642
def MakeStatementGenerator(depth): if (depth == MAX_DEPTH): (yield '') else: for stmtGen in stmt_gens: for value in stmtGen().generate(depth): (yield Indent(value, depth))
[ "def", "MakeStatementGenerator", "(", "depth", ")", ":", "if", "(", "depth", "==", "MAX_DEPTH", ")", ":", "(", "yield", "''", ")", "else", ":", "for", "stmtGen", "in", "stmt_gens", ":", "for", "value", "in", "stmtGen", "(", ")", ".", "generate", "(", ...
yields all possible expressions .
train
false
12,643
def volume_type_extra_specs_get(context, volume_type_id): return IMPL.volume_type_extra_specs_get(context, volume_type_id)
[ "def", "volume_type_extra_specs_get", "(", "context", ",", "volume_type_id", ")", ":", "return", "IMPL", ".", "volume_type_extra_specs_get", "(", "context", ",", "volume_type_id", ")" ]
get all extra specs for a volume type .
train
false
12,645
def draw_polygon(list_of_points, color=colors.lightgreen, border=None, colour=None, **kwargs): if (colour is not None): color = colour del colour (strokecolor, color) = _stroke_and_fill_colors(color, border) xy_list = [] for (x, y) in list_of_points: xy_list.append(x) xy_list.append(y) return Polygon(xy_list, strokeColor=strokecolor, fillColor=color, strokewidth=0, **kwargs)
[ "def", "draw_polygon", "(", "list_of_points", ",", "color", "=", "colors", ".", "lightgreen", ",", "border", "=", "None", ",", "colour", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "colour", "is", "not", "None", ")", ":", "color", "=", "co...
draw polygon .
train
false
12,647
def build_lengths_matrix(str1, str2): matrix = [[0 for j in range((len(str2) + 1))] for i in range((len(str1) + 1))] for (i, x) in enumerate(str1): for (j, y) in enumerate(str2): if (x == y): matrix[(i + 1)][(j + 1)] = (matrix[i][j] + 1) else: matrix[(i + 1)][(j + 1)] = max(matrix[(i + 1)][j], matrix[i][(j + 1)]) return matrix
[ "def", "build_lengths_matrix", "(", "str1", ",", "str2", ")", ":", "matrix", "=", "[", "[", "0", "for", "j", "in", "range", "(", "(", "len", "(", "str2", ")", "+", "1", ")", ")", "]", "for", "i", "in", "range", "(", "(", "len", "(", "str1", "...
xxx: needs documentation written .
train
false
12,648
def strip_slashes(name): if name.startswith('/'): name = name[1:] if name.endswith('/'): name = name[:(-1)] return name
[ "def", "strip_slashes", "(", "name", ")", ":", "if", "name", ".", "startswith", "(", "'/'", ")", ":", "name", "=", "name", "[", "1", ":", "]", "if", "name", ".", "endswith", "(", "'/'", ")", ":", "name", "=", "name", "[", ":", "(", "-", "1", ...
remove slashes from the beginning and end of a part/url .
train
false
12,649
@requires_segment_info def workspaces(pl, segment_info, only_show=None, output=None, strip=0): output = (output or segment_info.get(u'output')) return [{u'contents': w[u'name'][strip:], u'highlight_groups': workspace_groups(w)} for w in get_i3_connection().get_workspaces() if (((not only_show) or any((w[typ] for typ in only_show))) and ((not output) or (w[u'output'] == output)))]
[ "@", "requires_segment_info", "def", "workspaces", "(", "pl", ",", "segment_info", ",", "only_show", "=", "None", ",", "output", "=", "None", ",", "strip", "=", "0", ")", ":", "output", "=", "(", "output", "or", "segment_info", ".", "get", "(", "u'output...
return list of used workspaces .
train
false
12,650
def _authn_context_class_ref(authn_class, authn_auth=None): cntx_class = factory(saml.AuthnContextClassRef, text=authn_class) if authn_auth: return factory(saml.AuthnContext, authn_context_class_ref=cntx_class, authenticating_authority=factory(saml.AuthenticatingAuthority, text=authn_auth)) else: return factory(saml.AuthnContext, authn_context_class_ref=cntx_class)
[ "def", "_authn_context_class_ref", "(", "authn_class", ",", "authn_auth", "=", "None", ")", ":", "cntx_class", "=", "factory", "(", "saml", ".", "AuthnContextClassRef", ",", "text", "=", "authn_class", ")", "if", "authn_auth", ":", "return", "factory", "(", "s...
construct the authn context with a authn context class reference .
train
true
12,651
def render_to_image_list(image_list): return render_to_js_vardef('tinyMCEImageList', image_list)
[ "def", "render_to_image_list", "(", "image_list", ")", ":", "return", "render_to_js_vardef", "(", "'tinyMCEImageList'", ",", "image_list", ")" ]
returns a httpresponse whose content is a javascript file representing a list of images suitable for use wit the tinymce external_image_list_url configuration option .
train
false
12,652
def get_imlist(path): return [os.path.join(path, f) for f in os.listdir(path) if f.endswith('.jpg')]
[ "def", "get_imlist", "(", "path", ")", ":", "return", "[", "os", ".", "path", ".", "join", "(", "path", ",", "f", ")", "for", "f", "in", "os", ".", "listdir", "(", "path", ")", "if", "f", ".", "endswith", "(", "'.jpg'", ")", "]" ]
returns a list of filenames for all jpg images in a directory .
train
false
12,653
def import_pyqt4(version=2): import sip if (version is not None): sip.setapi('QString', version) sip.setapi('QVariant', version) from PyQt4 import QtGui, QtCore, QtSvg if (not check_version(QtCore.PYQT_VERSION_STR, '4.7')): raise ImportError(('IPython requires PyQt4 >= 4.7, found %s' % QtCore.PYQT_VERSION_STR)) QtCore.Signal = QtCore.pyqtSignal QtCore.Slot = QtCore.pyqtSlot version = sip.getapi('QString') api = (QT_API_PYQTv1 if (version == 1) else QT_API_PYQT) return (QtCore, QtGui, QtSvg, api)
[ "def", "import_pyqt4", "(", "version", "=", "2", ")", ":", "import", "sip", "if", "(", "version", "is", "not", "None", ")", ":", "sip", ".", "setapi", "(", "'QString'", ",", "version", ")", "sip", ".", "setapi", "(", "'QVariant'", ",", "version", ")"...
import pyqt4 parameters version : 1 .
train
true
12,654
def reader(name): def _(fn): module_name = fn.__module__ if module_name.startswith('hy.core'): module_name = None _hy_reader[module_name][name] = fn return fn return _
[ "def", "reader", "(", "name", ")", ":", "def", "_", "(", "fn", ")", ":", "module_name", "=", "fn", ".", "__module__", "if", "module_name", ".", "startswith", "(", "'hy.core'", ")", ":", "module_name", "=", "None", "_hy_reader", "[", "module_name", "]", ...
decorator to define a reader macro called name .
train
false
12,655
def _check_name(name): if (not name): raise InvalidName('database name cannot be the empty string') for invalid_char in [' ', '.', '$', '/', '\\', '\x00', '"']: if (invalid_char in name): raise InvalidName(('database names cannot contain the character %r' % invalid_char))
[ "def", "_check_name", "(", "name", ")", ":", "if", "(", "not", "name", ")", ":", "raise", "InvalidName", "(", "'database name cannot be the empty string'", ")", "for", "invalid_char", "in", "[", "' '", ",", "'.'", ",", "'$'", ",", "'/'", ",", "'\\\\'", ","...
check if a database name is valid .
train
false
12,656
@pytest.mark.django_db @pytest.mark.parametrize('view,model,service_provider_attr,get_provider', [(PaymentMethodEditView, PaymentMethod, 'payment_processor', get_custom_payment_processor), (ShippingMethodEditView, ShippingMethod, 'carrier', get_custom_carrier)]) def test_method_creation(rf, admin_user, view, model, service_provider_attr, get_provider): with override_settings(LANGUAGES=[('en', 'en')]): view = view.as_view() service_provider_field = ('base-%s' % service_provider_attr) data = {service_provider_field: get_provider().id, 'base-choice_identifier': 'manual', 'base-name__en': 'Custom method', 'base-shop': get_default_shop().id, 'base-tax_class': get_default_tax_class().id, 'base-enabled': True} methods_before = model.objects.count() url = ('/?provider=%s' % get_provider().id) request = apply_request_middleware(rf.post(url, data=data), user=admin_user) response = view(request, pk=None) if hasattr(response, 'render'): response.render() assert (response.status_code in [200, 302]) assert (model.objects.count() == (methods_before + 1))
[ "@", "pytest", ".", "mark", ".", "django_db", "@", "pytest", ".", "mark", ".", "parametrize", "(", "'view,model,service_provider_attr,get_provider'", ",", "[", "(", "PaymentMethodEditView", ",", "PaymentMethod", ",", "'payment_processor'", ",", "get_custom_payment_proce...
to make things little bit more simple lets use only english as an language .
train
false
12,657
def gather_deferreds(deferreds): results_or_first_failure = gatherResults(deferreds) def log_and_discard(failure): '\n Log the supplied failure and discard it.\n\n The failure is deliberately discarded so as to prevent any further\n logging of this failure when the deferred is eventually garbage\n collected.\n\n :param Failure failure: The ``Failure`` to be logged.\n ' write_failure(failure) for deferred in deferreds: deferred.addErrback(log_and_discard) gathering = gatherResults(deferreds) gathering.addCallback((lambda ignored: results_or_first_failure)) return gathering
[ "def", "gather_deferreds", "(", "deferreds", ")", ":", "results_or_first_failure", "=", "gatherResults", "(", "deferreds", ")", "def", "log_and_discard", "(", "failure", ")", ":", "write_failure", "(", "failure", ")", "for", "deferred", "in", "deferreds", ":", "...
return a deferred which fires when all of the supplied deferreds have themselves fired .
train
false
12,658
def get_repository_metadata_by_id(app, id): sa_session = app.model.context.current return sa_session.query(app.model.RepositoryMetadata).get(app.security.decode_id(id))
[ "def", "get_repository_metadata_by_id", "(", "app", ",", "id", ")", ":", "sa_session", "=", "app", ".", "model", ".", "context", ".", "current", "return", "sa_session", ".", "query", "(", "app", ".", "model", ".", "RepositoryMetadata", ")", ".", "get", "("...
get repository metadata from the database .
train
false
12,659
@image_comparison(baseline_images=[u'fancyarrow_dpi_cor_100dpi'], remove_text=True, extensions=[u'png'], savefig_kwarg=dict(dpi=100)) def test_fancyarrow_dpi_cor_100dpi(): __prepare_fancyarrow_dpi_cor_test()
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'fancyarrow_dpi_cor_100dpi'", "]", ",", "remove_text", "=", "True", ",", "extensions", "=", "[", "u'png'", "]", ",", "savefig_kwarg", "=", "dict", "(", "dpi", "=", "100", ")", ")", "def", "test_fan...
check the export of a fancyarrowpatch @ 100 dpi .
train
false
12,663
def test_iforest_performance(): rng = check_random_state(2) X = (0.3 * rng.randn(120, 2)) X_train = np.r_[((X + 2), (X - 2))] X_train = X[:100] X_outliers = rng.uniform(low=(-4), high=4, size=(20, 2)) X_test = np.r_[(X[100:], X_outliers)] y_test = np.array((([0] * 20) + ([1] * 20))) clf = IsolationForest(max_samples=100, random_state=rng).fit(X_train) y_pred = (- clf.decision_function(X_test)) assert_greater(roc_auc_score(y_test, y_pred), 0.98)
[ "def", "test_iforest_performance", "(", ")", ":", "rng", "=", "check_random_state", "(", "2", ")", "X", "=", "(", "0.3", "*", "rng", ".", "randn", "(", "120", ",", "2", ")", ")", "X_train", "=", "np", ".", "r_", "[", "(", "(", "X", "+", "2", ")...
test isolation forest performs well .
train
false
12,665
def tag_pattern2re_pattern(tag_pattern): tag_pattern = re.sub(u'\\s', u'', tag_pattern) tag_pattern = re.sub(u'<', u'(<(', tag_pattern) tag_pattern = re.sub(u'>', u')>)', tag_pattern) if (not CHUNK_TAG_PATTERN.match(tag_pattern)): raise ValueError((u'Bad tag pattern: %r' % tag_pattern)) def reverse_str(str): lst = list(str) lst.reverse() return u''.join(lst) tc_rev = reverse_str(ChunkString.CHUNK_TAG_CHAR) reversed = reverse_str(tag_pattern) reversed = re.sub(u'\\.(?!\\\\(\\\\\\\\)*($|[^\\\\]))', tc_rev, reversed) tag_pattern = reverse_str(reversed) return tag_pattern
[ "def", "tag_pattern2re_pattern", "(", "tag_pattern", ")", ":", "tag_pattern", "=", "re", ".", "sub", "(", "u'\\\\s'", ",", "u''", ",", "tag_pattern", ")", "tag_pattern", "=", "re", ".", "sub", "(", "u'<'", ",", "u'(<('", ",", "tag_pattern", ")", "tag_patte...
convert a tag pattern to a regular expression pattern .
train
false
12,666
def test_one_qubit_anticommutators(): for g1 in (IdentityGate, X, Y, Z, H): for g2 in (IdentityGate, X, Y, Z, H): e = AntiCommutator(g1(0), g2(0)) a = matrix_to_zero(represent(e, nqubits=1, format='sympy')) b = matrix_to_zero(represent(e.doit(), nqubits=1, format='sympy')) assert (a == b) e = AntiCommutator(g1(0), g2(1)) a = matrix_to_zero(represent(e, nqubits=2, format='sympy')) b = matrix_to_zero(represent(e.doit(), nqubits=2, format='sympy')) assert (a == b)
[ "def", "test_one_qubit_anticommutators", "(", ")", ":", "for", "g1", "in", "(", "IdentityGate", ",", "X", ",", "Y", ",", "Z", ",", "H", ")", ":", "for", "g2", "in", "(", "IdentityGate", ",", "X", ",", "Y", ",", "Z", ",", "H", ")", ":", "e", "="...
test single qubit gate anticommutation relations .
train
false
12,668
def SplitURL(relative_url): (unused_scheme, unused_netloc, path, query, unused_fragment) = urlparse.urlsplit(relative_url) return (path, query)
[ "def", "SplitURL", "(", "relative_url", ")", ":", "(", "unused_scheme", ",", "unused_netloc", ",", "path", ",", "query", ",", "unused_fragment", ")", "=", "urlparse", ".", "urlsplit", "(", "relative_url", ")", "return", "(", "path", ",", "query", ")" ]
splits an http url into pieces .
train
false
12,669
def reset(): _runtime.reset()
[ "def", "reset", "(", ")", ":", "_runtime", ".", "reset", "(", ")" ]
resets the state of the tar mission .
train
false
12,671
def get_or_create_user(user_id, email): user_settings = get_user_settings(user_id, strict=False) if (user_settings is None): user_settings = _create_user(user_id, email) return user_settings
[ "def", "get_or_create_user", "(", "user_id", ",", "email", ")", ":", "user_settings", "=", "get_user_settings", "(", "user_id", ",", "strict", "=", "False", ")", "if", "(", "user_settings", "is", "None", ")", ":", "user_settings", "=", "_create_user", "(", "...
get or create user by fullname and email address .
train
false
12,672
def teams_new_unicode(self): name = self.unicode_before_teams() teams_mapping = getattr(settings, 'OPENID_LAUNCHPAD_TEAMS_MAPPING', {}) group_teams = [t for t in teams_mapping if (teams_mapping[t] == self.name)] if (len(group_teams) > 0): return ('%s -> %s' % (name, ', '.join(group_teams))) else: return name
[ "def", "teams_new_unicode", "(", "self", ")", ":", "name", "=", "self", ".", "unicode_before_teams", "(", ")", "teams_mapping", "=", "getattr", "(", "settings", ",", "'OPENID_LAUNCHPAD_TEAMS_MAPPING'", ",", "{", "}", ")", "group_teams", "=", "[", "t", "for", ...
replacement for group .
train
false
12,673
def _gen_https_names(domains): if (len(domains) == 1): return 'https://{0}'.format(domains[0]) elif (len(domains) == 2): return 'https://{dom[0]} and https://{dom[1]}'.format(dom=domains) elif (len(domains) > 2): return '{0}{1}{2}'.format(', '.join((('https://%s' % dom) for dom in domains[:(-1)])), ', and https://', domains[(-1)]) return ''
[ "def", "_gen_https_names", "(", "domains", ")", ":", "if", "(", "len", "(", "domains", ")", "==", "1", ")", ":", "return", "'https://{0}'", ".", "format", "(", "domains", "[", "0", "]", ")", "elif", "(", "len", "(", "domains", ")", "==", "2", ")", ...
returns a string of the https domains .
train
false
12,674
def methods_of(obj): result = [] for i in dir(obj): if (callable(getattr(obj, i)) and (not i.startswith('_'))): result.append((i, getattr(obj, i))) return result
[ "def", "methods_of", "(", "obj", ")", ":", "result", "=", "[", "]", "for", "i", "in", "dir", "(", "obj", ")", ":", "if", "(", "callable", "(", "getattr", "(", "obj", ",", "i", ")", ")", "and", "(", "not", "i", ".", "startswith", "(", "'_'", "...
get all callable methods of an object that dont start with underscore returns a list of tuples of the form .
train
false
12,675
def path_exists_strict(path): try: os.stat(path) except OSError as err: if (err.errno in (errno.EPERM, errno.EACCES)): raise return False else: return True
[ "def", "path_exists_strict", "(", "path", ")", ":", "try", ":", "os", ".", "stat", "(", "path", ")", "except", "OSError", "as", "err", ":", "if", "(", "err", ".", "errno", "in", "(", "errno", ".", "EPERM", ",", "errno", ".", "EACCES", ")", ")", "...
same as os .
train
false
12,676
def remote_editor_test(): from spyder.utils.qthelpers import qapplication app = qapplication() from spyder.config.main import CONF from spyder.widgets.variableexplorer.utils import make_remote_view, REMOTE_SETTINGS settings = {} for name in REMOTE_SETTINGS: settings[name] = CONF.get('variable_explorer', name) remote = make_remote_view(get_test_data(), settings) dialog = CollectionsEditor() dialog.setup(remote, remote=True) dialog.show() app.exec_()
[ "def", "remote_editor_test", "(", ")", ":", "from", "spyder", ".", "utils", ".", "qthelpers", "import", "qapplication", "app", "=", "qapplication", "(", ")", "from", "spyder", ".", "config", ".", "main", "import", "CONF", "from", "spyder", ".", "widgets", ...
remote collections editor test .
train
true
12,677
def format_distance_matrix(labels, data): return format_matrix(data, labels, labels, convert_matching_names_to_zero=True)
[ "def", "format_distance_matrix", "(", "labels", ",", "data", ")", ":", "return", "format_matrix", "(", "data", ",", "labels", ",", "labels", ",", "convert_matching_names_to_zero", "=", "True", ")" ]
writes distance matrix as tab-delimited text .
train
false
12,678
def setup_test_db(): db.upgradeDatabase(db.DBConnection(), mainDB.InitialSchema) db.sanityCheckDatabase(db.DBConnection(), mainDB.MainSanityCheck) db.upgradeDatabase(db.DBConnection('cache.db'), cache_db.InitialSchema) db.upgradeDatabase(db.DBConnection('failed.db'), failed_db.InitialSchema)
[ "def", "setup_test_db", "(", ")", ":", "db", ".", "upgradeDatabase", "(", "db", ".", "DBConnection", "(", ")", ",", "mainDB", ".", "InitialSchema", ")", "db", ".", "sanityCheckDatabase", "(", "db", ".", "DBConnection", "(", ")", ",", "mainDB", ".", "Main...
creates new .
train
false
12,679
def wol(mac, bcast='255.255.255.255', destport=9): dest = salt.utils.mac_str_to_bytes(mac) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.sendto((('\xff' * 6) + (dest * 16)), (bcast, int(destport))) return True
[ "def", "wol", "(", "mac", ",", "bcast", "=", "'255.255.255.255'", ",", "destport", "=", "9", ")", ":", "dest", "=", "salt", ".", "utils", ".", "mac_str_to_bytes", "(", "mac", ")", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", "...
send a "magic packet" to wake up a minion cli example: .
train
true
12,681
def list_strings(arg): if isinstance(arg, str): return [arg] else: return arg
[ "def", "list_strings", "(", "arg", ")", ":", "if", "isinstance", "(", "arg", ",", "str", ")", ":", "return", "[", "arg", "]", "else", ":", "return", "arg" ]
always return a list of strings .
train
false
12,682
def _decode_configurations_from_api(configurations): results = [] for c in configurations: result = {} if hasattr(c, 'classification'): result['Classification'] = c.classification if getattr(c, 'configurations', None): result['Configurations'] = _decode_configurations_from_api(c.configurations) result['Properties'] = dict(((kv.key, kv.value) for kv in getattr(c, 'properties', []))) results.append(result) return results
[ "def", "_decode_configurations_from_api", "(", "configurations", ")", ":", "results", "=", "[", "]", "for", "c", "in", "configurations", ":", "result", "=", "{", "}", "if", "hasattr", "(", "c", ",", "'classification'", ")", ":", "result", "[", "'Classificati...
recursively convert configurations object from describe_cluster() back into simple data structure .
train
false
12,683
def apply_path_root(path_root): return PathsExpansion(path_root.paths, tuple())
[ "def", "apply_path_root", "(", "path_root", ")", ":", "return", "PathsExpansion", "(", "path_root", ".", "paths", ",", "tuple", "(", ")", ")" ]
returns the paths for the root of the repo .
train
false
12,685
def remove_outputs(nb): for ws in nb.worksheets: for cell in ws.cells: if (cell.cell_type == 'code'): cell.outputs = [] if ('prompt_number' in cell): del cell['prompt_number']
[ "def", "remove_outputs", "(", "nb", ")", ":", "for", "ws", "in", "nb", ".", "worksheets", ":", "for", "cell", "in", "ws", ".", "cells", ":", "if", "(", "cell", ".", "cell_type", "==", "'code'", ")", ":", "cell", ".", "outputs", "=", "[", "]", "if...
remove the outputs from a notebook .
train
false
12,686
def test_rus_fit_sample_with_indices(): rus = RandomUnderSampler(return_indices=True, random_state=RND_SEED) (X_resampled, y_resampled, idx_under) = rus.fit_sample(X, Y) X_gt = np.array([[0.92923648, 0.76103773], [0.47104475, 0.44386323], [0.13347175, 0.12167502], [0.09125309, (-0.85409574)], [0.12372842, 0.6536186], [0.04352327, (-0.20515826)]]) y_gt = np.array([0, 0, 0, 1, 1, 1]) idx_gt = np.array([1, 3, 8, 6, 7, 0]) assert_array_equal(X_resampled, X_gt) assert_array_equal(y_resampled, y_gt) assert_array_equal(idx_under, idx_gt)
[ "def", "test_rus_fit_sample_with_indices", "(", ")", ":", "rus", "=", "RandomUnderSampler", "(", "return_indices", "=", "True", ",", "random_state", "=", "RND_SEED", ")", "(", "X_resampled", ",", "y_resampled", ",", "idx_under", ")", "=", "rus", ".", "fit_sample...
test the fit sample routine with indices support .
train
false
12,687
def get_item_user_text(item): return from_qvariant(item.data(0, Qt.UserRole), to_text_string)
[ "def", "get_item_user_text", "(", "item", ")", ":", "return", "from_qvariant", "(", "item", ".", "data", "(", "0", ",", "Qt", ".", "UserRole", ")", ",", "to_text_string", ")" ]
get qtreewidgetitem user role string .
train
false
12,688
def get_local_images_dir(subfolder=None): images_dir = os.path.join(config_dir, u'resources/images') if subfolder: images_dir = os.path.join(images_dir, subfolder) if iswindows: images_dir = os.path.normpath(images_dir) return images_dir
[ "def", "get_local_images_dir", "(", "subfolder", "=", "None", ")", ":", "images_dir", "=", "os", ".", "path", ".", "join", "(", "config_dir", ",", "u'resources/images'", ")", "if", "subfolder", ":", "images_dir", "=", "os", ".", "path", ".", "join", "(", ...
returns a path to the users local resources/images folder if a subfolder name parameter is specified .
train
false
12,690
def angle_axis2quat(theta, vector, is_normalized=False): vector = np.array(vector) if (not is_normalized): vector = (vector / math.sqrt(np.dot(vector, vector))) t2 = (theta / 2.0) st2 = math.sin(t2) return np.concatenate(([math.cos(t2)], (vector * st2)))
[ "def", "angle_axis2quat", "(", "theta", ",", "vector", ",", "is_normalized", "=", "False", ")", ":", "vector", "=", "np", ".", "array", "(", "vector", ")", "if", "(", "not", "is_normalized", ")", ":", "vector", "=", "(", "vector", "/", "math", ".", "...
quaternion for rotation of angle theta around vector parameters theta : scalar angle of rotation vector : 3 element sequence vector specifying axis for rotation .
train
false
12,692
def _add_file(file_path): _db_content['files'].append(file_path)
[ "def", "_add_file", "(", "file_path", ")", ":", "_db_content", "[", "'files'", "]", ".", "append", "(", "file_path", ")" ]
adds a file reference to the db .
train
false
12,693
def fix_return(node): if isinstance(node, datetime.datetime): node_value = str(node) elif isinstance(node, list): node_value = [fix_return(item) for item in node] elif isinstance(node, dict): node_value = dict([(item, fix_return(node[item])) for item in node.keys()]) else: node_value = node return node_value
[ "def", "fix_return", "(", "node", ")", ":", "if", "isinstance", "(", "node", ",", "datetime", ".", "datetime", ")", ":", "node_value", "=", "str", "(", "node", ")", "elif", "isinstance", "(", "node", ",", "list", ")", ":", "node_value", "=", "[", "fi...
fixup returned dictionary .
train
false
12,694
def EscapeMakeVariableExpansion(s): return s.replace('$', '$$')
[ "def", "EscapeMakeVariableExpansion", "(", "s", ")", ":", "return", "s", ".", "replace", "(", "'$'", ",", "'$$'", ")" ]
make has its own variable expansion syntax using $ .
train
false
12,695
def guestbook_key(guestbook_name=DEFAULT_GUESTBOOK_NAME): return ndb.Key('Guestbook', guestbook_name)
[ "def", "guestbook_key", "(", "guestbook_name", "=", "DEFAULT_GUESTBOOK_NAME", ")", ":", "return", "ndb", ".", "Key", "(", "'Guestbook'", ",", "guestbook_name", ")" ]
constructs a datastore key for a guestbook entity with name .
train
false
12,696
def Eval(train_dir, eval_dir, model_str, eval_data, decoder_file, num_steps, graph_def_file=None, eval_interval_secs=0, reader=None): decode = None if decoder_file: decode = decoder.Decoder(decoder_file) rates = ec.ErrorRates(label_error=None, word_recall_error=None, word_precision_error=None, sequence_error=None) with tf.Graph().as_default(): model = InitNetwork(eval_data, model_str, 'eval', reader=reader) sw = tf.train.SummaryWriter(eval_dir) while True: sess = tf.Session('') if (graph_def_file is not None): if (not tf.gfile.Exists(graph_def_file)): with tf.gfile.FastGFile(graph_def_file, 'w') as f: f.write(sess.graph.as_graph_def(add_shapes=True).SerializeToString()) ckpt = tf.train.get_checkpoint_state(train_dir) if (ckpt and ckpt.model_checkpoint_path): step = model.Restore(ckpt.model_checkpoint_path, sess) if decode: rates = decode.SoftmaxEval(sess, model, num_steps) _AddRateToSummary('Label error rate', rates.label_error, step, sw) _AddRateToSummary('Word recall error rate', rates.word_recall_error, step, sw) _AddRateToSummary('Word precision error rate', rates.word_precision_error, step, sw) _AddRateToSummary('Sequence error rate', rates.sequence_error, step, sw) sw.flush() print 'Error rates=', rates else: raise ValueError('Non-softmax decoder evaluation not implemented!') if eval_interval_secs: time.sleep(eval_interval_secs) else: break return rates
[ "def", "Eval", "(", "train_dir", ",", "eval_dir", ",", "model_str", ",", "eval_data", ",", "decoder_file", ",", "num_steps", ",", "graph_def_file", "=", "None", ",", "eval_interval_secs", "=", "0", ",", "reader", "=", "None", ")", ":", "decode", "=", "None...
evaluates a network and checkpoints it to disk .
train
false
12,698
def _raise_on_error(result): if (result != 0): raise HomeAssistantError('Error talking to MQTT: {}'.format(result))
[ "def", "_raise_on_error", "(", "result", ")", ":", "if", "(", "result", "!=", "0", ")", ":", "raise", "HomeAssistantError", "(", "'Error talking to MQTT: {}'", ".", "format", "(", "result", ")", ")" ]
raise error if error result .
train
false
12,699
def format_playlist(playlist, show_url=True): out = '\x02{}\x02'.format(playlist['title']) if playlist['description']: out += ': "{}"'.format(formatting.truncate(playlist['description'])) if playlist['genre']: out += ' - \x02{}\x02'.format(playlist['genre']) out += ' - by \x02{}\x02'.format(playlist['user']['username']) if (not playlist['tracks']): out += ' - No items' else: out += ' - {} items,'.format(len(playlist['tracks'])) seconds = round((int(playlist['duration']) / 1000)) out += ' {}'.format(timeformat.format_time(seconds, simple=True)) if show_url: out += ' - {}'.format(web.try_shorten(playlist['permalink_url'])) return out
[ "def", "format_playlist", "(", "playlist", ",", "show_url", "=", "True", ")", ":", "out", "=", "'\\x02{}\\x02'", ".", "format", "(", "playlist", "[", "'title'", "]", ")", "if", "playlist", "[", "'description'", "]", ":", "out", "+=", "': \"{}\"'", ".", "...
takes a soundcloud playlist item and returns a formatted string .
train
false