id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
2,957
def inputs(eval_data, data_dir, batch_size): if (not eval_data): filenames = [os.path.join(data_dir, ('data_batch_%d.bin' % i)) for i in xrange(1, 6)] num_examples_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN else: filenames = [os.path.join(data_dir, 'test_batch.bin')] num_examples_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_EVAL for f in filenames: if (not tf.gfile.Exists(f)): raise ValueError(('Failed to find file: ' + f)) filename_queue = tf.train.string_input_producer(filenames) read_input = read_cifar10(filename_queue) reshaped_image = tf.cast(read_input.uint8image, tf.float32) height = IMAGE_SIZE width = IMAGE_SIZE resized_image = tf.image.resize_image_with_crop_or_pad(reshaped_image, width, height) float_image = tf.image.per_image_standardization(resized_image) float_image.set_shape([height, width, 3]) read_input.label.set_shape([1]) min_fraction_of_examples_in_queue = 0.4 min_queue_examples = int((num_examples_per_epoch * min_fraction_of_examples_in_queue)) return _generate_image_and_label_batch(float_image, read_input.label, min_queue_examples, batch_size, shuffle=False)
[ "def", "inputs", "(", "eval_data", ",", "data_dir", ",", "batch_size", ")", ":", "if", "(", "not", "eval_data", ")", ":", "filenames", "=", "[", "os", ".", "path", ".", "join", "(", "data_dir", ",", "(", "'data_batch_%d.bin'", "%", "i", ")", ")", "fo...
return the inputs required to compute the given variables .
train
true
2,958
def dmp_inflate(f, M, u, K): if (not u): return dup_inflate(f, M[0], K) if all(((m == 1) for m in M)): return f else: return _rec_inflate(f, M, u, 0, K)
[ "def", "dmp_inflate", "(", "f", ",", "M", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_inflate", "(", "f", ",", "M", "[", "0", "]", ",", "K", ")", "if", "all", "(", "(", "(", "m", "==", "1", ")", "for", ...
map y_i to x_i**k_i in a polynomial in k[x] .
train
false
2,959
def splitdrive(p): return ('', p)
[ "def", "splitdrive", "(", "p", ")", ":", "return", "(", "''", ",", "p", ")" ]
split a pathname into a drive specification and the rest of the path .
train
false
2,961
def _get_table(table_name, metadata): return sqlalchemy.Table(table_name, metadata, autoload=True)
[ "def", "_get_table", "(", "table_name", ",", "metadata", ")", ":", "return", "sqlalchemy", ".", "Table", "(", "table_name", ",", "metadata", ",", "autoload", "=", "True", ")" ]
return a sqlalchemy table definition with associated metadata .
train
false
2,963
def min_path(path): (path, ext) = os.path.splitext(path) return ((path + '.min') + ext)
[ "def", "min_path", "(", "path", ")", ":", "(", "path", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "path", ")", "return", "(", "(", "path", "+", "'.min'", ")", "+", "ext", ")" ]
return the .
train
false
2,964
def getLoopListsByPath(derivation, endMultiplier, path): vertexes = [] loopLists = [[]] if (len(derivation.loop) < 2): return loopLists for (pointIndex, pointComplex) in enumerate(derivation.loop): if ((endMultiplier != None) and (not derivation.isEndCloseToStart)): if (pointIndex == 0): nextPoint = derivation.loop[1] pointComplex = ((endMultiplier * (pointComplex - nextPoint)) + nextPoint) elif (pointIndex == (len(derivation.loop) - 1)): previousPoint = derivation.loop[(pointIndex - 1)] pointComplex = ((endMultiplier * (pointComplex - previousPoint)) + previousPoint) addLoopByComplex(derivation, endMultiplier, loopLists, path, pointComplex, vertexes) if derivation.isEndCloseToStart: loopLists[(-1)].append([]) return loopLists
[ "def", "getLoopListsByPath", "(", "derivation", ",", "endMultiplier", ",", "path", ")", ":", "vertexes", "=", "[", "]", "loopLists", "=", "[", "[", "]", "]", "if", "(", "len", "(", "derivation", ".", "loop", ")", "<", "2", ")", ":", "return", "loopLi...
get loop lists from path .
train
false
2,965
def field_isomorphism_factor(a, b): (_, factors) = factor_list(a.minpoly, extension=b) for (f, _) in factors: if (f.degree() == 1): coeffs = f.rep.TC().to_sympy_list() (d, terms) = ((len(coeffs) - 1), []) for (i, coeff) in enumerate(coeffs): terms.append((coeff * (b.root ** (d - i)))) root = Add(*terms) if ((a.root - root).evalf(chop=True) == 0): return coeffs if ((a.root + root).evalf(chop=True) == 0): return [(- c) for c in coeffs] else: return None
[ "def", "field_isomorphism_factor", "(", "a", ",", "b", ")", ":", "(", "_", ",", "factors", ")", "=", "factor_list", "(", "a", ".", "minpoly", ",", "extension", "=", "b", ")", "for", "(", "f", ",", "_", ")", "in", "factors", ":", "if", "(", "f", ...
construct field isomorphism via factorization .
train
false
2,966
def get_instance_path_at_destination(instance, migrate_data=None): instance_relative_path = None if migrate_data: instance_relative_path = migrate_data.instance_relative_path if instance_relative_path: instance_dir = os.path.join(CONF.instances_path, instance_relative_path) else: instance_dir = get_instance_path(instance) return instance_dir
[ "def", "get_instance_path_at_destination", "(", "instance", ",", "migrate_data", "=", "None", ")", ":", "instance_relative_path", "=", "None", "if", "migrate_data", ":", "instance_relative_path", "=", "migrate_data", ".", "instance_relative_path", "if", "instance_relative...
get the instance path on destination node while live migration .
train
false
2,967
def _MatchPostfix(postfix_props, index_props): index_props_rev = reversed(index_props) for property_group in reversed(postfix_props): index_group_iter = itertools.islice(index_props_rev, len(property_group)) if isinstance(property_group, (frozenset, set)): index_group = set((prop for (prop, _) in index_group_iter)) if (index_group != property_group): return None else: index_group = list(index_group_iter) if (len(index_group) != len(property_group)): return None for ((index_prop, index_dir), (prop, direction)) in itertools.izip(index_group, reversed(property_group)): if ((index_prop != prop) or (direction and (index_dir != direction))): return None remaining = list(index_props_rev) remaining.reverse() return remaining
[ "def", "_MatchPostfix", "(", "postfix_props", ",", "index_props", ")", ":", "index_props_rev", "=", "reversed", "(", "index_props", ")", "for", "property_group", "in", "reversed", "(", "postfix_props", ")", ":", "index_group_iter", "=", "itertools", ".", "islice",...
matches a postfix constraint with an existing index .
train
false
2,968
def _create_tmp_config_dir(): import getpass import tempfile from matplotlib.cbook import mkdirs try: tempdir = tempfile.gettempdir() except NotImplementedError: return None try: username = getpass.getuser() except KeyError: username = str(os.getuid()) tempdir = tempfile.mkdtemp(prefix=(u'matplotlib-%s-' % username), dir=tempdir) os.environ[u'MPLCONFIGDIR'] = tempdir return tempdir
[ "def", "_create_tmp_config_dir", "(", ")", ":", "import", "getpass", "import", "tempfile", "from", "matplotlib", ".", "cbook", "import", "mkdirs", "try", ":", "tempdir", "=", "tempfile", ".", "gettempdir", "(", ")", "except", "NotImplementedError", ":", "return"...
if the config directory can not be created .
train
false
2,969
def _get_uri_from_request(request): uri = request.base_url if request.query_string: uri += ('?' + request.query_string.decode('utf-8')) return uri
[ "def", "_get_uri_from_request", "(", "request", ")", ":", "uri", "=", "request", ".", "base_url", "if", "request", ".", "query_string", ":", "uri", "+=", "(", "'?'", "+", "request", ".", "query_string", ".", "decode", "(", "'utf-8'", ")", ")", "return", ...
the uri returned from request .
train
true
2,970
def node_frame_class(node): klass = node.frame() while ((klass is not None) and (not isinstance(klass, astroid.Class))): if (klass.parent is None): klass = None else: klass = klass.parent.frame() return klass
[ "def", "node_frame_class", "(", "node", ")", ":", "klass", "=", "node", ".", "frame", "(", ")", "while", "(", "(", "klass", "is", "not", "None", ")", "and", "(", "not", "isinstance", "(", "klass", ",", "astroid", ".", "Class", ")", ")", ")", ":", ...
return klass node for a method node .
train
false
2,971
def manipulator_valid_rel_key(f, self, field_data, all_data): klass = f.rel.to try: klass._default_manager.get(**{f.rel.field_name: field_data}) except klass.DoesNotExist: raise validators.ValidationError, (_('Please enter a valid %s.') % f.verbose_name)
[ "def", "manipulator_valid_rel_key", "(", "f", ",", "self", ",", "field_data", ",", "all_data", ")", ":", "klass", "=", "f", ".", "rel", ".", "to", "try", ":", "klass", ".", "_default_manager", ".", "get", "(", "**", "{", "f", ".", "rel", ".", "field_...
validates that the value is a valid foreign key .
train
false
2,974
def get_profile(user): if hasattr(user, '_cached_userman_profile'): return user._cached_userman_profile else: try: profile = UserProfile.objects.get(user=user) except UserProfile.DoesNotExist as e: profile = create_profile_for_user(user) user._cached_userman_profile = profile return profile
[ "def", "get_profile", "(", "user", ")", ":", "if", "hasattr", "(", "user", ",", "'_cached_userman_profile'", ")", ":", "return", "user", ".", "_cached_userman_profile", "else", ":", "try", ":", "profile", "=", "UserProfile", ".", "objects", ".", "get", "(", ...
get profile .
train
false
2,975
def create_objects(): logger.log_info('Creating objects (Player #1 and Limbo room) ...') god_player = get_god_player() player_typeclass = settings.BASE_PLAYER_TYPECLASS god_player.swap_typeclass(player_typeclass, clean_attributes=True) god_player.basetype_setup() god_player.at_player_creation() god_player.locks.add('examine:perm(Immortals);edit:false();delete:false();boot:false();msg:all()') god_player.permissions.add('Immortals') character_typeclass = settings.BASE_CHARACTER_TYPECLASS god_character = create.create_object(character_typeclass, key=god_player.username, nohome=True) god_character.id = 1 god_character.save() god_character.db.desc = _('This is User #1.') god_character.locks.add('examine:perm(Immortals);edit:false();delete:false();boot:false();msg:all();puppet:false()') god_character.permissions.add('Immortals') god_player.attributes.add('_first_login', True) god_player.attributes.add('_last_puppet', god_character) try: god_player.db._playable_characters.append(god_character) except AttributeError: god_player.db_playable_characters = [god_character] room_typeclass = settings.BASE_ROOM_TYPECLASS limbo_obj = create.create_object(room_typeclass, _('Limbo'), nohome=True) limbo_obj.id = 2 limbo_obj.save() limbo_obj.db.desc = LIMBO_DESC.strip() limbo_obj.save() if (not god_character.location): god_character.location = limbo_obj if (not god_character.home): god_character.home = limbo_obj
[ "def", "create_objects", "(", ")", ":", "logger", ".", "log_info", "(", "'Creating objects (Player #1 and Limbo room) ...'", ")", "god_player", "=", "get_god_player", "(", ")", "player_typeclass", "=", "settings", ".", "BASE_PLAYER_TYPECLASS", "god_player", ".", "swap_t...
instantiate objects as per the given specifications .
train
false
2,977
@commands(u'duck', u'ddg', u'g') @example(u'.duck privacy or .duck !mcwiki obsidian') def duck(bot, trigger): query = trigger.group(2) if (not query): return bot.reply(u'.ddg what?') result = duck_api(query) if result: bot.reply(result) return uri = duck_search(query) if uri: bot.reply(uri) if (u'last_seen_url' in bot.memory): bot.memory[u'last_seen_url'][trigger.sender] = uri else: bot.reply((u"No results found for '%s'." % query))
[ "@", "commands", "(", "u'duck'", ",", "u'ddg'", ",", "u'g'", ")", "@", "example", "(", "u'.duck privacy or .duck !mcwiki obsidian'", ")", "def", "duck", "(", "bot", ",", "trigger", ")", ":", "query", "=", "trigger", ".", "group", "(", "2", ")", "if", "("...
queries duck duck go for the specified input .
train
false
2,980
def get_protocolinfo(controller): try: protocolinfo_response = _msg(controller, 'PROTOCOLINFO 1') except: protocolinfo_response = None if ((not protocolinfo_response) or (str(protocolinfo_response) == 'Authentication required.')): controller.connect() try: protocolinfo_response = _msg(controller, 'PROTOCOLINFO 1') except stem.SocketClosed as exc: raise stem.SocketError(exc) stem.response.convert('PROTOCOLINFO', protocolinfo_response) if protocolinfo_response.cookie_path: _expand_cookie_path(protocolinfo_response, stem.util.system.pid_by_name, 'tor') if isinstance(controller, stem.socket.ControlSocket): control_socket = controller else: control_socket = controller.get_socket() if isinstance(control_socket, stem.socket.ControlPort): if (control_socket.get_address() == '127.0.0.1'): pid_method = stem.util.system.pid_by_port _expand_cookie_path(protocolinfo_response, pid_method, control_socket.get_port()) elif isinstance(control_socket, stem.socket.ControlSocketFile): pid_method = stem.util.system.pid_by_open_file _expand_cookie_path(protocolinfo_response, pid_method, control_socket.get_socket_path()) return protocolinfo_response
[ "def", "get_protocolinfo", "(", "controller", ")", ":", "try", ":", "protocolinfo_response", "=", "_msg", "(", "controller", ",", "'PROTOCOLINFO 1'", ")", "except", ":", "protocolinfo_response", "=", "None", "if", "(", "(", "not", "protocolinfo_response", ")", "...
issues a protocolinfo query to a control socket .
train
false
2,982
def set_port_status(port_id, status): LOG.debug(_('set_port_status as %s called'), status) session = db.get_session() try: port = session.query(models_v2.Port).filter_by(id=port_id).one() port['status'] = status session.merge(port) session.flush() except exc.NoResultFound: raise q_exc.PortNotFound(port_id=port_id)
[ "def", "set_port_status", "(", "port_id", ",", "status", ")", ":", "LOG", ".", "debug", "(", "_", "(", "'set_port_status as %s called'", ")", ",", "status", ")", "session", "=", "db", ".", "get_session", "(", ")", "try", ":", "port", "=", "session", ".",...
set the port status .
train
false
2,983
def _register_functions(): for module_ in modules.__all__: mod_name = _to_snake_case(module_) mod_func = _copy_function(mod_name, str(mod_name)) mod_func.__doc__ = _build_doc(module_) __all__.append(mod_name) globals()[mod_name] = mod_func
[ "def", "_register_functions", "(", ")", ":", "for", "module_", "in", "modules", ".", "__all__", ":", "mod_name", "=", "_to_snake_case", "(", "module_", ")", "mod_func", "=", "_copy_function", "(", "mod_name", ",", "str", "(", "mod_name", ")", ")", "mod_func"...
iterate through the exposed testinfra modules .
train
false
2,985
def py(sfn, string=False, **kwargs): if (not os.path.isfile(sfn)): return {} mod = imp.load_source(os.path.basename(sfn).split('.')[0], sfn) if (('__env__' not in kwargs) and ('saltenv' in kwargs)): setattr(mod, '__env__', kwargs['saltenv']) builtins = ['salt', 'grains', 'pillar', 'opts'] for builtin in builtins: arg = '__{0}__'.format(builtin) setattr(mod, arg, kwargs[builtin]) for kwarg in kwargs: setattr(mod, kwarg, kwargs[kwarg]) try: data = mod.run() if string: return {'result': True, 'data': data} tgt = salt.utils.files.mkstemp() with salt.utils.fopen(tgt, 'w+') as target: target.write(data) return {'result': True, 'data': tgt} except Exception: trb = traceback.format_exc() return {'result': False, 'data': trb}
[ "def", "py", "(", "sfn", ",", "string", "=", "False", ",", "**", "kwargs", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "sfn", ")", ")", ":", "return", "{", "}", "mod", "=", "imp", ".", "load_source", "(", "os", ".", "pa...
evaluate a python expression .
train
false
2,986
@with_setup(step_runner_environ) def test_failing_behave_as_step_raises_assertion(): runnable_step = Step.from_string('Given I have a step which calls the "other step fails" step with behave_as') assert_raises(AssertionError, runnable_step.run, True)
[ "@", "with_setup", "(", "step_runner_environ", ")", "def", "test_failing_behave_as_step_raises_assertion", "(", ")", ":", "runnable_step", "=", "Step", ".", "from_string", "(", "'Given I have a step which calls the \"other step fails\" step with behave_as'", ")", "assert_raises",...
when a step definition calls another step definition with behave_as .
train
false
2,987
def _minolta2float(inVal): arr = numpy.asarray(inVal) if (arr.shape == ()): if (inVal < 50000): return (inVal / 10000.0) else: return (((- inVal) + 50000.0) / 10000.0) negs = (arr > 50000) out = (arr / 10000.0) out[negs] = (((- arr[negs]) + 50000.0) / 10000.0) return out
[ "def", "_minolta2float", "(", "inVal", ")", ":", "arr", "=", "numpy", ".", "asarray", "(", "inVal", ")", "if", "(", "arr", ".", "shape", "==", "(", ")", ")", ":", "if", "(", "inVal", "<", "50000", ")", ":", "return", "(", "inVal", "/", "10000.0",...
takes a number .
train
false
2,988
def _find_lineage_for_domains(config, domains): if config.duplicate: return ('newcert', None) (ident_names_cert, subset_names_cert) = cert_manager.find_duplicative_certs(config, domains) if ((ident_names_cert is None) and (subset_names_cert is None)): return ('newcert', None) if (ident_names_cert is not None): return _handle_identical_cert_request(config, ident_names_cert) elif (subset_names_cert is not None): return _handle_subset_cert_request(config, domains, subset_names_cert)
[ "def", "_find_lineage_for_domains", "(", "config", ",", "domains", ")", ":", "if", "config", ".", "duplicate", ":", "return", "(", "'newcert'", ",", "None", ")", "(", "ident_names_cert", ",", "subset_names_cert", ")", "=", "cert_manager", ".", "find_duplicative_...
determine whether there are duplicated names and how to handle them .
train
false
2,990
def fbeta_score(y_real, y_pred, beta): (_, _, f) = precision_recall_fscore(y_real, y_pred, beta=beta) return np.average(f)
[ "def", "fbeta_score", "(", "y_real", ",", "y_pred", ",", "beta", ")", ":", "(", "_", ",", "_", ",", "f", ")", "=", "precision_recall_fscore", "(", "y_real", ",", "y_pred", ",", "beta", "=", "beta", ")", "return", "np", ".", "average", "(", "f", ")"...
compute the f-beta score the f-beta score is the weighted harmonic mean of precision and recall .
train
false
2,991
def create_tmp_dir(function): @functools.wraps(function) def decorated_function(*args, **kwargs): tmp_dir_path = tempfile.mkdtemp() kwargs['tmp_dir_path'] = tmp_dir_path try: return function(*args, **kwargs) finally: utils.execute('rm', '-rf', tmp_dir_path) return decorated_function
[ "def", "create_tmp_dir", "(", "function", ")", ":", "@", "functools", ".", "wraps", "(", "function", ")", "def", "decorated_function", "(", "*", "args", ",", "**", "kwargs", ")", ":", "tmp_dir_path", "=", "tempfile", ".", "mkdtemp", "(", ")", "kwargs", "...
creates temporary directory for rsync purposes .
train
false
2,992
@login_required @require_POST def join_contributors(request): next = (get_next_url(request) or reverse('home')) group = Group.objects.get(name='Contributors') request.user.groups.add(group) messages.add_message(request, messages.SUCCESS, _('You are now part of the Contributors group!')) return HttpResponseRedirect(next)
[ "@", "login_required", "@", "require_POST", "def", "join_contributors", "(", "request", ")", ":", "next", "=", "(", "get_next_url", "(", "request", ")", "or", "reverse", "(", "'home'", ")", ")", "group", "=", "Group", ".", "objects", ".", "get", "(", "na...
join the contributors group .
train
false
2,994
def test_local_zero_div(): for t in (T.scalar, T.ivector, T.ftensor4): x = t('x') for op in (T.int_div, T.true_div): y = op(0, x) g = optimize(FunctionGraph([x], [y])) divs = [node for node in g.toposort() if (isinstance(node.op, T.elemwise.Elemwise) and isinstance(node.op.scalar_op, type(op.scalar_op)))] assert (len(divs) == 0) output = g.outputs[0] assert (output.ndim == y.ndim) assert (output.type == y.type) assert (theano.tensor.get_scalar_constant_value(output) == 0)
[ "def", "test_local_zero_div", "(", ")", ":", "for", "t", "in", "(", "T", ".", "scalar", ",", "T", ".", "ivector", ",", "T", ".", "ftensor4", ")", ":", "x", "=", "t", "(", "'x'", ")", "for", "op", "in", "(", "T", ".", "int_div", ",", "T", ".",...
tests 0/x -> 0 .
train
false
2,996
def getHelixComplexPath(derivation, elementNode): helixTypeFirstCharacter = derivation.helixType.lower()[:1] if (helixTypeFirstCharacter == 'b'): return [complex(), complex(1.0, 1.0)] if (helixTypeFirstCharacter == 'h'): return [complex(), complex(0.5, 0.5), complex(1.0, 0.0)] if (helixTypeFirstCharacter == 'p'): helixComplexPath = [] x = 0.0 xStep = (setting.getLayerThickness(elementNode) / derivation.faceWidth) justBelowOne = (1.0 - (0.5 * xStep)) while (x < justBelowOne): distanceFromCenter = (0.5 - x) parabolicTwist = (0.25 - (distanceFromCenter * distanceFromCenter)) helixComplexPath.append(complex(x, parabolicTwist)) x += xStep helixComplexPath.append(complex(1.0, 0.0)) return helixComplexPath print 'Warning, the helix type was not one of (basic, herringbone or parabolic) in getHelixComplexPath in gear for:' print derivation.helixType print derivation.elementNode
[ "def", "getHelixComplexPath", "(", "derivation", ",", "elementNode", ")", ":", "helixTypeFirstCharacter", "=", "derivation", ".", "helixType", ".", "lower", "(", ")", "[", ":", "1", "]", "if", "(", "helixTypeFirstCharacter", "==", "'b'", ")", ":", "return", ...
set gear helix path .
train
false
2,997
@app.route('/libtoggle', methods=['POST']) def review(): if (not g.user): return 'NO' idvv = request.form['pid'] if (not isvalidid(idvv)): return 'NO' pid = strip_version(idvv) if (not (pid in db)): return 'NO' uid = session['user_id'] record = query_db('select * from library where\n user_id = ? and paper_id = ?', [uid, pid], one=True) print record ret = 'NO' if record: g.db.execute('delete from library where user_id = ? and paper_id = ?', [uid, pid]) g.db.commit() ret = 'OFF' else: rawpid = strip_version(pid) g.db.execute('insert into library (paper_id, user_id, update_time) values (?, ?, ?)', [rawpid, uid, int(time.time())]) g.db.commit() ret = 'ON' return ret
[ "@", "app", ".", "route", "(", "'/libtoggle'", ",", "methods", "=", "[", "'POST'", "]", ")", "def", "review", "(", ")", ":", "if", "(", "not", "g", ".", "user", ")", ":", "return", "'NO'", "idvv", "=", "request", ".", "form", "[", "'pid'", "]", ...
user wants to toggle a paper in his library .
train
false
2,998
def requirements_list_from_file(requirements_file, dependency_links): requirements = [] with open(requirements_file) as f: for line in f: line = line.rstrip() if line.startswith('#'): continue elif line.startswith('--find-links'): link = line.split(None, 1)[1] dependency_links.append(link) else: parsed_requirements = parse_requirements(line) try: (req,) = list(parsed_requirements) except RequirementParseError as original_error: message = unicode(original_error) if (environ['HOME'] != '/srv/buildslave'): raise if (not message.startswith('Expected version spec in ')): raise if (';' not in line): raise continue if (getattr(req, 'marker', None) and (not req.marker.evaluate())): continue requirements.append(unicode(req)) return requirements
[ "def", "requirements_list_from_file", "(", "requirements_file", ",", "dependency_links", ")", ":", "requirements", "=", "[", "]", "with", "open", "(", "requirements_file", ")", "as", "f", ":", "for", "line", "in", "f", ":", "line", "=", "line", ".", "rstrip"...
parse a requirements file .
train
false
2,999
def walk_with_symlinks(top, func, arg): try: names = os.listdir(top) except os.error: return func(arg, top, names) exceptions = ('.', '..') for name in names: if (name not in exceptions): name = os.path.join(top, name) if os.path.isdir(name): walk_with_symlinks(name, func, arg)
[ "def", "walk_with_symlinks", "(", "top", ",", "func", ",", "arg", ")", ":", "try", ":", "names", "=", "os", ".", "listdir", "(", "top", ")", "except", "os", ".", "error", ":", "return", "func", "(", "arg", ",", "top", ",", "names", ")", "exceptions...
like os .
train
false
3,000
def get_trigger_db_by_ref(ref): try: return Trigger.get_by_ref(ref) except StackStormDBObjectNotFoundError as e: LOG.debug(('Database lookup for ref="%s" resulted ' + 'in exception : %s.'), ref, e, exc_info=True) return None
[ "def", "get_trigger_db_by_ref", "(", "ref", ")", ":", "try", ":", "return", "Trigger", ".", "get_by_ref", "(", "ref", ")", "except", "StackStormDBObjectNotFoundError", "as", "e", ":", "LOG", ".", "debug", "(", "(", "'Database lookup for ref=\"%s\" resulted '", "+"...
returns the trigger object from db given a string ref .
train
false
3,002
def get_method(java_object, method_name): return JavaMember(method_name, java_object, java_object._target_id, java_object._gateway_client)
[ "def", "get_method", "(", "java_object", ",", "method_name", ")", ":", "return", "JavaMember", "(", "method_name", ",", "java_object", ",", "java_object", ".", "_target_id", ",", "java_object", ".", "_gateway_client", ")" ]
retrieves a reference to the method of an object .
train
false
3,003
def get_instance_type_by_name(name, ctxt=None): if (name is None): return get_default_instance_type() if (ctxt is None): ctxt = context.get_admin_context() return db.instance_type_get_by_name(ctxt, name)
[ "def", "get_instance_type_by_name", "(", "name", ",", "ctxt", "=", "None", ")", ":", "if", "(", "name", "is", "None", ")", ":", "return", "get_default_instance_type", "(", ")", "if", "(", "ctxt", "is", "None", ")", ":", "ctxt", "=", "context", ".", "ge...
retrieves single instance type by name .
train
false
3,004
def ParseResponse(response, *args, **kwds): return _ParseFileEx(response, response.geturl(), *args, **kwds)[1:]
[ "def", "ParseResponse", "(", "response", ",", "*", "args", ",", "**", "kwds", ")", ":", "return", "_ParseFileEx", "(", "response", ",", "response", ".", "geturl", "(", ")", ",", "*", "args", ",", "**", "kwds", ")", "[", "1", ":", "]" ]
parse http response and return a list of htmlform instances .
train
false
3,005
def jaccard(vec1, vec2): if scipy.sparse.issparse(vec1): vec1 = vec1.toarray() if scipy.sparse.issparse(vec2): vec2 = vec2.toarray() if (isbow(vec1) and isbow(vec2)): union = (sum((weight for (id_, weight) in vec1)) + sum((weight for (id_, weight) in vec2))) (vec1, vec2) = (dict(vec1), dict(vec2)) intersection = 0.0 for (feature_id, feature_weight) in iteritems(vec1): intersection += min(feature_weight, vec2.get(feature_id, 0.0)) return (1 - (float(intersection) / float(union))) else: if isinstance(vec1, np.ndarray): vec1 = vec1.tolist() if isinstance(vec2, np.ndarray): vec2 = vec2.tolist() vec1 = set(vec1) vec2 = set(vec2) intersection = (vec1 & vec2) union = (vec1 | vec2) return (1 - (float(len(intersection)) / float(len(union))))
[ "def", "jaccard", "(", "vec1", ",", "vec2", ")", ":", "if", "scipy", ".", "sparse", ".", "issparse", "(", "vec1", ")", ":", "vec1", "=", "vec1", ".", "toarray", "(", ")", "if", "scipy", ".", "sparse", ".", "issparse", "(", "vec2", ")", ":", "vec2...
this is a sort of weighted jaccard index .
train
false
3,006
def run_coroutine_threadsafe(coro, loop): ident = loop.__dict__.get('_thread_ident') if ((ident is not None) and (ident == threading.get_ident())): raise RuntimeError('Cannot be called from within the event loop') if (not coroutines.iscoroutine(coro)): raise TypeError('A coroutine object is required') future = concurrent.futures.Future() def callback(): 'Callback to call the coroutine.' try: _chain_future(ensure_future(coro, loop=loop), future) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) else: _LOGGER.warning('Exception on lost future: ', exc_info=True) loop.call_soon_threadsafe(callback) return future
[ "def", "run_coroutine_threadsafe", "(", "coro", ",", "loop", ")", ":", "ident", "=", "loop", ".", "__dict__", ".", "get", "(", "'_thread_ident'", ")", "if", "(", "(", "ident", "is", "not", "None", ")", "and", "(", "ident", "==", "threading", ".", "get_...
submit a coroutine object to a given event loop .
train
false
3,007
@gen.coroutine def UpdateEpisode(client, obj_store, user_id, device_id, request): (yield Activity.VerifyActivityId(client, user_id, device_id, request['activity']['activity_id'])) headers = request.pop('headers') activity = request.pop('activity') request = {'headers': headers, 'user_id': user_id, 'activity': activity, 'episode': request} (yield gen.Task(Operation.CreateAndExecute, client, user_id, device_id, 'UpdateEpisodeOperation.Execute', request)) logging.info(('UPDATE EPISODE: user: %d, device: %d, episode: %s' % (user_id, device_id, request['episode']['episode_id']))) raise gen.Return({})
[ "@", "gen", ".", "coroutine", "def", "UpdateEpisode", "(", "client", ",", "obj_store", ",", "user_id", ",", "device_id", ",", "request", ")", ":", "(", "yield", "Activity", ".", "VerifyActivityId", "(", "client", ",", "user_id", ",", "device_id", ",", "req...
updates episode metadata .
train
false
3,008
def parse_one_rule_from_dict(rule): rule.pop('description', None) trigger = rule.pop('trigger', None) case_sensitive = rule.pop('caseSensitive', True) if (case_sensitive == 'false'): case_sensitive = False elif (case_sensitive == 'true'): case_sensitive = True try: search = rule.pop('search') except KeyError: raise ValueError('Redaction rule is missing `search` field') try: replace = rule.pop('replace') except KeyError: raise ValueError('Redaction rule is missing `replace` field') if rule: raise ValueError(('Redaction rule contains unknown field(s): %s' % rule.keys())) return RedactionRule(trigger, search, replace, case_sensitive)
[ "def", "parse_one_rule_from_dict", "(", "rule", ")", ":", "rule", ".", "pop", "(", "'description'", ",", "None", ")", "trigger", "=", "rule", ".", "pop", "(", "'trigger'", ",", "None", ")", "case_sensitive", "=", "rule", ".", "pop", "(", "'caseSensitive'",...
parse_one_rule_from_dict parses a redactionrule from a dictionary like: "description": "this is the first rule" .
train
false
3,010
def _AddHasFieldMethod(message_descriptor, cls): singular_fields = {} for field in message_descriptor.fields: if (field.label != _FieldDescriptor.LABEL_REPEATED): singular_fields[field.name] = field def HasField(self, field_name): try: field = singular_fields[field_name] except KeyError: raise ValueError(('Protocol message has no singular "%s" field.' % field_name)) if (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE): value = self._fields.get(field) return ((value is not None) and value._is_present_in_parent) else: return (field in self._fields) cls.HasField = HasField
[ "def", "_AddHasFieldMethod", "(", "message_descriptor", ",", "cls", ")", ":", "singular_fields", "=", "{", "}", "for", "field", "in", "message_descriptor", ".", "fields", ":", "if", "(", "field", ".", "label", "!=", "_FieldDescriptor", ".", "LABEL_REPEATED", "...
helper for _addmessagemethods() .
train
false
3,011
def has_vowel(w): for ch in w: if (ch in VOWELS): return True return False
[ "def", "has_vowel", "(", "w", ")", ":", "for", "ch", "in", "w", ":", "if", "(", "ch", "in", "VOWELS", ")", ":", "return", "True", "return", "False" ]
returns true if there is a vowel in the given string .
train
false
3,012
def libvlc_get_compiler(): f = (_Cfunctions.get('libvlc_get_compiler', None) or _Cfunction('libvlc_get_compiler', (), None, ctypes.c_char_p)) return f()
[ "def", "libvlc_get_compiler", "(", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_get_compiler'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_get_compiler'", ",", "(", ")", ",", "None", ",", "ctypes", ".", "c_char_p", ")", ")", ...
retrieve libvlc compiler version .
train
false
3,013
@webob.dec.wsgify @util.check_accept('application/json') def get_inventories(req): context = req.environ['placement.context'] uuid = util.wsgi_path_item(req.environ, 'uuid') try: resource_provider = objects.ResourceProvider.get_by_uuid(context, uuid) except exception.NotFound as exc: raise webob.exc.HTTPNotFound((_('No resource provider with uuid %(uuid)s found : %(error)s') % {'uuid': uuid, 'error': exc}), json_formatter=util.json_error_formatter) inventories = objects.InventoryList.get_all_by_resource_provider_uuid(context, resource_provider.uuid) return _send_inventories(req.response, resource_provider, inventories)
[ "@", "webob", ".", "dec", ".", "wsgify", "@", "util", ".", "check_accept", "(", "'application/json'", ")", "def", "get_inventories", "(", "req", ")", ":", "context", "=", "req", ".", "environ", "[", "'placement.context'", "]", "uuid", "=", "util", ".", "...
get a list of inventories .
train
false
3,014
def create_appscale_user(password, uaserver): does_user_exist = uaserver.does_user_exist(hermes_constants.USER_EMAIL, appscale_info.get_secret()) if (does_user_exist == 'true'): logging.debug('User {0} already exists, so not creating it again.'.format(hermes_constants.USER_EMAIL)) return True elif (uaserver.commit_new_user(hermes_constants.USER_EMAIL, password, hermes_constants.ACCOUNT_TYPE, appscale_info.get_secret()) == 'true'): return True else: logging.error('Error while creating an Appscale user.') return False
[ "def", "create_appscale_user", "(", "password", ",", "uaserver", ")", ":", "does_user_exist", "=", "uaserver", ".", "does_user_exist", "(", "hermes_constants", ".", "USER_EMAIL", ",", "appscale_info", ".", "get_secret", "(", ")", ")", "if", "(", "does_user_exist",...
creates the user account with the email address and password provided .
train
false
3,015
def get_main_running_hub(): hubs = get_running_hubs() if (not hubs): raise SAMPHubError(u'Unable to find a running SAMP Hub.') if (u'SAMP_HUB' in os.environ): if os.environ[u'SAMP_HUB'].startswith(u'std-lockurl:'): lockfilename = os.environ[u'SAMP_HUB'][len(u'std-lockurl:'):] else: raise SAMPHubError(u'SAMP Hub profile not supported.') else: lockfilename = os.path.join(_find_home(), u'.samp') return hubs[lockfilename]
[ "def", "get_main_running_hub", "(", ")", ":", "hubs", "=", "get_running_hubs", "(", ")", "if", "(", "not", "hubs", ")", ":", "raise", "SAMPHubError", "(", "u'Unable to find a running SAMP Hub.'", ")", "if", "(", "u'SAMP_HUB'", "in", "os", ".", "environ", ")", ...
get either the hub given by the environment variable samp_hub .
train
false
3,016
def randperm(n): r = range(n) x = [] while r: i = random.choice(r) x.append(i) r.remove(i) return x
[ "def", "randperm", "(", "n", ")", ":", "r", "=", "range", "(", "n", ")", "x", "=", "[", "]", "while", "r", ":", "i", "=", "random", ".", "choice", "(", "r", ")", "x", ".", "append", "(", "i", ")", "r", ".", "remove", "(", "i", ")", "retur...
function returning a random permutation of range(n) .
train
false
3,018
def fetch_token_mock(*args, **kwargs): return
[ "def", "fetch_token_mock", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return" ]
mock function used to bypass the oauth fetch token .
train
false
3,020
def require_valid_sender(handler): def test_user(self, **kwargs): 'Checks if the user is logged in and is authorized sender.' if (not self.user_id): self.redirect(current_user_services.create_login_url(self.request.uri)) return if (self.username not in config_domain.WHITELISTED_EMAIL_SENDERS.value): raise self.UnauthorizedUserException('%s is not an authorized user of this application', self.user_id) return handler(self, **kwargs) return test_user
[ "def", "require_valid_sender", "(", "handler", ")", ":", "def", "test_user", "(", "self", ",", "**", "kwargs", ")", ":", "if", "(", "not", "self", ".", "user_id", ")", ":", "self", ".", "redirect", "(", "current_user_services", ".", "create_login_url", "("...
decorator that checks if the current user is a authorized sender .
train
false
3,022
def _htmlescape(s, quote=False, whitespace=False): s = s.replace('&', '&amp;') s = s.replace('<', '&lt;') s = s.replace('>', '&gt;') if quote: s = s.replace('"', '&quot;') if whitespace: s = s.replace(' ', '&nbsp;') s = re.sub('(\\r\\n|\\r|\\n)', '<br />\n', s) return s
[ "def", "_htmlescape", "(", "s", ",", "quote", "=", "False", ",", "whitespace", "=", "False", ")", ":", "s", "=", "s", ".", "replace", "(", "'&'", ",", "'&amp;'", ")", "s", "=", "s", ".", "replace", "(", "'<'", ",", "'&lt;'", ")", "s", "=", "s",...
replace special characters & .
train
false
3,023
def describeObj(obj, depth=4, path=None, ignore=None): if (path is None): path = [obj] if (ignore is None): ignore = {} ignore[id(sys._getframe())] = None ignore[id(path)] = None gc.collect() refs = gc.get_referrers(obj) ignore[id(refs)] = None printed = False for ref in refs: if (id(ref) in ignore): continue if (id(ref) in list(map(id, path))): print(('Cyclic reference: ' + refPathString(([ref] + path)))) printed = True continue newPath = ([ref] + path) if (len(newPath) >= depth): refStr = refPathString(newPath) if ('[_]' not in refStr): print(refStr) printed = True else: describeObj(ref, depth, newPath, ignore) printed = True if (not printed): print(('Dead end: ' + refPathString(path)))
[ "def", "describeObj", "(", "obj", ",", "depth", "=", "4", ",", "path", "=", "None", ",", "ignore", "=", "None", ")", ":", "if", "(", "path", "is", "None", ")", ":", "path", "=", "[", "obj", "]", "if", "(", "ignore", "is", "None", ")", ":", "i...
trace all reference paths backward .
train
false
3,024
@utils.memoize def get_pkgfile(command): try: command = command.strip() if command.startswith('sudo '): command = command[5:] command = command.split(' ')[0] packages = subprocess.check_output(['pkgfile', '-b', '-v', command], universal_newlines=True, stderr=utils.DEVNULL).splitlines() return [package.split()[0] for package in packages] except subprocess.CalledProcessError: return None
[ "@", "utils", ".", "memoize", "def", "get_pkgfile", "(", "command", ")", ":", "try", ":", "command", "=", "command", ".", "strip", "(", ")", "if", "command", ".", "startswith", "(", "'sudo '", ")", ":", "command", "=", "command", "[", "5", ":", "]", ...
gets the packages that provide the given command using pkgfile .
train
true
3,028
def _blkid_output(out): flt = (lambda data: [el for el in data if el.strip()]) data = {} for dev_meta in flt(out.split('\n\n')): dev = {} for items in flt(dev_meta.strip().split('\n')): (key, val) = items.split('=', 1) dev[key.lower()] = val if (dev.pop('type') == 'xfs'): dev['label'] = dev.get('label') data[dev.pop('devname')] = dev mounts = _get_mounts() for device in six.iterkeys(mounts): if data.get(device): data[device].update(mounts[device]) return data
[ "def", "_blkid_output", "(", "out", ")", ":", "flt", "=", "(", "lambda", "data", ":", "[", "el", "for", "el", "in", "data", "if", "el", ".", "strip", "(", ")", "]", ")", "data", "=", "{", "}", "for", "dev_meta", "in", "flt", "(", "out", ".", ...
parse blkid output .
train
true
3,029
def _load_params(): global _ANSIBLE_ARGS if (_ANSIBLE_ARGS is not None): buffer = _ANSIBLE_ARGS else: if (len(sys.argv) > 1): if os.path.isfile(sys.argv[1]): fd = open(sys.argv[1], 'rb') buffer = fd.read() fd.close() else: buffer = sys.argv[1] if PY3: buffer = buffer.encode('utf-8', errors='surrogateescape') elif PY2: buffer = sys.stdin.read() else: buffer = sys.stdin.buffer.read() _ANSIBLE_ARGS = buffer try: params = json.loads(buffer.decode('utf-8')) except ValueError: print '\n{"msg": "Error: Module unable to decode valid JSON on stdin. Unable to figure out what parameters were passed", "failed": true}' sys.exit(1) if PY2: params = json_dict_unicode_to_bytes(params) try: return params['ANSIBLE_MODULE_ARGS'] except KeyError: print '\n{"msg": "Error: Module unable to locate ANSIBLE_MODULE_ARGS in json data from stdin. Unable to figure out what parameters were passed", "failed": true}' sys.exit(1)
[ "def", "_load_params", "(", ")", ":", "global", "_ANSIBLE_ARGS", "if", "(", "_ANSIBLE_ARGS", "is", "not", "None", ")", ":", "buffer", "=", "_ANSIBLE_ARGS", "else", ":", "if", "(", "len", "(", "sys", ".", "argv", ")", ">", "1", ")", ":", "if", "os", ...
read the modules parameters and store them globally .
train
false
3,030
def read_int64(fid): return _unpack_simple(fid, '>u8', np.int64)
[ "def", "read_int64", "(", "fid", ")", ":", "return", "_unpack_simple", "(", "fid", ",", "'>u8'", ",", "np", ".", "int64", ")" ]
read 64bit integer from bti file .
train
false
3,032
def get_role_ids(course_id): roles = Role.objects.filter(course_id=course_id).exclude(name=FORUM_ROLE_STUDENT) return dict([(role.name, list(role.users.values_list('id', flat=True))) for role in roles])
[ "def", "get_role_ids", "(", "course_id", ")", ":", "roles", "=", "Role", ".", "objects", ".", "filter", "(", "course_id", "=", "course_id", ")", ".", "exclude", "(", "name", "=", "FORUM_ROLE_STUDENT", ")", "return", "dict", "(", "[", "(", "role", ".", ...
returns a dictionary having role names as keys and a list of users as values .
train
false
3,035
def control_queue_from_config(config): return Queue(('control.%s' % config.server_name), galaxy_exchange, routing_key='control')
[ "def", "control_queue_from_config", "(", "config", ")", ":", "return", "Queue", "(", "(", "'control.%s'", "%", "config", ".", "server_name", ")", ",", "galaxy_exchange", ",", "routing_key", "=", "'control'", ")" ]
returns a queue instance with the correct name and routing key for this galaxy processs config .
train
false
3,037
def load_summary_protos(java_application=False): tmpl = ((config.KEY_PREFIX + config.KEY_TEMPLATE) + config.PART_SUFFIX) if java_application: tmpl = (('"' + tmpl) + '"') keys = [(tmpl % i) for i in range(0, (config.KEY_DISTANCE * config.KEY_MODULUS), config.KEY_DISTANCE)] results = memcache.get_multi(keys, namespace=config.KEY_NAMESPACE) records = [] for rec in results.itervalues(): try: pb = StatsProto(rec) except Exception as err: logging.warn('Bad record: %s', err) else: records.append(pb) logging.info('Loaded %d raw summary records, %d valid', len(results), len(records)) records.sort(key=(lambda pb: (- pb.start_timestamp_milliseconds()))) return records
[ "def", "load_summary_protos", "(", "java_application", "=", "False", ")", ":", "tmpl", "=", "(", "(", "config", ".", "KEY_PREFIX", "+", "config", ".", "KEY_TEMPLATE", ")", "+", "config", ".", "PART_SUFFIX", ")", "if", "java_application", ":", "tmpl", "=", ...
load all valid summary records from memcache .
train
false
3,038
def _is_nested_instance(e, etypes): return (isinstance(e, etypes) or (isinstance(e, exceptions.MultipleExceptions) and any((_is_nested_instance(i, etypes) for i in e.inner_exceptions))))
[ "def", "_is_nested_instance", "(", "e", ",", "etypes", ")", ":", "return", "(", "isinstance", "(", "e", ",", "etypes", ")", "or", "(", "isinstance", "(", "e", ",", "exceptions", ".", "MultipleExceptions", ")", "and", "any", "(", "(", "_is_nested_instance",...
check if exception or its inner excepts are an instance of etypes .
train
false
3,039
def get_category_or_404(path): path_bits = [p for p in path.split('/') if p] return get_object_or_404(Category, slug=path_bits[(-1)])
[ "def", "get_category_or_404", "(", "path", ")", ":", "path_bits", "=", "[", "p", "for", "p", "in", "path", ".", "split", "(", "'/'", ")", "if", "p", "]", "return", "get_object_or_404", "(", "Category", ",", "slug", "=", "path_bits", "[", "(", "-", "1...
retrieve a category instance by a path .
train
true
3,040
def handle_empty_queue(): if (sabnzbd.nzbqueue.NzbQueue.do.actives() == 0): sabnzbd.save_state() logging.info('Queue has finished, launching: %s (%s)', sabnzbd.QUEUECOMPLETEACTION, sabnzbd.QUEUECOMPLETEARG) if sabnzbd.QUEUECOMPLETEARG: sabnzbd.QUEUECOMPLETEACTION(sabnzbd.QUEUECOMPLETEARG) else: Thread(target=sabnzbd.QUEUECOMPLETEACTION).start() sabnzbd.change_queue_complete_action(cfg.queue_complete(), new=False)
[ "def", "handle_empty_queue", "(", ")", ":", "if", "(", "sabnzbd", ".", "nzbqueue", ".", "NzbQueue", ".", "do", ".", "actives", "(", ")", "==", "0", ")", ":", "sabnzbd", ".", "save_state", "(", ")", "logging", ".", "info", "(", "'Queue has finished, launc...
check if empty queue calls for action .
train
false
3,041
def mask_not_printable(contents, mask_re=PRINTABLE): (out, cnt) = mask_re.subn('.', contents) return (cnt, out)
[ "def", "mask_not_printable", "(", "contents", ",", "mask_re", "=", "PRINTABLE", ")", ":", "(", "out", ",", "cnt", ")", "=", "mask_re", ".", "subn", "(", "'.'", ",", "contents", ")", "return", "(", "cnt", ",", "out", ")" ]
replaces non-printable characters with " .
train
false
3,042
def _encode_code(name, value, dummy, opts): cstring = _make_c_string(value) cstrlen = len(cstring) if (value.scope is None): return ((('\r' + name) + _PACK_INT(cstrlen)) + cstring) scope = _dict_to_bson(value.scope, False, opts, False) full_length = _PACK_INT(((8 + cstrlen) + len(scope))) return ((((('\x0f' + name) + full_length) + _PACK_INT(cstrlen)) + cstring) + scope)
[ "def", "_encode_code", "(", "name", ",", "value", ",", "dummy", ",", "opts", ")", ":", "cstring", "=", "_make_c_string", "(", "value", ")", "cstrlen", "=", "len", "(", "cstring", ")", "if", "(", "value", ".", "scope", "is", "None", ")", ":", "return"...
encode bson .
train
true
3,044
def patch_signal(): patch_module('signal')
[ "def", "patch_signal", "(", ")", ":", "patch_module", "(", "'signal'", ")" ]
make the signal .
train
false
3,046
def map_bits(fn, n): while n: b = (n & ((~ n) + 1)) (yield fn(b)) n ^= b
[ "def", "map_bits", "(", "fn", ",", "n", ")", ":", "while", "n", ":", "b", "=", "(", "n", "&", "(", "(", "~", "n", ")", "+", "1", ")", ")", "(", "yield", "fn", "(", "b", ")", ")", "n", "^=", "b" ]
call the given function given each nonzero bit from n .
train
false
3,048
def generate_secret(): import rsa print '[*] Generating secret, please hang on.' (pubkey, privkey) = rsa.newkeys(2048) priv_key_file = open(CONFIG['MobSF']['priv_key'], 'w') priv_key_file.write(privkey.save_pkcs1().decode('utf-8')) priv_key_file.close() pub_key_file = open(CONFIG['MobSF']['pub_key'], 'w') pub_key_file.write(pubkey.save_pkcs1().decode('utf-8')) pub_key_file.close() print '[!] Please move the private key file\n DCTB {}\n DCTB to MobSF to the path specified in settings.py\n DCTB (default: Mobile-Security-Framework-MobSF/MobSF/windows_vm_priv_key.asc)'.format(CONFIG['MobSF']['priv_key']) if (sys.version_info.major == 3): input('Please press any key when done..') elif (sys.version_info.major == 2): raw_input('Please press any key when done..')
[ "def", "generate_secret", "(", ")", ":", "import", "rsa", "print", "'[*] Generating secret, please hang on.'", "(", "pubkey", ",", "privkey", ")", "=", "rsa", ".", "newkeys", "(", "2048", ")", "priv_key_file", "=", "open", "(", "CONFIG", "[", "'MobSF'", "]", ...
make a secret key suitable for use in totp .
train
false
3,049
def _appendToDocstring(thingWithDoc, textToAppend): if thingWithDoc.__doc__: docstringLines = thingWithDoc.__doc__.splitlines() else: docstringLines = [] if (len(docstringLines) == 0): docstringLines.append(textToAppend) elif (len(docstringLines) == 1): docstringLines.extend(['', textToAppend, '']) else: spaces = docstringLines.pop() docstringLines.extend(['', (spaces + textToAppend), spaces]) thingWithDoc.__doc__ = '\n'.join(docstringLines)
[ "def", "_appendToDocstring", "(", "thingWithDoc", ",", "textToAppend", ")", ":", "if", "thingWithDoc", ".", "__doc__", ":", "docstringLines", "=", "thingWithDoc", ".", "__doc__", ".", "splitlines", "(", ")", "else", ":", "docstringLines", "=", "[", "]", "if", ...
append the given text to the docstring of c{thingwithdoc} .
train
false
3,050
def test_commented_scenarios(): scenario = Scenario.from_string(COMMENTED_SCENARIO) assert_equals(scenario.name, u'Adding some students to my university database') assert_equals(len(scenario.steps), 4)
[ "def", "test_commented_scenarios", "(", ")", ":", "scenario", "=", "Scenario", ".", "from_string", "(", "COMMENTED_SCENARIO", ")", "assert_equals", "(", "scenario", ".", "name", ",", "u'Adding some students to my university database'", ")", "assert_equals", "(", "len", ...
a scenario string that contains lines starting with # will be commented .
train
false
3,053
def load_qualified_module(full_module_name, path=None): remaining_pieces = full_module_name.split('.') done_pieces = [] file_obj = None while remaining_pieces: try: done_pieces.append(remaining_pieces.pop(0)) curr_module_name = '.'.join(done_pieces) (file_obj, filename, description) = imp.find_module(done_pieces[(-1)], path) package_module = imp.load_module(curr_module_name, file_obj, filename, description) path = (getattr(package_module, '__path__', None) or [filename]) finally: if file_obj: file_obj.close() return package_module
[ "def", "load_qualified_module", "(", "full_module_name", ",", "path", "=", "None", ")", ":", "remaining_pieces", "=", "full_module_name", ".", "split", "(", "'.'", ")", "done_pieces", "=", "[", "]", "file_obj", "=", "None", "while", "remaining_pieces", ":", "t...
load a module which may be within a package .
train
false
3,054
def default_fused_keys_renamer(keys): typ = type(keys[0]) if ((typ is str) or (typ is unicode)): names = [key_split(x) for x in keys[:0:(-1)]] names.append(keys[0]) return '-'.join(names) elif ((typ is tuple) and (len(keys[0]) > 0) and isinstance(keys[0][0], (str, unicode))): names = [key_split(x) for x in keys[:0:(-1)]] names.append(keys[0][0]) return (('-'.join(names),) + keys[0][1:]) else: return None
[ "def", "default_fused_keys_renamer", "(", "keys", ")", ":", "typ", "=", "type", "(", "keys", "[", "0", "]", ")", "if", "(", "(", "typ", "is", "str", ")", "or", "(", "typ", "is", "unicode", ")", ")", ":", "names", "=", "[", "key_split", "(", "x", ...
create new keys for fused tasks .
train
false
3,055
def createXYs(x, y=None): if (y is None): y = x xs = numpy.resize(x, (len(x) * len(y))) ys = numpy.repeat(y, len(x)) return numpy.vstack([xs, ys]).transpose()
[ "def", "createXYs", "(", "x", ",", "y", "=", "None", ")", ":", "if", "(", "y", "is", "None", ")", ":", "y", "=", "x", "xs", "=", "numpy", ".", "resize", "(", "x", ",", "(", "len", "(", "x", ")", "*", "len", "(", "y", ")", ")", ")", "ys"...
create an nx2 array of xy values including all combinations of the x and y values provided .
train
false
3,057
def polarify(eq, subs=True, lift=False): if lift: subs = False eq = _polarify(sympify(eq), lift) if (not subs): return eq reps = {s: Dummy(s.name, polar=True) for s in eq.free_symbols} eq = eq.subs(reps) return (eq, {r: s for (s, r) in reps.items()})
[ "def", "polarify", "(", "eq", ",", "subs", "=", "True", ",", "lift", "=", "False", ")", ":", "if", "lift", ":", "subs", "=", "False", "eq", "=", "_polarify", "(", "sympify", "(", "eq", ")", ",", "lift", ")", "if", "(", "not", "subs", ")", ":", ...
turn all numbers in eq into their polar equivalents .
train
false
3,060
def _wrap_generator_with_readonly(generator): def wrapper_generator(*args, **kwargs): generator_obj = generator(*args, **kwargs) readonly_connection.connection().set_django_connection() try: first_value = generator_obj.next() finally: readonly_connection.connection().unset_django_connection() (yield first_value) while True: (yield generator_obj.next()) wrapper_generator.__name__ = generator.__name__ return wrapper_generator
[ "def", "_wrap_generator_with_readonly", "(", "generator", ")", ":", "def", "wrapper_generator", "(", "*", "args", ",", "**", "kwargs", ")", ":", "generator_obj", "=", "generator", "(", "*", "args", ",", "**", "kwargs", ")", "readonly_connection", ".", "connect...
we have to wrap generators specially .
train
false
3,061
def clear_persistent_graph_cache(request): request.facebook = None request.session.delete('graph') if request.user.is_authenticated(): profile = get_profile(request.user) profile.clear_access_token()
[ "def", "clear_persistent_graph_cache", "(", "request", ")", ":", "request", ".", "facebook", "=", "None", "request", ".", "session", ".", "delete", "(", "'graph'", ")", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "profile", "=", "...
clears the caches for the graph cache .
train
false
3,062
@parametrize('varname', [attr for attr in dir(tables) if (not attr.startswith('_'))]) def test_variable_names(varname): table = getattr(tables, varname) try: if ((not issubclass(table, tables.TableBase)) or (table is tables.TableBase)): return except TypeError: return classname = table.__name__ if (classname and varname[0].isupper()): assert (varname == classname), ('%s refers to %s' % (varname, classname))
[ "@", "parametrize", "(", "'varname'", ",", "[", "attr", "for", "attr", "in", "dir", "(", "tables", ")", "if", "(", "not", "attr", ".", "startswith", "(", "'_'", ")", ")", "]", ")", "def", "test_variable_names", "(", "varname", ")", ":", "table", "=",...
we want pokedex .
train
false
3,063
def safeRef(target, onDelete=None): if hasattr(target, '__self__'): if (target.__self__ is not None): assert hasattr(target, '__func__'), ("safeRef target %r has __self__, but no __func__, don't know how to create reference" % (target,)) reference = get_bound_method_weakref(target=target, onDelete=onDelete) return reference if callable(onDelete): return weakref.ref(target, onDelete) else: return weakref.ref(target)
[ "def", "safeRef", "(", "target", ",", "onDelete", "=", "None", ")", ":", "if", "hasattr", "(", "target", ",", "'__self__'", ")", ":", "if", "(", "target", ".", "__self__", "is", "not", "None", ")", ":", "assert", "hasattr", "(", "target", ",", "'__fu...
return a *safe* weak reference to a callable target target -- the object to be weakly referenced .
train
false
3,064
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
3,066
def _write_defined_names(workbook, names): for named_range in workbook.get_named_ranges(): attrs = dict(named_range) if (named_range.scope is not None): attrs['localSheetId'] = safe_string(named_range.scope) name = Element(('{%s}definedName' % SHEET_MAIN_NS), attrs) name.text = named_range.value names.append(name)
[ "def", "_write_defined_names", "(", "workbook", ",", "names", ")", ":", "for", "named_range", "in", "workbook", ".", "get_named_ranges", "(", ")", ":", "attrs", "=", "dict", "(", "named_range", ")", "if", "(", "named_range", ".", "scope", "is", "not", "Non...
append definedname elements to the definednames node .
train
false
3,067
def cr_update_capacity_from_housing_units(shelter_id): db = current.db stable = db.cr_shelter htable = db.cr_shelter_unit query = (((htable.shelter_id == shelter_id) & (htable.status == 1)) & (htable.deleted != True)) total_capacity_day = htable.capacity_day.sum() total_capacity_night = htable.capacity_night.sum() row = db(query).select(total_capacity_day, total_capacity_night).first() if row: total_capacity_day = row[total_capacity_day] total_capacity_night = row[total_capacity_night] else: total_capacity_day = total_capacity_night = 0 db((stable._id == shelter_id)).update(capacity_day=total_capacity_day, capacity_night=total_capacity_night)
[ "def", "cr_update_capacity_from_housing_units", "(", "shelter_id", ")", ":", "db", "=", "current", ".", "db", "stable", "=", "db", ".", "cr_shelter", "htable", "=", "db", ".", "cr_shelter_unit", "query", "=", "(", "(", "(", "htable", ".", "shelter_id", "==",...
update shelter capacity numbers .
train
false
3,068
def npath(path): if ((not six.PY3) and (not isinstance(path, bytes))): return path.encode(fs_encoding) return path
[ "def", "npath", "(", "path", ")", ":", "if", "(", "(", "not", "six", ".", "PY3", ")", "and", "(", "not", "isinstance", "(", "path", ",", "bytes", ")", ")", ")", ":", "return", "path", ".", "encode", "(", "fs_encoding", ")", "return", "path" ]
always return a native path .
train
false
3,069
def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error): line = clean_lines.elided[linenum] declarator_end = line.rfind(')') if (declarator_end >= 0): fragment = line[declarator_end:] elif ((linenum > 1) and (clean_lines.elided[(linenum - 1)].rfind(')') >= 0)): fragment = line else: return if (Search('\\boverride\\b', fragment) and Search('\\bfinal\\b', fragment)): error(filename, linenum, 'readability/inheritance', 4, '"override" is redundant since function is already declared as "final"')
[ "def", "CheckRedundantOverrideOrFinal", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "declarator_end", "=", "line", ".", "rfind", "(", "')'", ")", "if", "(", "d...
check if line contains a redundant "override" or "final" virt-specifier .
train
true
3,070
def maybe_shift_divisions(df, periods, freq): if isinstance(freq, str): freq = pd.tseries.frequencies.to_offset(freq) if (isinstance(freq, pd.DateOffset) and (freq.isAnchored() or (not hasattr(freq, 'delta')))): return df.clear_divisions() if df.known_divisions: divs = pd.Series(range(len(df.divisions)), index=df.divisions) divisions = divs.shift(periods, freq=freq).index return type(df)(df.dask, df._name, df._meta, divisions) return df
[ "def", "maybe_shift_divisions", "(", "df", ",", "periods", ",", "freq", ")", ":", "if", "isinstance", "(", "freq", ",", "str", ")", ":", "freq", "=", "pd", ".", "tseries", ".", "frequencies", ".", "to_offset", "(", "freq", ")", "if", "(", "isinstance",...
maybe shift divisions by periods of size freq used to shift the divisions for the shift method .
train
false
3,071
def _get_username(user): if hasattr(user, 'get_username'): return user.get_username() elif hasattr(user, 'username'): return user.username else: return user
[ "def", "_get_username", "(", "user", ")", ":", "if", "hasattr", "(", "user", ",", "'get_username'", ")", ":", "return", "user", ".", "get_username", "(", ")", "elif", "hasattr", "(", "user", ",", "'username'", ")", ":", "return", "user", ".", "username",...
retrieves the mattermosts configured username :return: string: the username string .
train
false
3,072
def test_start_after_join(): group = worker.WalTransferGroup(FakeWalUploader()) group.join() seg = FakeWalSegment('arbitrary') with pytest.raises(UserCritical): group.start(seg)
[ "def", "test_start_after_join", "(", ")", ":", "group", "=", "worker", ".", "WalTransferGroup", "(", "FakeWalUploader", "(", ")", ")", "group", ".", "join", "(", ")", "seg", "=", "FakeWalSegment", "(", "'arbitrary'", ")", "with", "pytest", ".", "raises", "...
break an invariant by adding transfers after .
train
false
3,073
def _rational_reconstruction_int_coeffs(hm, m, ring): h = ring.zero if isinstance(ring.domain, PolynomialRing): reconstruction = _rational_reconstruction_int_coeffs domain = ring.domain.ring else: reconstruction = _integer_rational_reconstruction domain = hm.ring.domain for (monom, coeff) in hm.iterterms(): coeffh = reconstruction(coeff, m, domain) if (not coeffh): return None h[monom] = coeffh return h
[ "def", "_rational_reconstruction_int_coeffs", "(", "hm", ",", "m", ",", "ring", ")", ":", "h", "=", "ring", ".", "zero", "if", "isinstance", "(", "ring", ".", "domain", ",", "PolynomialRing", ")", ":", "reconstruction", "=", "_rational_reconstruction_int_coeffs"...
reconstruct every rational coefficient c_h of a polynomial h in mathbb q[t_1 .
train
false
3,074
def quitWindow(root): try: root.destroy() except: pass
[ "def", "quitWindow", "(", "root", ")", ":", "try", ":", "root", ".", "destroy", "(", ")", "except", ":", "pass" ]
quit a window .
train
false
3,075
def dump_student_extensions(course, student): data = [] header = [_('Unit'), _('Extended Due Date')] units = get_units_with_due_date(course) units = {u.location: u for u in units} query = StudentFieldOverride.objects.filter(course_id=course.id, student=student, field='due') for override in query: location = override.location.replace(course_key=course.id) if (location not in units): continue due = DATE_FIELD.from_json(json.loads(override.value)) due = due.strftime('%Y-%m-%d %H:%M') title = title_or_url(units[location]) data.append(dict(zip(header, (title, due)))) return {'header': header, 'title': _('Due date extensions for {0} {1} ({2})').format(student.first_name, student.last_name, student.username), 'data': data}
[ "def", "dump_student_extensions", "(", "course", ",", "student", ")", ":", "data", "=", "[", "]", "header", "=", "[", "_", "(", "'Unit'", ")", ",", "_", "(", "'Extended Due Date'", ")", "]", "units", "=", "get_units_with_due_date", "(", "course", ")", "u...
dumps data about the due date extensions granted for a particular student in a particular course .
train
false
3,076
def _get_cost_functions(): cost_fns_conf = CONF.least_cost_functions if (cost_fns_conf is None): fn_str = 'nova.scheduler.least_cost.compute_fill_first_cost_fn' cost_fns_conf = [fn_str] cost_fns = [] for cost_fn_str in cost_fns_conf: short_name = cost_fn_str.split('.')[(-1)] if (not (short_name.startswith('compute_') or short_name.startswith('noop'))): continue if cost_fn_str.startswith('nova.scheduler.least_cost.'): cost_fn_str = ('nova.scheduler.weights.least_cost' + cost_fn_str[25:]) try: cost_fn = importutils.import_class(cost_fn_str) except ImportError: raise exception.SchedulerCostFunctionNotFound(cost_fn_str=cost_fn_str) try: flag_name = ('%s_weight' % cost_fn.__name__) weight = getattr(CONF, flag_name) except AttributeError: raise exception.SchedulerWeightFlagNotFound(flag_name=flag_name) if ((flag_name == 'compute_fill_first_cost_fn_weight') and (weight is None)): weight = (-1.0) cost_fns.append((weight, cost_fn)) return cost_fns
[ "def", "_get_cost_functions", "(", ")", ":", "cost_fns_conf", "=", "CONF", ".", "least_cost_functions", "if", "(", "cost_fns_conf", "is", "None", ")", ":", "fn_str", "=", "'nova.scheduler.least_cost.compute_fill_first_cost_fn'", "cost_fns_conf", "=", "[", "fn_str", "]...
returns a list of tuples containing weights and cost functions to use for weighing hosts .
train
false
3,078
@click.command(u'setup-help') @pass_context def setup_help(context): from frappe.utils.help import sync for site in context.sites: try: frappe.init(site) frappe.connect() sync() finally: frappe.destroy()
[ "@", "click", ".", "command", "(", "u'setup-help'", ")", "@", "pass_context", "def", "setup_help", "(", "context", ")", ":", "from", "frappe", ".", "utils", ".", "help", "import", "sync", "for", "site", "in", "context", ".", "sites", ":", "try", ":", "...
setup help table in the current site .
train
false
3,079
def send_alert_confirmation(alert): ctx = Context({'alert': alert, 'site': Site.objects.get_current()}) subject_tpl = loader.get_template('customer/alerts/emails/confirmation_subject.txt') body_tpl = loader.get_template('customer/alerts/emails/confirmation_body.txt') mail.send_mail(subject_tpl.render(ctx).strip(), body_tpl.render(ctx), settings.OSCAR_FROM_EMAIL, [alert.email])
[ "def", "send_alert_confirmation", "(", "alert", ")", ":", "ctx", "=", "Context", "(", "{", "'alert'", ":", "alert", ",", "'site'", ":", "Site", ".", "objects", ".", "get_current", "(", ")", "}", ")", "subject_tpl", "=", "loader", ".", "get_template", "("...
send an alert confirmation email .
train
false
3,080
def DynkinDiagram(t): return CartanType(t).dynkin_diagram()
[ "def", "DynkinDiagram", "(", "t", ")", ":", "return", "CartanType", "(", "t", ")", ".", "dynkin_diagram", "(", ")" ]
display the dynkin diagram of a given lie algebra works by generating the cartantype for the input .
train
false
3,084
def is_module_enabled(module): return is_link(('/etc/apache2/mods-enabled/%s.load' % module))
[ "def", "is_module_enabled", "(", "module", ")", ":", "return", "is_link", "(", "(", "'/etc/apache2/mods-enabled/%s.load'", "%", "module", ")", ")" ]
check if an apache module is enabled .
train
false
3,085
def GetActiveFileName(bAutoSave=1): pathName = None active = GetActiveView() if (active is None): return None try: doc = active.GetDocument() pathName = doc.GetPathName() if (bAutoSave and ((len(pathName) > 0) or (doc.GetTitle()[:8] == 'Untitled') or (doc.GetTitle()[:6] == 'Script'))): if doc.IsModified(): try: doc.OnSaveDocument(pathName) pathName = doc.GetPathName() linecache.clearcache() except win32ui.error: raise KeyboardInterrupt except (win32ui.error, AttributeError): pass if (not pathName): return None return pathName
[ "def", "GetActiveFileName", "(", "bAutoSave", "=", "1", ")", ":", "pathName", "=", "None", "active", "=", "GetActiveView", "(", ")", "if", "(", "active", "is", "None", ")", ":", "return", "None", "try", ":", "doc", "=", "active", ".", "GetDocument", "(...
gets the file name for the active frame .
train
false
3,086
def show_path_changes(path_changes): (sources, destinations) = zip(*path_changes) sources = list(map(util.displayable_path, sources)) destinations = list(map(util.displayable_path, destinations)) col_width = ((term_width() - len(' -> ')) // 2) max_width = len(max((sources + destinations), key=len)) if (max_width > col_width): for (source, dest) in zip(sources, destinations): log.info(u'{0} \n -> {1}', source, dest) else: title_pad = ((max_width - len('Source ')) + len(' -> ')) log.info(u'Source {0} Destination', (' ' * title_pad)) for (source, dest) in zip(sources, destinations): pad = (max_width - len(source)) log.info(u'{0} {1} -> {2}', source, (' ' * pad), dest)
[ "def", "show_path_changes", "(", "path_changes", ")", ":", "(", "sources", ",", "destinations", ")", "=", "zip", "(", "*", "path_changes", ")", "sources", "=", "list", "(", "map", "(", "util", ".", "displayable_path", ",", "sources", ")", ")", "destination...
given a list of tuples that indicate the path changes .
train
false
3,087
def json_from_url(url): error_message = '' url_handle = urllib.urlopen(url) url_contents = url_handle.read() try: parsed_json = json.loads(url_contents) except Exception as e: error_message = str(url_contents) print 'Error parsing JSON data in json_from_url(): ', str(e) return (None, error_message) return (parsed_json, error_message)
[ "def", "json_from_url", "(", "url", ")", ":", "error_message", "=", "''", "url_handle", "=", "urllib", ".", "urlopen", "(", "url", ")", "url_contents", "=", "url_handle", ".", "read", "(", ")", "try", ":", "parsed_json", "=", "json", ".", "loads", "(", ...
send a request to the tool shed via the tool shed api and handle the response .
train
false
3,088
def have_python_version(name, cache={}): if (name not in cache): cache[name] = (os.system((name + ' -c "import test.test_support"')) == 0) return cache[name]
[ "def", "have_python_version", "(", "name", ",", "cache", "=", "{", "}", ")", ":", "if", "(", "name", "not", "in", "cache", ")", ":", "cache", "[", "name", "]", "=", "(", "os", ".", "system", "(", "(", "name", "+", "' -c \"import test.test_support\"'", ...
check whether the given name is a valid python binary and has test .
train
false
3,089
def cache_or_load_file(path, creator, loader): if os.path.exists(path): return loader(path) file_name = os.path.basename(path) temp_dir = tempfile.mkdtemp() temp_path = os.path.join(temp_dir, file_name) lock_path = os.path.join(_dataset_root, '_create_lock') try: content = creator(temp_path) with filelock.FileLock(lock_path): if (not os.path.exists(path)): shutil.move(temp_path, path) finally: shutil.rmtree(temp_dir) return content
[ "def", "cache_or_load_file", "(", "path", ",", "creator", ",", "loader", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "return", "loader", "(", "path", ")", "file_name", "=", "os", ".", "path", ".", "basename", "(", "path",...
caches a file if it does not exist .
train
false
3,090
def walk_storage(path, topdown=True, onerror=None, followlinks=False, storage=private_storage): if (not topdown): raise NotImplementedError if onerror: raise NotImplementedError roots = [path] while len(roots): new_roots = [] for root in roots: (dirs, files) = storage.listdir(root) files = [smart_str(f) for f in files] dirs = [smart_str(d) for d in dirs] (yield (root, dirs, files)) for dn in dirs: new_roots.append(('%s/%s' % (root, dn))) roots[:] = new_roots
[ "def", "walk_storage", "(", "path", ",", "topdown", "=", "True", ",", "onerror", "=", "None", ",", "followlinks", "=", "False", ",", "storage", "=", "private_storage", ")", ":", "if", "(", "not", "topdown", ")", ":", "raise", "NotImplementedError", "if", ...
generate the file names in a stored directory tree by walking the tree top-down .
train
false
3,092
def describe_service(service_class): descriptor = ServiceDescriptor() descriptor.name = service_class.__name__ methods = [] remote_methods = service_class.all_remote_methods() for name in sorted(remote_methods.iterkeys()): if (name == 'get_descriptor'): continue method = remote_methods[name] methods.append(describe_method(method)) if methods: descriptor.methods = methods return descriptor
[ "def", "describe_service", "(", "service_class", ")", ":", "descriptor", "=", "ServiceDescriptor", "(", ")", "descriptor", ".", "name", "=", "service_class", ".", "__name__", "methods", "=", "[", "]", "remote_methods", "=", "service_class", ".", "all_remote_method...
build descriptor for service .
train
false
3,093
def html_to_xhtml(html): try: html = html.getroot() except AttributeError: pass prefix = ('{%s}' % XHTML_NAMESPACE) for el in html.iter(etree.Element): tag = el.tag if (tag[0] != '{'): el.tag = (prefix + tag)
[ "def", "html_to_xhtml", "(", "html", ")", ":", "try", ":", "html", "=", "html", ".", "getroot", "(", ")", "except", "AttributeError", ":", "pass", "prefix", "=", "(", "'{%s}'", "%", "XHTML_NAMESPACE", ")", "for", "el", "in", "html", ".", "iter", "(", ...
convert all tags in an html tree to xhtml by moving them to the xhtml namespace .
train
true
3,094
def _log_runtime(parameter, proc_location, start_time): runtime = (time.time() - start_time) log.debug(('proc call (%s): %s (runtime: %0.4f)' % (parameter, proc_location, runtime)))
[ "def", "_log_runtime", "(", "parameter", ",", "proc_location", ",", "start_time", ")", ":", "runtime", "=", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", "log", ".", "debug", "(", "(", "'proc call (%s): %s (runtime: %0.4f)'", "%", "(", "paramet...
logs a message indicating a successful proc query .
train
false