id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1
value | is_duplicated bool 2
classes |
|---|---|---|---|---|---|
2,021 | def holdAcknowledge():
a = TpPd(pd=3)
b = MessageType(mesType=25)
packet = (a / b)
return packet
| [
"def",
"holdAcknowledge",
"(",
")",
":",
"a",
"=",
"TpPd",
"(",
"pd",
"=",
"3",
")",
"b",
"=",
"MessageType",
"(",
"mesType",
"=",
"25",
")",
"packet",
"=",
"(",
"a",
"/",
"b",
")",
"return",
"packet"
] | hold acknowledge section 9 . | train | true |
2,022 | def get_project_hierarchy(context, project_id, subtree_as_ids=False, parents_as_ids=False, is_admin_project=False):
keystone = _keystone_client(context)
generic_project = GenericProjectInfo(project_id, keystone.version)
if (keystone.version == 'v3'):
project = keystone.projects.get(project_id, subtree_as_ids=subtr... | [
"def",
"get_project_hierarchy",
"(",
"context",
",",
"project_id",
",",
"subtree_as_ids",
"=",
"False",
",",
"parents_as_ids",
"=",
"False",
",",
"is_admin_project",
"=",
"False",
")",
":",
"keystone",
"=",
"_keystone_client",
"(",
"context",
")",
"generic_project... | a helper method to get the project hierarchy . | train | false |
2,023 | def egquery(**keywds):
cgi = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/egquery.fcgi'
variables = {}
variables.update(keywds)
return _open(cgi, variables)
| [
"def",
"egquery",
"(",
"**",
"keywds",
")",
":",
"cgi",
"=",
"'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/egquery.fcgi'",
"variables",
"=",
"{",
"}",
"variables",
".",
"update",
"(",
"keywds",
")",
"return",
"_open",
"(",
"cgi",
",",
"variables",
")"
] | egquery provides entrez database counts for a global search . | train | false |
2,024 | def rank_est(A, atol=1e-13, rtol=0):
A = np.atleast_2d(A)
s = svd(A, compute_uv=False)
tol = max(atol, (rtol * s[0]))
rank = int((s >= tol).sum())
return rank
| [
"def",
"rank_est",
"(",
"A",
",",
"atol",
"=",
"1e-13",
",",
"rtol",
"=",
"0",
")",
":",
"A",
"=",
"np",
".",
"atleast_2d",
"(",
"A",
")",
"s",
"=",
"svd",
"(",
"A",
",",
"compute_uv",
"=",
"False",
")",
"tol",
"=",
"max",
"(",
"atol",
",",
... | estimate the rank of a matrix . | train | true |
2,025 | def waic(trace, model=None, n_eff=False, pointwise=False):
model = modelcontext(model)
log_py = log_post_trace(trace, model)
lppd_i = logsumexp(log_py, axis=0, b=(1.0 / log_py.shape[0]))
vars_lpd = np.var(log_py, axis=0)
if np.any((vars_lpd > 0.4)):
warnings.warn('For one or more samples the posterior variance o... | [
"def",
"waic",
"(",
"trace",
",",
"model",
"=",
"None",
",",
"n_eff",
"=",
"False",
",",
"pointwise",
"=",
"False",
")",
":",
"model",
"=",
"modelcontext",
"(",
"model",
")",
"log_py",
"=",
"log_post_trace",
"(",
"trace",
",",
"model",
")",
"lppd_i",
... | calculate the widely available information criterion . | train | false |
2,026 | def get_array_section_has_problem(course_id):
course = modulestore().get_course(course_id, depth=4)
b_section_has_problem = ([False] * len(course.get_children()))
i = 0
for section in course.get_children():
for subsection in section.get_children():
for unit in subsection.get_children():
for child in unit.g... | [
"def",
"get_array_section_has_problem",
"(",
"course_id",
")",
":",
"course",
"=",
"modulestore",
"(",
")",
".",
"get_course",
"(",
"course_id",
",",
"depth",
"=",
"4",
")",
"b_section_has_problem",
"=",
"(",
"[",
"False",
"]",
"*",
"len",
"(",
"course",
"... | returns an array of true/false whether each section has problems . | train | false |
2,027 | def OpenDocumentChart():
doc = OpenDocument('application/vnd.oasis.opendocument.chart')
doc.chart = Chart()
doc.body.addElement(doc.chart)
return doc
| [
"def",
"OpenDocumentChart",
"(",
")",
":",
"doc",
"=",
"OpenDocument",
"(",
"'application/vnd.oasis.opendocument.chart'",
")",
"doc",
".",
"chart",
"=",
"Chart",
"(",
")",
"doc",
".",
"body",
".",
"addElement",
"(",
"doc",
".",
"chart",
")",
"return",
"doc"
... | creates a chart document . | train | false |
2,028 | def get_themes():
themes = {}
builtins = pkg_resources.get_entry_map(dist=u'mkdocs', group=u'mkdocs.themes')
for theme in pkg_resources.iter_entry_points(group=u'mkdocs.themes'):
if ((theme.name in builtins) and (theme.dist.key != u'mkdocs')):
raise exceptions.ConfigurationError(u'The theme {0} is a builtin the... | [
"def",
"get_themes",
"(",
")",
":",
"themes",
"=",
"{",
"}",
"builtins",
"=",
"pkg_resources",
".",
"get_entry_map",
"(",
"dist",
"=",
"u'mkdocs'",
",",
"group",
"=",
"u'mkdocs.themes'",
")",
"for",
"theme",
"in",
"pkg_resources",
".",
"iter_entry_points",
"... | returns available themes list . | train | false |
2,029 | def crc_finalize(crc):
return (crc & _MASK)
| [
"def",
"crc_finalize",
"(",
"crc",
")",
":",
"return",
"(",
"crc",
"&",
"_MASK",
")"
] | finalize crc-32c checksum . | train | false |
2,030 | @logic.validate(logic.schema.default_activity_list_schema)
def package_activity_list(context, data_dict):
_check_access('package_show', context, data_dict)
model = context['model']
package_ref = data_dict.get('id')
package = model.Package.get(package_ref)
if (package is None):
raise logic.NotFound
offset = int(... | [
"@",
"logic",
".",
"validate",
"(",
"logic",
".",
"schema",
".",
"default_activity_list_schema",
")",
"def",
"package_activity_list",
"(",
"context",
",",
"data_dict",
")",
":",
"_check_access",
"(",
"'package_show'",
",",
"context",
",",
"data_dict",
")",
"mode... | return the given dataset s public activity stream . | train | false |
2,032 | def string_concat(*strings):
return ''.join([str(el) for el in strings])
| [
"def",
"string_concat",
"(",
"*",
"strings",
")",
":",
"return",
"''",
".",
"join",
"(",
"[",
"str",
"(",
"el",
")",
"for",
"el",
"in",
"strings",
"]",
")"
] | lazy variant of string concatenation . | train | false |
2,035 | def modifies_known_mutable(obj, attr):
for (typespec, unsafe) in _mutable_spec:
if isinstance(obj, typespec):
return (attr in unsafe)
return False
| [
"def",
"modifies_known_mutable",
"(",
"obj",
",",
"attr",
")",
":",
"for",
"(",
"typespec",
",",
"unsafe",
")",
"in",
"_mutable_spec",
":",
"if",
"isinstance",
"(",
"obj",
",",
"typespec",
")",
":",
"return",
"(",
"attr",
"in",
"unsafe",
")",
"return",
... | this function checks if an attribute on a builtin mutable object would modify it if called . | train | true |
2,036 | def getChainMatrixSVGIfNecessary(elementNode, yAxisPointingUpward):
matrixSVG = MatrixSVG()
if yAxisPointingUpward:
return matrixSVG
return getChainMatrixSVG(elementNode, matrixSVG)
| [
"def",
"getChainMatrixSVGIfNecessary",
"(",
"elementNode",
",",
"yAxisPointingUpward",
")",
":",
"matrixSVG",
"=",
"MatrixSVG",
"(",
")",
"if",
"yAxisPointingUpward",
":",
"return",
"matrixSVG",
"return",
"getChainMatrixSVG",
"(",
"elementNode",
",",
"matrixSVG",
")"
... | get chain matrixsvg by svgelement and yaxispointingupward . | train | false |
2,037 | def initRpc(config):
rpc_data = {'connect': '127.0.0.1', 'port': '8336', 'user': 'PLACEHOLDER', 'password': 'PLACEHOLDER', 'clienttimeout': '900'}
try:
fptr = open(config, 'r')
lines = fptr.readlines()
fptr.close()
except:
return None
for line in lines:
if (not line.startswith('rpc')):
continue
key_v... | [
"def",
"initRpc",
"(",
"config",
")",
":",
"rpc_data",
"=",
"{",
"'connect'",
":",
"'127.0.0.1'",
",",
"'port'",
":",
"'8336'",
",",
"'user'",
":",
"'PLACEHOLDER'",
",",
"'password'",
":",
"'PLACEHOLDER'",
",",
"'clienttimeout'",
":",
"'900'",
"}",
"try",
... | initialize namecoin rpc . | train | false |
2,038 | def get_view(request):
return HttpResponse('Hello world')
| [
"def",
"get_view",
"(",
"request",
")",
":",
"return",
"HttpResponse",
"(",
"'Hello world'",
")"
] | a simple view that expects a get request . | train | false |
2,040 | def _get_next_prev_month(generic_view, naive_result, is_previous, use_first_day):
date_field = generic_view.get_date_field()
allow_empty = generic_view.get_allow_empty()
allow_future = generic_view.get_allow_future()
if allow_empty:
result = naive_result
else:
if is_previous:
lookup = {('%s__lte' % date_fie... | [
"def",
"_get_next_prev_month",
"(",
"generic_view",
",",
"naive_result",
",",
"is_previous",
",",
"use_first_day",
")",
":",
"date_field",
"=",
"generic_view",
".",
"get_date_field",
"(",
")",
"allow_empty",
"=",
"generic_view",
".",
"get_allow_empty",
"(",
")",
"... | helper: get the next or the previous valid date . | train | false |
2,041 | @pytest.fixture(scope='session')
def unicode_encode_err():
return UnicodeEncodeError('ascii', '', 0, 2, 'fake exception')
| [
"@",
"pytest",
".",
"fixture",
"(",
"scope",
"=",
"'session'",
")",
"def",
"unicode_encode_err",
"(",
")",
":",
"return",
"UnicodeEncodeError",
"(",
"'ascii'",
",",
"''",
",",
"0",
",",
"2",
",",
"'fake exception'",
")"
] | provide a fake unicodeencodeerror exception . | train | false |
2,042 | def read_numpy(fh, byteorder, dtype, count):
dtype = ('b' if (dtype[(-1)] == 's') else (byteorder + dtype[(-1)]))
return fh.read_array(dtype, count)
| [
"def",
"read_numpy",
"(",
"fh",
",",
"byteorder",
",",
"dtype",
",",
"count",
")",
":",
"dtype",
"=",
"(",
"'b'",
"if",
"(",
"dtype",
"[",
"(",
"-",
"1",
")",
"]",
"==",
"'s'",
")",
"else",
"(",
"byteorder",
"+",
"dtype",
"[",
"(",
"-",
"1",
... | read tag data from file and return as numpy array . | train | true |
2,044 | def protocol_from_http(protocol_str):
return (int(protocol_str[5]), int(protocol_str[7]))
| [
"def",
"protocol_from_http",
"(",
"protocol_str",
")",
":",
"return",
"(",
"int",
"(",
"protocol_str",
"[",
"5",
"]",
")",
",",
"int",
"(",
"protocol_str",
"[",
"7",
"]",
")",
")"
] | return a protocol tuple from the given http/x . | train | false |
2,047 | def get_temper_devices():
from temperusb.temper import TemperHandler
return TemperHandler().get_devices()
| [
"def",
"get_temper_devices",
"(",
")",
":",
"from",
"temperusb",
".",
"temper",
"import",
"TemperHandler",
"return",
"TemperHandler",
"(",
")",
".",
"get_devices",
"(",
")"
] | scan the temper devices from temperusb . | train | false |
2,048 | def _move_to_next(fid, byte=8):
now = fid.tell()
if ((now % byte) != 0):
now = ((now - (now % byte)) + byte)
fid.seek(now, 0)
| [
"def",
"_move_to_next",
"(",
"fid",
",",
"byte",
"=",
"8",
")",
":",
"now",
"=",
"fid",
".",
"tell",
"(",
")",
"if",
"(",
"(",
"now",
"%",
"byte",
")",
"!=",
"0",
")",
":",
"now",
"=",
"(",
"(",
"now",
"-",
"(",
"now",
"%",
"byte",
")",
"... | move to next byte boundary . | train | false |
2,049 | def _align(sequenceA, sequenceB, match_fn, gap_A_fn, gap_B_fn, penalize_extend_when_opening, penalize_end_gaps, align_globally, gap_char, force_generic, score_only, one_alignment_only):
if ((not sequenceA) or (not sequenceB)):
return []
try:
(sequenceA + gap_char)
(sequenceB + gap_char)
except TypeError:
rai... | [
"def",
"_align",
"(",
"sequenceA",
",",
"sequenceB",
",",
"match_fn",
",",
"gap_A_fn",
",",
"gap_B_fn",
",",
"penalize_extend_when_opening",
",",
"penalize_end_gaps",
",",
"align_globally",
",",
"gap_char",
",",
"force_generic",
",",
"score_only",
",",
"one_alignmen... | align a set of terms . | train | false |
2,050 | def a_product(x, y, z=1):
return ((x * y) * z)
| [
"def",
"a_product",
"(",
"x",
",",
"y",
",",
"z",
"=",
"1",
")",
":",
"return",
"(",
"(",
"x",
"*",
"y",
")",
"*",
"z",
")"
] | simple function that returns the product of three numbers . | train | false |
2,051 | def owner(*paths):
return __salt__['lowpkg.owner'](*paths)
| [
"def",
"owner",
"(",
"*",
"paths",
")",
":",
"return",
"__salt__",
"[",
"'lowpkg.owner'",
"]",
"(",
"*",
"paths",
")"
] | return the name of the package that owns the file . | train | false |
2,053 | def Enabled():
return (not GlobalProcess().IsDefault())
| [
"def",
"Enabled",
"(",
")",
":",
"return",
"(",
"not",
"GlobalProcess",
"(",
")",
".",
"IsDefault",
"(",
")",
")"
] | indicates whether the dev_appserver is running in multiprocess mode . | train | false |
2,054 | @pytest.mark.network
def test_requirements_file(script):
(other_lib_name, other_lib_version) = ('anyjson', '0.3')
script.scratch_path.join('initools-req.txt').write(textwrap.dedent((' INITools==0.2\n # and something else to test out:\n %s<=%s\n ' % (other_lib_name, other_lib_version))))
r... | [
"@",
"pytest",
".",
"mark",
".",
"network",
"def",
"test_requirements_file",
"(",
"script",
")",
":",
"(",
"other_lib_name",
",",
"other_lib_version",
")",
"=",
"(",
"'anyjson'",
",",
"'0.3'",
")",
"script",
".",
"scratch_path",
".",
"join",
"(",
"'initools-... | test installing from a requirements file . | train | false |
2,055 | def cnn_pool(pool_dim, convolved_features):
num_images = convolved_features.shape[1]
num_features = convolved_features.shape[0]
convolved_dim = convolved_features.shape[2]
assert ((convolved_dim % pool_dim) == 0), 'Pooling dimension is not an exact multiple of convolved dimension'
pool_size = (convolved_dim / pool... | [
"def",
"cnn_pool",
"(",
"pool_dim",
",",
"convolved_features",
")",
":",
"num_images",
"=",
"convolved_features",
".",
"shape",
"[",
"1",
"]",
"num_features",
"=",
"convolved_features",
".",
"shape",
"[",
"0",
"]",
"convolved_dim",
"=",
"convolved_features",
"."... | pools the given convolved features . | train | false |
2,056 | def build_output_stream(args, env, request, response, output_options):
req_h = (OUT_REQ_HEAD in output_options)
req_b = (OUT_REQ_BODY in output_options)
resp_h = (OUT_RESP_HEAD in output_options)
resp_b = (OUT_RESP_BODY in output_options)
req = (req_h or req_b)
resp = (resp_h or resp_b)
output = []
Stream = get... | [
"def",
"build_output_stream",
"(",
"args",
",",
"env",
",",
"request",
",",
"response",
",",
"output_options",
")",
":",
"req_h",
"=",
"(",
"OUT_REQ_HEAD",
"in",
"output_options",
")",
"req_b",
"=",
"(",
"OUT_REQ_BODY",
"in",
"output_options",
")",
"resp_h",
... | build and return a chain of iterators over the request-response exchange each of which yields bytes chunks . | train | false |
2,057 | def lanl_graph():
import networkx as nx
try:
fh = open('lanl_routes.edgelist', 'r')
except IOError:
print 'lanl.edges not found'
raise
G = nx.Graph()
time = {}
time[0] = 0
for line in fh.readlines():
(head, tail, rtt) = line.split()
G.add_edge(int(head), int(tail))
time[int(head)] = float(rtt)
G0 = ... | [
"def",
"lanl_graph",
"(",
")",
":",
"import",
"networkx",
"as",
"nx",
"try",
":",
"fh",
"=",
"open",
"(",
"'lanl_routes.edgelist'",
",",
"'r'",
")",
"except",
"IOError",
":",
"print",
"'lanl.edges not found'",
"raise",
"G",
"=",
"nx",
".",
"Graph",
"(",
... | return the lanl internet view graph from lanl . | train | false |
2,058 | def api_access_enabled_or_404(view_func):
@wraps(view_func)
def wrapped_view(view_obj, *args, **kwargs):
'Wrapper for the view function.'
if ApiAccessConfig.current().enabled:
return view_func(view_obj, *args, **kwargs)
return HttpResponseNotFound()
return wrapped_view
| [
"def",
"api_access_enabled_or_404",
"(",
"view_func",
")",
":",
"@",
"wraps",
"(",
"view_func",
")",
"def",
"wrapped_view",
"(",
"view_obj",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"ApiAccessConfig",
".",
"current",
"(",
")",
".",
"enabled",... | if api access management feature is not enabled . | train | false |
2,059 | def eventlog(request, event=0):
if (not test_user_authenticated(request)):
return login(request, next=('/cobbler_web/eventlog/%s' % str(event)), expired=True)
event_info = remote.get_events()
if (event not in event_info):
return HttpResponse('event not found')
data = event_info[event]
eventname = data[0]
even... | [
"def",
"eventlog",
"(",
"request",
",",
"event",
"=",
"0",
")",
":",
"if",
"(",
"not",
"test_user_authenticated",
"(",
"request",
")",
")",
":",
"return",
"login",
"(",
"request",
",",
"next",
"=",
"(",
"'/cobbler_web/eventlog/%s'",
"%",
"str",
"(",
"eve... | shows the log for a given event . | train | false |
2,060 | def ipv4_to_bin(ip):
return addrconv.ipv4.text_to_bin(ip)
| [
"def",
"ipv4_to_bin",
"(",
"ip",
")",
":",
"return",
"addrconv",
".",
"ipv4",
".",
"text_to_bin",
"(",
"ip",
")"
] | converts human readable ipv4 string to binary representation . | train | false |
2,061 | def delete_network(context, net_id):
session = context.session
with session.begin(subtransactions=True):
net = session.query(BrocadeNetwork).filter_by(id=net_id).first()
if (net is not None):
session.delete(net)
| [
"def",
"delete_network",
"(",
"context",
",",
"net_id",
")",
":",
"session",
"=",
"context",
".",
"session",
"with",
"session",
".",
"begin",
"(",
"subtransactions",
"=",
"True",
")",
":",
"net",
"=",
"session",
".",
"query",
"(",
"BrocadeNetwork",
")",
... | permanently delete a network . | train | false |
2,062 | def int_to_bin(i):
i1 = (i % 256)
i2 = int((i / 256))
return (chr(i1) + chr(i2))
| [
"def",
"int_to_bin",
"(",
"i",
")",
":",
"i1",
"=",
"(",
"i",
"%",
"256",
")",
"i2",
"=",
"int",
"(",
"(",
"i",
"/",
"256",
")",
")",
"return",
"(",
"chr",
"(",
"i1",
")",
"+",
"chr",
"(",
"i2",
")",
")"
] | integer to two bytes . | train | true |
2,064 | def create_tcp_socket(module):
type_ = module.SOCK_STREAM
if hasattr(module, 'SOCK_CLOEXEC'):
type_ |= module.SOCK_CLOEXEC
sock = module.socket(module.AF_INET, type_)
_set_default_tcpsock_options(module, sock)
return sock
| [
"def",
"create_tcp_socket",
"(",
"module",
")",
":",
"type_",
"=",
"module",
".",
"SOCK_STREAM",
"if",
"hasattr",
"(",
"module",
",",
"'SOCK_CLOEXEC'",
")",
":",
"type_",
"|=",
"module",
".",
"SOCK_CLOEXEC",
"sock",
"=",
"module",
".",
"socket",
"(",
"modu... | create a tcp socket with the cloexec flag set . | train | false |
2,065 | def get_output_ids(ids_bcs_added_field, corrected_bc, num_errors, added_field, max_bc_errors=1.5, enum_val=1):
bc_corrected_flag = None
if (added_field is None):
curr_added_field = ''
else:
curr_added_field = added_field
if (corrected_bc is None):
curr_bc = ''
else:
curr_bc = corrected_bc
log_id = ''
if ... | [
"def",
"get_output_ids",
"(",
"ids_bcs_added_field",
",",
"corrected_bc",
",",
"num_errors",
",",
"added_field",
",",
"max_bc_errors",
"=",
"1.5",
",",
"enum_val",
"=",
"1",
")",
":",
"bc_corrected_flag",
"=",
"None",
"if",
"(",
"added_field",
"is",
"None",
")... | returns sampleid to write to output fasta/qual files ids_bcs_added_field: dict of : sampleid corrected_bc: corrected barcode sequence . | train | false |
2,066 | def fix_local_scheme(home_dir, symlink=True):
try:
import sysconfig
except ImportError:
pass
else:
if (sysconfig._get_default_scheme() == 'posix_local'):
local_path = os.path.join(home_dir, 'local')
if (not os.path.exists(local_path)):
os.mkdir(local_path)
for subdir_name in os.listdir(home_dir):... | [
"def",
"fix_local_scheme",
"(",
"home_dir",
",",
"symlink",
"=",
"True",
")",
":",
"try",
":",
"import",
"sysconfig",
"except",
"ImportError",
":",
"pass",
"else",
":",
"if",
"(",
"sysconfig",
".",
"_get_default_scheme",
"(",
")",
"==",
"'posix_local'",
")",... | platforms that use the "posix_local" install scheme need to be given an additional "local" location . | train | true |
2,068 | def _get_filename(fd):
if hasattr(fd, 'name'):
return fd.name
return fd
| [
"def",
"_get_filename",
"(",
"fd",
")",
":",
"if",
"hasattr",
"(",
"fd",
",",
"'name'",
")",
":",
"return",
"fd",
".",
"name",
"return",
"fd"
] | transform the absolute test filenames to relative ones . | train | false |
2,069 | def __generate_crc16_table():
result = []
for byte in range(256):
crc = 0
for _ in range(8):
if ((byte ^ crc) & 1):
crc = ((crc >> 1) ^ 40961)
else:
crc >>= 1
byte >>= 1
result.append(crc)
return result
| [
"def",
"__generate_crc16_table",
"(",
")",
":",
"result",
"=",
"[",
"]",
"for",
"byte",
"in",
"range",
"(",
"256",
")",
":",
"crc",
"=",
"0",
"for",
"_",
"in",
"range",
"(",
"8",
")",
":",
"if",
"(",
"(",
"byte",
"^",
"crc",
")",
"&",
"1",
")... | generates a crc16 lookup table . | train | false |
2,070 | def n_feature_influence(estimators, n_train, n_test, n_features, percentile):
percentiles = defaultdict(defaultdict)
for n in n_features:
print(('benchmarking with %d features' % n))
(X_train, y_train, X_test, y_test) = generate_dataset(n_train, n_test, n)
for (cls_name, estimator) in estimators.items():
est... | [
"def",
"n_feature_influence",
"(",
"estimators",
",",
"n_train",
",",
"n_test",
",",
"n_features",
",",
"percentile",
")",
":",
"percentiles",
"=",
"defaultdict",
"(",
"defaultdict",
")",
"for",
"n",
"in",
"n_features",
":",
"print",
"(",
"(",
"'benchmarking w... | estimate influence of the number of features on prediction time . | train | false |
2,071 | def model_query(context, model, *args, **kwargs):
session = (kwargs.get('session') or get_session())
read_deleted = (kwargs.get('read_deleted') or context.read_deleted)
project_only = kwargs.get('project_only', False)
def issubclassof_nova_base(obj):
return (isinstance(obj, type) and issubclass(obj, models.NovaBa... | [
"def",
"model_query",
"(",
"context",
",",
"model",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"session",
"=",
"(",
"kwargs",
".",
"get",
"(",
"'session'",
")",
"or",
"get_session",
"(",
")",
")",
"read_deleted",
"=",
"(",
"kwargs",
".",
"get",... | query helper that accounts for contexts read_deleted field . | train | false |
2,072 | def bptrs(a):
return pycuda.gpuarray.arange(a.ptr, (a.ptr + (a.shape[0] * a.strides[0])), a.strides[0], dtype=cublas.ctypes.c_void_p)
| [
"def",
"bptrs",
"(",
"a",
")",
":",
"return",
"pycuda",
".",
"gpuarray",
".",
"arange",
"(",
"a",
".",
"ptr",
",",
"(",
"a",
".",
"ptr",
"+",
"(",
"a",
".",
"shape",
"[",
"0",
"]",
"*",
"a",
".",
"strides",
"[",
"0",
"]",
")",
")",
",",
"... | pointer array when input represents a batch of matrices . | train | false |
2,074 | def clear_feature(dev, feature, recipient=None):
if (feature == ENDPOINT_HALT):
dev.clear_halt(recipient)
else:
(bmRequestType, wIndex) = _parse_recipient(recipient, util.CTRL_OUT)
dev.ctrl_transfer(bmRequestType=bmRequestType, bRequest=1, wIndex=wIndex, wValue=feature)
| [
"def",
"clear_feature",
"(",
"dev",
",",
"feature",
",",
"recipient",
"=",
"None",
")",
":",
"if",
"(",
"feature",
"==",
"ENDPOINT_HALT",
")",
":",
"dev",
".",
"clear_halt",
"(",
"recipient",
")",
"else",
":",
"(",
"bmRequestType",
",",
"wIndex",
")",
... | clear/disable a specific feature . | train | true |
2,076 | def lvresize(size, lvpath):
ret = {}
cmd = ['lvresize', '-L', str(size), lvpath]
cmd_ret = __salt__['cmd.run_all'](cmd, python_shell=False)
if (cmd_ret['retcode'] != 0):
return {}
return ret
| [
"def",
"lvresize",
"(",
"size",
",",
"lvpath",
")",
":",
"ret",
"=",
"{",
"}",
"cmd",
"=",
"[",
"'lvresize'",
",",
"'-L'",
",",
"str",
"(",
"size",
")",
",",
"lvpath",
"]",
"cmd_ret",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
",",
"... | return information about the logical volume(s) cli examples: . | train | false |
2,078 | @depends(HAS_PYVMOMI)
def service_restart(host, username, password, service_name, protocol=None, port=None, host_names=None):
service_instance = salt.utils.vmware.get_service_instance(host=host, username=username, password=password, protocol=protocol, port=port)
host_names = _check_hosts(service_instance, host, host_... | [
"@",
"depends",
"(",
"HAS_PYVMOMI",
")",
"def",
"service_restart",
"(",
"host",
",",
"username",
",",
"password",
",",
"service_name",
",",
"protocol",
"=",
"None",
",",
"port",
"=",
"None",
",",
"host_names",
"=",
"None",
")",
":",
"service_instance",
"="... | restart a "service" on the rest server . | train | true |
2,079 | def _compare_io(inv_op, out_file_ext='.fif'):
tempdir = _TempDir()
if (out_file_ext == '.fif'):
out_file = op.join(tempdir, 'test-inv.fif')
elif (out_file_ext == '.gz'):
out_file = op.join(tempdir, 'test-inv.fif.gz')
else:
raise ValueError('IO test could not complete')
inv_init = copy.deepcopy(inv_op)
write... | [
"def",
"_compare_io",
"(",
"inv_op",
",",
"out_file_ext",
"=",
"'.fif'",
")",
":",
"tempdir",
"=",
"_TempDir",
"(",
")",
"if",
"(",
"out_file_ext",
"==",
"'.fif'",
")",
":",
"out_file",
"=",
"op",
".",
"join",
"(",
"tempdir",
",",
"'test-inv.fif'",
")",
... | compare inverse io . | train | false |
2,080 | def rand_alnum(length=0):
jibber = ''.join([letters, digits])
return ''.join((choice(jibber) for _ in xrange((length or randint(10, 30)))))
| [
"def",
"rand_alnum",
"(",
"length",
"=",
"0",
")",
":",
"jibber",
"=",
"''",
".",
"join",
"(",
"[",
"letters",
",",
"digits",
"]",
")",
"return",
"''",
".",
"join",
"(",
"(",
"choice",
"(",
"jibber",
")",
"for",
"_",
"in",
"xrange",
"(",
"(",
"... | create a random string with random length :return: a random string of with length > 10 and length < 30 . | train | false |
2,081 | def _get_impl():
global _RPCIMPL
if (_RPCIMPL is None):
try:
_RPCIMPL = importutils.import_module(CONF.rpc_backend)
except ImportError:
impl = CONF.rpc_backend.replace('nova.rpc', 'nova.openstack.common.rpc')
_RPCIMPL = importutils.import_module(impl)
return _RPCIMPL
| [
"def",
"_get_impl",
"(",
")",
":",
"global",
"_RPCIMPL",
"if",
"(",
"_RPCIMPL",
"is",
"None",
")",
":",
"try",
":",
"_RPCIMPL",
"=",
"importutils",
".",
"import_module",
"(",
"CONF",
".",
"rpc_backend",
")",
"except",
"ImportError",
":",
"impl",
"=",
"CO... | delay import of rpc_backend until configuration is loaded . | train | false |
2,084 | def p_field_type(p):
p[0] = p[1]
| [
"def",
"p_field_type",
"(",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]"
] | field_type : ref_type | definition_type . | train | false |
2,085 | def _reverse_cmap_spec(spec):
if (u'listed' in spec):
return {u'listed': spec[u'listed'][::(-1)]}
if (u'red' in spec):
return revcmap(spec)
else:
revspec = list(reversed(spec))
if (len(revspec[0]) == 2):
revspec = [((1.0 - a), b) for (a, b) in revspec]
return revspec
| [
"def",
"_reverse_cmap_spec",
"(",
"spec",
")",
":",
"if",
"(",
"u'listed'",
"in",
"spec",
")",
":",
"return",
"{",
"u'listed'",
":",
"spec",
"[",
"u'listed'",
"]",
"[",
":",
":",
"(",
"-",
"1",
")",
"]",
"}",
"if",
"(",
"u'red'",
"in",
"spec",
")... | reverses cmap specification *spec* . | train | false |
2,087 | def to_class_path(cls):
return ':'.join([cls.__module__, cls.__name__])
| [
"def",
"to_class_path",
"(",
"cls",
")",
":",
"return",
"':'",
".",
"join",
"(",
"[",
"cls",
".",
"__module__",
",",
"cls",
".",
"__name__",
"]",
")"
] | returns class path for a class takes a class and returns the class path which is composed of the module plus the class name . | train | false |
2,089 | def check_partitioners(partitioners, keys):
if (partitioners is None):
return {}
keys = set(keys)
if (not (set(partitioners) <= keys)):
extra_keys = (set(partitioners) - keys)
raise KeyError('Invalid partitioner keys {}, partitioners can only be provided for {}'.format(', '.join(("'{}'".format(key) for key in ... | [
"def",
"check_partitioners",
"(",
"partitioners",
",",
"keys",
")",
":",
"if",
"(",
"partitioners",
"is",
"None",
")",
":",
"return",
"{",
"}",
"keys",
"=",
"set",
"(",
"keys",
")",
"if",
"(",
"not",
"(",
"set",
"(",
"partitioners",
")",
"<=",
"keys"... | checks the given partitioners . | train | false |
2,090 | def inv(a):
return (~ a)
| [
"def",
"inv",
"(",
"a",
")",
":",
"return",
"(",
"~",
"a",
")"
] | computes the inverse of square matrix . | train | false |
2,091 | def create_api_request(rf, method='get', url='/', data='', user=None, encode_as_json=True):
content_type = 'application/x-www-form-urlencoded'
if (data and encode_as_json):
from pootle.core.utils.json import jsonify
content_type = 'application/json'
data = jsonify(data)
request_method = getattr(rf, method.lowe... | [
"def",
"create_api_request",
"(",
"rf",
",",
"method",
"=",
"'get'",
",",
"url",
"=",
"'/'",
",",
"data",
"=",
"''",
",",
"user",
"=",
"None",
",",
"encode_as_json",
"=",
"True",
")",
":",
"content_type",
"=",
"'application/x-www-form-urlencoded'",
"if",
"... | convenience function to create and setup fake requests . | train | false |
2,093 | def add_permission(user, model, permission_codename):
content_type = ContentType.objects.get_for_model(model)
(permission, created) = Permission.objects.get_or_create(codename=permission_codename, content_type=content_type, defaults={'name': permission_codename})
user.user_permissions.add(permission)
| [
"def",
"add_permission",
"(",
"user",
",",
"model",
",",
"permission_codename",
")",
":",
"content_type",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"model",
")",
"(",
"permission",
",",
"created",
")",
"=",
"Permission",
".",
"objects",
... | add a permission to a lambda function . | train | false |
2,094 | def profile_(profile, names, vm_overrides=None, opts=None, **kwargs):
client = _get_client()
if isinstance(opts, dict):
client.opts.update(opts)
info = client.profile(profile, names, vm_overrides=vm_overrides, **kwargs)
return info
| [
"def",
"profile_",
"(",
"profile",
",",
"names",
",",
"vm_overrides",
"=",
"None",
",",
"opts",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"client",
"=",
"_get_client",
"(",
")",
"if",
"isinstance",
"(",
"opts",
",",
"dict",
")",
":",
"client",
".",... | spin up an instance using salt cloud cli example: . | train | true |
2,095 | def fake_pgettext(translations):
def _pgettext(context, text):
return translations.get((context, text), text)
return _pgettext
| [
"def",
"fake_pgettext",
"(",
"translations",
")",
":",
"def",
"_pgettext",
"(",
"context",
",",
"text",
")",
":",
"return",
"translations",
".",
"get",
"(",
"(",
"context",
",",
"text",
")",
",",
"text",
")",
"return",
"_pgettext"
] | create a fake implementation of pgettext . | train | false |
2,097 | def _decode_address_list_field(address_list):
if (not address_list):
return None
if (len(address_list) == 1):
return _decode_and_join_header(address_list[0])
else:
return map(_decode_and_join_header, address_list)
| [
"def",
"_decode_address_list_field",
"(",
"address_list",
")",
":",
"if",
"(",
"not",
"address_list",
")",
":",
"return",
"None",
"if",
"(",
"len",
"(",
"address_list",
")",
"==",
"1",
")",
":",
"return",
"_decode_and_join_header",
"(",
"address_list",
"[",
... | helper function to decode address lists . | train | false |
2,098 | def _check_params(X, metric, p, metric_params):
params = zip(['metric', 'p', 'metric_params'], [metric, p, metric_params])
est_params = X.get_params()
for (param_name, func_param) in params:
if (func_param != est_params[param_name]):
raise ValueError(('Got %s for %s, while the estimator has %s for the same para... | [
"def",
"_check_params",
"(",
"X",
",",
"metric",
",",
"p",
",",
"metric_params",
")",
":",
"params",
"=",
"zip",
"(",
"[",
"'metric'",
",",
"'p'",
",",
"'metric_params'",
"]",
",",
"[",
"metric",
",",
"p",
",",
"metric_params",
"]",
")",
"est_params",
... | helper to validate params . | train | false |
2,099 | def luhnCheck(value):
arr = []
for c in value:
if c.isdigit():
arr.append(int(c))
arr.reverse()
for idx in [i for i in range(len(arr)) if (i % 2)]:
d = (arr[idx] * 2)
if (d > 9):
d = ((d / 10) + (d % 10))
arr[idx] = d
sm = sum(arr)
return (not (sm % 10))
| [
"def",
"luhnCheck",
"(",
"value",
")",
":",
"arr",
"=",
"[",
"]",
"for",
"c",
"in",
"value",
":",
"if",
"c",
".",
"isdigit",
"(",
")",
":",
"arr",
".",
"append",
"(",
"int",
"(",
"c",
")",
")",
"arr",
".",
"reverse",
"(",
")",
"for",
"idx",
... | the luhn check against the value which can be an array of digits . | train | false |
2,100 | def establish_connection(ip, username='', password='', delay=1):
remote_conn = telnetlib.Telnet(ip, TELNET_PORT, TELNET_TIMEOUT)
output = remote_conn.read_until('sername:', READ_TIMEOUT)
remote_conn.write((username + '\n'))
output = remote_conn.read_until('ssword:', READ_TIMEOUT)
remote_conn.write((password + '\n'... | [
"def",
"establish_connection",
"(",
"ip",
",",
"username",
"=",
"''",
",",
"password",
"=",
"''",
",",
"delay",
"=",
"1",
")",
":",
"remote_conn",
"=",
"telnetlib",
".",
"Telnet",
"(",
"ip",
",",
"TELNET_PORT",
",",
"TELNET_TIMEOUT",
")",
"output",
"=",
... | use paramiko to establish an ssh channel to the device must return both return_conn_pre and return_conn so that the ssh connection is not garbage collected . | train | false |
2,101 | def interpolate(value_from, value_to, step=10):
if (type(value_from) in (list, tuple)):
out = []
for (x, y) in zip(value_from, value_to):
out.append(interpolate(x, y, step))
return out
else:
return (value_from + ((value_to - value_from) / float(step)))
| [
"def",
"interpolate",
"(",
"value_from",
",",
"value_to",
",",
"step",
"=",
"10",
")",
":",
"if",
"(",
"type",
"(",
"value_from",
")",
"in",
"(",
"list",
",",
"tuple",
")",
")",
":",
"out",
"=",
"[",
"]",
"for",
"(",
"x",
",",
"y",
")",
"in",
... | construct an interpolating polynomial for the data points . | train | false |
2,104 | def assert_student_view(block, fragment):
try:
html = lxml.html.fragment_fromstring(fragment.content)
except lxml.etree.ParserError:
assert_student_view_invalid_html(block, fragment.content)
else:
assert_student_view_valid_html(block, html)
| [
"def",
"assert_student_view",
"(",
"block",
",",
"fragment",
")",
":",
"try",
":",
"html",
"=",
"lxml",
".",
"html",
".",
"fragment_fromstring",
"(",
"fragment",
".",
"content",
")",
"except",
"lxml",
".",
"etree",
".",
"ParserError",
":",
"assert_student_vi... | helper function to assert that the fragment is valid output the specified blocks student_view . | train | false |
2,105 | def list_nodes(call=None):
if (call == 'action'):
raise SaltCloudSystemExit('The list_nodes function must be called with -f or --function.')
nodes = list_nodes_full()
ret = {}
for (instance_id, full_node) in nodes.items():
ret[instance_id] = {'id': full_node['id'], 'image': full_node['image'], 'size': full_node... | [
"def",
"list_nodes",
"(",
"call",
"=",
"None",
")",
":",
"if",
"(",
"call",
"==",
"'action'",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The list_nodes function must be called with -f or --function.'",
")",
"nodes",
"=",
"list_nodes_full",
"(",
")",
"ret",
"... | because this module is not specific to any cloud providers . | train | true |
2,107 | def multiply_timedelta(interval, number):
return timedelta(seconds=(timedelta_total_seconds(interval) * number))
| [
"def",
"multiply_timedelta",
"(",
"interval",
",",
"number",
")",
":",
"return",
"timedelta",
"(",
"seconds",
"=",
"(",
"timedelta_total_seconds",
"(",
"interval",
")",
"*",
"number",
")",
")"
] | timedeltas can not normally be multiplied by floating points . | train | false |
2,108 | def splint(a, b, tck, full_output=0):
if isinstance(tck, BSpline):
if (tck.c.ndim > 1):
mesg = 'Calling splint() with BSpline objects with c.ndim > 1 is not recommended. Use BSpline.integrate() instead.'
warnings.warn(mesg, DeprecationWarning)
if (full_output != 0):
mesg = ('full_output = %s is not suppor... | [
"def",
"splint",
"(",
"a",
",",
"b",
",",
"tck",
",",
"full_output",
"=",
"0",
")",
":",
"if",
"isinstance",
"(",
"tck",
",",
"BSpline",
")",
":",
"if",
"(",
"tck",
".",
"c",
".",
"ndim",
">",
"1",
")",
":",
"mesg",
"=",
"'Calling splint() with B... | evaluate the definite integral of a b-spline . | train | false |
2,109 | def _a_generator(foo):
(yield 42)
(yield foo)
| [
"def",
"_a_generator",
"(",
"foo",
")",
":",
"(",
"yield",
"42",
")",
"(",
"yield",
"foo",
")"
] | used to have an object to return for generators . | train | false |
2,110 | def freq_to_period(freq):
if (not isinstance(freq, offsets.DateOffset)):
freq = to_offset(freq)
freq = freq.rule_code.upper()
if ((freq == 'A') or freq.startswith(('A-', 'AS-'))):
return 1
elif ((freq == 'Q') or freq.startswith(('Q-', 'QS-'))):
return 4
elif ((freq == 'M') or freq.startswith(('M-', 'MS'))):
... | [
"def",
"freq_to_period",
"(",
"freq",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"freq",
",",
"offsets",
".",
"DateOffset",
")",
")",
":",
"freq",
"=",
"to_offset",
"(",
"freq",
")",
"freq",
"=",
"freq",
".",
"rule_code",
".",
"upper",
"(",
")",
... | convert a pandas frequency to a periodicity parameters freq : str or offset frequency to convert returns period : int periodicity of freq notes annual maps to 1 . | train | false |
2,112 | def getdate(string_date=None):
if (not string_date):
return get_datetime().date()
if isinstance(string_date, datetime.datetime):
return string_date.date()
elif isinstance(string_date, datetime.date):
return string_date
if ((not string_date) or (string_date == u'0000-00-00')):
return None
return parser.pars... | [
"def",
"getdate",
"(",
"string_date",
"=",
"None",
")",
":",
"if",
"(",
"not",
"string_date",
")",
":",
"return",
"get_datetime",
"(",
")",
".",
"date",
"(",
")",
"if",
"isinstance",
"(",
"string_date",
",",
"datetime",
".",
"datetime",
")",
":",
"retu... | coverts string date to datetime . | train | false |
2,113 | def _tanh(p, x, prec):
R = p.ring
p1 = R(0)
for precx in _giant_steps(prec):
tmp = (p - rs_atanh(p1, x, precx))
tmp = rs_mul(tmp, (1 - rs_square(p1, x, prec)), x, precx)
p1 += tmp
return p1
| [
"def",
"_tanh",
"(",
"p",
",",
"x",
",",
"prec",
")",
":",
"R",
"=",
"p",
".",
"ring",
"p1",
"=",
"R",
"(",
"0",
")",
"for",
"precx",
"in",
"_giant_steps",
"(",
"prec",
")",
":",
"tmp",
"=",
"(",
"p",
"-",
"rs_atanh",
"(",
"p1",
",",
"x",
... | helper function of rs_tanh return the series expansion of tanh of a univariate series using newtons method . | train | false |
2,114 | def BuildAdGroupAdOperations(adgroup_operations):
adgroup_ad_operations = [{'xsi_type': 'AdGroupAdOperation', 'operand': {'adGroupId': adgroup_operation['operand']['id'], 'ad': {'xsi_type': 'TextAd', 'headline': 'Luxury Cruise to Mars', 'description1': 'Visit the Red Planet in style.', 'description2': 'Low-gravity fun... | [
"def",
"BuildAdGroupAdOperations",
"(",
"adgroup_operations",
")",
":",
"adgroup_ad_operations",
"=",
"[",
"{",
"'xsi_type'",
":",
"'AdGroupAdOperation'",
",",
"'operand'",
":",
"{",
"'adGroupId'",
":",
"adgroup_operation",
"[",
"'operand'",
"]",
"[",
"'id'",
"]",
... | builds the operations adding an expandedtextad to each adgroup . | train | false |
2,116 | def set_course_cohort_settings(course_key, **kwargs):
fields = {'is_cohorted': bool, 'always_cohort_inline_discussions': bool, 'cohorted_discussions': list}
course_cohort_settings = get_course_cohort_settings(course_key)
for (field, field_type) in fields.items():
if (field in kwargs):
if (not isinstance(kwargs[... | [
"def",
"set_course_cohort_settings",
"(",
"course_key",
",",
"**",
"kwargs",
")",
":",
"fields",
"=",
"{",
"'is_cohorted'",
":",
"bool",
",",
"'always_cohort_inline_discussions'",
":",
"bool",
",",
"'cohorted_discussions'",
":",
"list",
"}",
"course_cohort_settings",
... | set cohort settings for a course . | train | false |
2,117 | def send_event(event, users):
queue_json_publish('notify_tornado', dict(event=event, users=users), send_notification_http)
| [
"def",
"send_event",
"(",
"event",
",",
"users",
")",
":",
"queue_json_publish",
"(",
"'notify_tornado'",
",",
"dict",
"(",
"event",
"=",
"event",
",",
"users",
"=",
"users",
")",
",",
"send_notification_http",
")"
] | users is a list of user ids . | train | false |
2,118 | def test_config_alterations_class():
class LineConfig(Config, ):
no_prefix = True
show_legend = False
fill = True
pretty_print = True
x_labels = ['a', 'b', 'c']
line1 = Line(LineConfig)
line1.add('_', [1, 2, 3])
l1 = line1.render()
LineConfig.stroke = False
line2 = Line(LineConfig)
line2.add('_', [1, 2... | [
"def",
"test_config_alterations_class",
"(",
")",
":",
"class",
"LineConfig",
"(",
"Config",
",",
")",
":",
"no_prefix",
"=",
"True",
"show_legend",
"=",
"False",
"fill",
"=",
"True",
"pretty_print",
"=",
"True",
"x_labels",
"=",
"[",
"'a'",
",",
"'b'",
",... | assert a config can be changed on config class . | train | false |
2,119 | def assoc_laguerre(x, n, k=0.0):
return orthogonal.eval_genlaguerre(n, k, x)
| [
"def",
"assoc_laguerre",
"(",
"x",
",",
"n",
",",
"k",
"=",
"0.0",
")",
":",
"return",
"orthogonal",
".",
"eval_genlaguerre",
"(",
"n",
",",
"k",
",",
"x",
")"
] | compute the generalized laguerre polynomial of degree n and order k . | train | false |
2,120 | def handleFileCollision(fileName, fileCollisionMethod):
if (fileCollisionMethod == 'overwrite'):
logging.warning(('Data file, %s, will be overwritten' % fileName))
elif (fileCollisionMethod == 'fail'):
msg = 'Data file %s already exists. Set argument fileCollisionMethod to overwrite.'
raise IOError((msg % fileN... | [
"def",
"handleFileCollision",
"(",
"fileName",
",",
"fileCollisionMethod",
")",
":",
"if",
"(",
"fileCollisionMethod",
"==",
"'overwrite'",
")",
":",
"logging",
".",
"warning",
"(",
"(",
"'Data file, %s, will be overwritten'",
"%",
"fileName",
")",
")",
"elif",
"(... | handle filename collisions by overwriting . | train | false |
2,121 | def unserializeObject(value):
return (base64unpickle(value) if value else None)
| [
"def",
"unserializeObject",
"(",
"value",
")",
":",
"return",
"(",
"base64unpickle",
"(",
"value",
")",
"if",
"value",
"else",
"None",
")"
] | unserializes object from given serialized form . | train | false |
2,123 | def set_fs_home():
fs_home = get_fs_home()
if (fs_home is None):
return False
else:
os.environ['FREESURFER_HOME'] = fs_home
return True
| [
"def",
"set_fs_home",
"(",
")",
":",
"fs_home",
"=",
"get_fs_home",
"(",
")",
"if",
"(",
"fs_home",
"is",
"None",
")",
":",
"return",
"False",
"else",
":",
"os",
".",
"environ",
"[",
"'FREESURFER_HOME'",
"]",
"=",
"fs_home",
"return",
"True"
] | set the freesurfer_home environment variable . | train | false |
2,124 | def child_fd_list_add(fd):
global child_fd_list
child_fd_list.append(fd)
| [
"def",
"child_fd_list_add",
"(",
"fd",
")",
":",
"global",
"child_fd_list",
"child_fd_list",
".",
"append",
"(",
"fd",
")"
] | add a file descriptor to list to be closed in child processes . | train | false |
2,125 | def bucketize(point, bucket_size):
return (bucket_size * math.floor((point / bucket_size)))
| [
"def",
"bucketize",
"(",
"point",
",",
"bucket_size",
")",
":",
"return",
"(",
"bucket_size",
"*",
"math",
".",
"floor",
"(",
"(",
"point",
"/",
"bucket_size",
")",
")",
")"
] | floor the point to the next lower multiple of bucket_size . | train | false |
2,127 | def getAlongWayHexadecimalPrimary(beginBrightness, beginRatio, colorWidth, endBrightness, endRatio):
brightness = ((beginRatio * float(beginBrightness)) + (endRatio * float(endBrightness)))
return getWidthHex(int(round(brightness)), colorWidth)
| [
"def",
"getAlongWayHexadecimalPrimary",
"(",
"beginBrightness",
",",
"beginRatio",
",",
"colorWidth",
",",
"endBrightness",
",",
"endRatio",
")",
":",
"brightness",
"=",
"(",
"(",
"beginRatio",
"*",
"float",
"(",
"beginBrightness",
")",
")",
"+",
"(",
"endRatio"... | get a primary color along the way from grey to the end color . | train | false |
2,128 | def _RetainVerticalSpacingBeforeComments(uwline):
prev_token = None
for tok in uwline.tokens:
if (tok.is_comment and prev_token):
if (((tok.lineno - tok.value.count(u'\n')) - prev_token.lineno) > 1):
tok.AdjustNewlinesBefore(ONE_BLANK_LINE)
prev_token = tok
| [
"def",
"_RetainVerticalSpacingBeforeComments",
"(",
"uwline",
")",
":",
"prev_token",
"=",
"None",
"for",
"tok",
"in",
"uwline",
".",
"tokens",
":",
"if",
"(",
"tok",
".",
"is_comment",
"and",
"prev_token",
")",
":",
"if",
"(",
"(",
"(",
"tok",
".",
"lin... | retain vertical spacing before comments . | train | false |
2,129 | def _iter_unit_summary(namespace):
from . import core
units = []
has_prefixes = set()
for (key, val) in six.iteritems(namespace):
if (not isinstance(val, core.UnitBase)):
continue
if (key != val.name):
continue
if isinstance(val, core.PrefixUnit):
has_prefixes.add(val._represents.bases[0].name)
els... | [
"def",
"_iter_unit_summary",
"(",
"namespace",
")",
":",
"from",
".",
"import",
"core",
"units",
"=",
"[",
"]",
"has_prefixes",
"=",
"set",
"(",
")",
"for",
"(",
"key",
",",
"val",
")",
"in",
"six",
".",
"iteritems",
"(",
"namespace",
")",
":",
"if",... | generates the tuple used to format the unit summary docs in generate_unit_summary . | train | false |
2,130 | def retry_over_time(fun, catch, args=[], kwargs={}, errback=None, max_retries=None, interval_start=2, interval_step=2, interval_max=30, callback=None):
retries = 0
interval_range = fxrange(interval_start, (interval_max + interval_start), interval_step, repeatlast=True)
for retries in count():
try:
return fun(*a... | [
"def",
"retry_over_time",
"(",
"fun",
",",
"catch",
",",
"args",
"=",
"[",
"]",
",",
"kwargs",
"=",
"{",
"}",
",",
"errback",
"=",
"None",
",",
"max_retries",
"=",
"None",
",",
"interval_start",
"=",
"2",
",",
"interval_step",
"=",
"2",
",",
"interva... | retry the function over and over until max retries is exceeded . | train | false |
2,132 | def binarySearch(seq, cmp_func):
lower = 0
upper = len(seq)
while (lower < upper):
index = ((lower + upper) >> 1)
diff = cmp_func(seq[index])
if (diff < 0):
upper = index
elif (diff > 0):
lower = (index + 1)
else:
return index
return None
| [
"def",
"binarySearch",
"(",
"seq",
",",
"cmp_func",
")",
":",
"lower",
"=",
"0",
"upper",
"=",
"len",
"(",
"seq",
")",
"while",
"(",
"lower",
"<",
"upper",
")",
":",
"index",
"=",
"(",
"(",
"lower",
"+",
"upper",
")",
">>",
"1",
")",
"diff",
"=... | search a value in a sequence using binary search . | train | false |
2,134 | def verify_python_version():
(major, minor, micro, release_level, serial) = sys.version_info
if (major == 2):
if (minor != 7):
msg = 'Error: Python 2.%s found but Python 2.7 required.'
print (msg % minor)
elif (major > 2):
msg = 'It seems that you are running w3af using Python3, which is not officially sup... | [
"def",
"verify_python_version",
"(",
")",
":",
"(",
"major",
",",
"minor",
",",
"micro",
",",
"release_level",
",",
"serial",
")",
"=",
"sys",
".",
"version_info",
"if",
"(",
"major",
"==",
"2",
")",
":",
"if",
"(",
"minor",
"!=",
"7",
")",
":",
"m... | check python version eq 2 . | train | false |
2,136 | def get_dataset_filename(name, ext, hid):
base = ''.join(((((c in FILENAME_VALID_CHARS) and c) or '_') for c in name))
return (base + ('_%s.%s' % (hid, ext)))
| [
"def",
"get_dataset_filename",
"(",
"name",
",",
"ext",
",",
"hid",
")",
":",
"base",
"=",
"''",
".",
"join",
"(",
"(",
"(",
"(",
"(",
"c",
"in",
"FILENAME_VALID_CHARS",
")",
"and",
"c",
")",
"or",
"'_'",
")",
"for",
"c",
"in",
"name",
")",
")",
... | builds a filename for a dataset using its name an extension . | train | false |
2,139 | def merge_adjacent(gen):
gen = iter(gen)
last = next(gen)
for this in gen:
if (this.merge_key == last.merge_key):
last.merge(this)
elif (last < this):
(yield last)
last = this
else:
raise AssertionError(('Bad order, %s > %s' % (last, this)))
(yield last)
| [
"def",
"merge_adjacent",
"(",
"gen",
")",
":",
"gen",
"=",
"iter",
"(",
"gen",
")",
"last",
"=",
"next",
"(",
"gen",
")",
"for",
"this",
"in",
"gen",
":",
"if",
"(",
"this",
".",
"merge_key",
"==",
"last",
".",
"merge_key",
")",
":",
"last",
".",... | merge adjacent messages that compare equal . | train | false |
2,140 | def test_decorator_string_issue():
source = dedent(' """\n @"""\n def bla():\n pass\n\n bla.')
s = jedi.Script(source)
assert s.completions()
assert (s._get_module().get_code() == source)
| [
"def",
"test_decorator_string_issue",
"(",
")",
":",
"source",
"=",
"dedent",
"(",
"' \"\"\"\\n @\"\"\"\\n def bla():\\n pass\\n\\n bla.'",
")",
"s",
"=",
"jedi",
".",
"Script",
"(",
"source",
")",
"assert",
"s",
".",
"completions",
"(",
")",
"ass... | test case from #589 . | train | false |
2,141 | def pportInSelected():
if (port.DlPortReadPortUchar(statusRegAdrs) & 16):
return 1
else:
return 0
| [
"def",
"pportInSelected",
"(",
")",
":",
"if",
"(",
"port",
".",
"DlPortReadPortUchar",
"(",
"statusRegAdrs",
")",
"&",
"16",
")",
":",
"return",
"1",
"else",
":",
"return",
"0"
] | input from select pin . | train | false |
2,142 | def _attempt_YYYYMMDD(arg, errors):
def calc(carg):
carg = carg.astype(object)
parsed = lib.try_parse_year_month_day((carg / 10000), ((carg / 100) % 100), (carg % 100))
return tslib.array_to_datetime(parsed, errors=errors)
def calc_with_mask(carg, mask):
result = np.empty(carg.shape, dtype='M8[ns]')
iresult... | [
"def",
"_attempt_YYYYMMDD",
"(",
"arg",
",",
"errors",
")",
":",
"def",
"calc",
"(",
"carg",
")",
":",
"carg",
"=",
"carg",
".",
"astype",
"(",
"object",
")",
"parsed",
"=",
"lib",
".",
"try_parse_year_month_day",
"(",
"(",
"carg",
"/",
"10000",
")",
... | try to parse the yyyymmdd/%y%m%d format . | train | false |
2,143 | def disallow(nodes):
def disallowed(cls):
cls.unsupported_nodes = ()
for node in nodes:
new_method = _node_not_implemented(node, cls)
name = 'visit_{0}'.format(node)
cls.unsupported_nodes += (name,)
setattr(cls, name, new_method)
return cls
return disallowed
| [
"def",
"disallow",
"(",
"nodes",
")",
":",
"def",
"disallowed",
"(",
"cls",
")",
":",
"cls",
".",
"unsupported_nodes",
"=",
"(",
")",
"for",
"node",
"in",
"nodes",
":",
"new_method",
"=",
"_node_not_implemented",
"(",
"node",
",",
"cls",
")",
"name",
"... | decorator to disallow certain nodes from parsing . | train | true |
2,144 | def get_translated_storefile(store, pootle_path=None):
storeclass = store.syncer.file_class
filestore = store.syncer.convert(storeclass)
for unit in filestore.units:
if (not unit.istranslated()):
unit.target = ('Translation of %s' % unit.source)
path = (pootle_path if (pootle_path is not None) else store.pootl... | [
"def",
"get_translated_storefile",
"(",
"store",
",",
"pootle_path",
"=",
"None",
")",
":",
"storeclass",
"=",
"store",
".",
"syncer",
".",
"file_class",
"filestore",
"=",
"store",
".",
"syncer",
".",
"convert",
"(",
"storeclass",
")",
"for",
"unit",
"in",
... | returns file store with added translations for untranslated units . | train | false |
2,148 | def interdiffs_with_comments(review, current_pair):
if (not review):
return
diffsets = DiffSet.objects.filter(files__comments__review=review)
diffsets = diffsets.filter(files__comments__interfilediff__isnull=False)
diffsets = diffsets.distinct()
for diffset in diffsets:
interdiffs = DiffSet.objects.filter(file... | [
"def",
"interdiffs_with_comments",
"(",
"review",
",",
"current_pair",
")",
":",
"if",
"(",
"not",
"review",
")",
":",
"return",
"diffsets",
"=",
"DiffSet",
".",
"objects",
".",
"filter",
"(",
"files__comments__review",
"=",
"review",
")",
"diffsets",
"=",
"... | get a list of interdiffs in the review that contain draft comments . | train | false |
2,149 | @task()
@timeit
def send_group_email(announcement_id):
try:
announcement = Announcement.objects.get(pk=announcement_id)
except Announcement.DoesNotExist:
return
group = announcement.group
users = User.objects.filter(groups__in=[group])
plain_content = bleach.clean(announcement.content_parsed, tags=[], strip=Tr... | [
"@",
"task",
"(",
")",
"@",
"timeit",
"def",
"send_group_email",
"(",
"announcement_id",
")",
":",
"try",
":",
"announcement",
"=",
"Announcement",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"announcement_id",
")",
"except",
"Announcement",
".",
"DoesNotExi... | build and send the announcement emails to a group . | train | false |
2,150 | def assertUrisEqual(testcase, expected, actual):
expected = urlparse(expected)
actual = urlparse(actual)
testcase.assertEqual(expected.scheme, actual.scheme)
testcase.assertEqual(expected.netloc, actual.netloc)
testcase.assertEqual(expected.path, actual.path)
testcase.assertEqual(expected.params, actual.params)
... | [
"def",
"assertUrisEqual",
"(",
"testcase",
",",
"expected",
",",
"actual",
")",
":",
"expected",
"=",
"urlparse",
"(",
"expected",
")",
"actual",
"=",
"urlparse",
"(",
"actual",
")",
"testcase",
".",
"assertEqual",
"(",
"expected",
".",
"scheme",
",",
"act... | test that uris are the same . | train | false |
2,151 | def makeMimi(upid):
strSeed = 'gGddgPfeaf_gzyr'
prehash = ((upid + '_') + strSeed)
return md5(prehash.encode('utf-8')).hexdigest()
| [
"def",
"makeMimi",
"(",
"upid",
")",
":",
"strSeed",
"=",
"'gGddgPfeaf_gzyr'",
"prehash",
"=",
"(",
"(",
"upid",
"+",
"'_'",
")",
"+",
"strSeed",
")",
"return",
"md5",
"(",
"prehash",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")... | from URL also com . | train | true |
2,152 | def is_multigraphical(sequence):
deg_sequence = list(sequence)
if (not nx.utils.is_list_of_ints(deg_sequence)):
return False
(dsum, dmax) = (0, 0)
for d in deg_sequence:
if (d < 0):
return False
(dsum, dmax) = ((dsum + d), max(dmax, d))
if ((dsum % 2) or (dsum < (2 * dmax))):
return False
return True
| [
"def",
"is_multigraphical",
"(",
"sequence",
")",
":",
"deg_sequence",
"=",
"list",
"(",
"sequence",
")",
"if",
"(",
"not",
"nx",
".",
"utils",
".",
"is_list_of_ints",
"(",
"deg_sequence",
")",
")",
":",
"return",
"False",
"(",
"dsum",
",",
"dmax",
")",
... | returns true if some multigraph can realize the sequence . | train | false |
2,153 | def add_ssh_public_keys(name, filenames):
from fabtools.require.files import directory as _require_directory, file as _require_file
ssh_dir = posixpath.join(home_directory(name), '.ssh')
_require_directory(ssh_dir, mode='700', owner=name, use_sudo=True)
authorized_keys_filename = posixpath.join(ssh_dir, 'authorized... | [
"def",
"add_ssh_public_keys",
"(",
"name",
",",
"filenames",
")",
":",
"from",
"fabtools",
".",
"require",
".",
"files",
"import",
"directory",
"as",
"_require_directory",
",",
"file",
"as",
"_require_file",
"ssh_dir",
"=",
"posixpath",
".",
"join",
"(",
"home... | add multiple public keys to the users authorized ssh keys . | train | false |
2,155 | def discretize_bilinear_2D(model, x_range, y_range):
x = np.arange((x_range[0] - 0.5), (x_range[1] + 0.5))
y = np.arange((y_range[0] - 0.5), (y_range[1] + 0.5))
(x, y) = np.meshgrid(x, y)
values_intermediate_grid = model(x, y)
values = (0.5 * (values_intermediate_grid[1:, :] + values_intermediate_grid[:(-1), :]))
... | [
"def",
"discretize_bilinear_2D",
"(",
"model",
",",
"x_range",
",",
"y_range",
")",
":",
"x",
"=",
"np",
".",
"arange",
"(",
"(",
"x_range",
"[",
"0",
"]",
"-",
"0.5",
")",
",",
"(",
"x_range",
"[",
"1",
"]",
"+",
"0.5",
")",
")",
"y",
"=",
"np... | discretize model by performing a bilinear interpolation . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.