id_within_dataset int64 1 55.5k | snippet stringlengths 19 14.2k | tokens listlengths 6 1.63k | nl stringlengths 6 352 | split_within_dataset stringclasses 1 value | is_duplicated bool 2 classes |
|---|---|---|---|---|---|
52,260 | def remove_grunt_files():
for filename in ['Gruntfile.js']:
os.remove(os.path.join(PROJECT_DIRECTORY, filename))
| [
"def",
"remove_grunt_files",
"(",
")",
":",
"for",
"filename",
"in",
"[",
"'Gruntfile.js'",
"]",
":",
"os",
".",
"remove",
"(",
"os",
".",
"path",
".",
"join",
"(",
"PROJECT_DIRECTORY",
",",
"filename",
")",
")"
] | removes files needed for grunt if it isnt going to be used . | train | false |
52,261 | def set_ip_nonlocal_bind(value, namespace=None, log_fail_as_error=True):
cmd = [('%s=%d' % (IP_NONLOCAL_BIND, value))]
return sysctl(cmd, namespace=namespace, log_fail_as_error=log_fail_as_error)
| [
"def",
"set_ip_nonlocal_bind",
"(",
"value",
",",
"namespace",
"=",
"None",
",",
"log_fail_as_error",
"=",
"True",
")",
":",
"cmd",
"=",
"[",
"(",
"'%s=%d'",
"%",
"(",
"IP_NONLOCAL_BIND",
",",
"value",
")",
")",
"]",
"return",
"sysctl",
"(",
"cmd",
",",
... | set sysctl knob of ip_nonlocal_bind to given value . | train | false |
52,264 | def DocumentListFeedFromString(xml_string):
return atom.CreateClassFromXMLString(DocumentListFeed, xml_string)
| [
"def",
"DocumentListFeedFromString",
"(",
"xml_string",
")",
":",
"return",
"atom",
".",
"CreateClassFromXMLString",
"(",
"DocumentListFeed",
",",
"xml_string",
")"
] | converts an xml string into a documentlistfeed object . | train | false |
52,267 | def calc_age(date, trans=False):
if trans:
d = T('d')
h = T('h')
m = T('m')
else:
d = 'd'
h = 'h'
m = 'm'
try:
now = datetime.datetime.now()
dage = (now - date)
seconds = dage.seconds
if dage.days:
age = ('%s%s' % (dage.days, d))
elif (seconds / 3600):
age = ('%s%s' % ((seconds / 3600), h))
else:
age = ('%s%s' % ((seconds / 60), m))
except:
age = '-'
return age
| [
"def",
"calc_age",
"(",
"date",
",",
"trans",
"=",
"False",
")",
":",
"if",
"trans",
":",
"d",
"=",
"T",
"(",
"'d'",
")",
"h",
"=",
"T",
"(",
"'h'",
")",
"m",
"=",
"T",
"(",
"'m'",
")",
"else",
":",
"d",
"=",
"'d'",
"h",
"=",
"'h'",
"m",
... | calculate the age difference between now and date . | train | false |
52,269 | def create_end_of_rib_update():
mpunreach_attr = BGPPathAttributeMpUnreachNLRI(RF_IPv4_VPN.afi, RF_IPv4_VPN.safi, [])
eor = BGPUpdate(path_attributes=[mpunreach_attr])
return eor
| [
"def",
"create_end_of_rib_update",
"(",
")",
":",
"mpunreach_attr",
"=",
"BGPPathAttributeMpUnreachNLRI",
"(",
"RF_IPv4_VPN",
".",
"afi",
",",
"RF_IPv4_VPN",
".",
"safi",
",",
"[",
"]",
")",
"eor",
"=",
"BGPUpdate",
"(",
"path_attributes",
"=",
"[",
"mpunreach_a... | construct end-of-rib update instance . | train | true |
52,271 | def strip_encoding_cookie(filelike):
it = iter(filelike)
try:
first = next(it)
if (not cookie_comment_re.match(first)):
(yield first)
second = next(it)
if (not cookie_comment_re.match(second)):
(yield second)
except StopIteration:
return
for line in it:
(yield line)
| [
"def",
"strip_encoding_cookie",
"(",
"filelike",
")",
":",
"it",
"=",
"iter",
"(",
"filelike",
")",
"try",
":",
"first",
"=",
"next",
"(",
"it",
")",
"if",
"(",
"not",
"cookie_comment_re",
".",
"match",
"(",
"first",
")",
")",
":",
"(",
"yield",
"fir... | generator to pull lines from a text-mode file . | train | true |
52,272 | def iter_words(string):
i = 0
last_sep_index = (-1)
inside_word = False
for char in string:
if ((ord(char) < 128) and (char in seps)):
if inside_word:
(yield _Word(span=((last_sep_index + 1), i), value=string[(last_sep_index + 1):i]))
inside_word = False
last_sep_index = i
else:
inside_word = True
i += 1
if inside_word:
(yield _Word(span=((last_sep_index + 1), i), value=string[(last_sep_index + 1):i]))
| [
"def",
"iter_words",
"(",
"string",
")",
":",
"i",
"=",
"0",
"last_sep_index",
"=",
"(",
"-",
"1",
")",
"inside_word",
"=",
"False",
"for",
"char",
"in",
"string",
":",
"if",
"(",
"(",
"ord",
"(",
"char",
")",
"<",
"128",
")",
"and",
"(",
"char",... | iterate on all words in a string . | train | false |
52,273 | def process_input_files(inputs):
for ifile in inputs:
with open(ifile) as fin:
exec compile(fin.read(), ifile, 'exec')
| [
"def",
"process_input_files",
"(",
"inputs",
")",
":",
"for",
"ifile",
"in",
"inputs",
":",
"with",
"open",
"(",
"ifile",
")",
"as",
"fin",
":",
"exec",
"compile",
"(",
"fin",
".",
"read",
"(",
")",
",",
"ifile",
",",
"'exec'",
")"
] | read input source files for execution of legacy @export / @exportmany decorators . | train | false |
52,274 | def PackLongList(longs):
packed = struct.pack(('<' + ('q' * len(longs))), *longs)
if ((len(packed) > (8 * 15)) or (longs[0] == 18446744073709551615L)):
return ('\xff\xff\xff\xff\xff\xff\xff\xff' + zlib.compress(packed))
else:
return packed
| [
"def",
"PackLongList",
"(",
"longs",
")",
":",
"packed",
"=",
"struct",
".",
"pack",
"(",
"(",
"'<'",
"+",
"(",
"'q'",
"*",
"len",
"(",
"longs",
")",
")",
")",
",",
"*",
"longs",
")",
"if",
"(",
"(",
"len",
"(",
"packed",
")",
">",
"(",
"8",
... | pack a list of longs to a compact string . | train | false |
52,275 | def calc_stats(data):
x = np.asarray(data, np.float)
vals_min = np.min(x)
vals_max = np.max(x)
q2 = np.percentile(x, 50, interpolation='linear')
q1 = np.percentile(x, 25, interpolation='lower')
q3 = np.percentile(x, 75, interpolation='higher')
iqr = (q3 - q1)
whisker_dist = (1.5 * iqr)
d1 = np.min(x[(x >= (q1 - whisker_dist))])
d2 = np.max(x[(x <= (q3 + whisker_dist))])
return {'min': vals_min, 'max': vals_max, 'q1': q1, 'q2': q2, 'q3': q3, 'd1': d1, 'd2': d2}
| [
"def",
"calc_stats",
"(",
"data",
")",
":",
"x",
"=",
"np",
".",
"asarray",
"(",
"data",
",",
"np",
".",
"float",
")",
"vals_min",
"=",
"np",
".",
"min",
"(",
"x",
")",
"vals_max",
"=",
"np",
".",
"max",
"(",
"x",
")",
"q2",
"=",
"np",
".",
... | calculate statistics for use in violin plot . | train | false |
52,276 | def strip_newsgroup_header(text):
(_before, _blankline, after) = text.partition('\n\n')
return after
| [
"def",
"strip_newsgroup_header",
"(",
"text",
")",
":",
"(",
"_before",
",",
"_blankline",
",",
"after",
")",
"=",
"text",
".",
"partition",
"(",
"'\\n\\n'",
")",
"return",
"after"
] | given text in "news" format . | train | false |
52,278 | def ParseKey(key_string):
if (not key_string):
return None
if (key_string == 'None'):
return None
return datastore.Key(encoded=key_string)
| [
"def",
"ParseKey",
"(",
"key_string",
")",
":",
"if",
"(",
"not",
"key_string",
")",
":",
"return",
"None",
"if",
"(",
"key_string",
"==",
"'None'",
")",
":",
"return",
"None",
"return",
"datastore",
".",
"Key",
"(",
"encoded",
"=",
"key_string",
")"
] | turn a key stored in the database into a key or none . | train | false |
52,279 | def im_feeling_lucky(image_data, output_encoding=PNG, quality=None, correct_orientation=UNCHANGED_ORIENTATION, rpc=None, transparent_substitution_rgb=None):
rpc = im_feeling_lucky_async(image_data, output_encoding=output_encoding, quality=quality, correct_orientation=correct_orientation, rpc=rpc, transparent_substitution_rgb=transparent_substitution_rgb)
return rpc.get_result()
| [
"def",
"im_feeling_lucky",
"(",
"image_data",
",",
"output_encoding",
"=",
"PNG",
",",
"quality",
"=",
"None",
",",
"correct_orientation",
"=",
"UNCHANGED_ORIENTATION",
",",
"rpc",
"=",
"None",
",",
"transparent_substitution_rgb",
"=",
"None",
")",
":",
"rpc",
"... | automatically adjust image levels . | train | false |
52,280 | def update_event_source_mapping(UUID, FunctionName=None, Enabled=None, BatchSize=None, region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
args = {}
if (FunctionName is not None):
args['FunctionName'] = FunctionName
if (Enabled is not None):
args['Enabled'] = Enabled
if (BatchSize is not None):
args['BatchSize'] = BatchSize
r = conn.update_event_source_mapping(UUID=UUID, **args)
if r:
keys = ('UUID', 'BatchSize', 'EventSourceArn', 'FunctionArn', 'LastModified', 'LastProcessingResult', 'State', 'StateTransitionReason')
return {'updated': True, 'event_source_mapping': dict([(k, r.get(k)) for k in keys])}
else:
log.warning('Mapping was not updated')
return {'updated': False}
except ClientError as e:
return {'created': False, 'error': salt.utils.boto3.get_error(e)}
| [
"def",
"update_event_source_mapping",
"(",
"UUID",
",",
"FunctionName",
"=",
"None",
",",
"Enabled",
"=",
"None",
",",
"BatchSize",
"=",
"None",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
... | update the event source mapping identified by the uuid . | train | true |
52,281 | def get_current_canvas():
cc = [c() for c in canvasses if (c() is not None)]
if cc:
return cc[(-1)]
else:
return None
| [
"def",
"get_current_canvas",
"(",
")",
":",
"cc",
"=",
"[",
"c",
"(",
")",
"for",
"c",
"in",
"canvasses",
"if",
"(",
"c",
"(",
")",
"is",
"not",
"None",
")",
"]",
"if",
"cc",
":",
"return",
"cc",
"[",
"(",
"-",
"1",
")",
"]",
"else",
":",
"... | get the currently active canvas returns none if there is no canvas available . | train | false |
52,282 | def deconvolution_nd(x, W, b=None, stride=1, pad=0, outsize=None, use_cudnn=True):
ndim = len(x.shape[2:])
func = DeconvolutionND(ndim, stride, pad, outsize, use_cudnn)
if (b is None):
return func(x, W)
else:
return func(x, W, b)
| [
"def",
"deconvolution_nd",
"(",
"x",
",",
"W",
",",
"b",
"=",
"None",
",",
"stride",
"=",
"1",
",",
"pad",
"=",
"0",
",",
"outsize",
"=",
"None",
",",
"use_cudnn",
"=",
"True",
")",
":",
"ndim",
"=",
"len",
"(",
"x",
".",
"shape",
"[",
"2",
"... | n-dimensional deconvolution function . | train | false |
52,283 | def get_unique_databases_and_mirrors():
mirrored_aliases = {}
test_databases = {}
dependencies = {}
default_sig = connections[DEFAULT_DB_ALIAS].creation.test_db_signature()
for alias in connections:
connection = connections[alias]
test_settings = connection.settings_dict['TEST']
if test_settings['MIRROR']:
mirrored_aliases[alias] = test_settings['MIRROR']
else:
item = test_databases.setdefault(connection.creation.test_db_signature(), (connection.settings_dict['NAME'], set()))
item[1].add(alias)
if ('DEPENDENCIES' in test_settings):
dependencies[alias] = test_settings['DEPENDENCIES']
elif ((alias != DEFAULT_DB_ALIAS) and (connection.creation.test_db_signature() != default_sig)):
dependencies[alias] = test_settings.get('DEPENDENCIES', [DEFAULT_DB_ALIAS])
test_databases = dependency_ordered(test_databases.items(), dependencies)
test_databases = collections.OrderedDict(test_databases)
return (test_databases, mirrored_aliases)
| [
"def",
"get_unique_databases_and_mirrors",
"(",
")",
":",
"mirrored_aliases",
"=",
"{",
"}",
"test_databases",
"=",
"{",
"}",
"dependencies",
"=",
"{",
"}",
"default_sig",
"=",
"connections",
"[",
"DEFAULT_DB_ALIAS",
"]",
".",
"creation",
".",
"test_db_signature",... | figure out which databases actually need to be created . | train | false |
52,284 | def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
first = datetime.datetime(year, month, 1, hour, minute)
weekdayone = first.replace(day=(((dayofweek - first.isoweekday()) % 7) + 1))
wd = (weekdayone + ((whichweek - 1) * ONEWEEK))
if (wd.month != month):
wd -= ONEWEEK
return wd
| [
"def",
"picknthweekday",
"(",
"year",
",",
"month",
",",
"dayofweek",
",",
"hour",
",",
"minute",
",",
"whichweek",
")",
":",
"first",
"=",
"datetime",
".",
"datetime",
"(",
"year",
",",
"month",
",",
"1",
",",
"hour",
",",
"minute",
")",
"weekdayone",... | dayofweek == 0 means sunday . | train | true |
52,285 | def build_tree(lengths):
max_length = (max(lengths) + 1)
bit_counts = ([0] * max_length)
next_code = ([0] * max_length)
tree = {}
for i in lengths:
if i:
bit_counts[i] += 1
code = 0
for i in xrange(1, len(bit_counts)):
next_code[i] = code = ((code + bit_counts[(i - 1)]) << 1)
for (i, ln) in enumerate(lengths):
if ln:
tree[(ln, next_code[ln])] = i
next_code[ln] += 1
return tree
| [
"def",
"build_tree",
"(",
"lengths",
")",
":",
"max_length",
"=",
"(",
"max",
"(",
"lengths",
")",
"+",
"1",
")",
"bit_counts",
"=",
"(",
"[",
"0",
"]",
"*",
"max_length",
")",
"next_code",
"=",
"(",
"[",
"0",
"]",
"*",
"max_length",
")",
"tree",
... | build a huffman tree from a list of lengths . | train | false |
52,286 | def node_expansion(G, S):
neighborhood = set(chain.from_iterable((G.neighbors(v) for v in S)))
return (len(neighborhood) / len(S))
| [
"def",
"node_expansion",
"(",
"G",
",",
"S",
")",
":",
"neighborhood",
"=",
"set",
"(",
"chain",
".",
"from_iterable",
"(",
"(",
"G",
".",
"neighbors",
"(",
"v",
")",
"for",
"v",
"in",
"S",
")",
")",
")",
"return",
"(",
"len",
"(",
"neighborhood",
... | returns the node expansion of the set s . | train | false |
52,287 | def assert_string_list(dist, attr, value):
try:
assert (''.join(value) != value)
except (TypeError, ValueError, AttributeError, AssertionError):
raise DistutilsSetupError(('%r must be a list of strings (got %r)' % (attr, value)))
| [
"def",
"assert_string_list",
"(",
"dist",
",",
"attr",
",",
"value",
")",
":",
"try",
":",
"assert",
"(",
"''",
".",
"join",
"(",
"value",
")",
"!=",
"value",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
",",
"AttributeError",
",",
"AssertionError... | verify that value is a string list or none . | train | true |
52,288 | @requires_segment_info
def bufferlister(pl, segment_info, show_unlisted=False, **kwargs):
cur_buffer = vim.current.buffer
cur_bufnr = cur_buffer.number
def add_multiplier(buffer, dct):
dct[u'priority_multiplier'] = (1 + (0.001 * abs((buffer.number - cur_bufnr))))
return dct
return ((lambda buffer, current, modified: (buffer_updated_segment_info(segment_info, buffer), add_multiplier(buffer, {u'highlight_group_prefix': u'{0}{1}'.format(current, modified), u'divider_highlight_group': u'tab:divider'})))(buffer, (u'buf' if (buffer is cur_buffer) else u'buf_nc'), (u'_mod' if (int(vim.eval(u"getbufvar({0}, '&modified')".format(buffer.number))) > 0) else u'')) for buffer in vim.buffers if ((buffer is cur_buffer) or show_unlisted or (int(vim.eval((u'buflisted(%s)' % buffer.number))) > 0)))
| [
"@",
"requires_segment_info",
"def",
"bufferlister",
"(",
"pl",
",",
"segment_info",
",",
"show_unlisted",
"=",
"False",
",",
"**",
"kwargs",
")",
":",
"cur_buffer",
"=",
"vim",
".",
"current",
".",
"buffer",
"cur_bufnr",
"=",
"cur_buffer",
".",
"number",
"d... | list all buffers in segment_info format specifically generates a list of segment info dictionaries with buffer and bufnr keys set to buffer-specific ones . | train | false |
52,289 | def addon_listing(request, default='name', theme=False):
if theme:
qs = request.user.addons.filter(type=amo.ADDON_PERSONA)
else:
qs = Addon.objects.filter(authors=request.user).exclude(type=amo.ADDON_PERSONA)
filter_cls = (ThemeFilter if theme else AddonFilter)
filter_ = filter_cls(request, qs, 'sort', default)
return (filter_.qs, filter_)
| [
"def",
"addon_listing",
"(",
"request",
",",
"default",
"=",
"'name'",
",",
"theme",
"=",
"False",
")",
":",
"if",
"theme",
":",
"qs",
"=",
"request",
".",
"user",
".",
"addons",
".",
"filter",
"(",
"type",
"=",
"amo",
".",
"ADDON_PERSONA",
")",
"els... | set up the queryset and filtering for addon listing for dashboard . | train | false |
52,291 | def CheckCreateDefaultGUI():
rc = HaveGoodGUI()
if (not rc):
CreateDefaultGUI()
return rc
| [
"def",
"CheckCreateDefaultGUI",
"(",
")",
":",
"rc",
"=",
"HaveGoodGUI",
"(",
")",
"if",
"(",
"not",
"rc",
")",
":",
"CreateDefaultGUI",
"(",
")",
"return",
"rc"
] | checks and creates if necessary a default gui environment . | train | false |
52,292 | def qiime_blastx_seqs(seqs, blast_constructor=Blastall, blast_db=None, refseqs=None, refseqs_fp=None, blast_mat_root=None, params={}, WorkingDir=None, seqs_per_blast_run=1000, HALT_EXEC=False):
return qiime_blast_seqs(seqs, blast_constructor=blast_constructor, blast_program='blastx', blast_db=blast_db, refseqs=refseqs, refseqs_fp=refseqs_fp, blast_mat_root=blast_mat_root, params={}, WorkingDir=WorkingDir, seqs_per_blast_run=seqs_per_blast_run, is_protein=True, HALT_EXEC=HALT_EXEC)
| [
"def",
"qiime_blastx_seqs",
"(",
"seqs",
",",
"blast_constructor",
"=",
"Blastall",
",",
"blast_db",
"=",
"None",
",",
"refseqs",
"=",
"None",
",",
"refseqs_fp",
"=",
"None",
",",
"blast_mat_root",
"=",
"None",
",",
"params",
"=",
"{",
"}",
",",
"WorkingDi... | blast list of sequences . | train | false |
52,293 | def _get_arg_count(method):
if (not method):
return 0
arg_spec = inspect.getargspec(method)
return len(arg_spec[0])
| [
"def",
"_get_arg_count",
"(",
"method",
")",
":",
"if",
"(",
"not",
"method",
")",
":",
"return",
"0",
"arg_spec",
"=",
"inspect",
".",
"getargspec",
"(",
"method",
")",
"return",
"len",
"(",
"arg_spec",
"[",
"0",
"]",
")"
] | get the number of args for a method . | train | false |
52,294 | def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'):
if (file_encoding is None):
file_encoding = data_encoding
data_info = lookup(data_encoding)
file_info = lookup(file_encoding)
sr = StreamRecoder(file, data_info.encode, data_info.decode, file_info.streamreader, file_info.streamwriter, errors)
sr.data_encoding = data_encoding
sr.file_encoding = file_encoding
return sr
| [
"def",
"EncodedFile",
"(",
"file",
",",
"data_encoding",
",",
"file_encoding",
"=",
"None",
",",
"errors",
"=",
"'strict'",
")",
":",
"if",
"(",
"file_encoding",
"is",
"None",
")",
":",
"file_encoding",
"=",
"data_encoding",
"data_info",
"=",
"lookup",
"(",
... | return a wrapped version of file which provides transparent encoding translation . | train | false |
52,295 | def subfolders(path):
folders = []
while (path not in ('/', '')):
folders.append(path)
path = os.path.dirname(path)
folders.reverse()
return folders
| [
"def",
"subfolders",
"(",
"path",
")",
":",
"folders",
"=",
"[",
"]",
"while",
"(",
"path",
"not",
"in",
"(",
"'/'",
",",
"''",
")",
")",
":",
"folders",
".",
"append",
"(",
"path",
")",
"path",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"pat... | decompose a path string into a list of subfolders eg convert apps/dashboard/ranges into [apps . | train | false |
52,299 | @depends(HAS_PYVMOMI)
def update_host_datetime(host, username, password, protocol=None, port=None, host_names=None):
service_instance = salt.utils.vmware.get_service_instance(host=host, username=username, password=password, protocol=protocol, port=port)
host_names = _check_hosts(service_instance, host, host_names)
ret = {}
for host_name in host_names:
host_ref = _get_host_ref(service_instance, host, host_name=host_name)
date_time_manager = _get_date_time_mgr(host_ref)
try:
date_time_manager.UpdateDateTime(datetime.datetime.utcnow())
except vim.fault.HostConfigFault as err:
msg = "'vsphere.update_date_time' failed for host {0}: {1}".format(host_name, err)
log.debug(msg)
ret.update({host_name: {'Error': msg}})
continue
ret.update({host_name: {'Datetime Updated': True}})
return ret
| [
"@",
"depends",
"(",
"HAS_PYVMOMI",
")",
"def",
"update_host_datetime",
"(",
"host",
",",
"username",
",",
"password",
",",
"protocol",
"=",
"None",
",",
"port",
"=",
"None",
",",
"host_names",
"=",
"None",
")",
":",
"service_instance",
"=",
"salt",
".",
... | update the date/time on the given host or list of host_names . | train | true |
52,300 | def parse_qual_score(infile, value_cast_f=int):
id_to_qual = dict([rec for rec in MinimalQualParser(infile, value_cast_f)])
return id_to_qual
| [
"def",
"parse_qual_score",
"(",
"infile",
",",
"value_cast_f",
"=",
"int",
")",
":",
"id_to_qual",
"=",
"dict",
"(",
"[",
"rec",
"for",
"rec",
"in",
"MinimalQualParser",
"(",
"infile",
",",
"value_cast_f",
")",
"]",
")",
"return",
"id_to_qual"
] | load quality scores into dict . | train | false |
52,302 | def hash_shard(word):
return ('server%d' % (hash(word) % 4))
| [
"def",
"hash_shard",
"(",
"word",
")",
":",
"return",
"(",
"'server%d'",
"%",
"(",
"hash",
"(",
"word",
")",
"%",
"4",
")",
")"
] | do a great job of assigning data to servers using a hash value . | train | false |
52,303 | def makeTextNotes(replaceTxtNotes):
def _replacer(s):
outS = replaceTxtNotes
if (not isinstance(s, (unicode, str))):
return s
ssplit = s.split('::', 1)
text = ssplit[0]
keysDict = {}
if text:
keysDict['text'] = True
outS = outS.replace('%(text)s', text)
if (len(ssplit) == 2):
keysDict['notes'] = True
outS = outS.replace('%(notes)s', ssplit[1])
else:
outS = outS.replace('%(notes)s', u'')
def _excludeFalseConditionals(matchobj):
if (matchobj.group(1) in keysDict):
return matchobj.group(2)
return u''
while re_conditional.search(outS):
outS = re_conditional.sub(_excludeFalseConditionals, outS)
return outS
return _replacer
| [
"def",
"makeTextNotes",
"(",
"replaceTxtNotes",
")",
":",
"def",
"_replacer",
"(",
"s",
")",
":",
"outS",
"=",
"replaceTxtNotes",
"if",
"(",
"not",
"isinstance",
"(",
"s",
",",
"(",
"unicode",
",",
"str",
")",
")",
")",
":",
"return",
"s",
"ssplit",
... | create a function useful to handle text[::optional_note] values . | train | false |
52,304 | def pmonitor(popens, timeoutms=500, readline=True, readmax=1024):
poller = poll()
fdToHost = {}
for (host, popen) in popens.iteritems():
fd = popen.stdout.fileno()
fdToHost[fd] = host
poller.register(fd, POLLIN)
if (not readline):
flags = fcntl(fd, F_GETFL)
fcntl(fd, F_SETFL, (flags | O_NONBLOCK))
while popens:
fds = poller.poll(timeoutms)
if fds:
for (fd, event) in fds:
host = fdToHost[fd]
popen = popens[host]
if (event & POLLIN):
if readline:
line = popen.stdout.readline()
else:
line = popen.stdout.read(readmax)
(yield (host, line))
elif (event & POLLHUP):
poller.unregister(fd)
del popens[host]
else:
(yield (None, ''))
| [
"def",
"pmonitor",
"(",
"popens",
",",
"timeoutms",
"=",
"500",
",",
"readline",
"=",
"True",
",",
"readmax",
"=",
"1024",
")",
":",
"poller",
"=",
"poll",
"(",
")",
"fdToHost",
"=",
"{",
"}",
"for",
"(",
"host",
",",
"popen",
")",
"in",
"popens",
... | monitor dict of hosts to popen objects a line at a time timeoutms: timeout for poll() readline: return single line of output yields: host . | train | false |
52,305 | def dummy_sparse(groups):
from scipy import sparse
indptr = np.arange((len(groups) + 1))
data = np.ones(len(groups), dtype=np.int8)
indi = sparse.csr_matrix((data, g, indptr))
return indi
| [
"def",
"dummy_sparse",
"(",
"groups",
")",
":",
"from",
"scipy",
"import",
"sparse",
"indptr",
"=",
"np",
".",
"arange",
"(",
"(",
"len",
"(",
"groups",
")",
"+",
"1",
")",
")",
"data",
"=",
"np",
".",
"ones",
"(",
"len",
"(",
"groups",
")",
",",... | create a sparse indicator from a group array with integer labels parameters groups: ndarray . | train | false |
52,306 | def realign_dtype(dtype, offsets):
(cls, args, state) = dtype.__reduce__()
(names, fields) = state[3:5]
fields = fields.copy()
max_offset = 0
itemsize = state[5]
if ((fields is None) or (len(offsets) != len(names))):
raise ValueError('Dtype must be a structured dtype, and length of offsets list must be the same as the number of fields.')
for (name, offset) in zip(names, offsets):
field = fields[name]
if (offset == field[1]):
continue
fields[name] = (field[0], offset)
if (offset > max_offset):
itemsize = (offset + field[0].itemsize)
max_offset = offset
new_typespec = '|V{0}'.format(itemsize)
new_state = ((state[:4] + (fields, itemsize)) + state[6:])
new_dtype = cls(new_typespec, *args[1:])
new_dtype.__setstate__(new_state)
return new_dtype
| [
"def",
"realign_dtype",
"(",
"dtype",
",",
"offsets",
")",
":",
"(",
"cls",
",",
"args",
",",
"state",
")",
"=",
"dtype",
".",
"__reduce__",
"(",
")",
"(",
"names",
",",
"fields",
")",
"=",
"state",
"[",
"3",
":",
"5",
"]",
"fields",
"=",
"fields... | given a numpy struct dtype object an a list of integer offsets . | train | false |
52,307 | def set_color_codes(palette='deep'):
if (palette == 'reset'):
colors = [(0.0, 0.0, 1.0), (0.0, 0.5, 0.0), (1.0, 0.0, 0.0), (0.75, 0.75, 0.0), (0.75, 0.75, 0.0), (0.0, 0.75, 0.75), (0.0, 0.0, 0.0)]
else:
colors = (SEABORN_PALETTES[palette] + [(0.1, 0.1, 0.1)])
for (code, color) in zip('bgrmyck', colors):
rgb = mpl.colors.colorConverter.to_rgb(color)
mpl.colors.colorConverter.colors[code] = rgb
mpl.colors.colorConverter.cache[code] = rgb
| [
"def",
"set_color_codes",
"(",
"palette",
"=",
"'deep'",
")",
":",
"if",
"(",
"palette",
"==",
"'reset'",
")",
":",
"colors",
"=",
"[",
"(",
"0.0",
",",
"0.0",
",",
"1.0",
")",
",",
"(",
"0.0",
",",
"0.5",
",",
"0.0",
")",
",",
"(",
"1.0",
",",... | change how matplotlib color shorthands are interpreted . | train | false |
52,308 | def retrieve_config():
DEBUG = True
net_devices = NetworkDevice.objects.all()
for a_device in net_devices:
if ('ssh' in a_device.device_class):
if DEBUG:
print 'Retrieve device configuration: {} {}\n'.format(a_device.device_name, a_device.device_class)
ssh_connect = SSHConnection(a_device)
ssh_connect.enable_mode()
output = ssh_connect.send_command('show run\n')
file_name = (a_device.device_name + '.txt')
full_path = (CFGS_DIR + file_name)
if DEBUG:
print 'Writing configuration file to file system\n'
with open(full_path, 'w') as f:
f.write(output)
| [
"def",
"retrieve_config",
"(",
")",
":",
"DEBUG",
"=",
"True",
"net_devices",
"=",
"NetworkDevice",
".",
"objects",
".",
"all",
"(",
")",
"for",
"a_device",
"in",
"net_devices",
":",
"if",
"(",
"'ssh'",
"in",
"a_device",
".",
"device_class",
")",
":",
"i... | use ssh to retrieve the network device running configuration . | train | false |
52,309 | def modular_exp(base, exponent, modulus):
if (exponent < 0):
raise NegativeExponentError(('Negative exponents (%d) not allowed' % exponent))
return pow(base, exponent, modulus)
| [
"def",
"modular_exp",
"(",
"base",
",",
"exponent",
",",
"modulus",
")",
":",
"if",
"(",
"exponent",
"<",
"0",
")",
":",
"raise",
"NegativeExponentError",
"(",
"(",
"'Negative exponents (%d) not allowed'",
"%",
"exponent",
")",
")",
"return",
"pow",
"(",
"ba... | raise base to exponent . | train | true |
52,310 | def get_checker_executable(name):
if programs.is_program_installed(name):
return [name]
else:
path1 = programs.python_script_exists(package=None, module=(name + '_script'))
path2 = programs.python_script_exists(package=None, module=name)
if (path1 is not None):
return [sys.executable, path1]
elif (path2 is not None):
return [sys.executable, path2]
| [
"def",
"get_checker_executable",
"(",
"name",
")",
":",
"if",
"programs",
".",
"is_program_installed",
"(",
"name",
")",
":",
"return",
"[",
"name",
"]",
"else",
":",
"path1",
"=",
"programs",
".",
"python_script_exists",
"(",
"package",
"=",
"None",
",",
... | return checker executable in the form of a list of arguments for subprocess . | train | true |
52,312 | def service_enable(s_name, **connection_args):
ret = True
service = _service_get(s_name, **connection_args)
if (service is None):
return False
nitro = _connect(**connection_args)
if (nitro is None):
return False
try:
NSService.enable(nitro, service)
except NSNitroError as error:
log.debug('netscaler module error - NSService.enable() failed: {0}'.format(error))
ret = False
_disconnect(nitro)
return ret
| [
"def",
"service_enable",
"(",
"s_name",
",",
"**",
"connection_args",
")",
":",
"ret",
"=",
"True",
"service",
"=",
"_service_get",
"(",
"s_name",
",",
"**",
"connection_args",
")",
"if",
"(",
"service",
"is",
"None",
")",
":",
"return",
"False",
"nitro",
... | enable a service cli example: . | train | true |
52,313 | def test_sort_locations_non_existing_path():
finder = PackageFinder([], [], session=PipSession())
(files, urls) = finder._sort_locations([os.path.join('this', 'doesnt', 'exist')])
assert ((not urls) and (not files)), 'nothing should have been found'
| [
"def",
"test_sort_locations_non_existing_path",
"(",
")",
":",
"finder",
"=",
"PackageFinder",
"(",
"[",
"]",
",",
"[",
"]",
",",
"session",
"=",
"PipSession",
"(",
")",
")",
"(",
"files",
",",
"urls",
")",
"=",
"finder",
".",
"_sort_locations",
"(",
"["... | test that a non-existing path is ignored . | train | false |
52,314 | def pi_float():
(lasts, t, s, n, na, d, da) = (0, 3.0, 3, 1, 0, 0, 24)
while (s != lasts):
lasts = s
(n, na) = ((n + na), (na + 8))
(d, da) = ((d + da), (da + 32))
t = ((t * n) / d)
s += t
return s
| [
"def",
"pi_float",
"(",
")",
":",
"(",
"lasts",
",",
"t",
",",
"s",
",",
"n",
",",
"na",
",",
"d",
",",
"da",
")",
"=",
"(",
"0",
",",
"3.0",
",",
"3",
",",
"1",
",",
"0",
",",
"0",
",",
"24",
")",
"while",
"(",
"s",
"!=",
"lasts",
")... | native float . | train | false |
52,315 | def rm_env(user, name):
lst = list_tab(user)
ret = 'absent'
rm_ = None
for ind in range(len(lst['env'])):
if (name == lst['env'][ind]['name']):
rm_ = ind
if (rm_ is not None):
lst['env'].pop(rm_)
ret = 'removed'
comdat = _write_cron_lines(user, _render_tab(lst))
if comdat['retcode']:
return comdat['stderr']
return ret
| [
"def",
"rm_env",
"(",
"user",
",",
"name",
")",
":",
"lst",
"=",
"list_tab",
"(",
"user",
")",
"ret",
"=",
"'absent'",
"rm_",
"=",
"None",
"for",
"ind",
"in",
"range",
"(",
"len",
"(",
"lst",
"[",
"'env'",
"]",
")",
")",
":",
"if",
"(",
"name",... | remove cron environment variable for a specified user . | train | true |
52,316 | def effective(file, line, frame):
possibles = Breakpoint.bplist[(file, line)]
for i in range(0, len(possibles)):
b = possibles[i]
if (b.enabled == 0):
continue
if (not checkfuncname(b, frame)):
continue
b.hits = (b.hits + 1)
if (not b.cond):
if (b.ignore > 0):
b.ignore = (b.ignore - 1)
continue
else:
return (b, 1)
else:
try:
val = eval(b.cond, frame.f_globals, frame.f_locals)
if val:
if (b.ignore > 0):
b.ignore = (b.ignore - 1)
else:
return (b, 1)
except:
return (b, 0)
return (None, None)
| [
"def",
"effective",
"(",
"file",
",",
"line",
",",
"frame",
")",
":",
"possibles",
"=",
"Breakpoint",
".",
"bplist",
"[",
"(",
"file",
",",
"line",
")",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"possibles",
")",
")",
":",
"b",
... | determine which breakpoint for this file:line is to be acted upon . | train | false |
52,317 | @memoized
def format_address(addr, type):
colorcodes = {'data': 'blue', 'code': 'red', 'rodata': 'green', 'value': None}
return colorize(addr, colorcodes[type])
| [
"@",
"memoized",
"def",
"format_address",
"(",
"addr",
",",
"type",
")",
":",
"colorcodes",
"=",
"{",
"'data'",
":",
"'blue'",
",",
"'code'",
":",
"'red'",
",",
"'rodata'",
":",
"'green'",
",",
"'value'",
":",
"None",
"}",
"return",
"colorize",
"(",
"a... | colorize an address . | train | false |
52,319 | def mkfrag(text, tokens, startchar=None, endchar=None, charsbefore=0, charsafter=0):
if (startchar is None):
startchar = (tokens[0].startchar if tokens else 0)
if (endchar is None):
endchar = (tokens[(-1)].endchar if tokens else len(text))
startchar = max(0, (startchar - charsbefore))
endchar = min(len(text), (endchar + charsafter))
return Fragment(text, tokens, startchar, endchar)
| [
"def",
"mkfrag",
"(",
"text",
",",
"tokens",
",",
"startchar",
"=",
"None",
",",
"endchar",
"=",
"None",
",",
"charsbefore",
"=",
"0",
",",
"charsafter",
"=",
"0",
")",
":",
"if",
"(",
"startchar",
"is",
"None",
")",
":",
"startchar",
"=",
"(",
"to... | returns a :class:fragment object based on the :class:analysis . | train | false |
52,322 | def _get_ext_comm_subtype(type_high):
return _ext_comm_subtypes_classes.get(type_high, {})
| [
"def",
"_get_ext_comm_subtype",
"(",
"type_high",
")",
":",
"return",
"_ext_comm_subtypes_classes",
".",
"get",
"(",
"type_high",
",",
"{",
"}",
")"
] | returns a byteenumfield with the right sub-types dict for a given community . | train | false |
52,323 | def test_class_order():
class_names = [table.__name__ for table in tables.mapped_classes]
def key(name):
return ((name != 'Language'), name)
assert (class_names == sorted(class_names, key=key))
| [
"def",
"test_class_order",
"(",
")",
":",
"class_names",
"=",
"[",
"table",
".",
"__name__",
"for",
"table",
"in",
"tables",
".",
"mapped_classes",
"]",
"def",
"key",
"(",
"name",
")",
":",
"return",
"(",
"(",
"name",
"!=",
"'Language'",
")",
",",
"nam... | the declarative classes should be defined in alphabetical order . | train | false |
52,324 | def hex_decode(data, errors='strict'):
return (unicode(''.join(('{:02X} '.format(ord(b)) for b in serial.iterbytes(data)))), len(data))
| [
"def",
"hex_decode",
"(",
"data",
",",
"errors",
"=",
"'strict'",
")",
":",
"return",
"(",
"unicode",
"(",
"''",
".",
"join",
"(",
"(",
"'{:02X} '",
".",
"format",
"(",
"ord",
"(",
"b",
")",
")",
"for",
"b",
"in",
"serial",
".",
"iterbytes",
"(",
... | b@ab -> 40 41 42 . | train | false |
52,326 | def invoice(request, order_id, template=u'shop/order_invoice.html', template_pdf=u'shop/order_invoice_pdf.html', extra_context=None):
try:
order = Order.objects.get_for_user(order_id, request)
except Order.DoesNotExist:
raise Http404
context = {u'order': order}
context.update(order.details_as_dict())
context.update((extra_context or {}))
if (HAS_PDF and (request.GET.get(u'format') == u'pdf')):
response = HttpResponse(content_type=u'application/pdf')
name = slugify((u'%s-invoice-%s' % (settings.SITE_TITLE, order.id)))
response[u'Content-Disposition'] = (u'attachment; filename=%s.pdf' % name)
html = get_template(template_pdf).render(context)
pisa.CreatePDF(html, response)
return response
return TemplateResponse(request, template, context)
| [
"def",
"invoice",
"(",
"request",
",",
"order_id",
",",
"template",
"=",
"u'shop/order_invoice.html'",
",",
"template_pdf",
"=",
"u'shop/order_invoice_pdf.html'",
",",
"extra_context",
"=",
"None",
")",
":",
"try",
":",
"order",
"=",
"Order",
".",
"objects",
"."... | display a plain text invoice for the given order . | train | false |
52,327 | def dnsdomain_unregister(context, fqdomain):
return IMPL.dnsdomain_unregister(context, fqdomain)
| [
"def",
"dnsdomain_unregister",
"(",
"context",
",",
"fqdomain",
")",
":",
"return",
"IMPL",
".",
"dnsdomain_unregister",
"(",
"context",
",",
"fqdomain",
")"
] | purge associations for the specified dns zone . | train | false |
52,328 | def remove_pycharm_dir(project_directory):
idea_dir_location = os.path.join(PROJECT_DIRECTORY, '.idea/')
if os.path.exists(idea_dir_location):
shutil.rmtree(idea_dir_location)
docs_dir_location = os.path.join(PROJECT_DIRECTORY, 'docs/pycharm/')
if os.path.exists(docs_dir_location):
shutil.rmtree(docs_dir_location)
| [
"def",
"remove_pycharm_dir",
"(",
"project_directory",
")",
":",
"idea_dir_location",
"=",
"os",
".",
"path",
".",
"join",
"(",
"PROJECT_DIRECTORY",
",",
"'.idea/'",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"idea_dir_location",
")",
":",
"shutil",
"... | removes directories related to pycharm if it isnt going to be used . | train | false |
52,331 | def submit_items(log, userkey, items, chunksize=64):
data = []
def submit_chunk():
'Submit the current accumulated fingerprint data.'
log.info(u'submitting {0} fingerprints', len(data))
try:
acoustid.submit(API_KEY, userkey, data)
except acoustid.AcoustidError as exc:
log.warning(u'acoustid submission error: {0}', exc)
del data[:]
for item in items:
fp = fingerprint_item(log, item)
item_data = {'duration': int(item.length), 'fingerprint': fp}
if item.mb_trackid:
item_data['mbid'] = item.mb_trackid
log.debug(u'submitting MBID')
else:
item_data.update({'track': item.title, 'artist': item.artist, 'album': item.album, 'albumartist': item.albumartist, 'year': item.year, 'trackno': item.track, 'discno': item.disc})
log.debug(u'submitting textual metadata')
data.append(item_data)
if (len(data) >= chunksize):
submit_chunk()
if data:
submit_chunk()
| [
"def",
"submit_items",
"(",
"log",
",",
"userkey",
",",
"items",
",",
"chunksize",
"=",
"64",
")",
":",
"data",
"=",
"[",
"]",
"def",
"submit_chunk",
"(",
")",
":",
"log",
".",
"info",
"(",
"u'submitting {0} fingerprints'",
",",
"len",
"(",
"data",
")"... | submit fingerprints for the items to the acoustid server . | train | false |
52,332 | def inputhook_pyglet():
try:
t = clock()
while (not stdin_ready()):
pyglet.clock.tick()
for window in pyglet.app.windows:
window.switch_to()
window.dispatch_events()
window.dispatch_event('on_draw')
flip(window)
used_time = (clock() - t)
if (used_time > 10.0):
time.sleep(1.0)
elif (used_time > 0.1):
time.sleep(0.05)
else:
time.sleep(0.001)
except KeyboardInterrupt:
pass
return 0
| [
"def",
"inputhook_pyglet",
"(",
")",
":",
"try",
":",
"t",
"=",
"clock",
"(",
")",
"while",
"(",
"not",
"stdin_ready",
"(",
")",
")",
":",
"pyglet",
".",
"clock",
".",
"tick",
"(",
")",
"for",
"window",
"in",
"pyglet",
".",
"app",
".",
"windows",
... | run the pyglet event loop by processing pending events only . | train | true |
52,333 | def new(rsa_key):
return PKCS115_SigScheme(rsa_key)
| [
"def",
"new",
"(",
"rsa_key",
")",
":",
"return",
"PKCS115_SigScheme",
"(",
"rsa_key",
")"
] | return a form for a new imageclassificationmodeljob . | train | false |
52,334 | def GetClassForProgID(progid):
clsid = pywintypes.IID(progid)
return GetClassForCLSID(clsid)
| [
"def",
"GetClassForProgID",
"(",
"progid",
")",
":",
"clsid",
"=",
"pywintypes",
".",
"IID",
"(",
"progid",
")",
"return",
"GetClassForCLSID",
"(",
"clsid",
")"
] | get a python class for a program id given a program id . | train | false |
52,335 | def _decode_oack(packet, packet_buff, offset):
_decode_options(packet, packet_buff, offset)
return packet
| [
"def",
"_decode_oack",
"(",
"packet",
",",
"packet_buff",
",",
"offset",
")",
":",
"_decode_options",
"(",
"packet",
",",
"packet_buff",
",",
"offset",
")",
"return",
"packet"
] | decodes a oack packet . | train | false |
52,336 | @XFAIL
def test_union_boundary_of_joining_sets():
assert (Union(Interval(0, 10), Interval(10, 15), evaluate=False).boundary == FiniteSet(0, 15))
| [
"@",
"XFAIL",
"def",
"test_union_boundary_of_joining_sets",
"(",
")",
":",
"assert",
"(",
"Union",
"(",
"Interval",
"(",
"0",
",",
"10",
")",
",",
"Interval",
"(",
"10",
",",
"15",
")",
",",
"evaluate",
"=",
"False",
")",
".",
"boundary",
"==",
"Finite... | testing the boundary of unions is a hard problem . | train | false |
52,338 | def clear_installed_samples(shop):
configuration.set(shop, SAMPLE_PRODUCTS_KEY, None)
configuration.set(shop, SAMPLE_CATEGORIES_KEY, None)
configuration.set(shop, SAMPLE_CAROUSEL_KEY, None)
configuration.set(shop, SAMPLE_BUSINESS_SEGMENT_KEY, None)
| [
"def",
"clear_installed_samples",
"(",
"shop",
")",
":",
"configuration",
".",
"set",
"(",
"shop",
",",
"SAMPLE_PRODUCTS_KEY",
",",
"None",
")",
"configuration",
".",
"set",
"(",
"shop",
",",
"SAMPLE_CATEGORIES_KEY",
",",
"None",
")",
"configuration",
".",
"se... | clears all the samples values from the configuration . | train | false |
52,340 | def get_servers():
cmd = ['w32tm', '/query', '/configuration']
lines = __salt__['cmd.run'](cmd, python_shell=False).splitlines()
for line in lines:
try:
if line.startswith('NtpServer:'):
(_, ntpsvrs) = line.rstrip(' (Local)').split(':', 1)
return sorted(ntpsvrs.split())
except ValueError as e:
return False
return False
| [
"def",
"get_servers",
"(",
")",
":",
"cmd",
"=",
"[",
"'w32tm'",
",",
"'/query'",
",",
"'/configuration'",
"]",
"lines",
"=",
"__salt__",
"[",
"'cmd.run'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
")",
".",
"splitlines",
"(",
")",
"for",
"li... | get list of configured ntp servers cli example: . | train | true |
52,341 | def expose_plugview(url='/'):
def wrap(v):
handler = expose(url, v.methods)
if hasattr(v, 'as_view'):
return handler(v.as_view(v.__name__))
else:
return handler(v)
return wrap
| [
"def",
"expose_plugview",
"(",
"url",
"=",
"'/'",
")",
":",
"def",
"wrap",
"(",
"v",
")",
":",
"handler",
"=",
"expose",
"(",
"url",
",",
"v",
".",
"methods",
")",
"if",
"hasattr",
"(",
"v",
",",
"'as_view'",
")",
":",
"return",
"handler",
"(",
"... | decorator to expose flasks pluggable view classes . | train | false |
52,342 | def extract_args_for_httpie_main(context, method=None):
args = _extract_httpie_options(context)
if method:
args.append(method.upper())
args.append(context.url)
args += _extract_httpie_request_items(context)
return args
| [
"def",
"extract_args_for_httpie_main",
"(",
"context",
",",
"method",
"=",
"None",
")",
":",
"args",
"=",
"_extract_httpie_options",
"(",
"context",
")",
"if",
"method",
":",
"args",
".",
"append",
"(",
"method",
".",
"upper",
"(",
")",
")",
"args",
".",
... | transform a context object to a list of arguments that can be passed to httpie main function . | train | true |
52,343 | def count_mismatches(seq1, seq2, max_mm):
mm = 0
for i in range(min(len(seq2), len(seq1))):
if (seq1[i] != seq2[i]):
mm += 1
if (mm > max_mm):
return mm
return mm
| [
"def",
"count_mismatches",
"(",
"seq1",
",",
"seq2",
",",
"max_mm",
")",
":",
"mm",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"min",
"(",
"len",
"(",
"seq2",
")",
",",
"len",
"(",
"seq1",
")",
")",
")",
":",
"if",
"(",
"seq1",
"[",
"i",
"]",
... | counts mismatches . | train | false |
52,344 | def maskSensitiveData(msg):
retVal = getUnicode(msg)
for item in filter(None, map((lambda x: conf.get(x)), SENSITIVE_OPTIONS)):
regex = (SENSITIVE_DATA_REGEX % re.sub('(\\W)', '\\\\\\1', getUnicode(item)))
while extractRegexResult(regex, retVal):
value = extractRegexResult(regex, retVal)
retVal = retVal.replace(value, ('*' * len(value)))
if (not conf.get('hostname')):
match = re.search('(?i)sqlmap.+(-u|--url)(\\s+|=)([^ ]+)', retVal)
if match:
retVal = retVal.replace(match.group(3), ('*' * len(match.group(3))))
if getpass.getuser():
retVal = re.sub(('(?i)\\b%s\\b' % re.escape(getpass.getuser())), ('*' * len(getpass.getuser())), retVal)
return retVal
| [
"def",
"maskSensitiveData",
"(",
"msg",
")",
":",
"retVal",
"=",
"getUnicode",
"(",
"msg",
")",
"for",
"item",
"in",
"filter",
"(",
"None",
",",
"map",
"(",
"(",
"lambda",
"x",
":",
"conf",
".",
"get",
"(",
"x",
")",
")",
",",
"SENSITIVE_OPTIONS",
... | masks sensitive data in the supplied message . | train | false |
52,345 | def infer_netmask(addr):
addr = addr.toUnsigned()
if (addr == 0):
return (32 - 32)
if ((addr & (1 << 31)) == 0):
return (32 - 24)
if ((addr & (3 << 30)) == (2 << 30)):
return (32 - 16)
if ((addr & (7 << 29)) == (6 << 29)):
return (32 - 8)
if ((addr & (15 << 28)) == (14 << 28)):
return (32 - 0)
return (32 - 0)
| [
"def",
"infer_netmask",
"(",
"addr",
")",
":",
"addr",
"=",
"addr",
".",
"toUnsigned",
"(",
")",
"if",
"(",
"addr",
"==",
"0",
")",
":",
"return",
"(",
"32",
"-",
"32",
")",
"if",
"(",
"(",
"addr",
"&",
"(",
"1",
"<<",
"31",
")",
")",
"==",
... | uses network classes to guess the number of network bits . | train | false |
52,346 | def reservation_destroy(context, uuid):
return IMPL.reservation_destroy(context, uuid)
| [
"def",
"reservation_destroy",
"(",
"context",
",",
"uuid",
")",
":",
"return",
"IMPL",
".",
"reservation_destroy",
"(",
"context",
",",
"uuid",
")"
] | destroy the reservation or raise if it does not exist . | train | false |
52,347 | def _track_tasks(task_ids):
while True:
statuses = _get_task_statuses(task_ids)
if all([(status == 'STOPPED') for status in statuses]):
logger.info('ECS tasks {0} STOPPED'.format(','.join(task_ids)))
break
time.sleep(POLL_TIME)
logger.debug('ECS task status for tasks {0}: {1}'.format(','.join(task_ids), status))
| [
"def",
"_track_tasks",
"(",
"task_ids",
")",
":",
"while",
"True",
":",
"statuses",
"=",
"_get_task_statuses",
"(",
"task_ids",
")",
"if",
"all",
"(",
"[",
"(",
"status",
"==",
"'STOPPED'",
")",
"for",
"status",
"in",
"statuses",
"]",
")",
":",
"logger",... | poll task status until stopped . | train | true |
52,348 | @bdd.when(bdd.parsers.parse('I wait until {path} is loaded'))
def wait_until_loaded(quteproc, path):
quteproc.wait_for_load_finished(path)
| [
"@",
"bdd",
".",
"when",
"(",
"bdd",
".",
"parsers",
".",
"parse",
"(",
"'I wait until {path} is loaded'",
")",
")",
"def",
"wait_until_loaded",
"(",
"quteproc",
",",
"path",
")",
":",
"quteproc",
".",
"wait_for_load_finished",
"(",
"path",
")"
] | wait until the given path is loaded . | train | false |
52,349 | def mutnodeset_union(iterable):
set = mutnodeset()
for it in iterable:
set |= it
return set
| [
"def",
"mutnodeset_union",
"(",
"iterable",
")",
":",
"set",
"=",
"mutnodeset",
"(",
")",
"for",
"it",
"in",
"iterable",
":",
"set",
"|=",
"it",
"return",
"set"
] | return a mutable nodeset which is the union of all nodesets in iterable . | train | false |
52,350 | def vectorstrength(events, period):
events = asarray(events)
period = asarray(period)
if (events.ndim > 1):
raise ValueError('events cannot have dimensions more than 1')
if (period.ndim > 1):
raise ValueError('period cannot have dimensions more than 1')
scalarperiod = (not period.ndim)
events = atleast_2d(events)
period = atleast_2d(period)
if (period <= 0).any():
raise ValueError('periods must be positive')
vectors = exp(dot(((2j * pi) / period.T), events))
vectormean = mean(vectors, axis=1)
strength = abs(vectormean)
phase = angle(vectormean)
if scalarperiod:
strength = strength[0]
phase = phase[0]
return (strength, phase)
| [
"def",
"vectorstrength",
"(",
"events",
",",
"period",
")",
":",
"events",
"=",
"asarray",
"(",
"events",
")",
"period",
"=",
"asarray",
"(",
"period",
")",
"if",
"(",
"events",
".",
"ndim",
">",
"1",
")",
":",
"raise",
"ValueError",
"(",
"'events cann... | determine the vector strength of the events corresponding to the given period . | train | false |
52,352 | def getXMLFromCarvingFileName(fileName):
carving = fabmetheus_interpret.getCarving(fileName)
if (carving == None):
return ''
output = xml_simple_writer.getBeginGeometryXMLOutput()
carving.addXML(0, output)
return xml_simple_writer.getEndGeometryXMLString(output)
| [
"def",
"getXMLFromCarvingFileName",
"(",
"fileName",
")",
":",
"carving",
"=",
"fabmetheus_interpret",
".",
"getCarving",
"(",
"fileName",
")",
"if",
"(",
"carving",
"==",
"None",
")",
":",
"return",
"''",
"output",
"=",
"xml_simple_writer",
".",
"getBeginGeomet... | get xml text from xml text . | train | false |
52,353 | def _expandCipherString(cipherString, method, options):
ctx = SSL.Context(method)
ctx.set_options(options)
try:
ctx.set_cipher_list(cipherString.encode('ascii'))
except SSL.Error as e:
if (e.args[0][0][2] == 'no cipher match'):
return []
else:
raise
conn = SSL.Connection(ctx, None)
ciphers = conn.get_cipher_list()
if isinstance(ciphers[0], unicode):
return [OpenSSLCipher(cipher) for cipher in ciphers]
else:
return [OpenSSLCipher(cipher.decode('ascii')) for cipher in ciphers]
| [
"def",
"_expandCipherString",
"(",
"cipherString",
",",
"method",
",",
"options",
")",
":",
"ctx",
"=",
"SSL",
".",
"Context",
"(",
"method",
")",
"ctx",
".",
"set_options",
"(",
"options",
")",
"try",
":",
"ctx",
".",
"set_cipher_list",
"(",
"cipherString... | expand c{cipherstring} according to c{method} and c{options} to a list of explicit ciphers that are supported by the current platform . | train | false |
52,355 | def network_absent(name, driver=None):
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
networks = __salt__['dockerng.networks'](names=[name])
if (not networks):
ret['result'] = True
ret['comment'] = "Network '{0}' already absent".format(name)
return ret
for container in networks[0]['Containers']:
try:
ret['changes']['disconnected'] = __salt__['dockerng.disconnect_container_from_network'](container, name)
except Exception as exc:
ret['comment'] = "Failed to disconnect container '{0}' to network '{1}' {2}".format(container, name, exc)
try:
ret['changes']['removed'] = __salt__['dockerng.remove_network'](name)
ret['result'] = True
except Exception as exc:
ret['comment'] = "Failed to remove network '{0}': {1}".format(name, exc)
return ret
| [
"def",
"network_absent",
"(",
"name",
",",
"driver",
"=",
"None",
")",
":",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"False",
",",
"'comment'",
":",
"''",
"}",
"networks",
"=",
"__salt__",
"[",
... | ensure that a network is absent . | train | false |
52,356 | def _export_field_content(xblock_item, item_dir):
module_data = xblock_item.get_explicitly_set_fields_by_scope(Scope.content)
if isinstance(module_data, dict):
for field_name in module_data:
if (field_name not in DEFAULT_CONTENT_FIELDS):
with item_dir.open('{0}.{1}.{2}'.format(xblock_item.location.name, field_name, 'json'), 'w') as field_content_file:
field_content_file.write(dumps(module_data.get(field_name, {}), cls=EdxJSONEncoder, sort_keys=True, indent=4))
| [
"def",
"_export_field_content",
"(",
"xblock_item",
",",
"item_dir",
")",
":",
"module_data",
"=",
"xblock_item",
".",
"get_explicitly_set_fields_by_scope",
"(",
"Scope",
".",
"content",
")",
"if",
"isinstance",
"(",
"module_data",
",",
"dict",
")",
":",
"for",
... | export all fields related to xblock_item other than metadata and data to json file in provided directory . | train | false |
52,357 | def image_meta_set(image_id=None, name=None, profile=None, **kwargs):
conn = _auth(profile)
return conn.image_meta_set(image_id, name, **kwargs)
| [
"def",
"image_meta_set",
"(",
"image_id",
"=",
"None",
",",
"name",
"=",
"None",
",",
"profile",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"conn",
"=",
"_auth",
"(",
"profile",
")",
"return",
"conn",
".",
"image_meta_set",
"(",
"image_id",
",",
"name... | sets a key=value pair in the metadata for an image cli examples: . | train | true |
52,358 | def where_not_allclose(a, b, rtol=1e-05, atol=1e-08):
if (not np.all(np.isfinite(a))):
a = np.ma.fix_invalid(a).data
if (not np.all(np.isfinite(b))):
b = np.ma.fix_invalid(b).data
if ((atol == 0.0) and (rtol == 0.0)):
return np.where((a != b))
return np.where((np.abs((a - b)) > (atol + (rtol * np.abs(b)))))
| [
"def",
"where_not_allclose",
"(",
"a",
",",
"b",
",",
"rtol",
"=",
"1e-05",
",",
"atol",
"=",
"1e-08",
")",
":",
"if",
"(",
"not",
"np",
".",
"all",
"(",
"np",
".",
"isfinite",
"(",
"a",
")",
")",
")",
":",
"a",
"=",
"np",
".",
"ma",
".",
"... | a version of numpy . | train | false |
52,359 | def tar_compiled(file, dir, expression='^.+$', exclude_content_from=None):
tar = tarfile.TarFile(file, 'w')
for file in listdir(dir, expression, add_dirs=True, exclude_content_from=exclude_content_from):
filename = os.path.join(dir, file)
if os.path.islink(filename):
continue
if (os.path.isfile(filename) and (file[(-4):] != '.pyc')):
if (file[:6] == 'models'):
continue
if (file[:5] == 'views'):
continue
if (file[:11] == 'controllers'):
continue
if (file[:7] == 'modules'):
continue
tar.add(filename, file, False)
tar.close()
| [
"def",
"tar_compiled",
"(",
"file",
",",
"dir",
",",
"expression",
"=",
"'^.+$'",
",",
"exclude_content_from",
"=",
"None",
")",
":",
"tar",
"=",
"tarfile",
".",
"TarFile",
"(",
"file",
",",
"'w'",
")",
"for",
"file",
"in",
"listdir",
"(",
"dir",
",",
... | used to tar a compiled application . | train | false |
52,360 | def list_slotnames(host=None, admin_username=None, admin_password=None):
slotraw = __execute_ret('getslotname', host=host, admin_username=admin_username, admin_password=admin_password)
if (slotraw['retcode'] != 0):
return slotraw
slots = {}
stripheader = True
for l in slotraw['stdout'].splitlines():
if l.startswith('<'):
stripheader = False
continue
if stripheader:
continue
fields = l.split()
slots[fields[0]] = {}
slots[fields[0]]['slot'] = fields[0]
if (len(fields) > 1):
slots[fields[0]]['slotname'] = fields[1]
else:
slots[fields[0]]['slotname'] = ''
if (len(fields) > 2):
slots[fields[0]]['hostname'] = fields[2]
else:
slots[fields[0]]['hostname'] = ''
return slots
| [
"def",
"list_slotnames",
"(",
"host",
"=",
"None",
",",
"admin_username",
"=",
"None",
",",
"admin_password",
"=",
"None",
")",
":",
"slotraw",
"=",
"__execute_ret",
"(",
"'getslotname'",
",",
"host",
"=",
"host",
",",
"admin_username",
"=",
"admin_username",
... | list the names of all slots in the chassis . | train | true |
52,361 | def render_date(date):
if (not date):
return None
assert valid_date(date)
weekday = Definitions.weekday_abbr_list[date.weekday()]
month = Definitions.month_abbr_list[(date.month - 1)]
return date.strftime('{day}, %d {month} %Y %H:%M:%S GMT').format(day=weekday, month=month)
| [
"def",
"render_date",
"(",
"date",
")",
":",
"if",
"(",
"not",
"date",
")",
":",
"return",
"None",
"assert",
"valid_date",
"(",
"date",
")",
"weekday",
"=",
"Definitions",
".",
"weekday_abbr_list",
"[",
"date",
".",
"weekday",
"(",
")",
"]",
"month",
"... | render a date per rfcs 6265/2616/1123 . | train | true |
52,365 | @hook.command(autohelp=False)
def showtells(nick, notice, db, conn):
tells = get_unread(db, conn.name, nick)
if (not tells):
notice('You have no pending messages.')
return
for tell in tells:
(sender, message, time_sent) = tell
past = timeformat.time_since(time_sent)
notice('{} sent you a message {} ago: {}'.format(sender, past, message))
read_all_tells(db, conn.name, nick)
| [
"@",
"hook",
".",
"command",
"(",
"autohelp",
"=",
"False",
")",
"def",
"showtells",
"(",
"nick",
",",
"notice",
",",
"db",
",",
"conn",
")",
":",
"tells",
"=",
"get_unread",
"(",
"db",
",",
"conn",
".",
"name",
",",
"nick",
")",
"if",
"(",
"not"... | showtells -- view all pending tell messages . | train | false |
52,366 | def scrape_spring_config(url):
request = urllib2.Request(url=url)
response = urllib2.urlopen(request)
http_code = response.getcode()
content = response.read()
if ((http_code < 200) or (http_code >= 300)):
raise ValueError('Invalid HTTP={code} from {url}:\n{msg}'.format(code=http_code, url=url, msg=content))
json = JSONDecoder().decode(content)
return infer(json)
| [
"def",
"scrape_spring_config",
"(",
"url",
")",
":",
"request",
"=",
"urllib2",
".",
"Request",
"(",
"url",
"=",
"url",
")",
"response",
"=",
"urllib2",
".",
"urlopen",
"(",
"request",
")",
"http_code",
"=",
"response",
".",
"getcode",
"(",
")",
"content... | construct a config binding dictionary from a running instances baseurl . | train | false |
52,367 | def test_load_strings_loads_other_languages():
locale.getdefaultlocale = (lambda : ('fr_FR', 'UTF-8'))
strings.load_strings(helpers, 'fr')
assert (strings._('wait_for_hs') == 'En attente du HS:')
| [
"def",
"test_load_strings_loads_other_languages",
"(",
")",
":",
"locale",
".",
"getdefaultlocale",
"=",
"(",
"lambda",
":",
"(",
"'fr_FR'",
",",
"'UTF-8'",
")",
")",
"strings",
".",
"load_strings",
"(",
"helpers",
",",
"'fr'",
")",
"assert",
"(",
"strings",
... | load_strings() loads other languages in different locales . | train | false |
52,371 | @pytest.mark.parametrize('parallel', [True, False])
def test_quoted_fields(parallel, read_basic):
if parallel:
pytest.xfail('Multiprocessing can fail with quoted fields')
text = '\n"A B" C D\n1.5 2.1 -37.1\na b " c\n d"\n'
table = read_basic(text, parallel=parallel)
expected = Table([['1.5', 'a'], ['2.1', 'b'], ['-37.1', 'cd']], names=('A B', 'C', 'D'))
assert_table_equal(table, expected)
table = read_basic(text.replace('"', "'"), quotechar="'", parallel=parallel)
assert_table_equal(table, expected)
| [
"@",
"pytest",
".",
"mark",
".",
"parametrize",
"(",
"'parallel'",
",",
"[",
"True",
",",
"False",
"]",
")",
"def",
"test_quoted_fields",
"(",
"parallel",
",",
"read_basic",
")",
":",
"if",
"parallel",
":",
"pytest",
".",
"xfail",
"(",
"'Multiprocessing ca... | the character quotechar should denote the start of a field which can contain the field delimiter and newlines . | train | false |
52,372 | def add_commas(n):
strn = str(n)
lenn = len(strn)
i = 0
result = ''
while (i < lenn):
if ((((lenn - i) % 3) == 0) and (i != 0)):
result += ','
result += strn[i]
i += 1
return result
| [
"def",
"add_commas",
"(",
"n",
")",
":",
"strn",
"=",
"str",
"(",
"n",
")",
"lenn",
"=",
"len",
"(",
"strn",
")",
"i",
"=",
"0",
"result",
"=",
"''",
"while",
"(",
"i",
"<",
"lenn",
")",
":",
"if",
"(",
"(",
"(",
"(",
"lenn",
"-",
"i",
")... | receives integer n . | train | false |
52,373 | def resolve_document_version(document, resource, method, latest_doc=None):
resource_def = app.config['DOMAIN'][resource]
version = app.config['VERSION']
latest_version = app.config['LATEST_VERSION']
if (resource_def['versioning'] is True):
if ((method == 'GET') and (latest_doc is None)):
if (version not in document):
document[version] = 1
document[latest_version] = document[version]
if ((method == 'GET') and (latest_doc is not None)):
if (version not in latest_doc):
document[version] = 1
document[latest_version] = document[version]
else:
document[latest_version] = latest_doc[version]
if (version not in document):
document[version] = 1
if (method == 'POST'):
document[version] = 1
if ((method == 'PUT') or (method == 'PATCH') or ((method == 'DELETE') and (resource_def['soft_delete'] is True))):
if (not latest_doc):
abort(500, description=debug_error_message('I need the latest document here!'))
if (version in latest_doc):
document[version] = (latest_doc[version] + 1)
else:
document[version] = 1
| [
"def",
"resolve_document_version",
"(",
"document",
",",
"resource",
",",
"method",
",",
"latest_doc",
"=",
"None",
")",
":",
"resource_def",
"=",
"app",
".",
"config",
"[",
"'DOMAIN'",
"]",
"[",
"resource",
"]",
"version",
"=",
"app",
".",
"config",
"[",
... | version number logic for all methods . | train | false |
52,374 | def sha256(s):
return hashlib.sha256(s).digest()
| [
"def",
"sha256",
"(",
"s",
")",
":",
"return",
"hashlib",
".",
"sha256",
"(",
"s",
")",
".",
"digest",
"(",
")"
] | return sha256 digest of the string s . | train | false |
52,375 | def _parse_ethtool_pppoe_opts(opts, iface):
config = {}
for opt in _DEB_CONFIG_PPPOE_OPTS:
if (opt in opts):
config[opt] = opts[opt]
if (('provider' in opts) and (not opts['provider'])):
_raise_error_iface(iface, 'provider', (_CONFIG_TRUE + _CONFIG_FALSE))
valid = (_CONFIG_TRUE + _CONFIG_FALSE)
for option in ('noipdefault', 'usepeerdns', 'defaultroute', 'hide-password', 'noauth', 'persist', 'noaccomp'):
if (option in opts):
if (opts[option] in _CONFIG_TRUE):
config.update({option: 'True'})
elif (opts[option] in _CONFIG_FALSE):
config.update({option: 'False'})
else:
_raise_error_iface(iface, option, valid)
return config
| [
"def",
"_parse_ethtool_pppoe_opts",
"(",
"opts",
",",
"iface",
")",
":",
"config",
"=",
"{",
"}",
"for",
"opt",
"in",
"_DEB_CONFIG_PPPOE_OPTS",
":",
"if",
"(",
"opt",
"in",
"opts",
")",
":",
"config",
"[",
"opt",
"]",
"=",
"opts",
"[",
"opt",
"]",
"i... | filters given options and outputs valid settings for ethtools_pppoe_opts if an option has a value that is not expected . | train | true |
52,376 | def observe_with(observer, event_handler, pathnames, recursive):
for pathname in set(pathnames):
observer.schedule(event_handler, pathname, recursive)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
| [
"def",
"observe_with",
"(",
"observer",
",",
"event_handler",
",",
"pathnames",
",",
"recursive",
")",
":",
"for",
"pathname",
"in",
"set",
"(",
"pathnames",
")",
":",
"observer",
".",
"schedule",
"(",
"event_handler",
",",
"pathname",
",",
"recursive",
")",... | single observer thread with a scheduled path and event handler . | train | false |
52,377 | def status_response(**redis_kwargs):
(processes, messages) = get_recent(**redis_kwargs)
lines = [('%d' % time()), '----------']
for (index, (elapsed, pid, message)) in enumerate(processes):
if (elapsed > ((6 * 60) * 60)):
delete_statuses(pid, **redis_kwargs)
continue
if (elapsed > (10 * 60)):
continue
line = ('%03s. %05s %s, %s ago' % (str((index + 1)), pid, message, nice_time(elapsed)))
lines.append(line)
lines.append('----------')
for (elapsed, pid, message) in messages[:250]:
line = [(' ' * pid_indent(pid))]
line += [str(pid), (message + ',')]
line += [nice_time(elapsed), 'ago']
lines.append(' '.join(line))
return str('\n'.join(lines))
| [
"def",
"status_response",
"(",
"**",
"redis_kwargs",
")",
":",
"(",
"processes",
",",
"messages",
")",
"=",
"get_recent",
"(",
"**",
"redis_kwargs",
")",
"lines",
"=",
"[",
"(",
"'%d'",
"%",
"time",
"(",
")",
")",
",",
"'----------'",
"]",
"for",
"(",
... | retrieve recent messages from redis and . | train | false |
52,378 | def assure_queue(fnc):
@wraps(fnc)
def _wrapped(self, queue, *args, **kwargs):
if (not isinstance(queue, Queue)):
queue = self._manager.get(queue)
return fnc(self, queue, *args, **kwargs)
return _wrapped
| [
"def",
"assure_queue",
"(",
"fnc",
")",
":",
"@",
"wraps",
"(",
"fnc",
")",
"def",
"_wrapped",
"(",
"self",
",",
"queue",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"queue",
",",
"Queue",
")",
")",
":",
... | converts a queue id or name passed as the queue parameter to a queue object . | train | true |
52,379 | def snapshot_get(context, snapshot_id):
return IMPL.snapshot_get(context, snapshot_id)
| [
"def",
"snapshot_get",
"(",
"context",
",",
"snapshot_id",
")",
":",
"return",
"IMPL",
".",
"snapshot_get",
"(",
"context",
",",
"snapshot_id",
")"
] | get a snapshot or raise if it does not exist . | train | false |
52,380 | def distance(origin, destination):
(lat1, lon1) = origin
(lat2, lon2) = destination
radius = 6371
dlat = math.radians((lat2 - lat1))
dlon = math.radians((lon2 - lon1))
a = ((math.sin((dlat / 2)) * math.sin((dlat / 2))) + (((math.cos(math.radians(lat1)) * math.cos(math.radians(lat2))) * math.sin((dlon / 2))) * math.sin((dlon / 2))))
c = (2 * math.atan2(math.sqrt(a), math.sqrt((1 - a))))
d = (radius * c)
return d
| [
"def",
"distance",
"(",
"origin",
",",
"destination",
")",
":",
"(",
"lat1",
",",
"lon1",
")",
"=",
"origin",
"(",
"lat2",
",",
"lon2",
")",
"=",
"destination",
"radius",
"=",
"6371",
"dlat",
"=",
"math",
".",
"radians",
"(",
"(",
"lat2",
"-",
"lat... | determine distance between 2 sets of [lat . | train | false |
52,381 | def osc_fslist(directory):
fslist = []
for fs in os.listdir(directory):
fir = fs.find('-')
sec = fs.find('-', (fir + 1))
thrd = fs.find('-', (sec + 1))
fs_name = fs[0:fir]
if (('num_ref' not in fs_name) and (fs_name not in fslist)):
fslist.append(fs_name)
(yield fs_name)
| [
"def",
"osc_fslist",
"(",
"directory",
")",
":",
"fslist",
"=",
"[",
"]",
"for",
"fs",
"in",
"os",
".",
"listdir",
"(",
"directory",
")",
":",
"fir",
"=",
"fs",
".",
"find",
"(",
"'-'",
")",
"sec",
"=",
"fs",
".",
"find",
"(",
"'-'",
",",
"(",
... | return fs names based on folder names in osc directory . | train | false |
52,384 | def translateVector3Path(path, translateVector3):
for point in path:
point.setToVector3((point + translateVector3))
| [
"def",
"translateVector3Path",
"(",
"path",
",",
"translateVector3",
")",
":",
"for",
"point",
"in",
"path",
":",
"point",
".",
"setToVector3",
"(",
"(",
"point",
"+",
"translateVector3",
")",
")"
] | translate the vector3 path . | train | false |
52,388 | def warnpy3k(message, category=None, stacklevel=1):
if sys.py3kwarning:
if (category is None):
category = DeprecationWarning
warn(message, category, (stacklevel + 1))
| [
"def",
"warnpy3k",
"(",
"message",
",",
"category",
"=",
"None",
",",
"stacklevel",
"=",
"1",
")",
":",
"if",
"sys",
".",
"py3kwarning",
":",
"if",
"(",
"category",
"is",
"None",
")",
":",
"category",
"=",
"DeprecationWarning",
"warn",
"(",
"message",
... | issue a deprecation warning for python 3 . | train | true |
52,389 | def startup(name):
ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''}
if __opts__['test']:
ret['comment'] = 'Starting up local node'
return ret
__salt__['trafficserver.startup']()
ret['result'] = True
ret['comment'] = 'Starting up local node'
return ret
| [
"def",
"startup",
"(",
"name",
")",
":",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"None",
",",
"'comment'",
":",
"''",
"}",
"if",
"__opts__",
"[",
"'test'",
"]",
":",
"ret",
"[",
"'comment'",
... | start traffic server on the local node . | train | true |
52,391 | def lookupAuthority(name, timeout=None):
return getResolver().lookupAuthority(name, timeout)
| [
"def",
"lookupAuthority",
"(",
"name",
",",
"timeout",
"=",
"None",
")",
":",
"return",
"getResolver",
"(",
")",
".",
"lookupAuthority",
"(",
"name",
",",
"timeout",
")"
] | perform an soa record lookup . | train | false |
52,392 | def get_abbr_impl():
if hasattr(sys, 'pypy_version_info'):
pyimpl = 'pp'
elif sys.platform.startswith('java'):
pyimpl = 'jy'
elif (sys.platform == 'cli'):
pyimpl = 'ip'
else:
pyimpl = 'cp'
return pyimpl
| [
"def",
"get_abbr_impl",
"(",
")",
":",
"if",
"hasattr",
"(",
"sys",
",",
"'pypy_version_info'",
")",
":",
"pyimpl",
"=",
"'pp'",
"elif",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'java'",
")",
":",
"pyimpl",
"=",
"'jy'",
"elif",
"(",
"sys",
".",... | return abbreviated implementation name . | train | true |
52,395 | def tac():
return __timer__.tac()
| [
"def",
"tac",
"(",
")",
":",
"return",
"__timer__",
".",
"tac",
"(",
")"
] | prints and returns elapsed time since last tic . | train | false |
52,396 | def remove_na(series):
return series[notnull(_values_from_object(series))]
| [
"def",
"remove_na",
"(",
"series",
")",
":",
"return",
"series",
"[",
"notnull",
"(",
"_values_from_object",
"(",
"series",
")",
")",
"]"
] | return series containing only true/non-nan values . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.