id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
35,797
def prepare_form_encoded_body(oauth_params, body): return _append_params(oauth_params, body)
[ "def", "prepare_form_encoded_body", "(", "oauth_params", ",", "body", ")", ":", "return", "_append_params", "(", "oauth_params", ",", "body", ")" ]
prepare the form-encoded body .
train
false
35,798
def _CopyAndSetMultipleToFalse(prop): prop_copy = entity_pb.Property() prop_copy.MergeFrom(prop) prop_copy.set_multiple(False) return prop_copy
[ "def", "_CopyAndSetMultipleToFalse", "(", "prop", ")", ":", "prop_copy", "=", "entity_pb", ".", "Property", "(", ")", "prop_copy", ".", "MergeFrom", "(", "prop", ")", "prop_copy", ".", "set_multiple", "(", "False", ")", "return", "prop_copy" ]
copy the provided property and set its "multiple" attribute to false .
train
false
35,800
def change_smb_enum_shares(table): if (not table): return table result = {} for field in ['account_used', 'note']: if (field in table): result[field] = table.pop(field) result['shares'] = [] for (key, value) in table.iteritems(): value.update({'Share': key}) result['shares'].append(value) return result
[ "def", "change_smb_enum_shares", "(", "table", ")", ":", "if", "(", "not", "table", ")", ":", "return", "table", "result", "=", "{", "}", "for", "field", "in", "[", "'account_used'", ",", "'note'", "]", ":", "if", "(", "field", "in", "table", ")", ":...
adapt structured data from script smb-enum-shares so that it is easy to query when inserted in db .
train
false
35,801
def warning(msg, *args, **kwargs): if (len(root.handlers) == 0): basicConfig() root.warning(msg, *args, **kwargs)
[ "def", "warning", "(", "msg", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "len", "(", "root", ".", "handlers", ")", "==", "0", ")", ":", "basicConfig", "(", ")", "root", ".", "warning", "(", "msg", ",", "*", "args", ",", "**", ...
compat for testing with previous @with_app decorator .
train
false
35,802
def getFourSignificantFigures(number): if (number == None): return None absoluteNumber = abs(number) if (absoluteNumber >= 100.0): return getRoundedToPlacesString(2, number) if (absoluteNumber < 1e-09): return getRoundedToPlacesString(13, number) return getRoundedToPlacesString((3 - math.floor(math.log10(absoluteNumber))), number)
[ "def", "getFourSignificantFigures", "(", "number", ")", ":", "if", "(", "number", "==", "None", ")", ":", "return", "None", "absoluteNumber", "=", "abs", "(", "number", ")", "if", "(", "absoluteNumber", ">=", "100.0", ")", ":", "return", "getRoundedToPlacesS...
get number rounded to four significant figures as a string .
train
false
35,805
def test_inverse(): np.random.seed(3285) for link in Links: for k in range(10): p = np.random.uniform(0, 1) d = link.inverse(link(p)) assert_allclose(d, p, atol=1e-08, err_msg=str(link)) z = get_domainvalue(link) d = link(link.inverse(z)) assert_allclose(d, z, atol=1e-08, err_msg=str(link))
[ "def", "test_inverse", "(", ")", ":", "np", ".", "random", ".", "seed", "(", "3285", ")", "for", "link", "in", "Links", ":", "for", "k", "in", "range", "(", "10", ")", ":", "p", "=", "np", ".", "random", ".", "uniform", "(", "0", ",", "1", ")...
logic check that link .
train
false
35,807
def has_table_privileges(cursor, user, table, privs): cur_privs = get_table_privileges(cursor, user, table) have_currently = cur_privs.intersection(privs) other_current = cur_privs.difference(privs) desired = privs.difference(cur_privs) return (have_currently, other_current, desired)
[ "def", "has_table_privileges", "(", "cursor", ",", "user", ",", "table", ",", "privs", ")", ":", "cur_privs", "=", "get_table_privileges", "(", "cursor", ",", "user", ",", "table", ")", "have_currently", "=", "cur_privs", ".", "intersection", "(", "privs", "...
return the difference between the privileges that a user already has and the privileges that they desire to have .
train
false
35,808
def _unpack_fields(table, fields=[]): u = [] for f in fields: (a, b) = ((('.' in f) and f.split('.', 1)) or (table.name, f)) if ((a == table.name) and (b == ALL)): u.extend((f for f in table.db.tables[a].fields)) elif ((a != table.name) and (b == ALL)): u.extend((('%s.%s' % (a, f)) for f in table.db.tables[a].fields)) elif (a != table.name): u.append(('%s.%s' % (a, b))) else: u.append(b) return u
[ "def", "_unpack_fields", "(", "table", ",", "fields", "=", "[", "]", ")", ":", "u", "=", "[", "]", "for", "f", "in", "fields", ":", "(", "a", ",", "b", ")", "=", "(", "(", "(", "'.'", "in", "f", ")", "and", "f", ".", "split", "(", "'.'", ...
replaces "*" with the actual field names .
train
false
35,809
def _getCommonSteadyDBArgsDict(): return dict(creator=pymysql, host=Configuration.get('nupic.cluster.database.host'), port=int(Configuration.get('nupic.cluster.database.port')), user=Configuration.get('nupic.cluster.database.user'), passwd=Configuration.get('nupic.cluster.database.passwd'), charset='utf8', use_unicode=True, setsession=['SET AUTOCOMMIT = 1'])
[ "def", "_getCommonSteadyDBArgsDict", "(", ")", ":", "return", "dict", "(", "creator", "=", "pymysql", ",", "host", "=", "Configuration", ".", "get", "(", "'nupic.cluster.database.host'", ")", ",", "port", "=", "int", "(", "Configuration", ".", "get", "(", "'...
returns a dictionary of arguments for dbutils .
train
true
35,810
def remove_conflicting_jars(app_name): app_dir = (('/var/apps/' + app_name) + '/app/') lib_dir = locate_dir(app_dir, 'lib') if (not lib_dir): logging.warn('Lib directory not found in app code while updating.') return logging.info('Removing jars from {0}'.format(lib_dir)) conflicting_jars_pattern = ['appengine-api-1.0-sdk-*.jar', 'appengine-api-stubs-*.jar', 'appengine-api-labs-*.jar', 'appengine-jsr107cache-*.jar', 'jsr107cache-*.jar', 'appengine-mapreduce*.jar', 'appengine-pipeline*.jar', 'appengine-gcs-client*.jar'] for file in os.listdir(lib_dir): for pattern in conflicting_jars_pattern: if fnmatch.fnmatch(file, pattern): os.remove(((lib_dir + os.sep) + file))
[ "def", "remove_conflicting_jars", "(", "app_name", ")", ":", "app_dir", "=", "(", "(", "'/var/apps/'", "+", "app_name", ")", "+", "'/app/'", ")", "lib_dir", "=", "locate_dir", "(", "app_dir", ",", "'lib'", ")", "if", "(", "not", "lib_dir", ")", ":", "log...
removes jars uploaded which may conflict with appscale jars .
train
false
35,812
@core_helper @maintain.deprecated('h.time_ago_in_words_from_str is deprecated in 2.2 and will be removed. Please use h.time_ago_from_timestamp instead') def time_ago_in_words_from_str(date_str, granularity='month'): if date_str: return date.time_ago_in_words(date_str_to_datetime(date_str), granularity=granularity) else: return _('Unknown')
[ "@", "core_helper", "@", "maintain", ".", "deprecated", "(", "'h.time_ago_in_words_from_str is deprecated in 2.2 and will be removed. Please use h.time_ago_from_timestamp instead'", ")", "def", "time_ago_in_words_from_str", "(", "date_str", ",", "granularity", "=", "'month'", ")",...
deprecated in 2 .
train
false
35,813
def zero_mul_simp(l, index): while ((index >= 0) and (index < (len(l) - 1)) and (l[index][0] is l[(index + 1)][0])): exp = (l[index][1] + l[(index + 1)][1]) base = l[index][0] l[index] = (base, exp) del l[(index + 1)] if (l[index][1] == 0): del l[index] index -= 1
[ "def", "zero_mul_simp", "(", "l", ",", "index", ")", ":", "while", "(", "(", "index", ">=", "0", ")", "and", "(", "index", "<", "(", "len", "(", "l", ")", "-", "1", ")", ")", "and", "(", "l", "[", "index", "]", "[", "0", "]", "is", "l", "...
used to combine two reduced words .
train
false
35,814
def addPixelToPixelTableWithSteepness(isSteep, pixelDictionary, value, x, y): if isSteep: addPixelToPixelTable(pixelDictionary, value, y, x) else: addPixelToPixelTable(pixelDictionary, value, x, y)
[ "def", "addPixelToPixelTableWithSteepness", "(", "isSteep", ",", "pixelDictionary", ",", "value", ",", "x", ",", "y", ")", ":", "if", "isSteep", ":", "addPixelToPixelTable", "(", "pixelDictionary", ",", "value", ",", "y", ",", "x", ")", "else", ":", "addPixe...
add pixels to the pixel table with steepness .
train
false
35,815
def _const_compare_digest_backport(a, b): result = abs((len(a) - len(b))) for (l, r) in zip(bytearray(a), bytearray(b)): result |= (l ^ r) return (result == 0)
[ "def", "_const_compare_digest_backport", "(", "a", ",", "b", ")", ":", "result", "=", "abs", "(", "(", "len", "(", "a", ")", "-", "len", "(", "b", ")", ")", ")", "for", "(", "l", ",", "r", ")", "in", "zip", "(", "bytearray", "(", "a", ")", ",...
compare two digests of equal length in constant time .
train
true
35,817
def get_doc(arg1, arg2=None): if isinstance(arg1, BaseDocument): return arg1 elif isinstance(arg1, basestring): doctype = arg1 else: doctype = arg1.get(u'doctype') controller = get_controller(doctype) if controller: return controller(arg1, arg2) raise ImportError, arg1
[ "def", "get_doc", "(", "arg1", ",", "arg2", "=", "None", ")", ":", "if", "isinstance", "(", "arg1", ",", "BaseDocument", ")", ":", "return", "arg1", "elif", "isinstance", "(", "arg1", ",", "basestring", ")", ":", "doctype", "=", "arg1", "else", ":", ...
get the docstring from our command and options map .
train
false
35,819
def timedtest(max_time, tolerance=TOLERANCE): def _timedtest(function): def wrapper(*args, **kw): start_time = time.time() try: function(*args, **kw) finally: total_time = (time.time() - start_time) if (total_time > (max_time + tolerance)): raise DurationError(('Test was too long (%.2f s)' % total_time)) return wrapper return _timedtest
[ "def", "timedtest", "(", "max_time", ",", "tolerance", "=", "TOLERANCE", ")", ":", "def", "_timedtest", "(", "function", ")", ":", "def", "wrapper", "(", "*", "args", ",", "**", "kw", ")", ":", "start_time", "=", "time", ".", "time", "(", ")", "try",...
timedtest decorator decorates the test method with a timer when the time spent by the test exceeds max_time in seconds .
train
false
35,820
def BuildAdGroupCriterionOperations(adgroup_id): criterion_operations = [{'xsi_type': 'AdGroupCriterionOperation', 'operand': {'xsi_type': 'BiddableAdGroupCriterion', 'adGroupId': adgroup_id, 'criterion': {'xsi_type': 'Keyword', 'text': ('mars%s%s' % (uuid.uuid4(), ('!!!' if ((i % 10) == 0) else ''))), 'matchType': 'BROAD'}}, 'operator': 'ADD'} for i in range(KEYWORD_COUNT)] return criterion_operations
[ "def", "BuildAdGroupCriterionOperations", "(", "adgroup_id", ")", ":", "criterion_operations", "=", "[", "{", "'xsi_type'", ":", "'AdGroupCriterionOperation'", ",", "'operand'", ":", "{", "'xsi_type'", ":", "'BiddableAdGroupCriterion'", ",", "'adGroupId'", ":", "adgroup...
builds the operations adding a keyword criterion to each adgroup .
train
true
35,821
def isMultiline(s): return (string.find(s, '\n') != (-1))
[ "def", "isMultiline", "(", "s", ")", ":", "return", "(", "string", ".", "find", "(", "s", ",", "'\\n'", ")", "!=", "(", "-", "1", ")", ")" ]
returns c{true} if this string has a newline in it .
train
false
35,822
def build_files_list(root_dir): return [os.path.join(dirpath, file_path) for (dirpath, subdirs, files) in os.walk(root_dir) for file_path in files]
[ "def", "build_files_list", "(", "root_dir", ")", ":", "return", "[", "os", ".", "path", ".", "join", "(", "dirpath", ",", "file_path", ")", "for", "(", "dirpath", ",", "subdirs", ",", "files", ")", "in", "os", ".", "walk", "(", "root_dir", ")", "for"...
build a list containing absolute paths to the generated files .
train
false
35,823
def at_initial_setup(): modname = settings.AT_INITIAL_SETUP_HOOK_MODULE if (not modname): return try: mod = __import__(modname, fromlist=[None]) except (ImportError, ValueError): return logger.log_info(' Running at_initial_setup() hook.') if mod.__dict__.get('at_initial_setup', None): mod.at_initial_setup()
[ "def", "at_initial_setup", "(", ")", ":", "modname", "=", "settings", ".", "AT_INITIAL_SETUP_HOOK_MODULE", "if", "(", "not", "modname", ")", ":", "return", "try", ":", "mod", "=", "__import__", "(", "modname", ",", "fromlist", "=", "[", "None", "]", ")", ...
custom hook for users to overload some or all parts of the initial setup .
train
false
35,824
def _find_dependent_monitors(monitors, monitor_names): last_iteration_count = 0 while (len(monitor_names) != last_iteration_count): last_iteration_count = len(monitor_names) for mon in monitors: for auditor in mon.auditors: for support_index in auditor.support_auditor_indexes: if ((support_index in monitor_names) and (mon.watcher.index not in monitor_names)): monitor_names.append(mon.watcher.index) for support_index in auditor.support_watcher_indexes: if ((support_index in monitor_names) and (mon.watcher.index not in monitor_names)): monitor_names.append(mon.watcher.index) return monitor_names
[ "def", "_find_dependent_monitors", "(", "monitors", ",", "monitor_names", ")", ":", "last_iteration_count", "=", "0", "while", "(", "len", "(", "monitor_names", ")", "!=", "last_iteration_count", ")", ":", "last_iteration_count", "=", "len", "(", "monitor_names", ...
used to find all the monitors that re dependent on those in the original monitor_names to the list .
train
false
35,825
def plot_MCMC_model(ax, xdata, ydata, trace): ax.plot(xdata, ydata, 'ok') (alpha, beta) = trace[:2] xfit = np.linspace((-20), 120, 10) yfit = (alpha[:, None] + (beta[:, None] * xfit)) mu = yfit.mean(0) sig = (2 * yfit.std(0)) ax.plot(xfit, mu, '-k') ax.fill_between(xfit, (mu - sig), (mu + sig), color='lightgray') ax.set_xlabel('x') ax.set_ylabel('y')
[ "def", "plot_MCMC_model", "(", "ax", ",", "xdata", ",", "ydata", ",", "trace", ")", ":", "ax", ".", "plot", "(", "xdata", ",", "ydata", ",", "'ok'", ")", "(", "alpha", ",", "beta", ")", "=", "trace", "[", ":", "2", "]", "xfit", "=", "np", ".", ...
plot the linear model and 2sigma contours .
train
false
35,826
def user_required(handler): def check_login(self, *args, **kwargs): '\n If handler has no login_url specified invoke a 403 error\n ' if (self.request.query_string != ''): query_string = ('?' + self.request.query_string) else: query_string = '' continue_url = (self.request.path_url + query_string) login_url = self.uri_for('login', **{'continue': continue_url}) try: auth = self.auth.get_user_by_session() if (not auth): try: self.redirect(login_url, abort=True) except (AttributeError, KeyError) as e: self.abort(403) except AttributeError as e: logging.error(e) self.auth.unset_session() self.redirect(login_url) return handler(self, *args, **kwargs) return check_login
[ "def", "user_required", "(", "handler", ")", ":", "def", "check_login", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "self", ".", "request", ".", "query_string", "!=", "''", ")", ":", "query_string", "=", "(", "'?'", "+", ...
decorator for checking if theres a user associated with the current session .
train
false
35,828
def get_cache(): return requests.Session().cache
[ "def", "get_cache", "(", ")", ":", "return", "requests", ".", "Session", "(", ")", ".", "cache" ]
gets an in-memory cache value .
train
false
35,829
def wait_for_winexesvc(host, port, username, password, timeout=900): start = time.time() log.debug('Attempting winexe connection to host {0} on port {1}'.format(host, port)) creds = "-U '{0}%{1}' //{2}".format(username, password, host) logging_creds = "-U '{0}%XXX-REDACTED-XXX' //{1}".format(username, host) try_count = 0 while True: try_count += 1 try: ret_code = win_cmd('winexe {0} "sc query winexesvc"'.format(creds), logging_command=logging_creds) if (ret_code == 0): log.debug('winexe connected...') return True log.debug('Return code was {0}'.format(ret_code)) time.sleep(1) except socket.error as exc: log.debug('Caught exception in wait_for_winexesvc: {0}'.format(exc)) time.sleep(1) if ((time.time() - start) > timeout): log.error('winexe connection timed out: {0}'.format(timeout)) return False log.debug('Retrying winexe connection to host {0} on port {1} (try {2})'.format(host, port, try_count))
[ "def", "wait_for_winexesvc", "(", "host", ",", "port", ",", "username", ",", "password", ",", "timeout", "=", "900", ")", ":", "start", "=", "time", ".", "time", "(", ")", "log", ".", "debug", "(", "'Attempting winexe connection to host {0} on port {1}'", ".",...
wait until winexe connection can be established .
train
false
35,830
def _filtfilt(x, iir_params, picks, n_jobs, copy): from scipy.signal import filtfilt padlen = min(iir_params['padlen'], len(x)) n_jobs = check_n_jobs(n_jobs) (x, orig_shape, picks) = _prep_for_filtering(x, copy, picks) if ('sos' in iir_params): sosfiltfilt = get_sosfiltfilt() fun = partial(sosfiltfilt, sos=iir_params['sos'], padlen=padlen) _check_coefficients(iir_params['sos']) else: fun = partial(filtfilt, b=iir_params['b'], a=iir_params['a'], padlen=padlen) _check_coefficients((iir_params['b'], iir_params['a'])) if (n_jobs == 1): for p in picks: x[p] = fun(x=x[p]) else: (parallel, p_fun, _) = parallel_func(fun, n_jobs) data_new = parallel((p_fun(x=x[p]) for p in picks)) for (pp, p) in enumerate(picks): x[p] = data_new[pp] x.shape = orig_shape return x
[ "def", "_filtfilt", "(", "x", ",", "iir_params", ",", "picks", ",", "n_jobs", ",", "copy", ")", ":", "from", "scipy", ".", "signal", "import", "filtfilt", "padlen", "=", "min", "(", "iir_params", "[", "'padlen'", "]", ",", "len", "(", "x", ")", ")", ...
helper to more easily call filtfilt .
train
false
35,832
def conv_linear(args, kw, kh, nin, nout, do_bias, bias_start, prefix): assert (args is not None) if (not isinstance(args, (list, tuple))): args = [args] with tf.variable_scope(prefix): k = tf.get_variable('CvK', [kw, kh, nin, nout]) if (len(args) == 1): res = tf.nn.conv2d(args[0], k, [1, 1, 1, 1], 'SAME') else: res = tf.nn.conv2d(tf.concat(3, args), k, [1, 1, 1, 1], 'SAME') if (not do_bias): return res bias_term = tf.get_variable('CvB', [nout], initializer=tf.constant_initializer(0.0)) return ((res + bias_term) + bias_start)
[ "def", "conv_linear", "(", "args", ",", "kw", ",", "kh", ",", "nin", ",", "nout", ",", "do_bias", ",", "bias_start", ",", "prefix", ")", ":", "assert", "(", "args", "is", "not", "None", ")", "if", "(", "not", "isinstance", "(", "args", ",", "(", ...
convolutional linear map .
train
false
35,833
def maybe(typ, val): return (typ(val) if (val is not None) else val)
[ "def", "maybe", "(", "typ", ",", "val", ")", ":", "return", "(", "typ", "(", "val", ")", "if", "(", "val", "is", "not", "None", ")", "else", "val", ")" ]
call typ on value if val is defined .
train
false
35,835
def _api_test_nscript(name, output, kwargs): logging.info('Executing notification script') res = sabnzbd.notifier.send_nscript('SABnzbd', T('Test Notification'), 'other', force=True, test=kwargs) return report(output, error=res)
[ "def", "_api_test_nscript", "(", "name", ",", "output", ",", "kwargs", ")", ":", "logging", ".", "info", "(", "'Executing notification script'", ")", "res", "=", "sabnzbd", ".", "notifier", ".", "send_nscript", "(", "'SABnzbd'", ",", "T", "(", "'Test Notificat...
api: execute a test notification script .
train
false
35,837
def compute_average(imlist): averageim = array(Image.open(imlist[0]), 'f') skipped = 0 for imname in imlist[1:]: try: averageim += array(Image.open(imname)) except: print (imname + '...skipped') skipped += 1 averageim /= (len(imlist) - skipped) return array(averageim, 'uint8')
[ "def", "compute_average", "(", "imlist", ")", ":", "averageim", "=", "array", "(", "Image", ".", "open", "(", "imlist", "[", "0", "]", ")", ",", "'f'", ")", "skipped", "=", "0", "for", "imname", "in", "imlist", "[", "1", ":", "]", ":", "try", ":"...
compute the average of a list of images .
train
false
35,838
def _verify_picture_index(index): if (not (isinstance(index, tuple) and (len(index) == 2))): raise IndexError('Expected 2D index but got {0!r}'.format(index)) if all((isinstance(i, int) for i in index)): return index index = list(index) for (i, dim_slice) in enumerate(index): if isinstance(dim_slice, int): index[i] = dim_slice = slice(dim_slice, (dim_slice + 1)) return tuple(index)
[ "def", "_verify_picture_index", "(", "index", ")", ":", "if", "(", "not", "(", "isinstance", "(", "index", ",", "tuple", ")", "and", "(", "len", "(", "index", ")", "==", "2", ")", ")", ")", ":", "raise", "IndexError", "(", "'Expected 2D index but got {0!...
raise error if picture index is not a 2d index/slice .
train
false
35,839
def si16le(c, o=0): return unpack('<h', c[o:(o + 2)])[0]
[ "def", "si16le", "(", "c", ",", "o", "=", "0", ")", ":", "return", "unpack", "(", "'<h'", ",", "c", "[", "o", ":", "(", "o", "+", "2", ")", "]", ")", "[", "0", "]" ]
converts a 2-bytes string to a signed integer .
train
false
35,841
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
create a new xor cipher .
train
false
35,842
def marker_fn(record): type = record.type if (type in (1, 8)): marker = 'asset' elif (type == 3): marker = 'staff' else: marker = 'request' priority = record.priority if (priority == 3): marker = ('%s_red' % marker) elif (priority == 2): marker = ('%s_yellow' % marker) mtable = db.gis_marker marker = db((mtable.name == marker)).select(mtable.image, mtable.height, mtable.width, cache=s3db.cache, limitby=(0, 1)).first() return marker
[ "def", "marker_fn", "(", "record", ")", ":", "type", "=", "record", ".", "type", "if", "(", "type", "in", "(", "1", ",", "8", ")", ")", ":", "marker", "=", "'asset'", "elif", "(", "type", "==", "3", ")", ":", "marker", "=", "'staff'", "else", "...
function to decide which marker to use for requests map .
train
false
35,844
def precompute_idfs(wglobal, dfs, total_docs): return dict(((termid, wglobal(df, total_docs)) for (termid, df) in iteritems(dfs)))
[ "def", "precompute_idfs", "(", "wglobal", ",", "dfs", ",", "total_docs", ")", ":", "return", "dict", "(", "(", "(", "termid", ",", "wglobal", "(", "df", ",", "total_docs", ")", ")", "for", "(", "termid", ",", "df", ")", "in", "iteritems", "(", "dfs",...
precompute the inverse document frequency mapping for all terms .
train
false
35,845
def _sync_db_and_registry(qs, app_id): existing = dict(((m.module, m) for m in qs)) to_add = [m for m in module_registry if (m not in existing)] to_delete = [m for m in existing if (m not in module_registry)] for m in to_add: DiscoveryModule.objects.get_or_create(module=m, app=app_id) DiscoveryModule.objects.filter(module__in=to_delete, app=app_id).delete() if (to_add or to_delete): qs._result_cache = None
[ "def", "_sync_db_and_registry", "(", "qs", ",", "app_id", ")", ":", "existing", "=", "dict", "(", "(", "(", "m", ".", "module", ",", "m", ")", "for", "m", "in", "qs", ")", ")", "to_add", "=", "[", "m", "for", "m", "in", "module_registry", "if", "...
match up the module registry and discoverymodule rows in the db .
train
false
35,846
def VarintEncode(value): result = '' if (value < 0): raise ValueError('Varint can not encode a negative number.') bits = (value & 127) value >>= 7 while value: result += HIGH_CHR_MAP[bits] bits = (value & 127) value >>= 7 result += CHR_MAP[bits] return result
[ "def", "VarintEncode", "(", "value", ")", ":", "result", "=", "''", "if", "(", "value", "<", "0", ")", ":", "raise", "ValueError", "(", "'Varint can not encode a negative number.'", ")", "bits", "=", "(", "value", "&", "127", ")", "value", ">>=", "7", "w...
convert an integer to a varint and write it using the write function .
train
true
35,848
def describe_thing_type(thingTypeName, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) res = conn.describe_thing_type(thingTypeName=thingTypeName) if res: res.pop('ResponseMetadata', None) thingTypeMetadata = res.get('thingTypeMetadata') if thingTypeMetadata: for dtype in ('creationDate', 'deprecationDate'): dval = thingTypeMetadata.get(dtype) if (dval and isinstance(dval, datetime.date)): thingTypeMetadata[dtype] = '{0}'.format(dval) return {'thing_type': res} else: return {'thing_type': None} except ClientError as e: err = salt.utils.boto3.get_error(e) if (e.response.get('Error', {}).get('Code') == 'ResourceNotFoundException'): return {'thing_type': None} return {'error': err}
[ "def", "describe_thing_type", "(", "thingTypeName", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", ...
given a thing type name describe its properties .
train
true
35,849
def pack_unit(value): pack = np.zeros((value.shape + (4,)), dtype=np.ubyte) for i in range(4): (value, pack[..., i]) = np.modf((value * 256.0)) return pack
[ "def", "pack_unit", "(", "value", ")", ":", "pack", "=", "np", ".", "zeros", "(", "(", "value", ".", "shape", "+", "(", "4", ",", ")", ")", ",", "dtype", "=", "np", ".", "ubyte", ")", "for", "i", "in", "range", "(", "4", ")", ":", "(", "val...
packs float values between [0 .
train
true
35,850
def dup_sqf_norm(f, K): if (not K.is_Algebraic): raise DomainError('ground domain must be algebraic') (s, g) = (0, dmp_raise(K.mod.rep, 1, 0, K.dom)) while True: (h, _) = dmp_inject(f, 0, K, front=True) r = dmp_resultant(g, h, 1, K.dom) if dup_sqf_p(r, K.dom): break else: (f, s) = (dup_shift(f, (- K.unit), K), (s + 1)) return (s, f, r)
[ "def", "dup_sqf_norm", "(", "f", ",", "K", ")", ":", "if", "(", "not", "K", ".", "is_Algebraic", ")", ":", "raise", "DomainError", "(", "'ground domain must be algebraic'", ")", "(", "s", ",", "g", ")", "=", "(", "0", ",", "dmp_raise", "(", "K", ".",...
square-free norm of f in k[x] .
train
false
35,851
def load_key_bindings(get_search_state=None, enable_abort_and_exit_bindings=False, enable_system_bindings=False, enable_search=False, enable_open_in_editor=False, enable_extra_page_navigation=False, enable_auto_suggest_bindings=False): assert ((get_search_state is None) or callable(get_search_state)) enable_abort_and_exit_bindings = to_cli_filter(enable_abort_and_exit_bindings) enable_system_bindings = to_cli_filter(enable_system_bindings) enable_search = to_cli_filter(enable_search) enable_open_in_editor = to_cli_filter(enable_open_in_editor) enable_extra_page_navigation = to_cli_filter(enable_extra_page_navigation) enable_auto_suggest_bindings = to_cli_filter(enable_auto_suggest_bindings) registry = MergedRegistry([load_basic_bindings(), load_mouse_bindings(), ConditionalRegistry(load_abort_and_exit_bindings(), enable_abort_and_exit_bindings), ConditionalRegistry(load_basic_system_bindings(), enable_system_bindings), load_emacs_bindings(), ConditionalRegistry(load_emacs_open_in_editor_bindings(), enable_open_in_editor), ConditionalRegistry(load_emacs_search_bindings(get_search_state=get_search_state), enable_search), ConditionalRegistry(load_emacs_system_bindings(), enable_system_bindings), ConditionalRegistry(load_extra_emacs_page_navigation_bindings(), enable_extra_page_navigation), load_vi_bindings(get_search_state=get_search_state), ConditionalRegistry(load_vi_open_in_editor_bindings(), enable_open_in_editor), ConditionalRegistry(load_vi_search_bindings(get_search_state=get_search_state), enable_search), ConditionalRegistry(load_vi_system_bindings(), enable_system_bindings), ConditionalRegistry(load_extra_vi_page_navigation_bindings(), enable_extra_page_navigation), ConditionalRegistry(load_auto_suggestion_bindings(), enable_auto_suggest_bindings)]) return registry
[ "def", "load_key_bindings", "(", "get_search_state", "=", "None", ",", "enable_abort_and_exit_bindings", "=", "False", ",", "enable_system_bindings", "=", "False", ",", "enable_search", "=", "False", ",", "enable_open_in_editor", "=", "False", ",", "enable_extra_page_na...
create a registry object that contains the default key bindings .
train
true
35,853
def _recursive_search(path, pattern): filtered_files = list() for (dirpath, dirnames, files) in os.walk(path): for f in fnmatch.filter(files, pattern): if f.endswith(tuple(VALID_EXTENSIONS)): filtered_files.append(op.realpath(op.join(dirpath, f))) return filtered_files
[ "def", "_recursive_search", "(", "path", ",", "pattern", ")", ":", "filtered_files", "=", "list", "(", ")", "for", "(", "dirpath", ",", "dirnames", ",", "files", ")", "in", "os", ".", "walk", "(", "path", ")", ":", "for", "f", "in", "fnmatch", ".", ...
auxiliary function for recursive_search of the directory .
train
false
35,854
def gql(query_string, *args, **kwds): qry = _gql(query_string) if (args or kwds): qry = qry._bind(args, kwds) return qry
[ "def", "gql", "(", "query_string", ",", "*", "args", ",", "**", "kwds", ")", ":", "qry", "=", "_gql", "(", "query_string", ")", "if", "(", "args", "or", "kwds", ")", ":", "qry", "=", "qry", ".", "_bind", "(", "args", ",", "kwds", ")", "return", ...
parse a gql query string .
train
true
35,855
def stripFormatting(text): formatted = parseFormattedText(text) return _textattributes.flatten(formatted, _textattributes.DefaultFormattingState())
[ "def", "stripFormatting", "(", "text", ")", ":", "formatted", "=", "parseFormattedText", "(", "text", ")", "return", "_textattributes", ".", "flatten", "(", "formatted", ",", "_textattributes", ".", "DefaultFormattingState", "(", ")", ")" ]
remove all formatting codes from c{text} .
train
false
35,858
def to_human_time_from_seconds(seconds): assert (isinstance(seconds, int) or isinstance(seconds, long) or isinstance(seconds, float)) return _get_human_time(seconds)
[ "def", "to_human_time_from_seconds", "(", "seconds", ")", ":", "assert", "(", "isinstance", "(", "seconds", ",", "int", ")", "or", "isinstance", "(", "seconds", ",", "long", ")", "or", "isinstance", "(", "seconds", ",", "float", ")", ")", "return", "_get_h...
given a time value in seconds .
train
false
35,860
def cat_extract(tar, member, targetpath): assert member.isreg() targetpath = targetpath.rstrip('/') targetpath = targetpath.replace('/', os.sep) upperdirs = os.path.dirname(targetpath) if (upperdirs and (not os.path.exists(upperdirs))): try: os.makedirs(upperdirs) except EnvironmentError as e: if (e.errno == errno.EEXIST): pass else: raise with files.DeleteOnError(targetpath) as dest: with pipeline.get_cat_pipeline(pipeline.PIPE, dest.f) as pl: fp = tar.extractfile(member) copyfileobj.copyfileobj(fp, pl.stdin) if (sys.version_info < (3, 5)): tar.chown(member, targetpath) else: tar.chown(member, targetpath, False) tar.chmod(member, targetpath) tar.utime(member, targetpath)
[ "def", "cat_extract", "(", "tar", ",", "member", ",", "targetpath", ")", ":", "assert", "member", ".", "isreg", "(", ")", "targetpath", "=", "targetpath", ".", "rstrip", "(", "'/'", ")", "targetpath", "=", "targetpath", ".", "replace", "(", "'/'", ",", ...
extract a regular file member using cat for async-like i/o mostly adapted from tarfile .
train
true
35,861
def get_errors(name): return reduce((lambda memo, obj: (memo + get_rate(('%srequests_%s_errors_errorCount' % (NAME_PREFIX, obj))))), ['DELETE', 'GET', 'POST', 'PUT'], 0)
[ "def", "get_errors", "(", "name", ")", ":", "return", "reduce", "(", "(", "lambda", "memo", ",", "obj", ":", "(", "memo", "+", "get_rate", "(", "(", "'%srequests_%s_errors_errorCount'", "%", "(", "NAME_PREFIX", ",", "obj", ")", ")", ")", ")", ")", ",",...
return errors per second .
train
false
35,862
def get_from_backend(context, uri, **kwargs): loc = location.get_location_from_uri(uri) store = get_store_from_uri(context, uri, loc) return store.get(loc)
[ "def", "get_from_backend", "(", "context", ",", "uri", ",", "**", "kwargs", ")", ":", "loc", "=", "location", ".", "get_location_from_uri", "(", "uri", ")", "store", "=", "get_store_from_uri", "(", "context", ",", "uri", ",", "loc", ")", "return", "store",...
yields chunks of data from backend specified by uri .
train
false
35,863
def greedy(tree, objective=identity, **kwargs): optimize = partial(minimize, objective=objective) return treeapply(tree, {list: optimize, tuple: chain}, **kwargs)
[ "def", "greedy", "(", "tree", ",", "objective", "=", "identity", ",", "**", "kwargs", ")", ":", "optimize", "=", "partial", "(", "minimize", ",", "objective", "=", "objective", ")", "return", "treeapply", "(", "tree", ",", "{", "list", ":", "optimize", ...
execute a strategic tree .
train
false
35,865
@py.test.mark.parametrize('item_name', [item.name for item in six._urllib_parse_moved_attributes]) def test_move_items_urllib_parse(item_name): if ((item_name == 'ParseResult') and (sys.version_info < (2, 5))): py.test.skip('ParseResult is only found on 2.5+') if ((item_name in ('parse_qs', 'parse_qsl')) and (sys.version_info < (2, 6))): py.test.skip('parse_qs[l] is new in 2.6') if (sys.version_info[:2] >= (2, 6)): assert (item_name in dir(six.moves.urllib.parse)) getattr(six.moves.urllib.parse, item_name)
[ "@", "py", ".", "test", ".", "mark", ".", "parametrize", "(", "'item_name'", ",", "[", "item", ".", "name", "for", "item", "in", "six", ".", "_urllib_parse_moved_attributes", "]", ")", "def", "test_move_items_urllib_parse", "(", "item_name", ")", ":", "if", ...
ensure that everything loads correctly .
train
false
35,866
def _selectVerifyImplementation(): whatsWrong = 'Without the service_identity module, Twisted can perform only rudimentary TLS client hostname verification. Many valid certificate/hostname mappings may be rejected.' try: from service_identity import VerificationError from service_identity.pyopenssl import verify_hostname return (verify_hostname, VerificationError) except ImportError as e: warnings.warn_explicit(((("You do not have a working installation of the service_identity module: '" + str(e)) + "'. Please install it from <https://pypi.python.org/pypi/service_identity> and make sure all of its dependencies are satisfied. ") + whatsWrong), category=UserWarning, filename='', lineno=0) return (simpleVerifyHostname, SimpleVerificationError)
[ "def", "_selectVerifyImplementation", "(", ")", ":", "whatsWrong", "=", "'Without the service_identity module, Twisted can perform only rudimentary TLS client hostname verification. Many valid certificate/hostname mappings may be rejected.'", "try", ":", "from", "service_identity", "import"...
determine if c{service_identity} is installed .
train
false
35,868
def match_text_content(matcher): return AfterPreprocessing((lambda content: content.as_text()), matcher)
[ "def", "match_text_content", "(", "matcher", ")", ":", "return", "AfterPreprocessing", "(", "(", "lambda", "content", ":", "content", ".", "as_text", "(", ")", ")", ",", "matcher", ")" ]
match the text of a content instance .
train
false
35,869
def check_required(programs): for prog in programs: if (not which(prog)): raise exception.CommandNotFound(prog) return True
[ "def", "check_required", "(", "programs", ")", ":", "for", "prog", "in", "programs", ":", "if", "(", "not", "which", "(", "prog", ")", ")", ":", "raise", "exception", ".", "CommandNotFound", "(", "prog", ")", "return", "True" ]
checks that all commands in the programs list exist .
train
false
35,871
@testing.requires_testing_data def test_make_inverse_operator_free(): fwd_op = read_forward_solution_meg(fname_fwd, surf_ori=True) fwd_1 = read_forward_solution_meg(fname_fwd, surf_ori=False, force_fixed=False) fwd_2 = read_forward_solution_meg(fname_fwd, surf_ori=False, force_fixed=True) evoked = _get_evoked() noise_cov = read_cov(fname_cov) assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_2, noise_cov, depth=None) inv_1 = make_inverse_operator(evoked.info, fwd_op, noise_cov, loose=None) inv_2 = make_inverse_operator(evoked.info, fwd_op, noise_cov, loose=1) _compare_inverses_approx(inv_1, inv_2, evoked, 0, 0.01) inv_3 = make_inverse_operator(evoked.info, fwd_op, noise_cov, depth=None, loose=None) inv_4 = make_inverse_operator(evoked.info, fwd_1, noise_cov, depth=None, loose=None) _compare_inverses_approx(inv_3, inv_4, evoked, 0, 0.01)
[ "@", "testing", ".", "requires_testing_data", "def", "test_make_inverse_operator_free", "(", ")", ":", "fwd_op", "=", "read_forward_solution_meg", "(", "fname_fwd", ",", "surf_ori", "=", "True", ")", "fwd_1", "=", "read_forward_solution_meg", "(", "fname_fwd", ",", ...
test mne inverse computation .
train
false
35,872
def _run_all(cmd, log_lvl=None, log_msg=None, exitcode=0): res = __salt__['cmd.run_all'](cmd) if (res['retcode'] == exitcode): if res['stdout']: return res['stdout'] else: return True if (log_lvl is not None): log.log(LOG[log_lvl], log_msg.format(res['stderr'])) return False
[ "def", "_run_all", "(", "cmd", ",", "log_lvl", "=", "None", ",", "log_msg", "=", "None", ",", "exitcode", "=", "0", ")", ":", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "if", "(", "res", "[", "'retcode'", "]", "==", "exitco...
simple wrapper around cmd .
train
true
35,873
def _get_x11_vars(): if (not os.environ.get('DISPLAY', '').strip()): return 'DISPLAY=:0.0 ' else: return ''
[ "def", "_get_x11_vars", "(", ")", ":", "if", "(", "not", "os", ".", "environ", ".", "get", "(", "'DISPLAY'", ",", "''", ")", ".", "strip", "(", ")", ")", ":", "return", "'DISPLAY=:0.0 '", "else", ":", "return", "''" ]
return suitable environment definitions for x11 .
train
false
35,876
def _idnaBytes(text): try: import idna except ImportError: return text.encode('idna') else: return idna.encode(text)
[ "def", "_idnaBytes", "(", "text", ")", ":", "try", ":", "import", "idna", "except", "ImportError", ":", "return", "text", ".", "encode", "(", "'idna'", ")", "else", ":", "return", "idna", ".", "encode", "(", "text", ")" ]
convert some text typed by a human into some ascii bytes .
train
false
35,877
@login_required @require_POST def preview_revision(request): wiki_content = request.POST.get('content', '') slug = request.POST.get('slug') locale = request.POST.get('locale') statsd.incr('wiki.preview') if (slug and locale): doc = get_object_or_404(Document, slug=slug, locale=locale) products = doc.get_products() else: products = Product.objects.all() data = {'content': wiki_to_html(wiki_content, request.LANGUAGE_CODE), 'products': products} return render(request, 'wiki/preview.html', data)
[ "@", "login_required", "@", "require_POST", "def", "preview_revision", "(", "request", ")", ":", "wiki_content", "=", "request", ".", "POST", ".", "get", "(", "'content'", ",", "''", ")", "slug", "=", "request", ".", "POST", ".", "get", "(", "'slug'", ")...
create an html fragment preview of the posted wiki syntax .
train
false
35,880
def formataddr(pair): (name, address) = pair if name: quotes = '' if specialsre.search(name): quotes = '"' name = escapesre.sub('\\\\\\g<0>', name) return ('%s%s%s <%s>' % (quotes, name, quotes, address)) return address
[ "def", "formataddr", "(", "pair", ")", ":", "(", "name", ",", "address", ")", "=", "pair", "if", "name", ":", "quotes", "=", "''", "if", "specialsre", ".", "search", "(", "name", ")", ":", "quotes", "=", "'\"'", "name", "=", "escapesre", ".", "sub"...
the inverse of parseaddr() .
train
false
35,881
def get_random_bytes(n): return _get_singleton().read(n)
[ "def", "get_random_bytes", "(", "n", ")", ":", "return", "_get_singleton", "(", ")", ".", "read", "(", "n", ")" ]
return the specified number of cryptographically-strong random bytes .
train
false
35,882
def signal_name(signum): return SIGMAP[signum][3:]
[ "def", "signal_name", "(", "signum", ")", ":", "return", "SIGMAP", "[", "signum", "]", "[", "3", ":", "]" ]
return name of signal from signal number .
train
false
35,884
def p_statement_expr(): print t[1]
[ "def", "p_statement_expr", "(", ")", ":", "print", "t", "[", "1", "]" ]
statement : expression .
train
false
35,885
def request_user_has_resource_db_permission(permission_type): valid_method_names = ['get_one', 'get', 'post', 'put', 'delete'] def decorate(func): function_name = func.__name__ if (function_name not in valid_method_names): raise Exception(('This decorator should only be used to wrap %s methods' % ', '.join(valid_method_names))) @wraps(func) def func_wrapper(*args, **kwargs): controller_instance = args[0] resource_id = args[1] get_one_db_method = controller_instance.get_one_db_method resource_db = get_one_db_method(resource_id) assert (resource_db is not None) utils.assert_request_user_has_resource_db_permission(request=pecan.request, resource_db=resource_db, permission_type=permission_type) return func(*args, **kwargs) return func_wrapper return decorate
[ "def", "request_user_has_resource_db_permission", "(", "permission_type", ")", ":", "valid_method_names", "=", "[", "'get_one'", ",", "'get'", ",", "'post'", ",", "'put'", ",", "'delete'", "]", "def", "decorate", "(", "func", ")", ":", "function_name", "=", "fun...
a decorator meant to wrap post .
train
false
35,886
def _get_code(data, position, obj_end, opts, element_name): (code, position) = _get_string(data, position, obj_end, opts, element_name) return (Code(code), position)
[ "def", "_get_code", "(", "data", ",", "position", ",", "obj_end", ",", "opts", ",", "element_name", ")", ":", "(", "code", ",", "position", ")", "=", "_get_string", "(", "data", ",", "position", ",", "obj_end", ",", "opts", ",", "element_name", ")", "r...
decode a bson code to bson .
train
true
35,887
def _objectsToStrings(objects, arglist, strings, proto): myObjects = objects.copy() for (argname, argparser) in arglist: argparser.toBox(argname, strings, myObjects, proto) return strings
[ "def", "_objectsToStrings", "(", "objects", ",", "arglist", ",", "strings", ",", "proto", ")", ":", "myObjects", "=", "objects", ".", "copy", "(", ")", "for", "(", "argname", ",", "argparser", ")", "in", "arglist", ":", "argparser", ".", "toBox", "(", ...
convert a dictionary of python objects to an ampbox .
train
false
35,888
def is_installed_extension(name, user=None, host=None, port=None, maintenance_db=None, password=None, runas=None): installed_ext = get_installed_extension(name, user=user, host=host, port=port, maintenance_db=maintenance_db, password=password, runas=runas) return bool(installed_ext)
[ "def", "is_installed_extension", "(", "name", ",", "user", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ",", "maintenance_db", "=", "None", ",", "password", "=", "None", ",", "runas", "=", "None", ")", ":", "installed_ext", "=", "ge...
test if a specific extension is installed cli example: .
train
true
35,889
def test_import_vispy_gloo(): modnames = loaded_vispy_modules('vispy.gloo', 2) assert_equal(modnames, set((_min_modules + ['vispy.gloo', 'vispy.glsl', 'vispy.color'])))
[ "def", "test_import_vispy_gloo", "(", ")", ":", "modnames", "=", "loaded_vispy_modules", "(", "'vispy.gloo'", ",", "2", ")", "assert_equal", "(", "modnames", ",", "set", "(", "(", "_min_modules", "+", "[", "'vispy.gloo'", ",", "'vispy.glsl'", ",", "'vispy.color'...
importing vispy .
train
false
35,890
def log_softmax(x, use_cudnn=True): return LogSoftmax(use_cudnn)(x)
[ "def", "log_softmax", "(", "x", ",", "use_cudnn", "=", "True", ")", ":", "return", "LogSoftmax", "(", "use_cudnn", ")", "(", "x", ")" ]
channelwise log-softmax function .
train
false
35,892
def configFileProxy(section, option, boolean=False, integer=False): global config if config.has_option(section, option): try: if boolean: value = (config.getboolean(section, option) if config.get(section, option) else False) elif integer: value = (config.getint(section, option) if config.get(section, option) else 0) else: value = config.get(section, option) except ValueError as ex: errMsg = 'error occurred while processing the option ' errMsg += ("'%s' in provided configuration file ('%s')" % (option, getUnicode(ex))) raise SqlmapSyntaxException(errMsg) if value: conf[option] = value else: conf[option] = None else: debugMsg = ("missing requested option '%s' (section " % option) debugMsg += ("'%s') into the configuration file, " % section) debugMsg += 'ignoring. Skipping to next.' logger.debug(debugMsg)
[ "def", "configFileProxy", "(", "section", ",", "option", ",", "boolean", "=", "False", ",", "integer", "=", "False", ")", ":", "global", "config", "if", "config", ".", "has_option", "(", "section", ",", "option", ")", ":", "try", ":", "if", "boolean", ...
parse configuration file and save settings into the configuration advanced dictionary .
train
false
35,893
def isSegmentInX(segment, xFirst, xSecond): segmentFirstX = segment[0].point.real segmentSecondX = segment[1].point.real if (min(segmentFirstX, segmentSecondX) > max(xFirst, xSecond)): return False return (max(segmentFirstX, segmentSecondX) > min(xFirst, xSecond))
[ "def", "isSegmentInX", "(", "segment", ",", "xFirst", ",", "xSecond", ")", ":", "segmentFirstX", "=", "segment", "[", "0", "]", ".", "point", ".", "real", "segmentSecondX", "=", "segment", "[", "1", "]", ".", "point", ".", "real", "if", "(", "min", "...
determine if the segment overlaps within x .
train
false
35,894
def _get_filenames_and_classes(dataset_dir): flower_root = os.path.join(dataset_dir, 'flower_photos') directories = [] class_names = [] for filename in os.listdir(flower_root): path = os.path.join(flower_root, filename) if os.path.isdir(path): directories.append(path) class_names.append(filename) photo_filenames = [] for directory in directories: for filename in os.listdir(directory): path = os.path.join(directory, filename) photo_filenames.append(path) return (photo_filenames, sorted(class_names))
[ "def", "_get_filenames_and_classes", "(", "dataset_dir", ")", ":", "flower_root", "=", "os", ".", "path", ".", "join", "(", "dataset_dir", ",", "'flower_photos'", ")", "directories", "=", "[", "]", "class_names", "=", "[", "]", "for", "filename", "in", "os",...
returns a list of filenames and inferred class names .
train
false
35,895
def _compute_cov_epochs(epochs, n_jobs): (parallel, p_fun, _) = parallel_func(np.dot, n_jobs) data = parallel((p_fun(e, e.T) for e in epochs)) n_epochs = len(data) if (n_epochs == 0): raise RuntimeError('No good epochs found') (n_chan, n_samples) = (epochs.info['nchan'], len(epochs.times)) _check_n_samples((n_samples * n_epochs), n_chan) data = sum(data) return data
[ "def", "_compute_cov_epochs", "(", "epochs", ",", "n_jobs", ")", ":", "(", "parallel", ",", "p_fun", ",", "_", ")", "=", "parallel_func", "(", "np", ".", "dot", ",", "n_jobs", ")", "data", "=", "parallel", "(", "(", "p_fun", "(", "e", ",", "e", "."...
helper function for computing epochs covariance .
train
false
35,896
def get_history(obj, key, passive=PASSIVE_OFF): if (passive is True): util.warn_deprecated("Passing True for 'passive' is deprecated. Use attributes.PASSIVE_NO_INITIALIZE") passive = PASSIVE_NO_INITIALIZE elif (passive is False): util.warn_deprecated("Passing False for 'passive' is deprecated. Use attributes.PASSIVE_OFF") passive = PASSIVE_OFF return get_state_history(instance_state(obj), key, passive)
[ "def", "get_history", "(", "obj", ",", "key", ",", "passive", "=", "PASSIVE_OFF", ")", ":", "if", "(", "passive", "is", "True", ")", ":", "util", ".", "warn_deprecated", "(", "\"Passing True for 'passive' is deprecated. Use attributes.PASSIVE_NO_INITIALIZE\"", ")", ...
return a :class: .
train
false
35,897
def make_symbolizer(project, binary_images, referenced_images=None): driver = Driver() to_load = referenced_images if (to_load is None): to_load = [x['uuid'] for x in binary_images] (dsym_paths, loaded) = dsymcache.fetch_dsyms(project, to_load) user_images = [] for img in binary_images: if (img['uuid'] in loaded): user_images.append(img) return ReportSymbolizer(driver, dsym_paths, user_images)
[ "def", "make_symbolizer", "(", "project", ",", "binary_images", ",", "referenced_images", "=", "None", ")", ":", "driver", "=", "Driver", "(", ")", "to_load", "=", "referenced_images", "if", "(", "to_load", "is", "None", ")", ":", "to_load", "=", "[", "x",...
creates a symbolizer for the given project and binary images .
train
false
35,899
def get_unique_parents(entity_list): unique_parents = set((entity.get_parent() for entity in entity_list)) return list(unique_parents)
[ "def", "get_unique_parents", "(", "entity_list", ")", ":", "unique_parents", "=", "set", "(", "(", "entity", ".", "get_parent", "(", ")", "for", "entity", "in", "entity_list", ")", ")", "return", "list", "(", "unique_parents", ")" ]
translate a list of entities to a list of their parents .
train
false
35,900
@task(base=BaseInstructorTask) def delete_problem_state(entry_id, xmodule_instance_args): action_name = ugettext_noop('deleted') update_fcn = partial(delete_problem_module_state, xmodule_instance_args) visit_fcn = partial(perform_module_state_update, update_fcn, None) return run_main_task(entry_id, visit_fcn, action_name)
[ "@", "task", "(", "base", "=", "BaseInstructorTask", ")", "def", "delete_problem_state", "(", "entry_id", ",", "xmodule_instance_args", ")", ":", "action_name", "=", "ugettext_noop", "(", "'deleted'", ")", "update_fcn", "=", "partial", "(", "delete_problem_module_st...
deletes problem state entirely for all students on a particular problem in a course .
train
false
35,902
def set_sleep_on_power_button(enabled): state = salt.utils.mac_utils.validate_enabled(enabled) cmd = 'systemsetup -setallowpowerbuttontosleepcomputer {0}'.format(state) salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated(state, get_sleep_on_power_button)
[ "def", "set_sleep_on_power_button", "(", "enabled", ")", ":", "state", "=", "salt", ".", "utils", ".", "mac_utils", ".", "validate_enabled", "(", "enabled", ")", "cmd", "=", "'systemsetup -setallowpowerbuttontosleepcomputer {0}'", ".", "format", "(", "state", ")", ...
set whether or not the power button can sleep the computer .
train
true
35,904
def get_image_front(releaseid, size=None): return get_image(releaseid, 'front', size=size)
[ "def", "get_image_front", "(", "releaseid", ",", "size", "=", "None", ")", ":", "return", "get_image", "(", "releaseid", ",", "'front'", ",", "size", "=", "size", ")" ]
download the front cover art for a release .
train
false
35,907
def wait_for_any_log(nodes, pattern, timeout, filename='system.log'): for _ in range(timeout): for node in nodes: found = node.grep_log(pattern, filename=filename) if found: return node time.sleep(1) raise TimeoutError((((((time.strftime('%d %b %Y %H:%M:%S', time.gmtime()) + ' Unable to find: ') + repr(pattern)) + ' in any node log within ') + str(timeout)) + 's'))
[ "def", "wait_for_any_log", "(", "nodes", ",", "pattern", ",", "timeout", ",", "filename", "=", "'system.log'", ")", ":", "for", "_", "in", "range", "(", "timeout", ")", ":", "for", "node", "in", "nodes", ":", "found", "=", "node", ".", "grep_log", "(",...
look for a pattern in the system .
train
true
35,908
def test_no_retry_on_keyboadinterrupt(collect): key_name = 'test-key-name' b = B(name=key_name) class MarkedKeyboardInterrupt(KeyboardInterrupt, ): pass collect.inject(MarkedKeyboardInterrupt('SIGINT, probably')) d = wabs_deleter.Deleter(BlobService('test', 'ing'), 'test-container') with pytest.raises(MarkedKeyboardInterrupt): d.delete(b) while True: gevent.sleep(0.1) assert (collect.aborted_keys == [key_name]) collect.inject(None) d.close() assert (not collect.deleted_keys)
[ "def", "test_no_retry_on_keyboadinterrupt", "(", "collect", ")", ":", "key_name", "=", "'test-key-name'", "b", "=", "B", "(", "name", "=", "key_name", ")", "class", "MarkedKeyboardInterrupt", "(", "KeyboardInterrupt", ",", ")", ":", "pass", "collect", ".", "inje...
ensure that keyboardinterrupts are forwarded .
train
false
35,909
def _require_permission_set(user, directory, positive_permissions=None, negative_permissions=None): from pootle_app.models.permissions import PermissionSet criteria = {'user': user, 'directory': directory} permission_set = PermissionSet.objects.get_or_create(**criteria)[0] if (positive_permissions is not None): permission_set.positive_permissions.set(positive_permissions) if (negative_permissions is not None): permission_set.negative_permissions.set(negative_permissions) permission_set.save() return permission_set
[ "def", "_require_permission_set", "(", "user", ",", "directory", ",", "positive_permissions", "=", "None", ",", "negative_permissions", "=", "None", ")", ":", "from", "pootle_app", ".", "models", ".", "permissions", "import", "PermissionSet", "criteria", "=", "{",...
helper to get/create a new permissionset .
train
false
35,910
def find_eggs_in_zip(importer, path_item, only=False): if importer.archive.endswith('.whl'): return metadata = EggMetadata(importer) if metadata.has_metadata('PKG-INFO'): (yield Distribution.from_filename(path_item, metadata=metadata)) if only: return for subitem in metadata.resource_listdir('/'): if subitem.endswith('.egg'): subpath = os.path.join(path_item, subitem) for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): (yield dist)
[ "def", "find_eggs_in_zip", "(", "importer", ",", "path_item", ",", "only", "=", "False", ")", ":", "if", "importer", ".", "archive", ".", "endswith", "(", "'.whl'", ")", ":", "return", "metadata", "=", "EggMetadata", "(", "importer", ")", "if", "metadata",...
find eggs in zip files; possibly multiple nested eggs .
train
true
35,912
def extract_patches(arr, patch_shape=8, extraction_step=1): arr_ndim = arr.ndim if isinstance(patch_shape, numbers.Number): patch_shape = tuple(([patch_shape] * arr_ndim)) if isinstance(extraction_step, numbers.Number): extraction_step = tuple(([extraction_step] * arr_ndim)) patch_strides = arr.strides slices = [slice(None, None, st) for st in extraction_step] indexing_strides = arr[slices].strides patch_indices_shape = (((np.array(arr.shape) - np.array(patch_shape)) // np.array(extraction_step)) + 1) shape = tuple((list(patch_indices_shape) + list(patch_shape))) strides = tuple((list(indexing_strides) + list(patch_strides))) patches = as_strided(arr, shape=shape, strides=strides) return patches
[ "def", "extract_patches", "(", "arr", ",", "patch_shape", "=", "8", ",", "extraction_step", "=", "1", ")", ":", "arr_ndim", "=", "arr", ".", "ndim", "if", "isinstance", "(", "patch_shape", ",", "numbers", ".", "Number", ")", ":", "patch_shape", "=", "tup...
extracts patches of any n-dimensional array in place using strides .
train
false
35,913
def safe_getmembers(object, predicate=None): results = [] for key in dir(object): try: value = safe_getattr(object, key, None) except AttributeError: continue if ((not predicate) or predicate(value)): results.append((key, value)) results.sort() return results
[ "def", "safe_getmembers", "(", "object", ",", "predicate", "=", "None", ")", ":", "results", "=", "[", "]", "for", "key", "in", "dir", "(", "object", ")", ":", "try", ":", "value", "=", "safe_getattr", "(", "object", ",", "key", ",", "None", ")", "...
a version of inspect .
train
false
35,914
def validate_comma_separated_list(setting, value, option_parser, config_parser=None, config_section=None): if (not isinstance(value, list)): value = [value] last = value.pop() items = [i.strip(u' DCTB \n') for i in last.split(u',') if i.strip(u' DCTB \n')] value.extend(items) return value
[ "def", "validate_comma_separated_list", "(", "setting", ",", "value", ",", "option_parser", ",", "config_parser", "=", "None", ",", "config_section", "=", "None", ")", ":", "if", "(", "not", "isinstance", "(", "value", ",", "list", ")", ")", ":", "value", ...
check/normalize list arguments .
train
false
35,915
def _save_chain(chain_pem, chain_file): try: chain_file.write(chain_pem) finally: chain_file.close() logger.info('Cert chain written to %s', chain_file.name)
[ "def", "_save_chain", "(", "chain_pem", ",", "chain_file", ")", ":", "try", ":", "chain_file", ".", "write", "(", "chain_pem", ")", "finally", ":", "chain_file", ".", "close", "(", ")", "logger", ".", "info", "(", "'Cert chain written to %s'", ",", "chain_fi...
saves chain_pem at a unique path based on chain_path .
train
false
35,916
def initCommonOutputs(): kb.commonOutputs = {} key = None with openFile(paths.COMMON_OUTPUTS, 'r') as f: for line in f.readlines(): if (line.find('#') != (-1)): line = line[:line.find('#')] line = line.strip() if (len(line) > 1): if (line.startswith('[') and line.endswith(']')): key = line[1:(-1)] elif key: if (key not in kb.commonOutputs): kb.commonOutputs[key] = set() if (line not in kb.commonOutputs[key]): kb.commonOutputs[key].add(line)
[ "def", "initCommonOutputs", "(", ")", ":", "kb", ".", "commonOutputs", "=", "{", "}", "key", "=", "None", "with", "openFile", "(", "paths", ".", "COMMON_OUTPUTS", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ".", "readlines", "(", ")", ...
initializes dictionary containing common output values used by "good samaritan" feature .
train
false
35,917
def config_read_prefixed_options(conf, prefix_name, defaults): params = {} for option_name in defaults.keys(): value = conf.get(('%s%s' % (prefix_name, option_name))) if value: if isinstance(defaults.get(option_name), list): params[option_name] = [] for role in value.lower().split(','): params[option_name].append(role.strip()) else: params[option_name] = value.strip() return params
[ "def", "config_read_prefixed_options", "(", "conf", ",", "prefix_name", ",", "defaults", ")", ":", "params", "=", "{", "}", "for", "option_name", "in", "defaults", ".", "keys", "(", ")", ":", "value", "=", "conf", ".", "get", "(", "(", "'%s%s'", "%", "...
read prefixed options from configuration .
train
false
35,918
def res_xml(res, *args, **kw): return parse_xml(res.text, *args, **kw)
[ "def", "res_xml", "(", "res", ",", "*", "args", ",", "**", "kw", ")", ":", "return", "parse_xml", "(", "res", ".", "text", ",", "*", "args", ",", "**", "kw", ")" ]
this function is deprecated .
train
false
35,919
def get_request_promotions(request): promotions = PagePromotion._default_manager.select_related().prefetch_related('content_object').filter(page_url=request.path).order_by('display_order') if ('q' in request.GET): keyword_promotions = KeywordPromotion._default_manager.select_related().filter(keyword=request.GET['q']) if keyword_promotions.exists(): promotions = list(chain(promotions, keyword_promotions)) return promotions
[ "def", "get_request_promotions", "(", "request", ")", ":", "promotions", "=", "PagePromotion", ".", "_default_manager", ".", "select_related", "(", ")", ".", "prefetch_related", "(", "'content_object'", ")", ".", "filter", "(", "page_url", "=", "request", ".", "...
return promotions relevant to this request .
train
false
35,920
def create_test_cache_folder(): if (not os.path.isdir(sickbeard.CACHE_DIR)): os.mkdir(sickbeard.CACHE_DIR)
[ "def", "create_test_cache_folder", "(", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "sickbeard", ".", "CACHE_DIR", ")", ")", ":", "os", ".", "mkdir", "(", "sickbeard", ".", "CACHE_DIR", ")" ]
create a cache folder for caching tests .
train
false
35,921
def to_nice_yaml(a, indent=4, *args, **kw): transformed = yaml.dump(a, Dumper=AnsibleDumper, indent=indent, allow_unicode=True, default_flow_style=False, **kw) return to_text(transformed)
[ "def", "to_nice_yaml", "(", "a", ",", "indent", "=", "4", ",", "*", "args", ",", "**", "kw", ")", ":", "transformed", "=", "yaml", ".", "dump", "(", "a", ",", "Dumper", "=", "AnsibleDumper", ",", "indent", "=", "indent", ",", "allow_unicode", "=", ...
make verbose .
train
false
35,922
def axis0_safe_slice(X, mask, len_mask): if (len_mask != 0): return X[safe_mask(X, mask), :] return np.zeros(shape=(0, X.shape[1]))
[ "def", "axis0_safe_slice", "(", "X", ",", "mask", ",", "len_mask", ")", ":", "if", "(", "len_mask", "!=", "0", ")", ":", "return", "X", "[", "safe_mask", "(", "X", ",", "mask", ")", ",", ":", "]", "return", "np", ".", "zeros", "(", "shape", "=", ...
this mask is safer than safe_mask since it returns an empty array .
train
false
35,924
def object_session(instance): try: return _state_session(attributes.instance_state(instance)) except exc.NO_STATE: raise exc.UnmappedInstanceError(instance)
[ "def", "object_session", "(", "instance", ")", ":", "try", ":", "return", "_state_session", "(", "attributes", ".", "instance_state", "(", "instance", ")", ")", "except", "exc", ".", "NO_STATE", ":", "raise", "exc", ".", "UnmappedInstanceError", "(", "instance...
return the session to which instance belongs .
train
false
35,925
def del_cached_content(location): def location_str(loc): 'Force the location to a Unicode string.' return unicode(loc).encode('utf-8') locations = [location_str(location)] try: locations.append(location_str(location.replace(run=None))) except InvalidKeyError: pass CONTENT_CACHE.delete_many(locations, version=STATIC_CONTENT_VERSION)
[ "def", "del_cached_content", "(", "location", ")", ":", "def", "location_str", "(", "loc", ")", ":", "return", "unicode", "(", "loc", ")", ".", "encode", "(", "'utf-8'", ")", "locations", "=", "[", "location_str", "(", "location", ")", "]", "try", ":", ...
delete content for the given location .
train
false
35,926
def graycode_subsets(gray_code_set): for bitstring in list(GrayCode(len(gray_code_set)).generate_gray()): (yield get_subset_from_bitstring(gray_code_set, bitstring))
[ "def", "graycode_subsets", "(", "gray_code_set", ")", ":", "for", "bitstring", "in", "list", "(", "GrayCode", "(", "len", "(", "gray_code_set", ")", ")", ".", "generate_gray", "(", ")", ")", ":", "(", "yield", "get_subset_from_bitstring", "(", "gray_code_set",...
generates the subsets as enumerated by a gray code .
train
false
35,927
def _subprocess(cmd): try: proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) ret = utils.to_str(proc.communicate()[0]).strip() retcode = proc.wait() if ret: return ret elif (retcode != 1): return True else: return False except OSError as err: log.error(err) return False
[ "def", "_subprocess", "(", "cmd", ")", ":", "try", ":", "proc", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "shell", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "ret", "=", "utils", ".", "to_str", "(", "proc", ".", "commu...
function to standardize the subprocess call .
train
false
35,928
def get_profile_class(): setting = getattr(settings, 'AUTH_PROFILE_MODULE', None) if (setting is None): return None (app_label, model_name) = settings.AUTH_PROFILE_MODULE.split('.') return get_model(app_label, model_name)
[ "def", "get_profile_class", "(", ")", ":", "setting", "=", "getattr", "(", "settings", ",", "'AUTH_PROFILE_MODULE'", ",", "None", ")", "if", "(", "setting", "is", "None", ")", ":", "return", "None", "(", "app_label", ",", "model_name", ")", "=", "settings"...
return the profile model class .
train
false
35,929
@app.route('/account/<subscription_id>/providers/<provider_namespace>/unregister', methods=['POST']) @auth.require_login def provider_unregister_post(subscription_id, provider_namespace): creds = _get_credentials() models.unregister_provider(subscription_id, creds, provider_namespace) return redirect(url_for('subscription_view', subscription_id=subscription_id))
[ "@", "app", ".", "route", "(", "'/account/<subscription_id>/providers/<provider_namespace>/unregister'", ",", "methods", "=", "[", "'POST'", "]", ")", "@", "auth", ".", "require_login", "def", "provider_unregister_post", "(", "subscription_id", ",", "provider_namespace", ...
unregister provider request .
train
false
35,930
def _psd_from_mt(x_mt, weights): psd = (weights * x_mt) psd *= psd.conj() psd = psd.real.sum(axis=(-2)) psd *= (2 / (weights * weights.conj()).real.sum(axis=(-2))) return psd
[ "def", "_psd_from_mt", "(", "x_mt", ",", "weights", ")", ":", "psd", "=", "(", "weights", "*", "x_mt", ")", "psd", "*=", "psd", ".", "conj", "(", ")", "psd", "=", "psd", ".", "real", ".", "sum", "(", "axis", "=", "(", "-", "2", ")", ")", "psd...
compute psd from tapered spectra .
train
false