id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
36,493
def write_version(name): line = '@version: {0}'.format(name) try: if os.path.exists(__SYSLOG_NG_CONFIG_FILE): log.debug('Removing previous configuration file: {0}'.format(__SYSLOG_NG_CONFIG_FILE)) os.remove(__SYSLOG_NG_CONFIG_FILE) log.debug('Configuration file successfully removed') header = _format_generated_config_header() _write_config(config=header, newlines=1) _write_config(config=line, newlines=2) return _format_state_result(name, result=True) except OSError as err: log.error("Failed to remove previous configuration file '{0}': {1}".format(__SYSLOG_NG_CONFIG_FILE, str(err))) return _format_state_result(name, result=False)
[ "def", "write_version", "(", "name", ")", ":", "line", "=", "'@version: {0}'", ".", "format", "(", "name", ")", "try", ":", "if", "os", ".", "path", ".", "exists", "(", "__SYSLOG_NG_CONFIG_FILE", ")", ":", "log", ".", "debug", "(", "'Removing previous conf...
removes the previous configuration file .
train
true
36,494
def kronsum(A, B, format=None): A = coo_matrix(A) B = coo_matrix(B) if (A.shape[0] != A.shape[1]): raise ValueError('A is not square') if (B.shape[0] != B.shape[1]): raise ValueError('B is not square') dtype = upcast(A.dtype, B.dtype) L = kron(eye(B.shape[0], dtype=dtype), A, format=format) R = kron(B, eye(A.shape[0], dtype=dtype), format=format) return (L + R).asformat(format)
[ "def", "kronsum", "(", "A", ",", "B", ",", "format", "=", "None", ")", ":", "A", "=", "coo_matrix", "(", "A", ")", "B", "=", "coo_matrix", "(", "B", ")", "if", "(", "A", ".", "shape", "[", "0", "]", "!=", "A", ".", "shape", "[", "1", "]", ...
kronecker sum of sparse matrices a and b kronecker sum of two sparse matrices is a sum of two kronecker products kron + kron where a has shape and b has shape and i_m and i_n are identity matrices of shape and respectively .
train
false
36,495
def SaveWindowSize(section, rect, state=''): (left, top, right, bottom) = rect if state: state = (state + ' ') win32ui.WriteProfileVal(section, (state + 'left'), left) win32ui.WriteProfileVal(section, (state + 'top'), top) win32ui.WriteProfileVal(section, (state + 'right'), right) win32ui.WriteProfileVal(section, (state + 'bottom'), bottom)
[ "def", "SaveWindowSize", "(", "section", ",", "rect", ",", "state", "=", "''", ")", ":", "(", "left", ",", "top", ",", "right", ",", "bottom", ")", "=", "rect", "if", "state", ":", "state", "=", "(", "state", "+", "' '", ")", "win32ui", ".", "Wri...
writes a rectangle to an ini file args: section = section name in the applications ini file rect = a rectangle in a tuple .
train
false
36,497
@register(u'uppercase-word') def uppercase_word(event): buff = event.current_buffer for i in range(event.arg): pos = buff.document.find_next_word_ending() words = buff.document.text_after_cursor[:pos] buff.insert_text(words.upper(), overwrite=True)
[ "@", "register", "(", "u'uppercase-word'", ")", "def", "uppercase_word", "(", "event", ")", ":", "buff", "=", "event", ".", "current_buffer", "for", "i", "in", "range", "(", "event", ".", "arg", ")", ":", "pos", "=", "buff", ".", "document", ".", "find...
uppercase the current word .
train
true
36,498
def train_rpn(queue=None, imdb_name=None, init_model=None, solver=None, max_iters=None, cfg=None): cfg.TRAIN.HAS_RPN = True cfg.TRAIN.BBOX_REG = False cfg.TRAIN.PROPOSAL_METHOD = 'gt' cfg.TRAIN.IMS_PER_BATCH = 1 print 'Init model: {}'.format(init_model) print 'Using config:' pprint.pprint(cfg) import caffe _init_caffe(cfg) (roidb, imdb) = get_roidb(imdb_name) print 'roidb len: {}'.format(len(roidb)) output_dir = get_output_dir(imdb) print 'Output will be saved to `{:s}`'.format(output_dir) model_paths = train_net(solver, roidb, output_dir, pretrained_model=init_model, max_iters=max_iters) for i in model_paths[:(-1)]: os.remove(i) rpn_model_path = model_paths[(-1)] queue.put({'model_path': rpn_model_path})
[ "def", "train_rpn", "(", "queue", "=", "None", ",", "imdb_name", "=", "None", ",", "init_model", "=", "None", ",", "solver", "=", "None", ",", "max_iters", "=", "None", ",", "cfg", "=", "None", ")", ":", "cfg", ".", "TRAIN", ".", "HAS_RPN", "=", "T...
train a region proposal network in a separate training process .
train
false
36,499
def gis_marker_retrieve_file_properties(filename, path=None): if (not path): path = current.db.gis_marker.image.uploadfolder if ('/' in filename): _path = filename.split('/', 1) if (len(_path) > 1): (_path, filename) = _path else: (_path, filename) = ('', filename) path = os.path.join(path, _path) return {'path': path, 'filename': filename}
[ "def", "gis_marker_retrieve_file_properties", "(", "filename", ",", "path", "=", "None", ")", ":", "if", "(", "not", "path", ")", ":", "path", "=", "current", ".", "db", ".", "gis_marker", ".", "image", ".", "uploadfolder", "if", "(", "'/'", "in", "filen...
custom method to override web2py dals standard retrieve_file_properties .
train
false
36,500
def realpath_with_context(path, context): if (not os.path.isabs(path)): assert context.workdir path = os.path.join(context.workdir, os.path.normpath(path)) return path
[ "def", "realpath_with_context", "(", "path", ",", "context", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isabs", "(", "path", ")", ")", ":", "assert", "context", ".", "workdir", "path", "=", "os", ".", "path", ".", "join", "(", "context", ...
convert a path into its realpath: * for relative path: use :attr:context .
train
true
36,502
def rename_keys(dict_, key_map, reverse=False, destructive=False): new_dict = deepcopy(dict_) for (key, mapping) in key_map.items(): if reverse: (key, mapping) = (mapping, key) if ((not destructive) and new_dict.has_key(mapping)): continue if dict_.has_key(key): value = dict_[key] new_dict[mapping] = value del new_dict[key] return new_dict
[ "def", "rename_keys", "(", "dict_", ",", "key_map", ",", "reverse", "=", "False", ",", "destructive", "=", "False", ")", ":", "new_dict", "=", "deepcopy", "(", "dict_", ")", "for", "(", "key", ",", "mapping", ")", "in", "key_map", ".", "items", "(", ...
returns a dict that has particular keys renamed .
train
false
36,503
@then(u'we see record deleted') def step_see_data_deleted(context): _expect_exact(context, u'DELETE 1', timeout=2)
[ "@", "then", "(", "u'we see record deleted'", ")", "def", "step_see_data_deleted", "(", "context", ")", ":", "_expect_exact", "(", "context", ",", "u'DELETE 1'", ",", "timeout", "=", "2", ")" ]
wait to see delete output .
train
false
36,504
def str(val): return format('%.12g', val)
[ "def", "str", "(", "val", ")", ":", "return", "format", "(", "'%.12g'", ",", "val", ")" ]
convert float to string .
train
false
36,505
def c_looks(client): cmds = [('look %s' % obj) for obj in client.objs] if (not cmds): cmds = [('look %s' % exi) for exi in client.exits] if (not cmds): cmds = 'look' return cmds
[ "def", "c_looks", "(", "client", ")", ":", "cmds", "=", "[", "(", "'look %s'", "%", "obj", ")", "for", "obj", "in", "client", ".", "objs", "]", "if", "(", "not", "cmds", ")", ":", "cmds", "=", "[", "(", "'look %s'", "%", "exi", ")", "for", "exi...
looks at various objects .
train
false
36,506
@cronjobs.register def collection_subscribers(): log.debug('Starting collection subscriber update...') if (not waffle.switch_is_active('local-statistics-processing')): return False cursor = connection.cursor() cursor.execute('\n UPDATE collections SET weekly_subscribers = 0, monthly_subscribers = 0\n ') cursor.execute('\n UPDATE collections AS c\n INNER JOIN (\n SELECT\n COUNT(collection_id) AS count,\n collection_id\n FROM collection_subscriptions\n WHERE created >= DATE_SUB(CURDATE(), INTERVAL 7 DAY)\n GROUP BY collection_id\n ) AS weekly ON (c.id = weekly.collection_id)\n INNER JOIN (\n SELECT\n COUNT(collection_id) AS count,\n collection_id\n FROM collection_subscriptions\n WHERE created >= DATE_SUB(CURDATE(), INTERVAL 31 DAY)\n GROUP BY collection_id\n ) AS monthly ON (c.id = monthly.collection_id)\n SET c.weekly_subscribers = weekly.count,\n c.monthly_subscribers = monthly.count\n ')
[ "@", "cronjobs", ".", "register", "def", "collection_subscribers", "(", ")", ":", "log", ".", "debug", "(", "'Starting collection subscriber update...'", ")", "if", "(", "not", "waffle", ".", "switch_is_active", "(", "'local-statistics-processing'", ")", ")", ":", ...
collection weekly and monthly subscriber counts .
train
false
36,508
def _parse_hit_or_query_line(line): try: (mark, id, desc) = line.split(' ', 2) except ValueError: (mark, id) = line.split(' ', 1) desc = '' return (id, desc)
[ "def", "_parse_hit_or_query_line", "(", "line", ")", ":", "try", ":", "(", "mark", ",", "id", ",", "desc", ")", "=", "line", ".", "split", "(", "' '", ",", "2", ")", "except", "ValueError", ":", "(", "mark", ",", "id", ")", "=", "line", ".", "spl...
parse the query: line of exonerate alignment outputs .
train
false
36,509
def _app_id(app_id): if (app_id[0] != '/'): app_id = '/{0}'.format(app_id) return app_id
[ "def", "_app_id", "(", "app_id", ")", ":", "if", "(", "app_id", "[", "0", "]", "!=", "'/'", ")", ":", "app_id", "=", "'/{0}'", ".", "format", "(", "app_id", ")", "return", "app_id" ]
make sure the app_id is in the correct format .
train
false
36,510
def is_connection_to_a_vcenter(service_instance): try: api_type = service_instance.content.about.apiType except vim.fault.VimFault as exc: raise salt.exceptions.VMwareApiError(exc.msg) except vmodl.RuntimeFault as exc: raise salt.exceptions.VMwareRuntimeError(exc.msg) log.trace('api_type = {0}'.format(api_type)) if (api_type == 'VirtualCenter'): return True elif (api_type == 'HostAgent'): return False else: raise salt.exceptions.VMwareApiError("Unexpected api type '{0}' . Supported types: 'VirtualCenter/HostAgent'".format(api_type))
[ "def", "is_connection_to_a_vcenter", "(", "service_instance", ")", ":", "try", ":", "api_type", "=", "service_instance", ".", "content", ".", "about", ".", "apiType", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "raise", "salt", ".", "ex...
function that returns true if the connection is made to a vcenter server and false if the connection is made to an esxi host service_instance the service instance from which to obtain managed object references .
train
false
36,511
def get_ppi(): rdint = vs.random() request = Request((vs.MACRO_URL % (vs.P_TYPE['http'], vs.DOMAINS['sina'], rdint, vs.MACRO_TYPE[1], 3, 600, rdint))) text = urlopen(request, timeout=10).read() text = (text.decode('gbk') if ct.PY3 else text) regSym = re.compile('\\,count:(.*?)\\}') datastr = regSym.findall(text) datastr = datastr[0] datastr = datastr.split('data:')[1] js = json.loads(datastr) df = pd.DataFrame(js, columns=vs.PPI_COLS) for i in df.columns: df[i] = df[i].apply((lambda x: np.where((x is None), np.NaN, x))) if (i != 'month'): df[i] = df[i].astype(float) return df
[ "def", "get_ppi", "(", ")", ":", "rdint", "=", "vs", ".", "random", "(", ")", "request", "=", "Request", "(", "(", "vs", ".", "MACRO_URL", "%", "(", "vs", ".", "P_TYPE", "[", "'http'", "]", ",", "vs", ".", "DOMAINS", "[", "'sina'", "]", ",", "r...
return dataframe month :统计月份 ppiip :工业品出厂价格指数 ppi :生产资料价格指数 qm:采掘工业价格指数 rmi:原材料工业价格指数 pi:加工工业价格指数 cg:生活资料价格指数 food:食品类价格指数 clothing:衣着类价格指数 roeu:一般日用品价格指数 dcg:耐用消费品价格指数 .
train
false
36,512
def _keys(d): if _PY3: return list(d.keys()) else: return d.keys()
[ "def", "_keys", "(", "d", ")", ":", "if", "_PY3", ":", "return", "list", "(", "d", ".", "keys", "(", ")", ")", "else", ":", "return", "d", ".", "keys", "(", ")" ]
return a list of the keys of c{d} .
train
false
36,513
def is_datetimetz(array): return ((isinstance(array, ABCDatetimeIndex) and (getattr(array, 'tz', None) is not None)) or is_datetime64tz_dtype(array))
[ "def", "is_datetimetz", "(", "array", ")", ":", "return", "(", "(", "isinstance", "(", "array", ",", "ABCDatetimeIndex", ")", "and", "(", "getattr", "(", "array", ",", "'tz'", ",", "None", ")", "is", "not", "None", ")", ")", "or", "is_datetime64tz_dtype"...
return if we are a datetime with tz array .
train
false
36,514
@pytest.mark.skipif(str(u'sys.version_info >= (2, 7, 3)')) def test_pickle_compound_fallback(): gg = (Gaussian1D + Gaussian1D)() with pytest.raises(RuntimeError): pickle.dumps(gg)
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "str", "(", "u'sys.version_info >= (2, 7, 3)'", ")", ")", "def", "test_pickle_compound_fallback", "(", ")", ":", "gg", "=", "(", "Gaussian1D", "+", "Gaussian1D", ")", "(", ")", "with", "pytest", ".", "raises", ...
test fallback for pickling compound model on old versions of python affected by URL .
train
false
36,515
def requireInternetAccess(forceCheck=False): if (not haveInternetAccess(forceCheck=forceCheck)): msg = 'Internet access required but not detected.' logging.error(msg) raise NoInternetAccessError(msg) return True
[ "def", "requireInternetAccess", "(", "forceCheck", "=", "False", ")", ":", "if", "(", "not", "haveInternetAccess", "(", "forceCheck", "=", "forceCheck", ")", ")", ":", "msg", "=", "'Internet access required but not detected.'", "logging", ".", "error", "(", "msg",...
checks for access to the internet .
train
false
36,516
def get_installed_repository(app, tool_shed, name, owner, changeset_revision=None, installed_changeset_revision=None): query = app.install_model.context.query(app.install_model.ToolShedRepository) tool_shed = common_util.remove_protocol_from_tool_shed_url(tool_shed) clause_list = [(app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed), (app.install_model.ToolShedRepository.table.c.name == name), (app.install_model.ToolShedRepository.table.c.owner == owner)] if (changeset_revision is not None): clause_list.append((app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision)) if (installed_changeset_revision is not None): clause_list.append((app.install_model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision)) return query.filter(and_(*clause_list)).first()
[ "def", "get_installed_repository", "(", "app", ",", "tool_shed", ",", "name", ",", "owner", ",", "changeset_revision", "=", "None", ",", "installed_changeset_revision", "=", "None", ")", ":", "query", "=", "app", ".", "install_model", ".", "context", ".", "que...
return a tool shed repository database record defined by the combination of a toolshed .
train
false
36,517
def _run_horcmstart(inst): result = utils.execute('horcmstart.sh', inst) return result[0]
[ "def", "_run_horcmstart", "(", "inst", ")", ":", "result", "=", "utils", ".", "execute", "(", "'horcmstart.sh'", ",", "inst", ")", "return", "result", "[", "0", "]" ]
start the cci instance and return 0 if successful .
train
false
36,518
def append_crypto_meta(value, crypto_meta): return ('%s; swift_meta=%s' % (value, dump_crypto_meta(crypto_meta)))
[ "def", "append_crypto_meta", "(", "value", ",", "crypto_meta", ")", ":", "return", "(", "'%s; swift_meta=%s'", "%", "(", "value", ",", "dump_crypto_meta", "(", "crypto_meta", ")", ")", ")" ]
serialize and append crypto metadata to an encrypted value .
train
false
36,519
def processTrustResult(request): openid_request = getRequest(request) response_identity = getViewURL(request, idPage) allowed = ('allow' in request.POST) openid_response = openid_request.answer(allowed, identity=response_identity) if allowed: sreg_data = {'fullname': 'Example User', 'nickname': 'example', 'dob': '1970-01-01', 'email': 'invalid@example.com', 'gender': 'F', 'postcode': '12345', 'country': 'ES', 'language': 'eu', 'timezone': 'America/New_York'} sreg_req = sreg.SRegRequest.fromOpenIDRequest(openid_request) sreg_resp = sreg.SRegResponse.extractResponse(sreg_req, sreg_data) openid_response.addExtension(sreg_resp) pape_response = pape.Response() pape_response.setAuthLevel(pape.LEVELS_NIST, 0) openid_response.addExtension(pape_response) return displayResponse(request, openid_response)
[ "def", "processTrustResult", "(", "request", ")", ":", "openid_request", "=", "getRequest", "(", "request", ")", "response_identity", "=", "getViewURL", "(", "request", ",", "idPage", ")", "allowed", "=", "(", "'allow'", "in", "request", ".", "POST", ")", "o...
handle the result of a trust decision and respond to the rp accordingly .
train
true
36,520
def sens_at_k(counts, noise_eps, l, k): counts_sorted = sorted(counts, reverse=True) if (((0.5 * noise_eps) * l) > 1): print 'l too large to compute sensitivity' return 0 if (counts[0] < (counts[1] + k)): return 0 counts_sorted[0] -= k counts_sorted[1] += k val = logmgf_from_counts(counts_sorted, noise_eps, l) counts_sorted[0] -= 1 counts_sorted[1] += 1 val_changed = logmgf_from_counts(counts_sorted, noise_eps, l) return (val_changed - val)
[ "def", "sens_at_k", "(", "counts", ",", "noise_eps", ",", "l", ",", "k", ")", ":", "counts_sorted", "=", "sorted", "(", "counts", ",", "reverse", "=", "True", ")", "if", "(", "(", "(", "0.5", "*", "noise_eps", ")", "*", "l", ")", ">", "1", ")", ...
return sensitivity at distane k .
train
false
36,523
def track_utc_time_change(year=None, month=None, day=None, hour=None, minute=None, second=None): def track_utc_time_change_decorator(action): 'Decorator to track time changes.' event.track_utc_time_change(HASS, functools.partial(action, HASS), year, month, day, hour, minute, second) return action return track_utc_time_change_decorator
[ "def", "track_utc_time_change", "(", "year", "=", "None", ",", "month", "=", "None", ",", "day", "=", "None", ",", "hour", "=", "None", ",", "minute", "=", "None", ",", "second", "=", "None", ")", ":", "def", "track_utc_time_change_decorator", "(", "acti...
decorator factory to track time changes .
train
false
36,524
def merge_endpoints(defaults, additions): for (service, region_info) in additions.items(): defaults.setdefault(service, {}) defaults[service].update(region_info) return defaults
[ "def", "merge_endpoints", "(", "defaults", ",", "additions", ")", ":", "for", "(", "service", ",", "region_info", ")", "in", "additions", ".", "items", "(", ")", ":", "defaults", ".", "setdefault", "(", "service", ",", "{", "}", ")", "defaults", "[", "...
given an existing set of endpoint data .
train
false
36,525
def get_resource_image(status, type): resource_type = get_resource_type(type) resource_status = get_resource_status(status) resource_state = ((resource_type + '_') + resource_status) for key in resource_images: if (key == resource_state): return resource_images.get(key)
[ "def", "get_resource_image", "(", "status", ",", "type", ")", ":", "resource_type", "=", "get_resource_type", "(", "type", ")", "resource_status", "=", "get_resource_status", "(", "status", ")", "resource_state", "=", "(", "(", "resource_type", "+", "'_'", ")", ...
sets the image url and in_progress action sw based on status .
train
false
36,526
def _fk4_B_matrix(obstime): T = ((obstime.jyear - 1950.0) / 100.0) if getattr(T, u'shape', ()): T.shape += (1, 1) return (_B1950_TO_J2000_M + (_FK4_CORR * T))
[ "def", "_fk4_B_matrix", "(", "obstime", ")", ":", "T", "=", "(", "(", "obstime", ".", "jyear", "-", "1950.0", ")", "/", "100.0", ")", "if", "getattr", "(", "T", ",", "u'shape'", ",", "(", ")", ")", ":", "T", ".", "shape", "+=", "(", "1", ",", ...
this is a correction term in the fk4 transformations because fk4 is a rotating system - see murray 89 eqn 29 .
train
false
36,527
def arm_freq_config_set(kodi_setting, all_settings): try: version = PiVersion() except IOError: version = 'PiB' if (version == 'PiB'): if (int(kodi_setting) == 700): return 'remove_this_line' elif (version == 'Pi2'): if (int(kodi_setting) == 900): return 'remove_this_line' return kodi_setting
[ "def", "arm_freq_config_set", "(", "kodi_setting", ",", "all_settings", ")", ":", "try", ":", "version", "=", "PiVersion", "(", ")", "except", "IOError", ":", "version", "=", "'PiB'", "if", "(", "version", "==", "'PiB'", ")", ":", "if", "(", "int", "(", ...
checks if the frequency setting is the same as the default pi setting .
train
false
36,529
def sorted_tree_items(entries, name_order): key_func = ((name_order and key_entry_name_order) or key_entry) for (name, entry) in sorted(entries.items(), key=key_func): (mode, hexsha) = entry mode = int(mode) if (not isinstance(hexsha, bytes)): raise TypeError(('Expected bytes for SHA, got %r' % hexsha)) (yield TreeEntry(name, mode, hexsha))
[ "def", "sorted_tree_items", "(", "entries", ",", "name_order", ")", ":", "key_func", "=", "(", "(", "name_order", "and", "key_entry_name_order", ")", "or", "key_entry", ")", "for", "(", "name", ",", "entry", ")", "in", "sorted", "(", "entries", ".", "items...
iterate over a tree entries dictionary .
train
false
36,530
@contextmanager def control_stdin(input=None): org = sys.stdin sys.stdin = StringIO(input) if six.PY2: sys.stdin.encoding = 'utf-8' try: (yield sys.stdin) finally: sys.stdin = org
[ "@", "contextmanager", "def", "control_stdin", "(", "input", "=", "None", ")", ":", "org", "=", "sys", ".", "stdin", "sys", ".", "stdin", "=", "StringIO", "(", "input", ")", "if", "six", ".", "PY2", ":", "sys", ".", "stdin", ".", "encoding", "=", "...
sends input to stdin .
train
false
36,532
def get_associations(context, qos_specs_id): try: types = objects.VolumeTypeList.get_all_types_for_qos(context, qos_specs_id) except db_exc.DBError: LOG.exception(_LE('DB error:')) msg = (_('Failed to get all associations of qos specs %s') % qos_specs_id) LOG.warning(msg) raise exception.CinderException(message=msg) result = [] for vol_type in types: result.append({'association_type': 'volume_type', 'name': vol_type.name, 'id': vol_type.id}) return result
[ "def", "get_associations", "(", "context", ",", "qos_specs_id", ")", ":", "try", ":", "types", "=", "objects", ".", "VolumeTypeList", ".", "get_all_types_for_qos", "(", "context", ",", "qos_specs_id", ")", "except", "db_exc", ".", "DBError", ":", "LOG", ".", ...
get all associations of given qos specs .
train
false
36,533
def validate_distplot(hist_data, curve_type): hist_data_types = (list,) if np: hist_data_types += (np.ndarray,) if pd: hist_data_types += (pd.core.series.Series,) if (not isinstance(hist_data[0], hist_data_types)): raise exceptions.PlotlyError('Oops, this function was written to handle multiple datasets, if you want to plot just one, make sure your hist_data variable is still a list of lists, i.e. x = [1, 2, 3] -> x = [[1, 2, 3]]') curve_opts = ('kde', 'normal') if (curve_type not in curve_opts): raise exceptions.PlotlyError("curve_type must be defined as 'kde' or 'normal'") if (not scipy): raise ImportError('FigureFactory.create_distplot requires scipy')
[ "def", "validate_distplot", "(", "hist_data", ",", "curve_type", ")", ":", "hist_data_types", "=", "(", "list", ",", ")", "if", "np", ":", "hist_data_types", "+=", "(", "np", ".", "ndarray", ",", ")", "if", "pd", ":", "hist_data_types", "+=", "(", "pd", ...
distplot-specific validations :raises: if hist_data is not a list of lists :raises: if curve_type is not valid .
train
false
36,534
def wait_ready_prefix(popen, prefix): emulator_ready = False while (not emulator_ready): emulator_ready = popen.stderr.readline().startswith(prefix)
[ "def", "wait_ready_prefix", "(", "popen", ",", "prefix", ")", ":", "emulator_ready", "=", "False", "while", "(", "not", "emulator_ready", ")", ":", "emulator_ready", "=", "popen", ".", "stderr", ".", "readline", "(", ")", ".", "startswith", "(", "prefix", ...
wait until the a process encounters a line with matching prefix .
train
false
36,535
@g.stats.amqp_processor('cloudsearch_changes') def _run_changed(msgs, chan): start = datetime.now(g.tz) changed = [pickle.loads(msg.body) for msg in msgs] link_fns = LinkUploader.desired_fullnames(changed) sr_fns = SubredditUploader.desired_fullnames(changed) link_uploader = LinkUploader(g.CLOUDSEARCH_DOC_API, fullnames=link_fns) subreddit_uploader = SubredditUploader(g.CLOUDSEARCH_SUBREDDIT_DOC_API, fullnames=sr_fns) link_time = link_uploader.inject() subreddit_time = subreddit_uploader.inject() cloudsearch_time = (link_time + subreddit_time) totaltime = (datetime.now(g.tz) - start).total_seconds() print ('%s: %d messages in %.2fs seconds (%.2fs secs waiting on cloudsearch); %d duplicates, %s remaining)' % (start, len(changed), totaltime, cloudsearch_time, (len(changed) - len((link_fns | sr_fns))), msgs[(-1)].delivery_info.get('message_count', 'unknown')))
[ "@", "g", ".", "stats", ".", "amqp_processor", "(", "'cloudsearch_changes'", ")", "def", "_run_changed", "(", "msgs", ",", "chan", ")", ":", "start", "=", "datetime", ".", "now", "(", "g", ".", "tz", ")", "changed", "=", "[", "pickle", ".", "loads", ...
consume the cloudsearch_changes queue .
train
false
36,536
def outparam(key, type_=None): return BindParameter(key, None, type_=type_, unique=False, isoutparam=True)
[ "def", "outparam", "(", "key", ",", "type_", "=", "None", ")", ":", "return", "BindParameter", "(", "key", ",", "None", ",", "type_", "=", "type_", ",", "unique", "=", "False", ",", "isoutparam", "=", "True", ")" ]
create an out parameter for usage in functions .
train
false
36,539
def find_stim_steps(raw, pad_start=None, pad_stop=None, merge=0, stim_channel=None): stim_channel = _get_stim_channel(stim_channel, raw.info) picks = pick_channels(raw.info['ch_names'], include=stim_channel) if (len(picks) == 0): raise ValueError('No stim channel found to extract event triggers.') (data, _) = raw[picks, :] if np.any((data < 0)): warn('Trigger channel contains negative values, using absolute value.') data = np.abs(data) data = data.astype(np.int) return _find_stim_steps(data, raw.first_samp, pad_start=pad_start, pad_stop=pad_stop, merge=merge)
[ "def", "find_stim_steps", "(", "raw", ",", "pad_start", "=", "None", ",", "pad_stop", "=", "None", ",", "merge", "=", "0", ",", "stim_channel", "=", "None", ")", ":", "stim_channel", "=", "_get_stim_channel", "(", "stim_channel", ",", "raw", ".", "info", ...
find all steps in data from a stim channel .
train
false
36,540
def validate_backup_window(window): hour = '[01]?[0-9]|2[0-3]' minute = '[0-5][0-9]' r = ('(?P<start_hour>%s):(?P<start_minute>%s)-(?P<end_hour>%s):(?P<end_minute>%s)' % (hour, minute, hour, minute)) range_regex = re.compile(r) m = range_regex.match(window) if (not m): raise ValueError('DBInstance PreferredBackupWindow must be in the format: hh24:mi-hh24:mi') start_ts = ((int(m.group('start_hour')) * 60) + int(m.group('start_minute'))) end_ts = ((int(m.group('end_hour')) * 60) + int(m.group('end_minute'))) if (abs((end_ts - start_ts)) < 30): raise ValueError('DBInstance PreferredBackupWindow must be at least 30 minutes long.') return window
[ "def", "validate_backup_window", "(", "window", ")", ":", "hour", "=", "'[01]?[0-9]|2[0-3]'", "minute", "=", "'[0-5][0-9]'", "r", "=", "(", "'(?P<start_hour>%s):(?P<start_minute>%s)-(?P<end_hour>%s):(?P<end_minute>%s)'", "%", "(", "hour", ",", "minute", ",", "hour", ","...
validate preferredbackupwindow for dbinstance .
train
true
36,541
def parse_db_config(config_key='sqlalchemy.url'): from ckan.common import config url = config[config_key] regex = ['^\\s*(?P<db_type>\\w*)', '://', '(?P<db_user>[^:]*)', ':?', '(?P<db_pass>[^@]*)', '@', '(?P<db_host>[^/:]*)', ':?', '(?P<db_port>[^/]*)', '/', '(?P<db_name>[\\w.-]*)'] db_details_match = re.match(''.join(regex), url) if (not db_details_match): raise Exception(('Could not extract db details from url: %r' % url)) db_details = db_details_match.groupdict() return db_details
[ "def", "parse_db_config", "(", "config_key", "=", "'sqlalchemy.url'", ")", ":", "from", "ckan", ".", "common", "import", "config", "url", "=", "config", "[", "config_key", "]", "regex", "=", "[", "'^\\\\s*(?P<db_type>\\\\w*)'", ",", "'://'", ",", "'(?P<db_user>[...
takes a config key for a database connection url and parses it into a dictionary .
train
false
36,542
def unpack_to_nibbles(bindata): o = bin_to_nibbles(bindata) flags = o[0] if (flags & 2): o.append(NIBBLE_TERMINATOR) if ((flags & 1) == 1): o = o[1:] else: o = o[2:] return o
[ "def", "unpack_to_nibbles", "(", "bindata", ")", ":", "o", "=", "bin_to_nibbles", "(", "bindata", ")", "flags", "=", "o", "[", "0", "]", "if", "(", "flags", "&", "2", ")", ":", "o", ".", "append", "(", "NIBBLE_TERMINATOR", ")", "if", "(", "(", "fla...
unpack packed binary data to nibbles .
train
true
36,543
def print_class(class_name): filename = sys.argv[0] with open(filename) as f: data = f.read() regex = (('^(class ' + class_name) + '\\b.*?)^\\S') match = re.search(regex, data, flags=(re.MULTILINE | re.DOTALL)) if match: print(match.group(1))
[ "def", "print_class", "(", "class_name", ")", ":", "filename", "=", "sys", ".", "argv", "[", "0", "]", "with", "open", "(", "filename", ")", "as", "f", ":", "data", "=", "f", ".", "read", "(", ")", "regex", "=", "(", "(", "'^(class '", "+", "clas...
read this file and print the section with the class name specified .
train
false
36,545
def generateArgList(arglist): args = [] extra = [] count = 0 for i in range(len(arglist)): elt = arglist[i] if isinstance(elt, str): args.append(elt) elif isinstance(elt, tuple): args.append(TupleArg((i * 2), elt)) extra.extend(misc.flatten(elt)) count = (count + 1) else: raise ValueError, 'unexpect argument type:', elt return ((args + extra), count)
[ "def", "generateArgList", "(", "arglist", ")", ":", "args", "=", "[", "]", "extra", "=", "[", "]", "count", "=", "0", "for", "i", "in", "range", "(", "len", "(", "arglist", ")", ")", ":", "elt", "=", "arglist", "[", "i", "]", "if", "isinstance", ...
generate an arg list marking tupleargs .
train
false
36,547
def _get_basic_auth_info(request): (meth, auth) = request.META['HTTP_AUTHORIZATION'].split() if (meth.lower() != 'basic'): raise ValueError (username, password) = base64.b64decode(auth).split(':') return (username, password)
[ "def", "_get_basic_auth_info", "(", "request", ")", ":", "(", "meth", ",", "auth", ")", "=", "request", ".", "META", "[", "'HTTP_AUTHORIZATION'", "]", ".", "split", "(", ")", "if", "(", "meth", ".", "lower", "(", ")", "!=", "'basic'", ")", ":", "rais...
grab basic auth info .
train
false
36,548
@treeio_login_required @handle_response_format def subscription_delete(request, subscription_id, response_format='html'): subscription = get_object_or_404(Subscription, pk=subscription_id) if ((not request.user.profile.has_permission(subscription, mode='w')) and (not request.user.profile.is_admin('treeio.sales'))): return user_denied(request, "You don't have access to this Sale Status", response_format) if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): subscription.trash = True subscription.save() else: subscription.delete() return HttpResponseRedirect(reverse('sales_index')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('sales_subscription_view', args=[subscription.id])) return render_to_response('sales/subscription_delete', {'subscription': subscription}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "subscription_delete", "(", "request", ",", "subscription_id", ",", "response_format", "=", "'html'", ")", ":", "subscription", "=", "get_object_or_404", "(", "Subscription", ",", "pk", "=", "subscrip...
subscription delete .
train
false
36,549
def test_internet(host=u'8.8.8.8', port=53, timeout=3): try: socket.setdefaulttimeout(timeout) socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port)) return True except Exception as ex: print ex.message return False
[ "def", "test_internet", "(", "host", "=", "u'8.8.8.8'", ",", "port", "=", "53", ",", "timeout", "=", "3", ")", ":", "try", ":", "socket", ".", "setdefaulttimeout", "(", "timeout", ")", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socke...
returns true if internet is connected host: 8 .
train
false
36,550
def maximum_flow_value(G, s, t, capacity='capacity', flow_func=None, **kwargs): if (flow_func is None): if kwargs: raise nx.NetworkXError('You have to explicitly set a flow_func if you need to pass parameters via kwargs.') flow_func = default_flow_func if (not callable(flow_func)): raise nx.NetworkXError('flow_func has to be callable.') R = flow_func(G, s, t, capacity=capacity, value_only=True, **kwargs) return R.graph['flow_value']
[ "def", "maximum_flow_value", "(", "G", ",", "s", ",", "t", ",", "capacity", "=", "'capacity'", ",", "flow_func", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "flow_func", "is", "None", ")", ":", "if", "kwargs", ":", "raise", "nx", ".", "N...
find the value of maximum single-commodity flow .
train
false
36,551
def has_userland_tool(executable): if os.path.isabs(executable): return os.path.exists(executable) else: for d in os.environ['PATH'].split(':'): f = os.path.join(d, executable) if os.path.exists(f): return True return False
[ "def", "has_userland_tool", "(", "executable", ")", ":", "if", "os", ".", "path", ".", "isabs", "(", "executable", ")", ":", "return", "os", ".", "path", ".", "exists", "(", "executable", ")", "else", ":", "for", "d", "in", "os", ".", "environ", "[",...
returns whether the system has a given executable .
train
false
36,552
def regex_search(value, regex, *args, **kwargs): groups = list() for arg in args: if arg.startswith('\\g'): match = re.match('\\\\g<(\\S+)>', arg).group(1) groups.append(match) elif arg.startswith('\\'): match = int(re.match('\\\\(\\d+)', arg).group(1)) groups.append(match) else: raise errors.AnsibleFilterError('Unknown argument') flags = 0 if kwargs.get('ignorecase'): flags |= re.I if kwargs.get('multiline'): flags |= re.M match = re.search(regex, value, flags) if match: if (not groups): return match.group() else: items = list() for item in groups: items.append(match.group(item)) return items
[ "def", "regex_search", "(", "value", ",", "regex", ",", "*", "args", ",", "**", "kwargs", ")", ":", "groups", "=", "list", "(", ")", "for", "arg", "in", "args", ":", "if", "arg", ".", "startswith", "(", "'\\\\g'", ")", ":", "match", "=", "re", "....
perform re .
train
false
36,553
def _toOPM(plate): d = dict(plate.qualifiers.items()) d[_csvData] = {} d[_csvData][_plate] = plate.id d[_measurements] = {} d[_measurements][_hour] = [] times = set() for (wid, w) in plate._wells.items(): d[_measurements][wid] = [] for hour in w._signals: times.add(hour) for hour in sorted(times): d[_measurements][_hour].append(hour) for (wid, w) in plate._wells.items(): if (hour in w._signals): d[_measurements][wid].append(w[hour]) else: d[_measurements][wid].append(float('nan')) return d
[ "def", "_toOPM", "(", "plate", ")", ":", "d", "=", "dict", "(", "plate", ".", "qualifiers", ".", "items", "(", ")", ")", "d", "[", "_csvData", "]", "=", "{", "}", "d", "[", "_csvData", "]", "[", "_plate", "]", "=", "plate", ".", "id", "d", "[...
transform a platerecord object into a dictionary .
train
false
36,555
def p_enum(p): val = _make_enum(p[2], p[4]) setattr(thrift_stack[(-1)], p[2], val) _add_thrift_meta('enums', val)
[ "def", "p_enum", "(", "p", ")", ":", "val", "=", "_make_enum", "(", "p", "[", "2", "]", ",", "p", "[", "4", "]", ")", "setattr", "(", "thrift_stack", "[", "(", "-", "1", ")", "]", ",", "p", "[", "2", "]", ",", "val", ")", "_add_thrift_meta", ...
enum : enum identifier { enum_seq } .
train
false
36,556
def _safe_del_key(dict_, key): if (key in dict_): del dict_[key]
[ "def", "_safe_del_key", "(", "dict_", ",", "key", ")", ":", "if", "(", "key", "in", "dict_", ")", ":", "del", "dict_", "[", "key", "]" ]
aux function .
train
false
36,558
@register(u'kill-line') def kill_line(event): buff = event.current_buffer if (event.arg < 0): deleted = buff.delete_before_cursor(count=(- buff.document.get_start_of_line_position())) elif (buff.document.current_char == u'\n'): deleted = buff.delete(1) else: deleted = buff.delete(count=buff.document.get_end_of_line_position()) event.cli.clipboard.set_text(deleted)
[ "@", "register", "(", "u'kill-line'", ")", "def", "kill_line", "(", "event", ")", ":", "buff", "=", "event", ".", "current_buffer", "if", "(", "event", ".", "arg", "<", "0", ")", ":", "deleted", "=", "buff", ".", "delete_before_cursor", "(", "count", "...
kill the text from the cursor to the end of the line .
train
true
36,560
def test_handle_default(): x = deepcopy(_handle_default('scalings')) y = _handle_default('scalings') z = _handle_default('scalings', dict(mag=1, grad=2)) w = _handle_default('scalings', {}) assert_equal(set(x.keys()), set(y.keys())) assert_equal(set(x.keys()), set(z.keys())) for key in x.keys(): assert_equal(x[key], y[key]) assert_equal(x[key], w[key]) if (key in ('mag', 'grad')): assert_true((x[key] != z[key])) else: assert_equal(x[key], z[key])
[ "def", "test_handle_default", "(", ")", ":", "x", "=", "deepcopy", "(", "_handle_default", "(", "'scalings'", ")", ")", "y", "=", "_handle_default", "(", "'scalings'", ")", "z", "=", "_handle_default", "(", "'scalings'", ",", "dict", "(", "mag", "=", "1", ...
test mutable default .
train
false
36,561
def Browser(driver_name='firefox', *args, **kwargs): try: driver = _DRIVERS[driver_name] except KeyError: raise DriverNotFoundError(('No driver for %s' % driver_name)) return driver(*args, **kwargs)
[ "def", "Browser", "(", "driver_name", "=", "'firefox'", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "driver", "=", "_DRIVERS", "[", "driver_name", "]", "except", "KeyError", ":", "raise", "DriverNotFoundError", "(", "(", "'No driver for %s'"...
returns a driver instance for the given name .
train
true
36,562
def ms_module(exploit): return {'1': 'exploit/multi/browser/adobe_flash_hacking_team_uaf', '2': 'exploit/multi/browser/adobe_flash_nellymoser_bof', '3': 'exploit/multi/browser/adobe_flash_shader_drawing_fill', '4': 'exploit/windows/browser/ms14_012_textrange', '5': 'exploit/windows/browser/ms14_012_cmarkup_uaf', '6': 'exploit/windows/browser/ms13_080_cdisplaypointer', '7': 'exploit/windows/browser/ie_setmousecapture_uaf', '8': 'exploit/multi/browser/java_jre17_jmxbean_2', '9': 'exploit/multi/browser/java_jre17_jmxbean', '10': 'exploit/windows/browser/ms13_009_ie_slayoutrun_uaf', '11': 'exploit/windows/browser/ie_cbutton_uaf', '12': 'exploit/multi/browser/java_jre17_exec', '13': 'exploit/windows/browser/ie_execcommand_uaf', '14': 'exploit/multi/browser/java_atomicreferencearray', '15': 'exploit/multi/browser/java_verifier_field_access', '16': 'exploit/windows/browser/ms12_037_same_id', '17': 'exploit/windows/browser/msxml_get_definition_code_exec', '18': 'exploit/windows/browser/adobe_flash_rtmp', '19': 'exploit/windows/browser/adobe_flash_mp4_cprt', '20': 'exploit/windows/browser/ms12_004_midi', '21': 'multi/browser/java_rhino\nset target 1', '22': 'windows/browser/ms11_050_mshtml_cobjectelement', '23': 'windows/browser/adobe_flashplayer_flash10o', '24': 'windows/browser/cisco_anyconnect_exec', '25': 'windows/browser/ms11_003_ie_css_import', '26': 'windows/browser/wmi_admintools', '27': 'windows/browser/ms10_090_ie_css_clip', '28': 'windows/browser/java_codebase_trust', '29': 'windows/browser/java_docbase_bof', '30': 'windows/browser/webdav_dll_hijacker', '31': 'windows/browser/adobe_flashplayer_avm', '32': 'windows/browser/adobe_shockwave_rcsl_corruption', '33': 'windows/browser/adobe_cooltype_sing', '34': 'windows/browser/apple_quicktime_marshaled_punk', '35': 'windows/browser/ms10_042_helpctr_xss_cmd_exec', '36': 'windows/browser/ms10_018_ie_behaviors', '37': 'windows/browser/ms10_002_aurora', '38': 'windows/browser/ms10_018_ie_tabular_activex', '39': 'windows/browser/ms09_002_memory_corruption', '40': 'windows/browser/ms09_072_style_object', '41': 'windows/browser/ie_iscomponentinstalled', '42': 'windows/browser/ms08_078_xml_corruption', '43': 'windows/browser/ie_unsafe_scripting', '44': 'multi/browser/firefox_escape_retval', '45': 'windows/browser/mozilla_mchannel', '46': 'auxiliary/server/browser_autopwn'}.get(exploit, 'ERROR')
[ "def", "ms_module", "(", "exploit", ")", ":", "return", "{", "'1'", ":", "'exploit/multi/browser/adobe_flash_hacking_team_uaf'", ",", "'2'", ":", "'exploit/multi/browser/adobe_flash_nellymoser_bof'", ",", "'3'", ":", "'exploit/multi/browser/adobe_flash_shader_drawing_fill'", ",...
receives the input given by the user from gen_payload .
train
false
36,564
def checkDynamicContent(firstPage, secondPage): if kb.nullConnection: debugMsg = 'dynamic content checking skipped ' debugMsg += 'because NULL connection used' logger.debug(debugMsg) return if any(((page is None) for page in (firstPage, secondPage))): warnMsg = "can't check dynamic content " warnMsg += 'because of lack of page content' logger.critical(warnMsg) return seqMatcher = getCurrentThreadData().seqMatcher seqMatcher.set_seq1(firstPage) seqMatcher.set_seq2(secondPage) if (seqMatcher.quick_ratio() <= UPPER_RATIO_BOUND): findDynamicContent(firstPage, secondPage) count = 0 while (not Request.queryPage()): count += 1 if (count > conf.retries): warnMsg = 'target URL is too dynamic. ' warnMsg += "Switching to '--text-only' " logger.warn(warnMsg) conf.textOnly = True return warnMsg = 'target URL is heavily dynamic' warnMsg += '. sqlmap is going to retry the request' logger.critical(warnMsg) (secondPage, _) = Request.queryPage(content=True) findDynamicContent(firstPage, secondPage)
[ "def", "checkDynamicContent", "(", "firstPage", ",", "secondPage", ")", ":", "if", "kb", ".", "nullConnection", ":", "debugMsg", "=", "'dynamic content checking skipped '", "debugMsg", "+=", "'because NULL connection used'", "logger", ".", "debug", "(", "debugMsg", ")...
this function checks for the dynamic content in the provided pages .
train
false
36,565
def copula_mv_archimedean(u, transform, args=(), axis=(-1)): phi = transform.evaluate phi_inv = transform.inverse cdfv = phi_inv(phi(u, *args).sum(axis), *args) return cdfv
[ "def", "copula_mv_archimedean", "(", "u", ",", "transform", ",", "args", "=", "(", ")", ",", "axis", "=", "(", "-", "1", ")", ")", ":", "phi", "=", "transform", ".", "evaluate", "phi_inv", "=", "transform", ".", "inverse", "cdfv", "=", "phi_inv", "("...
generic multivariate archimedean copula .
train
false
36,566
def _predict_binary(estimator, X): if is_regressor(estimator): return estimator.predict(X) try: score = np.ravel(estimator.decision_function(X)) except (AttributeError, NotImplementedError): score = estimator.predict_proba(X)[:, 1] return score
[ "def", "_predict_binary", "(", "estimator", ",", "X", ")", ":", "if", "is_regressor", "(", "estimator", ")", ":", "return", "estimator", ".", "predict", "(", "X", ")", "try", ":", "score", "=", "np", ".", "ravel", "(", "estimator", ".", "decision_functio...
make predictions using a single binary estimator .
train
false
36,567
def print_rep_seqs(mapping, seqs, out_fp): out_fh = open((out_fp + '/prefix_dereplicated.fasta'), 'w') for s in get_representatives(mapping, seqs.iteritems()): out_fh.write(s.to_fasta()) out_fh.close()
[ "def", "print_rep_seqs", "(", "mapping", ",", "seqs", ",", "out_fp", ")", ":", "out_fh", "=", "open", "(", "(", "out_fp", "+", "'/prefix_dereplicated.fasta'", ")", ",", "'w'", ")", "for", "s", "in", "get_representatives", "(", "mapping", ",", "seqs", ".", ...
print the cluster seeds of a mapping to out_fp .
train
false
36,569
def ConfigureEmails(config): print '\n\n-=Monitoring/Email Domain=-\nEmails concerning alerts or updates must be sent to this domain.\n' domain = RetryQuestion('Email Domain e.g example.com', '^([\\.A-Za-z0-9-]+)*$', config_lib.CONFIG.Get('Logging.domain')) config.Set('Logging.domain', domain) print '\n\n-=Alert Email Address=-\nAddress where monitoring events get sent, e.g. crashed clients, broken server\netc.\n' email = RetryQuestion('Alert Email Address', '', ('grr-monitoring@%s' % domain)) config.Set('Monitoring.alert_email', email) print '\n\n-=Emergency Email Address=-\nAddress where high priority events such as an emergency ACL bypass are sent.\n' emergency_email = RetryQuestion('Emergency Access Email Address', '', ('grr-emergency@%s' % domain)) config.Set('Monitoring.emergency_access_email', emergency_email)
[ "def", "ConfigureEmails", "(", "config", ")", ":", "print", "'\\n\\n-=Monitoring/Email Domain=-\\nEmails concerning alerts or updates must be sent to this domain.\\n'", "domain", "=", "RetryQuestion", "(", "'Email Domain e.g example.com'", ",", "'^([\\\\.A-Za-z0-9-]+)*$'", ",", "conf...
configure email notification addresses .
train
false
36,570
def validate_feature(module, mode='show'): feature = module.params['feature'] feature_to_be_mapped = {'show': {'nv overlay': 'nve', 'vn-segment-vlan-based': 'vnseg_vlan', 'hsrp': 'hsrp_engine', 'fabric multicast': 'fabric_mcast', 'scp-server': 'scpServer', 'sftp-server': 'sftpServer', 'sla responder': 'sla_responder', 'sla sender': 'sla_sender', 'ssh': 'sshServer', 'tacacs+': 'tacacs', 'telnet': 'telnetServer', 'ethernet-link-oam': 'elo', 'port-security': 'eth_port_sec'}, 'config': {'nve': 'nv overlay', 'vnseg_vlan': 'vn-segment-vlan-based', 'hsrp_engine': 'hsrp', 'fabric_mcast': 'fabric multicast', 'scpServer': 'scp-server', 'sftpServer': 'sftp-server', 'sla_sender': 'sla sender', 'sla_responder': 'sla responder', 'sshServer': 'ssh', 'tacacs': 'tacacs+', 'telnetServer': 'telnet', 'elo': 'ethernet-link-oam', 'eth_port_sec': 'port-security'}} if (feature in feature_to_be_mapped[mode]): feature = feature_to_be_mapped[mode][feature] return feature
[ "def", "validate_feature", "(", "module", ",", "mode", "=", "'show'", ")", ":", "feature", "=", "module", ".", "params", "[", "'feature'", "]", "feature_to_be_mapped", "=", "{", "'show'", ":", "{", "'nv overlay'", ":", "'nve'", ",", "'vn-segment-vlan-based'", ...
some features may need to be mapped due to inconsistency between how they appear from "show feature" output and how they are configured .
train
false
36,571
def ParseNewPingMsg(msg): parsed = re.match(kNewPingMsgRe, msg) if (not parsed): return None try: return (parsed.group(1), parsed.group(2)) except IndexError as e: logging.warning(('RE matched "%s", but extracted wrong numbers of items: %r' % (msg, e))) return None
[ "def", "ParseNewPingMsg", "(", "msg", ")", ":", "parsed", "=", "re", ".", "match", "(", "kNewPingMsgRe", ",", "msg", ")", "if", "(", "not", "parsed", ")", ":", "return", "None", "try", ":", "return", "(", "parsed", ".", "group", "(", "1", ")", ",",...
attempt to parse the message for a ping .
train
false
36,572
def grouped_by_prefix(args, prefixes): group_args = [] current_scope = None for arg in args.strip().split(): assert (not arg.startswith('-')), 'REQUIRE: arg, not options' scope = select_prefix_for(arg, prefixes) if (scope != current_scope): if group_args: (yield ' '.join(group_args)) group_args = [] current_scope = scope group_args.append(arg) if group_args: (yield ' '.join(group_args))
[ "def", "grouped_by_prefix", "(", "args", ",", "prefixes", ")", ":", "group_args", "=", "[", "]", "current_scope", "=", "None", "for", "arg", "in", "args", ".", "strip", "(", ")", ".", "split", "(", ")", ":", "assert", "(", "not", "arg", ".", "startsw...
group behave args by scope into multiple test-runs .
train
false
36,574
def calculate_dist_vincenty(latitudes, longitudes): assert (len(latitudes) == len(longitudes)), 'latitudes and longitudes must be lists of exactly the same size' size = len(latitudes) dtx_mtx = zeros([size, size]) for i in range(size): for j in range(i, size): dtx_mtx[(i, j)] = dtx_mtx[(j, i)] = dist_vincenty(latitudes[i], longitudes[i], latitudes[j], longitudes[j]) return dtx_mtx
[ "def", "calculate_dist_vincenty", "(", "latitudes", ",", "longitudes", ")", ":", "assert", "(", "len", "(", "latitudes", ")", "==", "len", "(", "longitudes", ")", ")", ",", "'latitudes and longitudes must be lists of exactly the same size'", "size", "=", "len", "(",...
returns the distance matrix from calculating dist_vicenty latitudes .
train
false
36,575
def _get_volume_type_extra_spec(type_id, spec_key, possible_values=None, default_value=None): if (not type_id): return default_value spec_key = ('vmware:%s' % spec_key) spec_value = volume_types.get_volume_type_extra_specs(type_id, spec_key) if (not spec_value): LOG.debug('Returning default spec value: %s.', default_value) return default_value if (possible_values is None): return spec_value if (spec_value in possible_values): LOG.debug('Returning spec value %s', spec_value) return spec_value LOG.debug('Invalid spec value: %s specified.', spec_value)
[ "def", "_get_volume_type_extra_spec", "(", "type_id", ",", "spec_key", ",", "possible_values", "=", "None", ",", "default_value", "=", "None", ")", ":", "if", "(", "not", "type_id", ")", ":", "return", "default_value", "spec_key", "=", "(", "'vmware:%s'", "%",...
get extra spec value .
train
false
36,576
def _wider_test_jpeg(data): if (data[:2] == '\xff\xd8'): return 'jpeg'
[ "def", "_wider_test_jpeg", "(", "data", ")", ":", "if", "(", "data", "[", ":", "2", "]", "==", "'\\xff\\xd8'", ")", ":", "return", "'jpeg'" ]
test for a jpeg file following the unix file implementation which uses the magic bytes rather than just looking for the bytes that represent jfif or exif at a fixed position .
train
false
36,577
def findiso(flavor): url = isoURLs[flavor] name = path.basename(url) iso = path.join(VMImageDir, name) if ((not path.exists(iso)) or ((stat(iso)[ST_MODE] & 511) != 292)): log('* Retrieving', url) run(('curl -C - -o %s %s' % (iso, url))) result = run(('file ' + iso)) if (('ISO' not in result) and ('boot' not in result)): os.remove(iso) raise Exception(('findiso: could not download iso from ' + url)) log('* Write-protecting iso', iso) os.chmod(iso, 292) log('* Using iso', iso) return iso
[ "def", "findiso", "(", "flavor", ")", ":", "url", "=", "isoURLs", "[", "flavor", "]", "name", "=", "path", ".", "basename", "(", "url", ")", "iso", "=", "path", ".", "join", "(", "VMImageDir", ",", "name", ")", "if", "(", "(", "not", "path", ".",...
find iso .
train
false
36,578
def redirect_with_get(view_name, get_querydict, do_reverse=True): if do_reverse: url = reverse(view_name) else: url = view_name if get_querydict: return redirect(('%s?%s' % (url, get_querydict.urlencode(safe='/')))) return redirect(view_name)
[ "def", "redirect_with_get", "(", "view_name", ",", "get_querydict", ",", "do_reverse", "=", "True", ")", ":", "if", "do_reverse", ":", "url", "=", "reverse", "(", "view_name", ")", "else", ":", "url", "=", "view_name", "if", "get_querydict", ":", "return", ...
helper function to carry over get parameters across redirects using urlencode because the @login_required decorator generates next queryparams with / unencoded .
train
false
36,579
def check_zmq_version(min_version): sv = ver_str(min_version) try: (found, zmq_h) = find_zmq_version() sf = ver_str(found) if (found < min_version): print ('This pyzmq requires zeromq >= %s' % sv) print ('but it appears you are building against %s' % zmq_h) print ('which has zeromq %s' % sf) sys.exit(1) except IOError: msg = '\n'.join(["Couldn't find zmq.h to check for version compatibility.", "If you see 'undeclared identifier' errors, your ZeroMQ is likely too old.", ('This pyzmq requires zeromq >= %s' % sv)]) warn(msg) except IndexError: msg = '\n'.join(["Couldn't find ZMQ_VERSION macros in zmq.h to check for version compatibility.", 'This probably means that you have ZeroMQ <= 2.0.9', "If you see 'undeclared identifier' errors, your ZeroMQ is likely too old.", ('This pyzmq requires zeromq >= %s' % sv)]) warn(msg) except Exception: traceback.print_exc() msg = '\n'.join(['Unexpected Error checking for zmq version.', "If you see 'undeclared identifier' errors, your ZeroMQ is likely too old.", ('This pyzmq requires zeromq >= %s' % sv)]) warn(msg)
[ "def", "check_zmq_version", "(", "min_version", ")", ":", "sv", "=", "ver_str", "(", "min_version", ")", "try", ":", "(", "found", ",", "zmq_h", ")", "=", "find_zmq_version", "(", ")", "sf", "=", "ver_str", "(", "found", ")", "if", "(", "found", "<", ...
check that zmq .
train
false
36,580
def get_volume_group_info(vg): (out, err) = execute('vgs', '--noheadings', '--nosuffix', '--separator', '|', '--units', 'b', '-o', 'vg_size,vg_free', vg, run_as_root=True) info = out.split('|') if (len(info) != 2): raise RuntimeError((_('vg %s must be LVM volume group') % vg)) return {'total': int(info[0]), 'free': int(info[1]), 'used': (int(info[0]) - int(info[1]))}
[ "def", "get_volume_group_info", "(", "vg", ")", ":", "(", "out", ",", "err", ")", "=", "execute", "(", "'vgs'", ",", "'--noheadings'", ",", "'--nosuffix'", ",", "'--separator'", ",", "'|'", ",", "'--units'", ",", "'b'", ",", "'-o'", ",", "'vg_size,vg_free'...
return free/used/total space info for a volume group in bytes .
train
false
36,581
@click.command(u'clear-website-cache') @pass_context def clear_website_cache(context): import frappe.website.render for site in context.sites: try: frappe.init(site=site) frappe.connect() frappe.website.render.clear_cache() finally: frappe.destroy()
[ "@", "click", ".", "command", "(", "u'clear-website-cache'", ")", "@", "pass_context", "def", "clear_website_cache", "(", "context", ")", ":", "import", "frappe", ".", "website", ".", "render", "for", "site", "in", "context", ".", "sites", ":", "try", ":", ...
clear website cache .
train
false
36,582
def _remove_original_values(_config_vars): for k in list(_config_vars): if k.startswith(_INITPRE): del _config_vars[k]
[ "def", "_remove_original_values", "(", "_config_vars", ")", ":", "for", "k", "in", "list", "(", "_config_vars", ")", ":", "if", "k", ".", "startswith", "(", "_INITPRE", ")", ":", "del", "_config_vars", "[", "k", "]" ]
remove original unmodified values for testing .
train
false
36,583
def libvlc_media_discoverer_release(p_mdis): f = (_Cfunctions.get('libvlc_media_discoverer_release', None) or _Cfunction('libvlc_media_discoverer_release', ((1,),), None, None, MediaDiscoverer)) return f(p_mdis)
[ "def", "libvlc_media_discoverer_release", "(", "p_mdis", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_discoverer_release'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_discoverer_release'", ",", "(", "(", "1", ",", ")", "...
release media discover object .
train
false
36,584
def vm_disk_snapshot_revert(name, kwargs=None, call=None): if (call != 'action'): raise SaltCloudSystemExit('The vm_disk_snapshot_revert action must be called with -a or --action.') if (kwargs is None): kwargs = {} disk_id = kwargs.get('disk_id', None) snapshot_id = kwargs.get('snapshot_id', None) if ((disk_id is None) or (snapshot_id is None)): raise SaltCloudSystemExit("The vm_disk_snapshot_revert function requires a 'disk_id' and a 'snapshot_id' to be provided.") (server, user, password) = _get_xml_rpc() auth = ':'.join([user, password]) vm_id = int(get_vm_id(kwargs={'name': name})) response = server.one.vm.disksnapshotrevert(auth, vm_id, int(disk_id), int(snapshot_id)) data = {'action': 'vm.disksnapshotrevert', 'deleted': response[0], 'snapshot_id': response[1], 'error_code': response[2]} return data
[ "def", "vm_disk_snapshot_revert", "(", "name", ",", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The vm_disk_snapshot_revert action must be called with -a or --action.'...
reverts a disk state to a previously taken snapshot .
train
true
36,585
def test_ros_bad_ratio(): ratio = (-1.0) ros = RandomOverSampler(ratio=ratio, random_state=RND_SEED) assert_raises(ValueError, ros.fit_sample, X, Y) ratio = 100.0 ros = RandomOverSampler(ratio=ratio, random_state=RND_SEED) assert_raises(ValueError, ros.fit_sample, X, Y) ratio = 'rnd' ros = RandomOverSampler(ratio=ratio, random_state=RND_SEED) assert_raises(ValueError, ros.fit_sample, X, Y) ratio = [0.5, 0.5] ros = RandomOverSampler(ratio=ratio, random_state=RND_SEED) assert_raises(ValueError, ros.fit_sample, X, Y)
[ "def", "test_ros_bad_ratio", "(", ")", ":", "ratio", "=", "(", "-", "1.0", ")", "ros", "=", "RandomOverSampler", "(", "ratio", "=", "ratio", ",", "random_state", "=", "RND_SEED", ")", "assert_raises", "(", "ValueError", ",", "ros", ".", "fit_sample", ",", ...
test either if an error is raised with a wrong decimal value for the ratio .
train
false
36,588
@decorators.which('ssh-keygen') def get_known_host(user, hostname, config=None, port=None): full = _get_known_hosts_file(config=config, user=user) if isinstance(full, dict): return full ssh_hostname = _hostname_and_port_to_ssh_hostname(hostname, port) cmd = ['ssh-keygen', '-F', ssh_hostname, '-f', full] lines = __salt__['cmd.run'](cmd, ignore_retcode=True, python_shell=False).splitlines() known_hosts = list(_parse_openssh_output(lines)) return (known_hosts[0] if known_hosts else None)
[ "@", "decorators", ".", "which", "(", "'ssh-keygen'", ")", "def", "get_known_host", "(", "user", ",", "hostname", ",", "config", "=", "None", ",", "port", "=", "None", ")", ":", "full", "=", "_get_known_hosts_file", "(", "config", "=", "config", ",", "us...
return information about known host from the configfile .
train
false
36,589
def create_attach_volumes(name, kwargs, call=None, wait_to_finish=True): if (call != 'action'): raise SaltCloudSystemExit('The create_attach_volumes action must be called with -a or --action.') if (kwargs is None): kwargs = {} if isinstance(kwargs['volumes'], str): volumes = yaml.safe_load(kwargs['volumes']) else: volumes = kwargs['volumes'] conn = get_conn() ret = [] for volume in volumes: if ('disk_name' in volume): log.error('You cannot specify a disk_name. Only new volumes are allowed') return False volume.setdefault('logical_disk_size_in_gb', volume.get('size', 100)) volume.setdefault('host_caching', 'ReadOnly') volume.setdefault('lun', 0) volume.setdefault('media_link', (kwargs['media_link'][:(-4)] + '-disk-{0}.vhd'.format(volume['lun']))) volume.setdefault('disk_label', (kwargs['role_name'] + '-disk-{0}'.format(volume['lun']))) volume_dict = {'volume_name': volume['lun'], 'disk_label': volume['disk_label']} kwargs_add_data_disk = ['lun', 'host_caching', 'media_link', 'disk_label', 'disk_name', 'logical_disk_size_in_gb', 'source_media_link'] for key in (set(volume.keys()) - set(kwargs_add_data_disk)): del volume[key] attach = conn.add_data_disk(kwargs['service_name'], kwargs['deployment_name'], kwargs['role_name'], **volume) log.debug(attach) if attach: msg = '{0} attached to {1} (aka {2})'.format(volume_dict['volume_name'], kwargs['role_name'], name) log.info(msg) ret.append(msg) else: log.error('Error attaching {0} on Azure'.format(volume_dict)) return ret
[ "def", "create_attach_volumes", "(", "name", ",", "kwargs", ",", "call", "=", "None", ",", "wait_to_finish", "=", "True", ")", ":", "if", "(", "call", "!=", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The create_attach_volumes action must be calle...
create and attach multiple volumes to a node .
train
true
36,590
def teardown_module(): reload_module(db_replicator)
[ "def", "teardown_module", "(", ")", ":", "reload_module", "(", "db_replicator", ")" ]
test fixture run once after all tests of this module .
train
false
36,592
def apply_operators(e): e = e.expand() muls = e.atoms(Mul) subs_list = [(m, _apply_Mul(m)) for m in iter(muls)] return e.subs(subs_list)
[ "def", "apply_operators", "(", "e", ")", ":", "e", "=", "e", ".", "expand", "(", ")", "muls", "=", "e", ".", "atoms", "(", "Mul", ")", "subs_list", "=", "[", "(", "m", ",", "_apply_Mul", "(", "m", ")", ")", "for", "m", "in", "iter", "(", "mul...
take a sympy expression with operators and states and apply the operators .
train
false
36,593
def test_wrap_tuple(): wrapped = wrap_value((HyInteger(0),)) assert (type(wrapped) == HyList) assert (type(wrapped[0]) == HyInteger) assert (wrapped == HyList([HyInteger(0)]))
[ "def", "test_wrap_tuple", "(", ")", ":", "wrapped", "=", "wrap_value", "(", "(", "HyInteger", "(", "0", ")", ",", ")", ")", "assert", "(", "type", "(", "wrapped", ")", "==", "HyList", ")", "assert", "(", "type", "(", "wrapped", "[", "0", "]", ")", ...
test conversion of tuples .
train
false
36,595
def get_cifar100(withlabel=True, ndim=3, scale=1.0): raw = _retrieve_cifar_100() train = _preprocess_cifar(raw['train_x'], raw['train_y'], withlabel, ndim, scale) test = _preprocess_cifar(raw['test_x'], raw['test_y'], withlabel, ndim, scale) return (train, test)
[ "def", "get_cifar100", "(", "withlabel", "=", "True", ",", "ndim", "=", "3", ",", "scale", "=", "1.0", ")", ":", "raw", "=", "_retrieve_cifar_100", "(", ")", "train", "=", "_preprocess_cifar", "(", "raw", "[", "'train_x'", "]", ",", "raw", "[", "'train...
gets the cifar-100 dataset .
train
false
36,596
def merge_base(head, ref): return git.merge_base(head, ref)[STDOUT]
[ "def", "merge_base", "(", "head", ",", "ref", ")", ":", "return", "git", ".", "merge_base", "(", "head", ",", "ref", ")", "[", "STDOUT", "]" ]
given ref .
train
false
36,597
def chgid(name, gid): pre_gid = __salt__['file.group_to_gid'](name) if (gid == pre_gid): return True cmd = 'groupmod -g {0} {1}'.format(gid, name) __salt__['cmd.run'](cmd, python_shell=False) post_gid = __salt__['file.group_to_gid'](name) if (post_gid != pre_gid): return (post_gid == gid) return False
[ "def", "chgid", "(", "name", ",", "gid", ")", ":", "pre_gid", "=", "__salt__", "[", "'file.group_to_gid'", "]", "(", "name", ")", "if", "(", "gid", "==", "pre_gid", ")", ":", "return", "True", "cmd", "=", "'groupmod -g {0} {1}'", ".", "format", "(", "g...
change the default group of the user cli example: .
train
true
36,598
def test_arithmetic(): ev = read_evokeds(fname, condition=0) ev1 = EvokedArray(np.ones_like(ev.data), ev.info, ev.times[0], nave=20) ev2 = EvokedArray((- np.ones_like(ev.data)), ev.info, ev.times[0], nave=10) ev = combine_evoked([ev1, ev2], weights='nave') assert_equal(ev.nave, (ev1.nave + ev2.nave)) assert_allclose(ev.data, ((1.0 / 3.0) * np.ones_like(ev.data))) for weights in ('nave', 'equal', [0.5, 0.5]): ev = combine_evoked([ev1, ev1], weights=weights) assert_allclose(ev.data, ev1.data) assert_equal(ev.nave, (2 * ev1.nave)) ev = combine_evoked([ev1, (- ev1)], weights=weights) assert_allclose(ev.data, 0.0, atol=1e-20) assert_equal(ev.nave, (2 * ev1.nave)) ev = combine_evoked([ev1, (- ev1)], weights='equal') assert_allclose(ev.data, 0.0, atol=1e-20) assert_equal(ev.nave, (2 * ev1.nave)) ev = combine_evoked([ev1, (- ev2)], weights='equal') expected = int(round((1.0 / ((0.25 / ev1.nave) + (0.25 / ev2.nave))))) assert_equal(expected, 27) assert_equal(ev.nave, expected) old_comment1 = ev1.comment old_comment2 = ev2.comment ev1.comment = None ev = combine_evoked([ev1, (- ev2)], weights=[1, (-1)]) assert_equal(ev.comment.count('unknown'), 2) assert_true(('-unknown' in ev.comment)) assert_true((' + ' in ev.comment)) ev1.comment = old_comment1 ev2.comment = old_comment2 ev = combine_evoked([ev1, ev2], weights='equal') assert_allclose(ev.data, np.zeros_like(ev1.data)) ev = combine_evoked([ev1, ev2], weights=[1, 0]) assert_equal(ev.nave, ev1.nave) assert_allclose(ev.data, ev1.data) ev = combine_evoked([ev1, ev2], weights=[1, (-1)]) assert_allclose(ev.data, (2 * np.ones_like(ev1.data))) assert_raises(ValueError, combine_evoked, [ev1, ev2], weights='foo') assert_raises(ValueError, combine_evoked, [ev1, ev2], weights=[1]) (evoked1, evoked2) = read_evokeds(fname, condition=[0, 1], proj=True) ch_names = evoked1.ch_names[2:] evoked1.info['bads'] = ['EEG 008'] evoked1.drop_channels(evoked1.ch_names[:1]) evoked2.drop_channels(evoked2.ch_names[1:2]) gave = grand_average([evoked1, evoked2]) assert_equal(gave.data.shape, [len(ch_names), evoked1.data.shape[1]]) assert_equal(ch_names, gave.ch_names) assert_equal(gave.nave, 2) assert_raises(ValueError, grand_average, [1, evoked1])
[ "def", "test_arithmetic", "(", ")", ":", "ev", "=", "read_evokeds", "(", "fname", ",", "condition", "=", "0", ")", "ev1", "=", "EvokedArray", "(", "np", ".", "ones_like", "(", "ev", ".", "data", ")", ",", "ev", ".", "info", ",", "ev", ".", "times",...
test general arithmetic operations .
train
false
36,599
def hrm_programme_hours_month(row): try: thisdate = row['hrm_programme_hours.date'] except AttributeError: return current.messages['NONE'] if (not thisdate): return current.messages['NONE'] month = thisdate.month year = thisdate.year first = datetime.date(year, month, 1) return first.strftime('%y-%m')
[ "def", "hrm_programme_hours_month", "(", "row", ")", ":", "try", ":", "thisdate", "=", "row", "[", "'hrm_programme_hours.date'", "]", "except", "AttributeError", ":", "return", "current", ".", "messages", "[", "'NONE'", "]", "if", "(", "not", "thisdate", ")", ...
virtual field for hrm_programme_hours - returns the date of the first day of the month of this entry .
train
false
36,600
def user_remove(name, database=None, user=None, password=None, host=None, port=None): if (not user_exists(name, database, user, password, host, port)): if database: log.info("User '{0}' does not exist for DB '{1}'".format(name, database)) else: log.info("Cluster admin '{0}' does not exist".format(name)) return False client = _client(user=user, password=password, host=host, port=port) if (not database): return client.delete_cluster_admin(name) client.switch_database(database) return client.delete_database_user(name)
[ "def", "user_remove", "(", "name", ",", "database", "=", "None", ",", "user", "=", "None", ",", "password", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ")", ":", "if", "(", "not", "user_exists", "(", "name", ",", "database", ",...
delete mysql user cli example: .
train
true
36,602
def get_filter_to_core_f(table, sample_ids=None, fraction_for_core=1.0): if (not (0.0 <= fraction_for_core <= 1.0)): raise ValueError(('invalid fraction_for_core passed to core filter: %1.2f is outside of range [0,1].' % fraction_for_core)) if (sample_ids is None): position_mask = array(([True] * len(table.ids()))) else: position_mask = array([(s in sample_ids) for s in table.ids()]) min_count = (fraction_for_core * position_mask.sum()) def f(values, obs_ids, obs_md): return (((values != 0) & position_mask).sum() >= min_count) return f
[ "def", "get_filter_to_core_f", "(", "table", ",", "sample_ids", "=", "None", ",", "fraction_for_core", "=", "1.0", ")", ":", "if", "(", "not", "(", "0.0", "<=", "fraction_for_core", "<=", "1.0", ")", ")", ":", "raise", "ValueError", "(", "(", "'invalid fra...
return function that filters a table to its core observations table: the biom-format table object to filter sample_ids: list of sample ids of interest for the core computation fraction_for_core: the fraction of the sample_ids that an observation must have a non-zero count for to be considered a core observation .
train
false
36,604
def Smooth(xs, sigma=2, **options): return ndimage.filters.gaussian_filter1d(xs, sigma, **options)
[ "def", "Smooth", "(", "xs", ",", "sigma", "=", "2", ",", "**", "options", ")", ":", "return", "ndimage", ".", "filters", ".", "gaussian_filter1d", "(", "xs", ",", "sigma", ",", "**", "options", ")" ]
smooths a numpy array with a gaussian filter .
train
false
36,605
def setPortAddress(address=888): global PORT if (isinstance(address, basestring) and address.startswith('0x')): address = int(address, 16) if (PORT is not None): del PORT try: PORT = ParallelPort(address=address) except Exception as exp: logging.warning(('Could not initiate port: %s' % str(exp))) PORT = None
[ "def", "setPortAddress", "(", "address", "=", "888", ")", ":", "global", "PORT", "if", "(", "isinstance", "(", "address", ",", "basestring", ")", "and", "address", ".", "startswith", "(", "'0x'", ")", ")", ":", "address", "=", "int", "(", "address", ",...
set the memory address or device node for your parallel port of your parallel port .
train
false
36,606
def test_deleting_from_a_message_with_multiple_uids(db, default_account, message, thread): inbox_folder = Folder.find_or_create(db.session, default_account, 'inbox', 'inbox') sent_folder = Folder.find_or_create(db.session, default_account, 'sent', 'sent') add_fake_imapuid(db.session, default_account.id, message, sent_folder, 1337) add_fake_imapuid(db.session, default_account.id, message, inbox_folder, 2222) assert (len(message.imapuids) == 2) remove_deleted_uids(default_account.id, inbox_folder.id, [2222]) db.session.expire_all() assert (message.deleted_at is None), 'The associated message should not have been marked for deletion.' assert (len(message.imapuids) == 1), 'The message should have only one imapuid.'
[ "def", "test_deleting_from_a_message_with_multiple_uids", "(", "db", ",", "default_account", ",", "message", ",", "thread", ")", ":", "inbox_folder", "=", "Folder", ".", "find_or_create", "(", "db", ".", "session", ",", "default_account", ",", "'inbox'", ",", "'in...
check that deleting a imapuid from a message with multiple uids doesnt mark the message for deletion .
train
false
36,609
def string_to_intlist(text): try: return [ord(c) for c in text.encode('utf-8')] except (UnicodeEncodeError, UnicodeDecodeError): return [ord(c) for c in text]
[ "def", "string_to_intlist", "(", "text", ")", ":", "try", ":", "return", "[", "ord", "(", "c", ")", "for", "c", "in", "text", ".", "encode", "(", "'utf-8'", ")", "]", "except", "(", "UnicodeEncodeError", ",", "UnicodeDecodeError", ")", ":", "return", "...
converts a string into an array of integers .
train
false
36,610
def _validate_dependencies_met(): from cryptography.x509.extensions import Extensions if (getattr(Extensions, 'get_extension_for_class', None) is None): raise ImportError("'cryptography' module missing required functionality. Try upgrading to v1.3.4 or newer.") from OpenSSL.crypto import X509 x509 = X509() if (getattr(x509, '_x509', None) is None): raise ImportError("'pyOpenSSL' module missing required functionality. Try upgrading to v0.14 or newer.")
[ "def", "_validate_dependencies_met", "(", ")", ":", "from", "cryptography", ".", "x509", ".", "extensions", "import", "Extensions", "if", "(", "getattr", "(", "Extensions", ",", "'get_extension_for_class'", ",", "None", ")", "is", "None", ")", ":", "raise", "I...
verifies that pyopenssls package-level dependencies have been met .
train
true
36,611
def download_appstats(servername, appid, path, secure, rpc_server_factory, filename, appdir, merge, java_application): if os.path.isdir(appdir): sys.path.insert(0, appdir) try: logging.info('Importing appengine_config from %s', appdir) import appengine_config except ImportError as err: logging.warn('Failed to load appengine_config: %s', err) remote_api_stub.ConfigureRemoteApi(appid, path, auth_func, servername=servername, save_cookies=True, secure=secure, rpc_server_factory=rpc_server_factory) remote_api_stub.MaybeInvokeAuthentication() os.environ['SERVER_SOFTWARE'] = 'Development (remote_api_shell)/1.0' if (not appid): appid = os.environ['APPLICATION_ID'] download_data(filename, merge, java_application)
[ "def", "download_appstats", "(", "servername", ",", "appid", ",", "path", ",", "secure", ",", "rpc_server_factory", ",", "filename", ",", "appdir", ",", "merge", ",", "java_application", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "appdir", ")", ...
invoke remote_api to download appstats data .
train
false
36,612
def pr_add_affiliation(master, affiliate, role=None, role_type=OU): if (not role): return None master_pe = pr_get_pe_id(master) affiliate_pe = pr_get_pe_id(affiliate) role_id = None if (master_pe and affiliate_pe): rtable = current.s3db.pr_role query = (((rtable.pe_id == master_pe) & (rtable.role == role)) & (rtable.deleted != True)) row = current.db(query).select(rtable.id, limitby=(0, 1)).first() if (not row): data = {'pe_id': master_pe, 'role': role, 'role_type': role_type} role_id = rtable.insert(**data) else: role_id = row.id if role_id: pr_add_to_role(role_id, affiliate_pe) return role_id
[ "def", "pr_add_affiliation", "(", "master", ",", "affiliate", ",", "role", "=", "None", ",", "role_type", "=", "OU", ")", ":", "if", "(", "not", "role", ")", ":", "return", "None", "master_pe", "=", "pr_get_pe_id", "(", "master", ")", "affiliate_pe", "="...
add a new affiliation record .
train
false
36,613
def _remove_universal_flags(_config_vars): for cv in _UNIVERSAL_CONFIG_VARS: if ((cv in _config_vars) and (cv not in os.environ)): flags = _config_vars[cv] flags = re.sub('-arch\\s+\\w+\\s', ' ', flags, re.ASCII) flags = re.sub('-isysroot [^ DCTB ]*', ' ', flags) _save_modified_value(_config_vars, cv, flags) return _config_vars
[ "def", "_remove_universal_flags", "(", "_config_vars", ")", ":", "for", "cv", "in", "_UNIVERSAL_CONFIG_VARS", ":", "if", "(", "(", "cv", "in", "_config_vars", ")", "and", "(", "cv", "not", "in", "os", ".", "environ", ")", ")", ":", "flags", "=", "_config...
remove all universal build arguments from config vars .
train
false
36,616
def cursor_iter(cursor, sentinel, col_count): try: for rows in iter((lambda : cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)), sentinel): (yield [r[0:col_count] for r in rows]) finally: cursor.close()
[ "def", "cursor_iter", "(", "cursor", ",", "sentinel", ",", "col_count", ")", ":", "try", ":", "for", "rows", "in", "iter", "(", "(", "lambda", ":", "cursor", ".", "fetchmany", "(", "GET_ITERATOR_CHUNK_SIZE", ")", ")", ",", "sentinel", ")", ":", "(", "y...
yields blocks of rows from a cursor and ensures the cursor is closed when done .
train
false
36,617
def bunzip2_stream(fileobj, bufsize=1024): if (bz2 is None): raise Exception('bz2 module was not successfully imported (likely not installed).') d = bz2.BZ2Decompressor() while True: chunk = fileobj.read(bufsize) if (not chunk): return part = d.decompress(chunk) if part: (yield part)
[ "def", "bunzip2_stream", "(", "fileobj", ",", "bufsize", "=", "1024", ")", ":", "if", "(", "bz2", "is", "None", ")", ":", "raise", "Exception", "(", "'bz2 module was not successfully imported (likely not installed).'", ")", "d", "=", "bz2", ".", "BZ2Decompressor",...
decompress gzipped data on the fly .
train
false
36,618
def describe_alias(FunctionName, Name, region=None, key=None, keyid=None, profile=None): try: alias = _find_alias(FunctionName, Name, region=region, key=key, keyid=keyid, profile=profile) if alias: keys = ('AliasArn', 'Name', 'FunctionVersion', 'Description') return {'alias': dict([(k, alias.get(k)) for k in keys])} else: return {'alias': None} except ClientError as e: return {'error': salt.utils.boto3.get_error(e)}
[ "def", "describe_alias", "(", "FunctionName", ",", "Name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "alias", "=", "_find_alias", "(", "FunctionName", ",", "Name",...
given a function name and alias name describe the properties of the alias .
train
false
36,619
def vpn_enable(name): run(settings.service, 'openvpn', 'start', name)
[ "def", "vpn_enable", "(", "name", ")", ":", "run", "(", "settings", ".", "service", ",", "'openvpn'", ",", "'start'", ",", "name", ")" ]
start a vpn .
train
false