id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
13,384
def rectify(x): return theano.tensor.nnet.relu(x)
[ "def", "rectify", "(", "x", ")", ":", "return", "theano", ".", "tensor", ".", "nnet", ".", "relu", "(", "x", ")" ]
rectify activation function :math:varphi(x) = max parameters x : float32 the activation .
train
false
13,385
def lift_quarantine(request): request.session.pop('third_party_auth_quarantined_modules', None)
[ "def", "lift_quarantine", "(", "request", ")", ":", "request", ".", "session", ".", "pop", "(", "'third_party_auth_quarantined_modules'", ",", "None", ")" ]
remove the session quarantine variable .
train
false
13,387
def init_cycle(): colors_shuffle = [(globals()[i.encode('utf8')] if (not str(i).isdigit()) else term_color(int(i))) for i in c['CYCLE_COLOR']] return itertools.cycle(colors_shuffle)
[ "def", "init_cycle", "(", ")", ":", "colors_shuffle", "=", "[", "(", "globals", "(", ")", "[", "i", ".", "encode", "(", "'utf8'", ")", "]", "if", "(", "not", "str", "(", "i", ")", ".", "isdigit", "(", ")", ")", "else", "term_color", "(", "int", ...
init the cycle .
train
false
13,388
def _clean_attrib(obj, attrs): out = {} for key in attrs: val = getattr(obj, key) if (val is not None): out[key] = _serialize(val) return out
[ "def", "_clean_attrib", "(", "obj", ",", "attrs", ")", ":", "out", "=", "{", "}", "for", "key", "in", "attrs", ":", "val", "=", "getattr", "(", "obj", ",", "key", ")", "if", "(", "val", "is", "not", "None", ")", ":", "out", "[", "key", "]", "...
create a dictionary from an objects specified .
train
false
13,390
def _verify_signify_ed25519_signature(pubkey_file, signature_file, message): pubkey = _read_signify_ed25519_pubkey(pubkey_file) verify_key = signing.VerifyKey(pubkey) sig = _read_signify_ed25519_signature(signature_file) verify_key.verify(message, sig)
[ "def", "_verify_signify_ed25519_signature", "(", "pubkey_file", ",", "signature_file", ",", "message", ")", ":", "pubkey", "=", "_read_signify_ed25519_pubkey", "(", "pubkey_file", ")", "verify_key", "=", "signing", ".", "VerifyKey", "(", "pubkey", ")", "sig", "=", ...
verify a ed25519 signature created with openbsd signify .
train
false
13,391
def _setup_environ(app_id): os.environ['APPLICATION_ID'] = app_id filename = '/etc/appscale/port-{0}.txt'.format(app_id) with open(filename) as file_handle: port = file_handle.read() os.environ['NGINX_PORT'] = port
[ "def", "_setup_environ", "(", "app_id", ")", ":", "os", ".", "environ", "[", "'APPLICATION_ID'", "]", "=", "app_id", "filename", "=", "'/etc/appscale/port-{0}.txt'", ".", "format", "(", "app_id", ")", "with", "open", "(", "filename", ")", "as", "file_handle", ...
sets up the os .
train
false
13,393
@utils.arg('fixed_ip', metavar='<fixed_ip>', help=_('Fixed IP Address.')) @deprecated_network def do_fixed_ip_get(cs, args): result = cs.fixed_ips.get(args.fixed_ip) _print_fixed_ip(cs, result)
[ "@", "utils", ".", "arg", "(", "'fixed_ip'", ",", "metavar", "=", "'<fixed_ip>'", ",", "help", "=", "_", "(", "'Fixed IP Address.'", ")", ")", "@", "deprecated_network", "def", "do_fixed_ip_get", "(", "cs", ",", "args", ")", ":", "result", "=", "cs", "."...
retrieve info on a fixed ip .
train
false
13,395
def _purge_folder(folder, app, remove_from_disk, info_only=False): for ld in folder.datasets: print('Deleting library dataset id ', ld.id) ld.deleted = True for ldda in ([ld.library_dataset_dataset_association] + ld.expired_datasets): _purge_dataset_instance(ldda, app, remove_from_disk, info_only=info_only) for sub_folder in folder.folders: _purge_folder(sub_folder, app, remove_from_disk, info_only=info_only) if (not info_only): print('Purging folder id ', folder.id) folder.purged = True app.sa_session.add(folder) app.sa_session.flush()
[ "def", "_purge_folder", "(", "folder", ",", "app", ",", "remove_from_disk", ",", "info_only", "=", "False", ")", ":", "for", "ld", "in", "folder", ".", "datasets", ":", "print", "(", "'Deleting library dataset id '", ",", "ld", ".", "id", ")", "ld", ".", ...
purges a folder and its contents .
train
false
13,396
def buffer_iterator_to_line_iterator(chunks): log.warning('buffer_iterator_to_line_iterator() has been renamed to to_lines(). This alias will be removed in v0.6.0') return to_lines(chunks)
[ "def", "buffer_iterator_to_line_iterator", "(", "chunks", ")", ":", "log", ".", "warning", "(", "'buffer_iterator_to_line_iterator() has been renamed to to_lines(). This alias will be removed in v0.6.0'", ")", "return", "to_lines", "(", "chunks", ")" ]
alias for :py:func:to_lines() .
train
false
13,397
def json_encode_np(obj): if isinstance(obj, np.ndarray): return list(obj) elif isinstance(obj, np.float32): return float(obj) elif isinstance(obj, np.float64): return float(obj) elif isinstance(obj, np.int32): return int(obj) elif isinstance(obj, np.int64): return int(obj) else: return obj
[ "def", "json_encode_np", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "np", ".", "ndarray", ")", ":", "return", "list", "(", "obj", ")", "elif", "isinstance", "(", "obj", ",", "np", ".", "float32", ")", ":", "return", "float", "(", "o...
json cant serialize numpy types .
train
false
13,398
@core_helper def render_datetime(datetime_, date_format=None, with_hours=False): datetime_ = _datestamp_to_datetime(datetime_) if (not datetime_): return '' if date_format: return datetime_.strftime(date_format) return formatters.localised_nice_date(datetime_, show_date=True, with_hours=with_hours)
[ "@", "core_helper", "def", "render_datetime", "(", "datetime_", ",", "date_format", "=", "None", ",", "with_hours", "=", "False", ")", ":", "datetime_", "=", "_datestamp_to_datetime", "(", "datetime_", ")", "if", "(", "not", "datetime_", ")", ":", "return", ...
render a datetime object or timestamp string as a localised date or in the requested format .
train
false
13,400
def scroll_forward(event, half=False): w = _current_window_for_event(event) b = event.cli.current_buffer if (w and w.render_info): info = w.render_info ui_content = info.ui_content scroll_height = info.window_height if half: scroll_height //= 2 y = (b.document.cursor_position_row + 1) height = 0 while (y < ui_content.line_count): line_height = info.get_height_for_line(y) if ((height + line_height) < scroll_height): height += line_height y += 1 else: break b.cursor_position = b.document.translate_row_col_to_index(y, 0)
[ "def", "scroll_forward", "(", "event", ",", "half", "=", "False", ")", ":", "w", "=", "_current_window_for_event", "(", "event", ")", "b", "=", "event", ".", "cli", ".", "current_buffer", "if", "(", "w", "and", "w", ".", "render_info", ")", ":", "info"...
scroll window down .
train
true
13,401
@verbose def run_subprocess(command, verbose=None, *args, **kwargs): for (stdxxx, sys_stdxxx) in (['stderr', sys.stderr], ['stdout', sys.stdout]): if (stdxxx not in kwargs): kwargs[stdxxx] = subprocess.PIPE elif (kwargs[stdxxx] is sys_stdxxx): if isinstance(sys_stdxxx, StringIO): kwargs[stdxxx] = subprocess.PIPE else: kwargs[stdxxx] = sys_stdxxx env = kwargs.get('env', os.environ) if any((p.startswith('~') for p in env['PATH'].split(os.pathsep))): warn('Your PATH environment variable contains at least one path starting with a tilde ("~") character. Such paths are not interpreted correctly from within Python. It is recommended that you use "$HOME" instead of "~".') if isinstance(command, string_types): command_str = command else: command_str = ' '.join(command) logger.info(('Running subprocess: %s' % command_str)) try: p = subprocess.Popen(command, *args, **kwargs) except Exception: if isinstance(command, string_types): command_name = command.split()[0] else: command_name = command[0] logger.error(('Command not found: %s' % command_name)) raise (stdout_, stderr) = p.communicate() stdout_ = ('' if (stdout_ is None) else stdout_.decode('utf-8')) stderr = ('' if (stderr is None) else stderr.decode('utf-8')) if stdout_.strip(): logger.info(('stdout:\n%s' % stdout_)) if stderr.strip(): logger.info(('stderr:\n%s' % stderr)) output = (stdout_, stderr) if p.returncode: print(output) err_fun = subprocess.CalledProcessError.__init__ if ('output' in _get_args(err_fun)): raise subprocess.CalledProcessError(p.returncode, command, output) else: raise subprocess.CalledProcessError(p.returncode, command) return output
[ "@", "verbose", "def", "run_subprocess", "(", "command", ",", "verbose", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "for", "(", "stdxxx", ",", "sys_stdxxx", ")", "in", "(", "[", "'stderr'", ",", "sys", ".", "stderr", "]", ",", "[...
run command using subprocess .
train
false
13,402
def fortranize_double_constants(code_string): import re pattern_exp = re.compile('\\d+(\\.)?\\d*[eE]-?\\d+') pattern_float = re.compile('\\d+\\.\\d*(?!\\d*d)') def subs_exp(matchobj): return re.sub('[eE]', 'd', matchobj.group(0)) def subs_float(matchobj): return ('%sd0' % matchobj.group(0)) code_string = pattern_exp.sub(subs_exp, code_string) code_string = pattern_float.sub(subs_float, code_string) return code_string
[ "def", "fortranize_double_constants", "(", "code_string", ")", ":", "import", "re", "pattern_exp", "=", "re", ".", "compile", "(", "'\\\\d+(\\\\.)?\\\\d*[eE]-?\\\\d+'", ")", "pattern_float", "=", "re", ".", "compile", "(", "'\\\\d+\\\\.\\\\d*(?!\\\\d*d)'", ")", "def",...
replaces every literal float with literal doubles .
train
false
13,403
def trigger_emails(): settings = frappe.get_doc(u'Daily Work Summary Settings') for d in settings.companies: if (frappe.utils.nowtime().split(u':')[0] == d.send_emails_at.split(u':')[0]): emails = get_employee_emails(d.company) if emails: daily_work_summary = frappe.get_doc(dict(doctype=u'Daily Work Summary', company=d.company)).insert() daily_work_summary.send_mails(settings, emails)
[ "def", "trigger_emails", "(", ")", ":", "settings", "=", "frappe", ".", "get_doc", "(", "u'Daily Work Summary Settings'", ")", "for", "d", "in", "settings", ".", "companies", ":", "if", "(", "frappe", ".", "utils", ".", "nowtime", "(", ")", ".", "split", ...
send emails to employees of the enabled companies at the give hour asking them what did they work on today .
train
false
13,404
def get_multiprocessing_logger(): try: from billiard import util except ImportError: pass else: return util.get_logger()
[ "def", "get_multiprocessing_logger", "(", ")", ":", "try", ":", "from", "billiard", "import", "util", "except", "ImportError", ":", "pass", "else", ":", "return", "util", ".", "get_logger", "(", ")" ]
return the multiprocessing logger .
train
false
13,405
def color_style(): if (not supports_color()): style = no_style() else: SPIDER_COLORS = os.environ.get('SPIDER_COLORS', '') color_settings = termcolors.parse_color_setting(SPIDER_COLORS) if color_settings: class dummy: pass style = dummy() for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]: format = color_settings.get(role, {}) setattr(style, role, termcolors.make_style(**format)) style.ERROR_OUTPUT = style.ERROR else: style = no_style() return style
[ "def", "color_style", "(", ")", ":", "if", "(", "not", "supports_color", "(", ")", ")", ":", "style", "=", "no_style", "(", ")", "else", ":", "SPIDER_COLORS", "=", "os", ".", "environ", ".", "get", "(", "'SPIDER_COLORS'", ",", "''", ")", "color_setting...
returns a style object with the django color scheme .
train
false
13,406
def _get_chance_level(scorer, y_train): if (scorer.__name__ == 'accuracy_score'): chance = np.max([np.mean((y_train == c)) for c in np.unique(y_train)]) elif (scorer.__name__ == 'roc_auc_score'): chance = 0.5 else: chance = np.nan warn(('Cannot find chance level from %s, specify chance level' % scorer.__name__)) return chance
[ "def", "_get_chance_level", "(", "scorer", ",", "y_train", ")", ":", "if", "(", "scorer", ".", "__name__", "==", "'accuracy_score'", ")", ":", "chance", "=", "np", ".", "max", "(", "[", "np", ".", "mean", "(", "(", "y_train", "==", "c", ")", ")", "...
get the chance level .
train
false
13,407
def parseBoolValue(value, fail_on_errors=True, preserve_none=False): if (not isinstance(value, string_type)): if (preserve_none and (value is None)): return value return bool(value) elif (preserve_none and (value.lower() == u'none')): return None elif (value.lower() in (u'true', u'yes', u'y', u'on', u'1')): return True elif (value.lower() in (u'false', u'no', u'n', u'off', u'0', u'none')): return False elif fail_on_errors: raise ValueError((u'Cannot parse bool value: %r' % value))
[ "def", "parseBoolValue", "(", "value", ",", "fail_on_errors", "=", "True", ",", "preserve_none", "=", "False", ")", ":", "if", "(", "not", "isinstance", "(", "value", ",", "string_type", ")", ")", ":", "if", "(", "preserve_none", "and", "(", "value", "is...
parses a string representing bool value .
train
false
13,408
def PrintResource(resource): print resource.resource_id.text, resource.GetResourceType()
[ "def", "PrintResource", "(", "resource", ")", ":", "print", "resource", ".", "resource_id", ".", "text", ",", "resource", ".", "GetResourceType", "(", ")" ]
display a resource to standard out .
train
false
13,409
def import_driver(drivers, preferred=None): if preferred: drivers = [preferred] for d in drivers: try: return __import__(d, None, None, ['x']) except ImportError: pass raise ImportError(('Unable to import ' + ' or '.join(drivers)))
[ "def", "import_driver", "(", "drivers", ",", "preferred", "=", "None", ")", ":", "if", "preferred", ":", "drivers", "=", "[", "preferred", "]", "for", "d", "in", "drivers", ":", "try", ":", "return", "__import__", "(", "d", ",", "None", ",", "None", ...
import the first available driver or preferred driver .
train
false
13,410
def scroll_page_down(event): w = _current_window_for_event(event) b = event.cli.current_buffer if (w and w.render_info): line_index = max(w.render_info.last_visible_line(), (w.vertical_scroll + 1)) w.vertical_scroll = line_index b.cursor_position = b.document.translate_row_col_to_index(line_index, 0) b.cursor_position += b.document.get_start_of_line_position(after_whitespace=True)
[ "def", "scroll_page_down", "(", "event", ")", ":", "w", "=", "_current_window_for_event", "(", "event", ")", "b", "=", "event", ".", "cli", ".", "current_buffer", "if", "(", "w", "and", "w", ".", "render_info", ")", ":", "line_index", "=", "max", "(", ...
scroll page down .
train
true
13,411
def shared_dataset(data_xy, borrow=True): (data_x, data_y) = data_xy shared_x = theano.shared(np.asarray(data_x, dtype=theano.config.floatX), borrow=borrow) shared_y = theano.shared(np.asarray(data_y, dtype=theano.config.floatX), borrow=borrow) return (shared_x, T.cast(shared_y, 'int32'))
[ "def", "shared_dataset", "(", "data_xy", ",", "borrow", "=", "True", ")", ":", "(", "data_x", ",", "data_y", ")", "=", "data_xy", "shared_x", "=", "theano", ".", "shared", "(", "np", ".", "asarray", "(", "data_x", ",", "dtype", "=", "theano", ".", "c...
function that loads the dataset into shared variables the reason we store our dataset in shared variables is to allow theano to copy it into the gpu memory .
train
false
13,412
def _decode_proc_address_encoding(addr): (ip, port) = addr.split(':') port = int(port, 16) if (sys.version_info >= (3,)): ip = ip.encode('ascii') if (sys.byteorder == 'little'): ip = socket.inet_ntop(socket.AF_INET, base64.b16decode(ip)[::(-1)]) else: ip = socket.inet_ntop(socket.AF_INET, base64.b16decode(ip)) return (ip, port)
[ "def", "_decode_proc_address_encoding", "(", "addr", ")", ":", "(", "ip", ",", "port", ")", "=", "addr", ".", "split", "(", "':'", ")", "port", "=", "int", "(", "port", ",", "16", ")", "if", "(", "sys", ".", "version_info", ">=", "(", "3", ",", "...
translates an address entry in the /proc/net/* contents to a human readable form (reference <URL for instance: "0500000a:0016" -> .
train
false
13,413
@pytest.mark.parametrize((u'code',), pars) def test_projection_default(code): model = getattr(projections, (u'Sky2Pix_' + code)) tinv = model() (x, y) = tinv(45, 45) model = getattr(projections, (u'Pix2Sky_' + code)) tinv = model() (x, y) = tinv(0, 0)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "(", "u'code'", ",", ")", ",", "pars", ")", "def", "test_projection_default", "(", "code", ")", ":", "model", "=", "getattr", "(", "projections", ",", "(", "u'Sky2Pix_'", "+", "code", ")", ")", "tinv...
check astropy model eval with default parameters .
train
false
13,414
@click.command('set-default-site') @click.argument('site') def set_default_site(site): from bench.utils import set_default_site set_default_site(site)
[ "@", "click", ".", "command", "(", "'set-default-site'", ")", "@", "click", ".", "argument", "(", "'site'", ")", "def", "set_default_site", "(", "site", ")", ":", "from", "bench", ".", "utils", "import", "set_default_site", "set_default_site", "(", "site", "...
set default site for bench .
train
false
13,416
def _sequence_field(checked_class, suffix, item_type, optional, initial): class TheType(checked_class, ): __type__ = item_type TheType.__name__ = (item_type.__name__.capitalize() + suffix) if optional: def factory(argument): if (argument is None): return None else: return TheType(argument) else: factory = TheType return field(type=(optional_type(TheType) if optional else TheType), factory=factory, mandatory=True, initial=factory(initial))
[ "def", "_sequence_field", "(", "checked_class", ",", "suffix", ",", "item_type", ",", "optional", ",", "initial", ")", ":", "class", "TheType", "(", "checked_class", ",", ")", ":", "__type__", "=", "item_type", "TheType", ".", "__name__", "=", "(", "item_typ...
create checked field for either pset or pvector .
train
false
13,417
def external_login_email_get(): form = ResendConfirmationForm(request.form) session = get_session() if (not session.is_external_first_login): raise HTTPError(http.UNAUTHORIZED) external_id_provider = session.data['auth_user_external_id_provider'] return {'form': form, 'external_id_provider': external_id_provider}
[ "def", "external_login_email_get", "(", ")", ":", "form", "=", "ResendConfirmationForm", "(", "request", ".", "form", ")", "session", "=", "get_session", "(", ")", "if", "(", "not", "session", ".", "is_external_first_login", ")", ":", "raise", "HTTPError", "("...
landing view for first-time oauth-login user to enter their email address .
train
false
13,419
def issubtype(a, b): if issubclass(a, b): return True if (issubclass(a, (tuple, list, set)) and issubclass(b, Iterator)): return True if (issubclass(b, (tuple, list, set)) and issubclass(a, Iterator)): return True return False
[ "def", "issubtype", "(", "a", ",", "b", ")", ":", "if", "issubclass", "(", "a", ",", "b", ")", ":", "return", "True", "if", "(", "issubclass", "(", "a", ",", "(", "tuple", ",", "list", ",", "set", ")", ")", "and", "issubclass", "(", "b", ",", ...
a custom issubclass .
train
false
13,420
def make_vector(): return T.vector()
[ "def", "make_vector", "(", ")", ":", "return", "T", ".", "vector", "(", ")" ]
returns a new theano vector .
train
false
13,421
def test_import_submodule_global_shadowed(pyi_builder): pyi_builder.test_source('\n # Assert that this submodule is shadowed by a string global variable.\n from pyi_testmod_submodule_global_shadowed import submodule\n assert type(submodule) == str\n\n # Assert that this submodule is still frozen into this test application.\n # To do so:\n #\n # 1. Delete this global variable from its parent package.\n # 2. Assert that this submodule is unshadowed by this global variable.\n import pyi_testmod_submodule_global_shadowed, sys\n del pyi_testmod_submodule_global_shadowed.submodule\n from pyi_testmod_submodule_global_shadowed import submodule\n assert type(submodule) == type(sys)\n ')
[ "def", "test_import_submodule_global_shadowed", "(", "pyi_builder", ")", ":", "pyi_builder", ".", "test_source", "(", "'\\n # Assert that this submodule is shadowed by a string global variable.\\n from pyi_testmod_submodule_global_shadowed import submodule\\n assert type(sub...
functional test validating issue #1919 .
train
false
13,422
def TR6(rv, max=4, pow=False): return _TR56(rv, cos, sin, (lambda x: (1 - x)), max=max, pow=pow)
[ "def", "TR6", "(", "rv", ",", "max", "=", "4", ",", "pow", "=", "False", ")", ":", "return", "_TR56", "(", "rv", ",", "cos", ",", "sin", ",", "(", "lambda", "x", ":", "(", "1", "-", "x", ")", ")", ",", "max", "=", "max", ",", "pow", "=", ...
replacement of cos**2 with 1 - sin(x)**2 .
train
false
13,423
def _serialize_list_prop(prop, values): serialized_value = [] for (i, val) in enumerate((values or [])): db_type = prop.get_item_definition_at_index(i).DB_TYPE if (db_type is None): continue serialized_value.append({'type': db_type, 'value': val}) return serialized_value
[ "def", "_serialize_list_prop", "(", "prop", ",", "values", ")", ":", "serialized_value", "=", "[", "]", "for", "(", "i", ",", "val", ")", "in", "enumerate", "(", "(", "values", "or", "[", "]", ")", ")", ":", "db_type", "=", "prop", ".", "get_item_def...
a helper func called to correctly serialize an array property .
train
false
13,424
def bench_isotonic_regression(Y): gc.collect() tstart = datetime.now() isotonic_regression(Y) delta = (datetime.now() - tstart) return total_seconds(delta)
[ "def", "bench_isotonic_regression", "(", "Y", ")", ":", "gc", ".", "collect", "(", ")", "tstart", "=", "datetime", ".", "now", "(", ")", "isotonic_regression", "(", "Y", ")", "delta", "=", "(", "datetime", ".", "now", "(", ")", "-", "tstart", ")", "r...
runs a single iteration of isotonic regression on the input data .
train
false
13,425
def shift(input, shift, output=None, order=3, mode='constant', cval=0.0, prefilter=True): if ((order < 0) or (order > 5)): raise RuntimeError('spline order not supported') input = numpy.asarray(input) if numpy.iscomplexobj(input): raise TypeError('Complex type not supported') if (input.ndim < 1): raise RuntimeError('input and output rank must be > 0') mode = _extend_mode_to_code(mode) if (prefilter and (order > 1)): filtered = spline_filter(input, order, output=numpy.float64) else: filtered = input (output, return_value) = _ni_support._get_output(output, input) shift = _ni_support._normalize_sequence(shift, input.ndim) shift = [(- ii) for ii in shift] shift = numpy.asarray(shift, dtype=numpy.float64) if (not shift.flags.contiguous): shift = shift.copy() _nd_image.zoom_shift(filtered, None, shift, output, order, mode, cval) return return_value
[ "def", "shift", "(", "input", ",", "shift", ",", "output", "=", "None", ",", "order", "=", "3", ",", "mode", "=", "'constant'", ",", "cval", "=", "0.0", ",", "prefilter", "=", "True", ")", ":", "if", "(", "(", "order", "<", "0", ")", "or", "(",...
shift an image randomly or non-randomly .
train
false
13,426
def forwards_move_org_source(apps, schema_editor): RemoteOrganization = apps.get_model(u'oauth', u'RemoteOrganization') SocialAccount = apps.get_model(u'socialaccount', u'SocialAccount') for account in SocialAccount.objects.all(): rows = RemoteOrganization.objects.filter(users=account.user, source=account.provider).update(account=account)
[ "def", "forwards_move_org_source", "(", "apps", ",", "schema_editor", ")", ":", "RemoteOrganization", "=", "apps", ".", "get_model", "(", "u'oauth'", ",", "u'RemoteOrganization'", ")", "SocialAccount", "=", "apps", ".", "get_model", "(", "u'socialaccount'", ",", "...
use source field to set organization account .
train
false
13,427
def GetAnalyticsClient(): SOURCE_APP_NAME = 'Analytics-ManagementAPI-Demo-v1' my_client = gdata.analytics.client.AnalyticsClient(source=SOURCE_APP_NAME) try: gdata.sample_util.authorize_client(my_client, service=my_client.auth_service, source=SOURCE_APP_NAME, scopes=['https://www.google.com/analytics/feeds/']) except gdata.client.BadAuthentication: exit('Invalid user credentials given.') except gdata.client.Error: exit('Login Error') return my_client
[ "def", "GetAnalyticsClient", "(", ")", ":", "SOURCE_APP_NAME", "=", "'Analytics-ManagementAPI-Demo-v1'", "my_client", "=", "gdata", ".", "analytics", ".", "client", ".", "AnalyticsClient", "(", "source", "=", "SOURCE_APP_NAME", ")", "try", ":", "gdata", ".", "samp...
returns an authorized googleanalayticsclient object .
train
false
13,428
def _config(name, key=None, **kwargs): if (key is None): key = name if (name in kwargs): value = kwargs[name] else: value = __salt__['config.option']('ldap.{0}'.format(key)) return value
[ "def", "_config", "(", "name", ",", "key", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "key", "is", "None", ")", ":", "key", "=", "name", "if", "(", "name", "in", "kwargs", ")", ":", "value", "=", "kwargs", "[", "name", "]", "else", ...
return a value for name from master config file options or defaults .
train
true
13,429
def get_cur_file_size(fp, position_to_eof=False): if (isinstance(fp, KeyFile) and (not position_to_eof)): return fp.getkey().size if (not position_to_eof): cur_pos = fp.tell() fp.seek(0, os.SEEK_END) cur_file_size = fp.tell() if (not position_to_eof): fp.seek(cur_pos, os.SEEK_SET) return cur_file_size
[ "def", "get_cur_file_size", "(", "fp", ",", "position_to_eof", "=", "False", ")", ":", "if", "(", "isinstance", "(", "fp", ",", "KeyFile", ")", "and", "(", "not", "position_to_eof", ")", ")", ":", "return", "fp", ".", "getkey", "(", ")", ".", "size", ...
returns size of file .
train
false
13,430
def load_txt_sent(flist_txt, subset_pct): total_size = sum(map(os.path.getsize, flist_txt)) subset_size = int(((subset_pct / 100.0) * total_size)) all_sent = [] for txt_file in flist_txt: if (subset_size > 0): with open(txt_file, 'r') as f: data = f.read(subset_size) subset_size -= sys.getsizeof(data) sent = data.split('\n') if (subset_size <= 0): sent = sent[:(-1)] all_sent += sent return all_sent
[ "def", "load_txt_sent", "(", "flist_txt", ",", "subset_pct", ")", ":", "total_size", "=", "sum", "(", "map", "(", "os", ".", "path", ".", "getsize", ",", "flist_txt", ")", ")", "subset_size", "=", "int", "(", "(", "(", "subset_pct", "/", "100.0", ")", ...
load all the senteces from a list of txt files using standard file io .
train
false
13,436
def siEval(s, typ=float, regex=FLOAT_REGEX): (val, siprefix, suffix) = siParse(s, regex) v = typ(val) return siApply(val, siprefix)
[ "def", "siEval", "(", "s", ",", "typ", "=", "float", ",", "regex", "=", "FLOAT_REGEX", ")", ":", "(", "val", ",", "siprefix", ",", "suffix", ")", "=", "siParse", "(", "s", ",", "regex", ")", "v", "=", "typ", "(", "val", ")", "return", "siApply", ...
convert a value written in si notation to its equivalent prefixless value .
train
false
13,439
def _constructMethod(cls, name, self): func = cls.__dict__[name] if _PY3: return _MethodType(func, self) return _MethodType(func, self, cls)
[ "def", "_constructMethod", "(", "cls", ",", "name", ",", "self", ")", ":", "func", "=", "cls", ".", "__dict__", "[", "name", "]", "if", "_PY3", ":", "return", "_MethodType", "(", "func", ",", "self", ")", "return", "_MethodType", "(", "func", ",", "s...
construct a bound method .
train
false
13,442
def send_approve_mail(request, user): approval_emails = split_addresses(settings.ACCOUNTS_APPROVAL_EMAILS) if (not approval_emails): return context = {u'request': request, u'user': user, u'change_url': admin_url(user.__class__, u'change', user.id)} subject = subject_template(u'email/account_approve_subject.txt', context) send_mail_template(subject, u'email/account_approve', settings.DEFAULT_FROM_EMAIL, approval_emails, context=context)
[ "def", "send_approve_mail", "(", "request", ",", "user", ")", ":", "approval_emails", "=", "split_addresses", "(", "settings", ".", "ACCOUNTS_APPROVAL_EMAILS", ")", "if", "(", "not", "approval_emails", ")", ":", "return", "context", "=", "{", "u'request'", ":", ...
sends an email to staff in listed in the setting accounts_approval_emails .
train
false
13,445
def compute_mac(token, serialized_data): return hash_data((serialized_data + token))
[ "def", "compute_mac", "(", "token", ",", "serialized_data", ")", ":", "return", "hash_data", "(", "(", "serialized_data", "+", "token", ")", ")" ]
computes and returns the base64 encoded mac .
train
false
13,446
def env_file(registry, xml_parent, data): eib = XML.SubElement(xml_parent, 'hudson.plugins.envfile.EnvFileBuildWrapper') jenkins_jobs.modules.base.add_nonblank_xml_subelement(eib, 'filePath', data.get('properties-file'))
[ "def", "env_file", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "eib", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.envfile.EnvFileBuildWrapper'", ")", "jenkins_jobs", ".", "modules", ".", "base", ".", "add_nonblank_xml_sub...
yaml: env-file add or override environment variables to the whole build process requires the jenkins :jenkins-wiki:environment file plugin <envfile+plugin> .
train
false
13,447
def get_vmconfig(vmid, node=None, node_type='openvz'): if (node is None): for (host_name, host_details) in six.iteritems(avail_locations()): for item in query('get', 'nodes/{0}/{1}'.format(host_name, node_type)): if (item['vmid'] == vmid): node = host_name data = query('get', 'nodes/{0}/{1}/{2}/config'.format(node, node_type, vmid)) return data
[ "def", "get_vmconfig", "(", "vmid", ",", "node", "=", "None", ",", "node_type", "=", "'openvz'", ")", ":", "if", "(", "node", "is", "None", ")", ":", "for", "(", "host_name", ",", "host_details", ")", "in", "six", ".", "iteritems", "(", "avail_location...
get vm configuration .
train
true
13,449
@register.simple_tag() def querystring_toggle(request, multi=True, page_key='page', **kwargs): new_querydict = request.GET.copy() try: new_querydict.pop(page_key) except KeyError: pass for (k, v) in kwargs.items(): values = new_querydict.getlist(k) if ((k in new_querydict) and (v in values)): values.remove(v) new_querydict.setlist(k, values) elif (not multi): new_querydict[k] = v else: new_querydict.update({k: v}) querystring = new_querydict.urlencode() if querystring: return ('?' + querystring) else: return ''
[ "@", "register", ".", "simple_tag", "(", ")", "def", "querystring_toggle", "(", "request", ",", "multi", "=", "True", ",", "page_key", "=", "'page'", ",", "**", "kwargs", ")", ":", "new_querydict", "=", "request", ".", "GET", ".", "copy", "(", ")", "tr...
add or remove a parameter in the http get query string .
train
false
13,450
def _format_key(key): (schema, table) = key table = (table or '(FACT)') if schema: return '{}.{}'.format(schema, table) else: return table
[ "def", "_format_key", "(", "key", ")", ":", "(", "schema", ",", "table", ")", "=", "key", "table", "=", "(", "table", "or", "'(FACT)'", ")", "if", "schema", ":", "return", "'{}.{}'", ".", "format", "(", "schema", ",", "table", ")", "else", ":", "re...
format table key key to a string .
train
false
13,452
def allow_icmp(zone, icmp, permanent=True): if (icmp not in get_icmp_types(permanent)): log.error('Invalid ICMP type') return False if (icmp not in list_icmp_block(zone, permanent)): log.info('ICMP Type is already permitted') return 'success' cmd = '--zone={0} --remove-icmp-block={1}'.format(zone, icmp) if permanent: cmd += ' --permanent' return __firewall_cmd(cmd)
[ "def", "allow_icmp", "(", "zone", ",", "icmp", ",", "permanent", "=", "True", ")", ":", "if", "(", "icmp", "not", "in", "get_icmp_types", "(", "permanent", ")", ")", ":", "log", ".", "error", "(", "'Invalid ICMP type'", ")", "return", "False", "if", "(...
allow a specific icmp type on a zone .
train
true
13,454
def importorskip(modname, minversion=None): try: pytest.importorskip(modname, minversion) except Skipped as exc: return skip(str(exc)) except Exception as exc: print 'importorskip: Exception in module "{}":'.format(modname) print ('-' * 60) traceback.print_exc(file=sys.stdout) print ('-' * 60) return skip(str(exc)) else: return _noop
[ "def", "importorskip", "(", "modname", ",", "minversion", "=", "None", ")", ":", "try", ":", "pytest", ".", "importorskip", "(", "modname", ",", "minversion", ")", "except", "Skipped", "as", "exc", ":", "return", "skip", "(", "str", "(", "exc", ")", ")...
return imported module if it has at least "minversion" as its __version__ attribute .
train
false
13,455
def simulate_mouse_click(widget, x, y): widget.event_generate('<Enter>', x=0, y=0) widget.event_generate('<Motion>', x=x, y=y) widget.event_generate('<ButtonPress-1>', x=x, y=y) widget.event_generate('<ButtonRelease-1>', x=x, y=y)
[ "def", "simulate_mouse_click", "(", "widget", ",", "x", ",", "y", ")", ":", "widget", ".", "event_generate", "(", "'<Enter>'", ",", "x", "=", "0", ",", "y", "=", "0", ")", "widget", ".", "event_generate", "(", "'<Motion>'", ",", "x", "=", "x", ",", ...
generate proper events to click at the x .
train
false
13,456
def squared_difference(x1, x2): return SquaredDifference()(x1, x2)
[ "def", "squared_difference", "(", "x1", ",", "x2", ")", ":", "return", "SquaredDifference", "(", ")", "(", "x1", ",", "x2", ")" ]
squared difference of input variables .
train
false
13,457
def GetELBZones(region, node_types=None): balancers = GetLoadBalancers(region, node_types=node_types) res = [] for b in balancers: res.extend(b.availability_zones) return res
[ "def", "GetELBZones", "(", "region", ",", "node_types", "=", "None", ")", ":", "balancers", "=", "GetLoadBalancers", "(", "region", ",", "node_types", "=", "node_types", ")", "res", "=", "[", "]", "for", "b", "in", "balancers", ":", "res", ".", "extend",...
return a list of availability zone names covered by the load balancers in a given region .
train
false
13,458
def run_frontend(func): func_id = bytecode.FunctionIdentity.from_function(func) interp = interpreter.Interpreter(func_id) bc = bytecode.ByteCode(func_id=func_id) func_ir = interp.interpret(bc) post_proc = postproc.PostProcessor(func_ir) post_proc.run() return func_ir
[ "def", "run_frontend", "(", "func", ")", ":", "func_id", "=", "bytecode", ".", "FunctionIdentity", ".", "from_function", "(", "func", ")", "interp", "=", "interpreter", ".", "Interpreter", "(", "func_id", ")", "bc", "=", "bytecode", ".", "ByteCode", "(", "...
run the compiler frontend over the given python function .
train
false
13,460
def CLTVersion(): STANDALONE_PKG_ID = 'com.apple.pkg.DeveloperToolsCLILeo' FROM_XCODE_PKG_ID = 'com.apple.pkg.DeveloperToolsCLI' MAVERICKS_PKG_ID = 'com.apple.pkg.CLTools_Executables' regex = re.compile('version: (?P<version>.+)') for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]: try: output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key]) return re.search(regex, output).groupdict()['version'] except: continue
[ "def", "CLTVersion", "(", ")", ":", "STANDALONE_PKG_ID", "=", "'com.apple.pkg.DeveloperToolsCLILeo'", "FROM_XCODE_PKG_ID", "=", "'com.apple.pkg.DeveloperToolsCLI'", "MAVERICKS_PKG_ID", "=", "'com.apple.pkg.CLTools_Executables'", "regex", "=", "re", ".", "compile", "(", "'vers...
returns the version of command-line tools from pkgutil .
train
false
13,462
def _from_hass_color(color): from limitlessled import Color return Color(*tuple(color))
[ "def", "_from_hass_color", "(", "color", ")", ":", "from", "limitlessled", "import", "Color", "return", "Color", "(", "*", "tuple", "(", "color", ")", ")" ]
convert home assistant rgb list to color tuple .
train
false
13,466
def create_confirm_application(message): registry = Registry() @registry.add_binding(u'y') @registry.add_binding(u'Y') def _(event): event.cli.buffers[DEFAULT_BUFFER].text = u'y' event.cli.set_return_value(True) @registry.add_binding(u'n') @registry.add_binding(u'N') @registry.add_binding(Keys.ControlC) def _(event): event.cli.buffers[DEFAULT_BUFFER].text = u'n' event.cli.set_return_value(False) return create_prompt_application(message, key_bindings_registry=registry)
[ "def", "create_confirm_application", "(", "message", ")", ":", "registry", "=", "Registry", "(", ")", "@", "registry", ".", "add_binding", "(", "u'y'", ")", "@", "registry", ".", "add_binding", "(", "u'Y'", ")", "def", "_", "(", "event", ")", ":", "event...
create a confirmation application that returns true/false .
train
true
13,468
def connect_button(button, fn): button.pressed.connect(fn)
[ "def", "connect_button", "(", "button", ",", "fn", ")", ":", "button", ".", "pressed", ".", "connect", "(", "fn", ")" ]
connect a button to a function .
train
false
13,469
def import_file_to_globals(env, module_name, fpath): mod = import_file_to_module(module_name, fpath) for (k, v) in mod.__dict__.items(): env[k] = v
[ "def", "import_file_to_globals", "(", "env", ",", "module_name", ",", "fpath", ")", ":", "mod", "=", "import_file_to_module", "(", "module_name", ",", "fpath", ")", "for", "(", "k", ",", "v", ")", "in", "mod", ".", "__dict__", ".", "items", "(", ")", "...
import content from fpath and puts it into the dict provided .
train
false
13,473
def get_root(): root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, 'setup.py') versioneer_py = os.path.join(root, 'versioneer.py') if (not (os.path.exists(setup_py) or os.path.exists(versioneer_py))): root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, 'setup.py') versioneer_py = os.path.join(root, 'versioneer.py') if (not (os.path.exists(setup_py) or os.path.exists(versioneer_py))): err = "Versioneer was unable to run the project root directory. Versioneer requires setup.py to be executed from its immediate directory (like 'python setup.py COMMAND'), or in a way that lets it use sys.argv[0] to find the root (like 'python path/to/setup.py COMMAND')." raise VersioneerBadRootError(err) try: me = os.path.realpath(os.path.abspath(__file__)) me_dir = os.path.normcase(os.path.splitext(me)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if (me_dir != vsr_dir): print(('Warning: build in %s is using versioneer.py from %s' % (os.path.dirname(me), versioneer_py))) except NameError: pass return root
[ "def", "get_root", "(", ")", ":", "root", "=", "os", ".", "path", ".", "realpath", "(", "os", ".", "path", ".", "abspath", "(", "os", ".", "getcwd", "(", ")", ")", ")", "setup_py", "=", "os", ".", "path", ".", "join", "(", "root", ",", "'setup....
return a tuple composed of when provided a :term:router instance as the app argument .
train
true
13,474
def parse_group_ref(source, info): source.expect('<') saved_pos = source.pos name = parse_name(source, True) source.expect('>') if info.is_open_group(name): raise error('cannot refer to an open group', source.string, source.pos) return make_ref_group(info, name, saved_pos)
[ "def", "parse_group_ref", "(", "source", ",", "info", ")", ":", "source", ".", "expect", "(", "'<'", ")", "saved_pos", "=", "source", ".", "pos", "name", "=", "parse_name", "(", "source", ",", "True", ")", "source", ".", "expect", "(", "'>'", ")", "i...
parses a group reference .
train
false
13,476
def click_css(page, css, source_index=0, require_notification=True): def _is_visible(element): 'Is the given element visible?' return (element.is_displayed() and all(((size > 0) for size in element.size.itervalues()))) disable_animations(page) page.q(css=css).filter(_is_visible).nth(source_index).click() if require_notification: wait_for_notification(page) page.wait_for_ajax()
[ "def", "click_css", "(", "page", ",", "css", ",", "source_index", "=", "0", ",", "require_notification", "=", "True", ")", ":", "def", "_is_visible", "(", "element", ")", ":", "return", "(", "element", ".", "is_displayed", "(", ")", "and", "all", "(", ...
click the button/link with the given css and index on the specified page .
train
false
13,478
def make_array_ndenumerate_cls(nditerty): return _make_flattening_iter_cls(nditerty, 'ndenumerate')
[ "def", "make_array_ndenumerate_cls", "(", "nditerty", ")", ":", "return", "_make_flattening_iter_cls", "(", "nditerty", ",", "'ndenumerate'", ")" ]
return the structure representation of the given *nditerty* .
train
false
13,479
def test_timeout(): from time import sleep sleep(2)
[ "def", "test_timeout", "(", ")", ":", "from", "time", "import", "sleep", "sleep", "(", "2", ")" ]
this function needs to be pickleable .
train
false
13,482
def check_session(kwargs): if (not check_access()): return u'Access denied' key = kwargs.get('session') if (not key): key = kwargs.get('apikey') msg = None if (not key): logging.warning(T('Missing Session key')) msg = T('Error: Session Key Required') elif (key != cfg.api_key()): logging.warning(T('Error: Session Key Incorrect')) msg = T('Error: Session Key Incorrect') return msg
[ "def", "check_session", "(", "kwargs", ")", ":", "if", "(", "not", "check_access", "(", ")", ")", ":", "return", "u'Access denied'", "key", "=", "kwargs", ".", "get", "(", "'session'", ")", "if", "(", "not", "key", ")", ":", "key", "=", "kwargs", "."...
check session key .
train
false
13,484
def create_models(model, data): return write_models(model, data, None)
[ "def", "create_models", "(", "model", ",", "data", ")", ":", "return", "write_models", "(", "model", ",", "data", ",", "None", ")" ]
create models for each data hash .
train
false
13,485
def scrape_options_into_new_groups(source_groups, assignments): log.warning('scrape_options_into_new_groups() is deprecated and will be removed in v0.6.0') all_options = scrape_options_and_index_by_dest(*source_groups) return populate_option_groups_with_options(assignments, all_options)
[ "def", "scrape_options_into_new_groups", "(", "source_groups", ",", "assignments", ")", ":", "log", ".", "warning", "(", "'scrape_options_into_new_groups() is deprecated and will be removed in v0.6.0'", ")", "all_options", "=", "scrape_options_and_index_by_dest", "(", "*", "sou...
puts options from the :py:class:optionparser and :py:class:optiongroup objects in source_groups into the keys of assignments according to the values of assignments .
train
false
13,487
def eventlet_un_patch_all(): modules_to_unpatch = [os, select, socket, thread, time, Queue, threading, ssl, __builtin__] for to_unpatch in modules_to_unpatch: reload(to_unpatch)
[ "def", "eventlet_un_patch_all", "(", ")", ":", "modules_to_unpatch", "=", "[", "os", ",", "select", ",", "socket", ",", "thread", ",", "time", ",", "Queue", ",", "threading", ",", "ssl", ",", "__builtin__", "]", "for", "to_unpatch", "in", "modules_to_unpatch...
a method to unpatch eventlet monkey patching used for the reactor tests .
train
false
13,488
def is_period_arraylike(arr): if isinstance(arr, ABCPeriodIndex): return True elif isinstance(arr, (np.ndarray, ABCSeries)): return ((arr.dtype == object) and (lib.infer_dtype(arr) == 'period')) return (getattr(arr, 'inferred_type', None) == 'period')
[ "def", "is_period_arraylike", "(", "arr", ")", ":", "if", "isinstance", "(", "arr", ",", "ABCPeriodIndex", ")", ":", "return", "True", "elif", "isinstance", "(", "arr", ",", "(", "np", ".", "ndarray", ",", "ABCSeries", ")", ")", ":", "return", "(", "("...
return if we are period arraylike / periodindex .
train
false
13,492
def apply_rollback(datastore, name): return _proxy_cmd('apply_rollback', datastore, name)
[ "def", "apply_rollback", "(", "datastore", ",", "name", ")", ":", "return", "_proxy_cmd", "(", "'apply_rollback'", ",", "datastore", ",", "name", ")" ]
apply a system rollback .
train
false
13,493
def _foreign_key_ignoring_handle(self, *fixture_labels, **options): using = options.get('database', DEFAULT_DB_ALIAS) commit = options.get('commit', True) connection = connections[using] if uses_mysql(connection): cursor = connection.cursor() cursor.execute('SET foreign_key_checks = 0') _old_handle(self, *fixture_labels, **options) if uses_mysql(connection): cursor = connection.cursor() cursor.execute('SET foreign_key_checks = 1')
[ "def", "_foreign_key_ignoring_handle", "(", "self", ",", "*", "fixture_labels", ",", "**", "options", ")", ":", "using", "=", "options", ".", "get", "(", "'database'", ",", "DEFAULT_DB_ALIAS", ")", "commit", "=", "options", ".", "get", "(", "'commit'", ",", ...
wrap the the stock loaddata to ignore foreign key checks .
train
false
13,495
def _can_view_courseware_with_prerequisites(user, course): def _is_prerequisites_disabled(): '\n Checks if prerequisites are disabled in the settings.\n ' return (ACCESS_DENIED if is_prerequisite_courses_enabled() else ACCESS_GRANTED) return (_is_prerequisites_disabled() or _has_staff_access_to_descriptor(user, course, course.id) or user.is_anonymous() or _has_fulfilled_prerequisites(user, [course.id]))
[ "def", "_can_view_courseware_with_prerequisites", "(", "user", ",", "course", ")", ":", "def", "_is_prerequisites_disabled", "(", ")", ":", "return", "(", "ACCESS_DENIED", "if", "is_prerequisite_courses_enabled", "(", ")", "else", "ACCESS_GRANTED", ")", "return", "(",...
checks if a user has access to a course based on its prerequisites .
train
false
13,496
def run(cmd, **kwargs): log('-', cmd) cmd = cmd.split() arg0 = cmd[0] if (not find_executable(arg0)): raise Exception((('Cannot find executable "%s";' % arg0) + ('you might try %s --depend' % argv[0]))) return check_output(cmd, **kwargs)
[ "def", "run", "(", "cmd", ",", "**", "kwargs", ")", ":", "log", "(", "'-'", ",", "cmd", ")", "cmd", "=", "cmd", ".", "split", "(", ")", "arg0", "=", "cmd", "[", "0", "]", "if", "(", "not", "find_executable", "(", "arg0", ")", ")", ":", "raise...
run cmd as a child process and return exit code .
train
false
13,499
def _linux_disks(): ret = {'disks': [], 'SSDs': []} for entry in glob.glob('/sys/block/*/queue/rotational'): with salt.utils.fopen(entry) as entry_fp: device = entry.split('/')[3] flag = entry_fp.read(1) if (flag == '0'): ret['SSDs'].append(device) log.trace('Device {0} reports itself as an SSD'.format(device)) elif (flag == '1'): ret['disks'].append(device) log.trace('Device {0} reports itself as an HDD'.format(device)) else: log.trace('Unable to identify device {0} as an SSD or HDD. It does not report 0 or 1'.format(device)) return ret
[ "def", "_linux_disks", "(", ")", ":", "ret", "=", "{", "'disks'", ":", "[", "]", ",", "'SSDs'", ":", "[", "]", "}", "for", "entry", "in", "glob", ".", "glob", "(", "'/sys/block/*/queue/rotational'", ")", ":", "with", "salt", ".", "utils", ".", "fopen...
return list of disk devices and work out if they are ssd or hdd .
train
true
13,500
def apply_transform(x, transform_matrix, channel_index=2, fill_mode='nearest', cval=0.0): x = np.rollaxis(x, channel_index, 0) final_affine_matrix = transform_matrix[:2, :2] final_offset = transform_matrix[:2, 2] channel_images = [ndi.interpolation.affine_transform(x_channel, final_affine_matrix, final_offset, order=0, mode=fill_mode, cval=cval) for x_channel in x] x = np.stack(channel_images, axis=0) x = np.rollaxis(x, 0, (channel_index + 1)) return x
[ "def", "apply_transform", "(", "x", ",", "transform_matrix", ",", "channel_index", "=", "2", ",", "fill_mode", "=", "'nearest'", ",", "cval", "=", "0.0", ")", ":", "x", "=", "np", ".", "rollaxis", "(", "x", ",", "channel_index", ",", "0", ")", "final_a...
return transformed images by given transform_matrix from transform_matrix_offset_center .
train
true
13,501
def error_msg_wx(msg, parent=None): dialog = wx.MessageDialog(parent=parent, message=msg, caption='Matplotlib backend_wx error', style=(wx.OK | wx.CENTRE)) dialog.ShowModal() dialog.Destroy() return None
[ "def", "error_msg_wx", "(", "msg", ",", "parent", "=", "None", ")", ":", "dialog", "=", "wx", ".", "MessageDialog", "(", "parent", "=", "parent", ",", "message", "=", "msg", ",", "caption", "=", "'Matplotlib backend_wx error'", ",", "style", "=", "(", "w...
signal an error condition -- in a gui .
train
true
13,502
def ndim(expr): return len(shape(expr))
[ "def", "ndim", "(", "expr", ")", ":", "return", "len", "(", "shape", "(", "expr", ")", ")" ]
returns the number of axes in a tensor .
train
false
13,503
def get_runners_base_paths(): system_runners_base_path = get_system_runners_base_path() runners_base_paths = (cfg.CONF.content.runners_base_paths or '') if runners_base_paths.endswith(':'): runners_base_paths = runners_base_paths[:(-1)] result = [] if system_runners_base_path: result.append(system_runners_base_path) runners_base_paths = runners_base_paths.split(':') result = (result + runners_base_paths) result = [path for path in result if path] result = list(OrderedSet(result)) return result
[ "def", "get_runners_base_paths", "(", ")", ":", "system_runners_base_path", "=", "get_system_runners_base_path", "(", ")", "runners_base_paths", "=", "(", "cfg", ".", "CONF", ".", "content", ".", "runners_base_paths", "or", "''", ")", "if", "runners_base_paths", "."...
return a list of base paths which are searched for runners .
train
false
13,504
def campaign_history(cls, codenames, start, stop): time_points = get_time_points('hour', start, stop) q = Session.query(cls).filter((cls.interval == 'hour')).filter(cls.codename.in_(codenames)).filter(cls.date.in_(time_points)).order_by(cls.date) return [(r.date, r.codename, r.subreddit, (r.unique_count, r.pageview_count)) for r in q.all()]
[ "def", "campaign_history", "(", "cls", ",", "codenames", ",", "start", ",", "stop", ")", ":", "time_points", "=", "get_time_points", "(", "'hour'", ",", "start", ",", "stop", ")", "q", "=", "Session", ".", "query", "(", "cls", ")", ".", "filter", "(", ...
get hourly traffic for given campaigns .
train
false
13,505
def v1_deprecated(warning=None): warning = (warning or '') def mark_deprecated(f): def optional_warn_function(*args, **kwargs): if ENABLE_V1_WARNINGS: warnings.warn(warning, DeprecationWarning, stacklevel=2) return f(*args, **kwargs) try: optional_warn_function.func_name = f.func_name except TypeError: pass return optional_warn_function return mark_deprecated
[ "def", "v1_deprecated", "(", "warning", "=", "None", ")", ":", "warning", "=", "(", "warning", "or", "''", ")", "def", "mark_deprecated", "(", "f", ")", ":", "def", "optional_warn_function", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "ENABL...
shows a warning if enable_v1_warnings is true .
train
false
13,506
@requires_version('scipy', '0.17.1') def test_spherical_harmonics(): (az, pol) = np.meshgrid(np.linspace(0, (2 * np.pi), 30), np.linspace(0, np.pi, 20)) for degree in range(1, 10): for order in range(0, (degree + 1)): sph = _sph_harm(order, degree, az, pol) sph_scipy = sph_harm(order, degree, az, pol) assert_allclose(sph, sph_scipy, atol=1e-07)
[ "@", "requires_version", "(", "'scipy'", ",", "'0.17.1'", ")", "def", "test_spherical_harmonics", "(", ")", ":", "(", "az", ",", "pol", ")", "=", "np", ".", "meshgrid", "(", "np", ".", "linspace", "(", "0", ",", "(", "2", "*", "np", ".", "pi", ")",...
test spherical harmonic functions .
train
false
13,507
def local_api_url(): return ('http://%s%s' % (os.environ.get('HTTP_HOST'), LOCAL_GCS_ENDPOINT))
[ "def", "local_api_url", "(", ")", ":", "return", "(", "'http://%s%s'", "%", "(", "os", ".", "environ", ".", "get", "(", "'HTTP_HOST'", ")", ",", "LOCAL_GCS_ENDPOINT", ")", ")" ]
return url for gcs emulation on dev appserver .
train
false
13,508
def make_pre_authed_request(env, method=None, path=None, body=None, headers=None, agent='Swift', swift_source=None): query_string = None if (path and ('?' in path)): (path, query_string) = path.split('?', 1) newenv = make_pre_authed_env(env, method, path=unquote(path), agent=agent, query_string=query_string, swift_source=swift_source) if (not headers): headers = {} if body: return Request.blank(path, environ=newenv, body=body, headers=headers) else: return Request.blank(path, environ=newenv, headers=headers)
[ "def", "make_pre_authed_request", "(", "env", ",", "method", "=", "None", ",", "path", "=", "None", ",", "body", "=", "None", ",", "headers", "=", "None", ",", "agent", "=", "'Swift'", ",", "swift_source", "=", "None", ")", ":", "query_string", "=", "N...
same as :py:func:make_subrequest but with preauthorization .
train
false
13,509
def test_no_qapp(request): assert ('qapp' not in request.fixturenames)
[ "def", "test_no_qapp", "(", "request", ")", ":", "assert", "(", "'qapp'", "not", "in", "request", ".", "fixturenames", ")" ]
make sure a test without qapp doesnt use qapp .
train
false
13,510
def test_function_series1(): class my_function(Function, ): def fdiff(self, argindex=1): return cos(self.args[0]) @classmethod def eval(cls, arg): arg = sympify(arg) if (arg == 0): return sympify(0) assert (my_function(x).series(x, 0, 10) == sin(x).series(x, 0, 10)) assert (limit((my_function(x) / x), x, 0) == 1)
[ "def", "test_function_series1", "(", ")", ":", "class", "my_function", "(", "Function", ",", ")", ":", "def", "fdiff", "(", "self", ",", "argindex", "=", "1", ")", ":", "return", "cos", "(", "self", ".", "args", "[", "0", "]", ")", "@", "classmethod"...
create our new "sin" function .
train
false
13,511
def create_directory(dirname): try: os.makedirs(dirname) except OSError: pass
[ "def", "create_directory", "(", "dirname", ")", ":", "try", ":", "os", ".", "makedirs", "(", "dirname", ")", "except", "OSError", ":", "pass" ]
creates dirname and its parents if it does not exist .
train
false
13,513
def add_unknown_words(word_vecs, vocab, min_df=1, k=300): for word in vocab: if ((word not in word_vecs) and (vocab[word] >= min_df)): word_vecs[word] = np.random.uniform((-0.25), 0.25, k)
[ "def", "add_unknown_words", "(", "word_vecs", ",", "vocab", ",", "min_df", "=", "1", ",", "k", "=", "300", ")", ":", "for", "word", "in", "vocab", ":", "if", "(", "(", "word", "not", "in", "word_vecs", ")", "and", "(", "vocab", "[", "word", "]", ...
for words that occur in at least min_df documents .
train
false
13,514
def send_email_after_export(email, event_name, result): if ('__error' in result): send_email(email, action=EVENT_EXPORT_FAIL, subject=MAILS[EVENT_EXPORT_FAIL]['subject'].format(event_name=event_name), html=MAILS[EVENT_EXPORT_FAIL]['message'].format(error_text=result['result']['message'])) else: send_email(email, action=EVENT_EXPORTED, subject=MAILS[EVENT_EXPORTED]['subject'].format(event_name=event_name), html=MAILS[EVENT_EXPORTED]['message'].format(download_url=(request.url_root.strip('/') + result['download_url'])))
[ "def", "send_email_after_export", "(", "email", ",", "event_name", ",", "result", ")", ":", "if", "(", "'__error'", "in", "result", ")", ":", "send_email", "(", "email", ",", "action", "=", "EVENT_EXPORT_FAIL", ",", "subject", "=", "MAILS", "[", "EVENT_EXPOR...
send email after event export .
train
false
13,515
def _get_offset_time(utc_offset): if (utc_offset is not None): minutes = _offset_to_min(utc_offset) offset = timedelta(minutes=minutes) offset_time = (datetime.utcnow() + offset) offset_time = offset_time.replace(tzinfo=_FixedOffset(minutes)) else: offset_time = datetime.now() return offset_time
[ "def", "_get_offset_time", "(", "utc_offset", ")", ":", "if", "(", "utc_offset", "is", "not", "None", ")", ":", "minutes", "=", "_offset_to_min", "(", "utc_offset", ")", "offset", "=", "timedelta", "(", "minutes", "=", "minutes", ")", "offset_time", "=", "...
will return the current time adjusted using the input timezone offset .
train
true
13,516
@not_implemented_for('undirected') def is_weakly_connected(G): if (len(G) == 0): raise nx.NetworkXPointlessConcept('Connectivity is undefined for the null graph.') return (len(list(weakly_connected_components(G))[0]) == len(G))
[ "@", "not_implemented_for", "(", "'undirected'", ")", "def", "is_weakly_connected", "(", "G", ")", ":", "if", "(", "len", "(", "G", ")", "==", "0", ")", ":", "raise", "nx", ".", "NetworkXPointlessConcept", "(", "'Connectivity is undefined for the null graph.'", ...
test directed graph for weak connectivity .
train
false
13,517
def create_annotated_table(data, selected_indices): if (data is None): return None names = [var.name for var in (data.domain.variables + data.domain.metas)] name = _get_next_name(names, ANNOTATED_DATA_FEATURE_NAME) metas = (data.domain.metas + (DiscreteVariable(name, ('No', 'Yes')),)) domain = Domain(data.domain.attributes, data.domain.class_vars, metas) annotated = np.zeros((len(data), 1)) if (selected_indices is not None): annotated[selected_indices] = 1 table = Table(domain, data.X, data.Y, metas=np.hstack((data.metas, annotated))) table.attributes = data.attributes return table
[ "def", "create_annotated_table", "(", "data", ",", "selected_indices", ")", ":", "if", "(", "data", "is", "None", ")", ":", "return", "None", "names", "=", "[", "var", ".", "name", "for", "var", "in", "(", "data", ".", "domain", ".", "variables", "+", ...
returns data with concatenated flag column .
train
false
13,518
def run_torch_job(job): params = {} for param in job.param: dbl_vals = param.dbl_val._values int_vals = param.int_val._values str_vals = param.str_val._values if (len(dbl_vals) > 0): params[param.name] = dbl_vals elif (len(int_vals) > 0): params[param.name] = int_vals elif (len(str_vals) > 0): params[param.name] = str_vals else: raise Exception('Unknown parameter type.') param_str = '' for (pname, pval) in params.iteritems(): if (len(pval) == 1): pval = str(pval[0]) else: pval = ','.join([str(v) for v in pval]) param_str += (((('-' + pname) + ' ') + pval) + ' ') cmd = ('./%s %s' % (job.name, param_str)) log(('Executing command: %s\n' % cmd)) sh(cmd)
[ "def", "run_torch_job", "(", "job", ")", ":", "params", "=", "{", "}", "for", "param", "in", "job", ".", "param", ":", "dbl_vals", "=", "param", ".", "dbl_val", ".", "_values", "int_vals", "=", "param", ".", "int_val", ".", "_values", "str_vals", "=", ...
run a torch based job .
train
false
13,519
@testing.requires_testing_data def test_lcmv_raw(): (raw, _, _, _, noise_cov, label, forward, _, _, _) = _get_data(all_forward=False, epochs=False, data_cov=False) (tmin, tmax) = (0, 20) (start, stop) = raw.time_as_index([tmin, tmax]) data_cov = mne.compute_raw_covariance(raw, tmin=tmin, tmax=tmax) stc = lcmv_raw(raw, forward, noise_cov, data_cov, reg=0.01, label=label, start=start, stop=stop) assert_array_almost_equal(np.array([tmin, tmax]), np.array([stc.times[0], stc.times[(-1)]]), decimal=2) vertno = [forward['src'][0]['vertno'], forward['src'][1]['vertno']] assert_true((len(stc.vertices[0]) == len(np.intersect1d(vertno[0], label.vertices)))) assert_true((len(stc.vertices[1]) == 0))
[ "@", "testing", ".", "requires_testing_data", "def", "test_lcmv_raw", "(", ")", ":", "(", "raw", ",", "_", ",", "_", ",", "_", ",", "noise_cov", ",", "label", ",", "forward", ",", "_", ",", "_", ",", "_", ")", "=", "_get_data", "(", "all_forward", ...
test lcmv with raw data .
train
false
13,520
def test_issue242(en_tokenizer): text = u'There are different food safety standards in different countries.' patterns = [[{LOWER: u'food'}, {LOWER: u'safety'}], [{LOWER: u'safety'}, {LOWER: u'standards'}]] doc = en_tokenizer(text) matcher = Matcher(doc.vocab) matcher.add(u'FOOD', u'FOOD', {}, patterns) matches = [(ent_type, start, end) for (ent_id, ent_type, start, end) in matcher(doc)] doc.ents += tuple(matches) (match1, match2) = matches assert (match1[1] == 3) assert (match1[2] == 5) assert (match2[1] == 4) assert (match2[2] == 6)
[ "def", "test_issue242", "(", "en_tokenizer", ")", ":", "text", "=", "u'There are different food safety standards in different countries.'", "patterns", "=", "[", "[", "{", "LOWER", ":", "u'food'", "}", ",", "{", "LOWER", ":", "u'safety'", "}", "]", ",", "[", "{"...
test overlapping multi-word phrases .
train
false
13,521
def extract_nodegraph_info(filename): ksize = None n_tables = None table_size = None signature = None version = None ht_type = None occupied = None uint_size = len(pack('I', 0)) uchar_size = len(pack('B', 0)) ulonglong_size = len(pack('Q', 0)) try: with open(filename, 'rb') as nodegraph: (signature,) = unpack('4s', nodegraph.read(4)) (version,) = unpack('B', nodegraph.read(1)) (ht_type,) = unpack('B', nodegraph.read(1)) (ksize,) = unpack('I', nodegraph.read(uint_size)) (n_tables,) = unpack('B', nodegraph.read(uchar_size)) (occupied,) = unpack('Q', nodegraph.read(ulonglong_size)) (table_size,) = unpack('Q', nodegraph.read(ulonglong_size)) if (signature != 'OXLI'): raise ValueError(("Node graph '{}' is missing file type signature".format(filename) + str(signature))) except: raise ValueError("Node graph '{}' is corrupt ".format(filename)) return (ksize, round(table_size, (-2)), n_tables, version, ht_type, occupied)
[ "def", "extract_nodegraph_info", "(", "filename", ")", ":", "ksize", "=", "None", "n_tables", "=", "None", "table_size", "=", "None", "signature", "=", "None", "version", "=", "None", "ht_type", "=", "None", "occupied", "=", "None", "uint_size", "=", "len", ...
open the given nodegraph file and return a tuple of information .
train
false
13,522
def setup_hmdevice_discovery_helper(hass, hmdevicetype, discovery_info, add_callback_devices): devices = [] for config in discovery_info[ATTR_DISCOVER_DEVICES]: _LOGGER.debug('Add device %s from config: %s', str(hmdevicetype), str(config)) new_device = hmdevicetype(hass, config) new_device.link_homematic() devices.append(new_device) add_callback_devices(devices) return True
[ "def", "setup_hmdevice_discovery_helper", "(", "hass", ",", "hmdevicetype", ",", "discovery_info", ",", "add_callback_devices", ")", ":", "devices", "=", "[", "]", "for", "config", "in", "discovery_info", "[", "ATTR_DISCOVER_DEVICES", "]", ":", "_LOGGER", ".", "de...
helper to setup homematic devices with discovery info .
train
false
13,524
@testing.requires_testing_data def test_output_formats(): tempdir = _TempDir() formats = ['short', 'int', 'single', 'double'] tols = [0.0001, 1e-07, 1e-07, 1e-15] raw = read_raw_fif(test_fif_fname).crop(0, 1) temp_file = op.join(tempdir, 'raw.fif') for (ii, (fmt, tol)) in enumerate(zip(formats, tols)): if (ii > 0): assert_raises(IOError, raw.save, temp_file, fmt=fmt) raw.save(temp_file, fmt=fmt, overwrite=True) raw2 = read_raw_fif(temp_file) raw2_data = raw2[:, :][0] assert_allclose(raw2_data, raw[:, :][0], rtol=tol, atol=1e-25) assert_equal(raw2.orig_format, fmt)
[ "@", "testing", ".", "requires_testing_data", "def", "test_output_formats", "(", ")", ":", "tempdir", "=", "_TempDir", "(", ")", "formats", "=", "[", "'short'", ",", "'int'", ",", "'single'", ",", "'double'", "]", "tols", "=", "[", "0.0001", ",", "1e-07", ...
test saving and loading raw data using multiple formats .
train
false
13,525
def coth(arg): return (1 / numpy.tanh(arg))
[ "def", "coth", "(", "arg", ")", ":", "return", "(", "1", "/", "numpy", ".", "tanh", "(", "arg", ")", ")" ]
hyperbolic cotangent .
train
false
13,526
def _a_encode_bytes(value, mapping): assert isinstance(value, bytes), ('VALUE has invalid type: %s' % type(value)) return (str(len(value)).encode('UTF-8'), 'b', value)
[ "def", "_a_encode_bytes", "(", "value", ",", "mapping", ")", ":", "assert", "isinstance", "(", "value", ",", "bytes", ")", ",", "(", "'VALUE has invalid type: %s'", "%", "type", "(", "value", ")", ")", "return", "(", "str", "(", "len", "(", "value", ")",...
foo-bar --> .
train
false
13,528
def gradient_descent(cost, eta, n): w = np.random.uniform((-1), 1, 9) costs = [] for j in xrange(n): c = cost(w) print 'Current cost: {0:.3f}'.format(c) costs.append(c) gradient = [partial(cost, k, w) for k in xrange(9)] w = np.array([(wt - (eta * d)) for (wt, d) in zip(w, gradient)]) return (w, costs)
[ "def", "gradient_descent", "(", "cost", ",", "eta", ",", "n", ")", ":", "w", "=", "np", ".", "random", ".", "uniform", "(", "(", "-", "1", ")", ",", "1", ",", "9", ")", "costs", "=", "[", "]", "for", "j", "in", "xrange", "(", "n", ")", ":",...
perform n iterations of the gradient descent algorithm to minimize the cost function .
train
false