id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
51,716
def RemoveNativelySupportedComponents(filters, orders, exists): (filters, orders) = Normalize(filters, orders, exists) for f in filters: if (f.op() in EXISTS_OPERATORS): return (filters, orders) has_key_desc_order = False if (orders and (orders[(-1)].property() == datastore_types.KEY_SPECIAL_PROPERTY)): if (orders[(-1)].direction() == ASCENDING): orders = orders[:(-1)] else: has_key_desc_order = True if (not has_key_desc_order): for f in filters: if ((f.op() in INEQUALITY_OPERATORS) and (f.property(0).name() != datastore_types.KEY_SPECIAL_PROPERTY)): break else: filters = [f for f in filters if (f.property(0).name() != datastore_types.KEY_SPECIAL_PROPERTY)] return (filters, orders)
[ "def", "RemoveNativelySupportedComponents", "(", "filters", ",", "orders", ",", "exists", ")", ":", "(", "filters", ",", "orders", ")", "=", "Normalize", "(", "filters", ",", "orders", ",", "exists", ")", "for", "f", "in", "filters", ":", "if", "(", "f",...
removes query components that are natively supported by the datastore .
train
false
51,718
def test_no_truncate_crval_p17(): w = wcs.WCS(naxis=2) w.wcs.crval = [50.123456789012344, 50.123456789012344] w.wcs.cdelt = [0.001, 0.001] w.wcs.ctype = [u'RA---TAN', u'DEC--TAN'] w.wcs.set() header = w.to_header() assert (header[u'CRVAL1'] != w.wcs.crval[0]) assert (header[u'CRVAL2'] != w.wcs.crval[1]) header = w.to_header(relax=wcs.WCSHDO_P17) assert (header[u'CRVAL1'] == w.wcs.crval[0]) assert (header[u'CRVAL2'] == w.wcs.crval[1])
[ "def", "test_no_truncate_crval_p17", "(", ")", ":", "w", "=", "wcs", ".", "WCS", "(", "naxis", "=", "2", ")", "w", ".", "wcs", ".", "crval", "=", "[", "50.123456789012344", ",", "50.123456789012344", "]", "w", ".", "wcs", ".", "cdelt", "=", "[", "0.0...
regression test for URL .
train
false
51,719
def registry(database_name=None): if (database_name is None): import threading database_name = threading.currentThread().dbname return modules.registry.Registry(database_name)
[ "def", "registry", "(", "database_name", "=", "None", ")", ":", "if", "(", "database_name", "is", "None", ")", ":", "import", "threading", "database_name", "=", "threading", ".", "currentThread", "(", ")", ".", "dbname", "return", "modules", ".", "registry",...
return the model registry for the given database .
train
false
51,722
def _parse_modes(mode_string, unary_modes=''): if ((not mode_string) or (not (mode_string[0] in '+-'))): return [] modes = [] parts = mode_string.split() (mode_part, args) = (parts[0], parts[1:]) for ch in mode_part: if (ch in '+-'): sign = ch continue arg = (args.pop(0) if ((ch in unary_modes) and args) else None) modes.append([sign, ch, arg]) return modes
[ "def", "_parse_modes", "(", "mode_string", ",", "unary_modes", "=", "''", ")", ":", "if", "(", "(", "not", "mode_string", ")", "or", "(", "not", "(", "mode_string", "[", "0", "]", "in", "'+-'", ")", ")", ")", ":", "return", "[", "]", "modes", "=", ...
parse the mode_string and return a list of triples .
train
true
51,724
def assure_volume(fnc): @wraps(fnc) def _wrapped(self, volume, *args, **kwargs): if (not isinstance(volume, CloudBlockStorageVolume)): volume = self._manager.get(volume) return fnc(self, volume, *args, **kwargs) return _wrapped
[ "def", "assure_volume", "(", "fnc", ")", ":", "@", "wraps", "(", "fnc", ")", "def", "_wrapped", "(", "self", ",", "volume", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "isinstance", "(", "volume", ",", "CloudBlockStorageVolume", ...
converts a volumeid passed as the volume to a cloudblockstoragevolume object .
train
true
51,725
def _find_credentials(host): user_names = [__pillar__['proxy'].get('username', 'root')] passwords = __pillar__['proxy']['passwords'] for user in user_names: for password in passwords: try: ret = salt.modules.vsphere.system_info(host=host, username=user, password=password) except SaltSystemExit: continue if ret: return (user, password) raise SaltSystemExit('Cannot complete login due to an incorrect user name or password.')
[ "def", "_find_credentials", "(", "host", ")", ":", "user_names", "=", "[", "__pillar__", "[", "'proxy'", "]", ".", "get", "(", "'username'", ",", "'root'", ")", "]", "passwords", "=", "__pillar__", "[", "'proxy'", "]", "[", "'passwords'", "]", "for", "us...
cycle through all the possible credentials and return the first one that works .
train
true
51,726
def polarity(s, **kwargs): return sentiment(s, **kwargs)[0]
[ "def", "polarity", "(", "s", ",", "**", "kwargs", ")", ":", "return", "sentiment", "(", "s", ",", "**", "kwargs", ")", "[", "0", "]" ]
returns the sentence polarity between -1 .
train
false
51,729
def zero_crossings(data): zx = np.zeros(len(data)) zx[np.where(((data[:(-1)] * data[1:]) < 0))] = 1 return zx
[ "def", "zero_crossings", "(", "data", ")", ":", "zx", "=", "np", ".", "zeros", "(", "len", "(", "data", ")", ")", "zx", "[", "np", ".", "where", "(", "(", "(", "data", "[", ":", "(", "-", "1", ")", "]", "*", "data", "[", "1", ":", "]", ")...
return a vector of length n-1 of zero-crossings within vector data .
train
false
51,730
def enum_cfg_repos(config): repositories = util.getRepositoryDir(config) for section in config.sections(): l = section.split(None, 1) type_ = l.pop(0) if (type_ != 'repo'): continue if (not l): continue (name,) = l if (not os.path.exists(os.path.join(repositories, name))): subpath = ('%s.git' % name) else: subpath = name (yield (section, name, repositories, subpath))
[ "def", "enum_cfg_repos", "(", "config", ")", ":", "repositories", "=", "util", ".", "getRepositoryDir", "(", "config", ")", "for", "section", "in", "config", ".", "sections", "(", ")", ":", "l", "=", "section", ".", "split", "(", "None", ",", "1", ")",...
enumerates all repositories that have repo sections in the config .
train
false
51,731
def dnsdomain_list(context): return IMPL.dnsdomain_list(context)
[ "def", "dnsdomain_list", "(", "context", ")", ":", "return", "IMPL", ".", "dnsdomain_list", "(", "context", ")" ]
get a list of all zones in our database .
train
false
51,734
def test_finder_priority_file_over_page(data): req = InstallRequirement.from_line('gmpy==1.15', None) finder = PackageFinder([data.find_links], ['http://pypi.python.org/simple'], session=PipSession()) all_versions = finder.find_all_candidates(req.name) assert (all_versions[0].location.scheme == 'file') assert all(((version.location.scheme == 'https') for version in all_versions[1:])), all_versions link = finder.find_requirement(req, False) assert link.url.startswith('file://')
[ "def", "test_finder_priority_file_over_page", "(", "data", ")", ":", "req", "=", "InstallRequirement", ".", "from_line", "(", "'gmpy==1.15'", ",", "None", ")", "finder", "=", "PackageFinder", "(", "[", "data", ".", "find_links", "]", ",", "[", "'http://pypi.pyth...
test packagefinder prefers file links over equivalent page links .
train
false
51,736
def infer_subscript(self, context=None): value = next(self.value.infer(context)) if (value is YES): (yield YES) return index = next(self.slice.infer(context)) if (index is YES): (yield YES) return if isinstance(index, nodes.Const): try: assigned = value.getitem(index.value, context) except AttributeError: raise InferenceError() except (IndexError, TypeError): (yield YES) return if (self is assigned): (yield YES) return for infered in assigned.infer(context): (yield infered) else: raise InferenceError()
[ "def", "infer_subscript", "(", "self", ",", "context", "=", "None", ")", ":", "value", "=", "next", "(", "self", ".", "value", ".", "infer", "(", "context", ")", ")", "if", "(", "value", "is", "YES", ")", ":", "(", "yield", "YES", ")", "return", ...
infer simple subscription such as [1 .
train
false
51,737
def test_suite(): return DocTestSuite()
[ "def", "test_suite", "(", ")", ":", "return", "DocTestSuite", "(", ")" ]
for the z3 test runner .
train
false
51,738
def values_eq_approx_high_tol(a, b): assert (a.ndim == 4) atol = None if ((a.shape[(-1)] * a.shape[(-2)]) > 100): atol = 3e-05 return CudaNdarrayType.values_eq_approx(a, b, atol=atol)
[ "def", "values_eq_approx_high_tol", "(", "a", ",", "b", ")", ":", "assert", "(", "a", ".", "ndim", "==", "4", ")", "atol", "=", "None", "if", "(", "(", "a", ".", "shape", "[", "(", "-", "1", ")", "]", "*", "a", ".", "shape", "[", "(", "-", ...
this fct is needed to dont have debugmode raise useless error due to ronding error .
train
false
51,740
def get_network(context, net_id, fields=None): session = context.session return session.query(BrocadeNetwork).filter_by(id=net_id).first()
[ "def", "get_network", "(", "context", ",", "net_id", ",", "fields", "=", "None", ")", ":", "session", "=", "context", ".", "session", "return", "session", ".", "query", "(", "BrocadeNetwork", ")", ".", "filter_by", "(", "id", "=", "net_id", ")", ".", "...
get brocade specific network .
train
false
51,742
def maybe_drop_privileges(uid=None, gid=None): if (sys.platform == u'win32'): return if os.geteuid(): if (not os.getuid()): raise SecurityError(u'contact support') uid = (uid and parse_uid(uid)) gid = (gid and parse_gid(gid)) if uid: _setuid(uid, gid) else: (gid and setgid(gid)) if (uid and (not os.getuid()) and (not os.geteuid())): raise SecurityError(u'Still root uid after drop privileges!') if (gid and (not os.getgid()) and (not os.getegid())): raise SecurityError(u'Still root gid after drop privileges!')
[ "def", "maybe_drop_privileges", "(", "uid", "=", "None", ",", "gid", "=", "None", ")", ":", "if", "(", "sys", ".", "platform", "==", "u'win32'", ")", ":", "return", "if", "os", ".", "geteuid", "(", ")", ":", "if", "(", "not", "os", ".", "getuid", ...
change process privileges to new user/group .
train
false
51,743
def test_adapthist_alpha(): img = skimage.img_as_float(data.astronaut()) alpha = np.ones((img.shape[0], img.shape[1]), dtype=float) img = np.dstack((img, alpha)) with expected_warnings(['precision loss']): adapted = exposure.equalize_adapthist(img) assert (adapted.shape != img.shape) img = img[:, :, :3] full_scale = skimage.exposure.rescale_intensity(img) assert (img.shape == adapted.shape) assert_almost_equal(peak_snr(full_scale, adapted), 109.393, 2) assert_almost_equal(norm_brightness_err(full_scale, adapted), 0.0248, 3)
[ "def", "test_adapthist_alpha", "(", ")", ":", "img", "=", "skimage", ".", "img_as_float", "(", "data", ".", "astronaut", "(", ")", ")", "alpha", "=", "np", ".", "ones", "(", "(", "img", ".", "shape", "[", "0", "]", ",", "img", ".", "shape", "[", ...
test an rgba color image .
train
false
51,744
def _encode_emr_api_params(x): if isinstance(x, dict): result = {} for (key, value) in x.items(): if ((key == 'Properties') and isinstance(value, dict)): value = [{'Key': k, 'Value': v} for (k, v) in sorted(value.items())] unpacked_value = _encode_emr_api_params(value) if isinstance(unpacked_value, dict): for (subkey, subvalue) in unpacked_value.items(): result[('%s.%s' % (key, subkey))] = subvalue else: result[key] = unpacked_value return result if isinstance(x, (list, tuple)): return _encode_emr_api_params(dict(((('member.%d' % (i + 1)), item) for (i, item) in enumerate(x)))) return x
[ "def", "_encode_emr_api_params", "(", "x", ")", ":", "if", "isinstance", "(", "x", ",", "dict", ")", ":", "result", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "x", ".", "items", "(", ")", ":", "if", "(", "(", "key", "==", "'Prop...
recursively unpack parameters to the emr api .
train
false
51,748
@gen.engine def ListAllCommonPrefixes(store, delimiter, callback, prefix=None, marker=None): prefixes = set() keys = [] while True: (new_prefixes, new_keys) = (yield gen.Task(store.ListCommonPrefixes, delimiter, prefix=prefix, marker=marker, maxkeys=1000)) prefixes = prefixes.union(set(new_prefixes)) keys.extend(new_keys) if ((len(new_prefixes) + len(new_keys)) < 1000): break marker = '' if (len(new_prefixes) > 0): marker = max(marker, new_prefixes[(-1)]) if (len(new_keys) > 0): marker = max(marker, new_keys[(-1)]) callback((sorted(list(prefixes)), keys))
[ "@", "gen", ".", "engine", "def", "ListAllCommonPrefixes", "(", "store", ",", "delimiter", ",", "callback", ",", "prefix", "=", "None", ",", "marker", "=", "None", ")", ":", "prefixes", "=", "set", "(", ")", "keys", "=", "[", "]", "while", "True", ":...
list all common prefixes .
train
false
51,749
def __get_request_cpu_usage(): warnings.warn('get_request_cpu_usage may not always return correct values', DeprecationWarning, stacklevel=2) if apiproxy: return apiproxy.GetRequestCpuUsage() return 0
[ "def", "__get_request_cpu_usage", "(", ")", ":", "warnings", ".", "warn", "(", "'get_request_cpu_usage may not always return correct values'", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "if", "apiproxy", ":", "return", "apiproxy", ".", "GetRequestCpuUs...
get the amount of cpu used so far for the current request .
train
false
51,750
def _natsort_key(item, case_sensitivity=False): item = str(item) try: chunks = re.split('(\\d+(?:\\.\\d+)?)', item) except TypeError: chunks = re.split('(\\d+(?:\\.\\d+)?)', item[0]) for ii in range(len(chunks)): if (chunks[ii] and (chunks[ii][0] in '0123456789')): if ('.' in chunks[ii]): numtype = float else: numtype = int chunks[ii] = (0, numtype(chunks[ii])) else: chunks[ii] = (1, chunks[ii]) return (chunks, item)
[ "def", "_natsort_key", "(", "item", ",", "case_sensitivity", "=", "False", ")", ":", "item", "=", "str", "(", "item", ")", "try", ":", "chunks", "=", "re", ".", "split", "(", "'(\\\\d+(?:\\\\.\\\\d+)?)'", ",", "item", ")", "except", "TypeError", ":", "ch...
provides normalized version of item for sorting with digits .
train
false
51,751
def check_output_inside_dir(command, dirpath): with inside_dir(dirpath): return subprocess.check_output(shlex.split(command))
[ "def", "check_output_inside_dir", "(", "command", ",", "dirpath", ")", ":", "with", "inside_dir", "(", "dirpath", ")", ":", "return", "subprocess", ".", "check_output", "(", "shlex", ".", "split", "(", "command", ")", ")" ]
run a command from inside a given directory .
train
false
51,752
def rolling_window(array, length): orig_shape = array.shape if (not orig_shape): raise IndexError("Can't restride a scalar.") elif (orig_shape[0] <= length): raise IndexError("Can't restride array of shape {shape} with a window length of {len}".format(shape=orig_shape, len=length)) num_windows = ((orig_shape[0] - length) + 1) new_shape = ((num_windows, length) + orig_shape[1:]) new_strides = ((array.strides[0],) + array.strides) return as_strided(array, new_shape, new_strides)
[ "def", "rolling_window", "(", "array", ",", "length", ")", ":", "orig_shape", "=", "array", ".", "shape", "if", "(", "not", "orig_shape", ")", ":", "raise", "IndexError", "(", "\"Can't restride a scalar.\"", ")", "elif", "(", "orig_shape", "[", "0", "]", "...
restride an array of shape into an array of shape where each slice at index i along the first axis is equivalent to result[i] = array[length * i:length * ] parameters array : np .
train
true
51,754
def test_apiview_invalid_method(rf): view = UserAPIView.as_view() request = create_api_request(rf, 'post') response = view(request) assert (response.status_code == 405) request = create_api_request(rf, 'patch') response = view(request) assert (response.status_code == 405)
[ "def", "test_apiview_invalid_method", "(", "rf", ")", ":", "view", "=", "UserAPIView", ".", "as_view", "(", ")", "request", "=", "create_api_request", "(", "rf", ",", "'post'", ")", "response", "=", "view", "(", "request", ")", "assert", "(", "response", "...
tests for invalid methods .
train
false
51,755
def max_drawdown(networth): hwm = [0] eq_idx = networth.index drawdown = pd.Series(index=eq_idx) duration = pd.Series(index=eq_idx) for t in range(1, len(eq_idx)): cur_hwm = max(hwm[(t - 1)], networth[t]) hwm.append(cur_hwm) drawdown[t] = (hwm[t] - networth[t]) duration[t] = (0 if (drawdown[t] <= 0) else (duration[(t - 1)] + 1)) return (drawdown.max(), duration.max())
[ "def", "max_drawdown", "(", "networth", ")", ":", "hwm", "=", "[", "0", "]", "eq_idx", "=", "networth", ".", "index", "drawdown", "=", "pd", ".", "Series", "(", "index", "=", "eq_idx", ")", "duration", "=", "pd", ".", "Series", "(", "index", "=", "...
networth: 历史净值 .
train
false
51,756
@require_POST @login_required @permitted def follow_commentable(request, course_id, commentable_id): user = cc.User.from_django_user(request.user) commentable = cc.Commentable.find(commentable_id) user.follow(commentable) return JsonResponse({})
[ "@", "require_POST", "@", "login_required", "@", "permitted", "def", "follow_commentable", "(", "request", ",", "course_id", ",", "commentable_id", ")", ":", "user", "=", "cc", ".", "User", ".", "from_django_user", "(", "request", ".", "user", ")", "commentabl...
given a course_id and commentable id .
train
false
51,757
@ship.command('move') @click.argument('ship') @click.argument('x', type=float) @click.argument('y', type=float) @click.option('--speed', metavar='KN', default=10, help='Speed in knots.') def ship_move(ship, x, y, speed): click.echo(('Moving ship %s to %s,%s with speed %s' % (ship, x, y, speed)))
[ "@", "ship", ".", "command", "(", "'move'", ")", "@", "click", ".", "argument", "(", "'ship'", ")", "@", "click", ".", "argument", "(", "'x'", ",", "type", "=", "float", ")", "@", "click", ".", "argument", "(", "'y'", ",", "type", "=", "float", "...
moves ship to the new location x .
train
false
51,758
def preDeployFile(file): pass
[ "def", "preDeployFile", "(", "file", ")", ":", "pass" ]
called prior to deploying a single built file .
train
false
51,759
def detect_console_encoding(): import locale global _initial_defencoding encoding = None try: encoding = (sys.stdout.encoding or sys.stdin.encoding) except AttributeError: pass if ((not encoding) or ('ascii' in encoding.lower())): try: encoding = locale.getpreferredencoding() except Exception: pass if ((not encoding) or ('ascii' in encoding.lower())): encoding = sys.getdefaultencoding() if (not _initial_defencoding): _initial_defencoding = sys.getdefaultencoding() return encoding
[ "def", "detect_console_encoding", "(", ")", ":", "import", "locale", "global", "_initial_defencoding", "encoding", "=", "None", "try", ":", "encoding", "=", "(", "sys", ".", "stdout", ".", "encoding", "or", "sys", ".", "stdin", ".", "encoding", ")", "except"...
try to find the most capable encoding supported by the console .
train
true
51,761
def get_order_dir(field, default='ASC'): dirn = ORDER_DIR[default] if (field[0] == '-'): return (field[1:], dirn[1]) return (field, dirn[0])
[ "def", "get_order_dir", "(", "field", ",", "default", "=", "'ASC'", ")", ":", "dirn", "=", "ORDER_DIR", "[", "default", "]", "if", "(", "field", "[", "0", "]", "==", "'-'", ")", ":", "return", "(", "field", "[", "1", ":", "]", ",", "dirn", "[", ...
returns the field name and direction for an order specification .
train
false
51,762
def make_bar_data(asset_info, calendar): assert (PSEUDO_EPOCH.value < calendar.normalize().min().value <= asset_info['start_date'].min().value), ("calendar.min(): %s\nasset_info['start_date'].min(): %s" % (calendar.min(), asset_info['start_date'].min())) assert (asset_info['start_date'] < asset_info['end_date']).all() def _raw_data_for_asset(asset_id): "\n Generate 'raw' data that encodes information about the asset.\n\n See docstring for a description of the data format.\n " datetimes = calendar[calendar.slice_indexer(asset_start(asset_info, asset_id), asset_end(asset_info, asset_id))] data = full((len(datetimes), len(US_EQUITY_PRICING_BCOLZ_COLUMNS)), ((asset_id * 100) * 1000), dtype=uint32) data[:, :5] += (arange(5, dtype=uint32) * 1000) data[:, :5] += (datetimes - PSEUDO_EPOCH).days[:, None].astype(uint32) frame = DataFrame(data, index=datetimes, columns=US_EQUITY_PRICING_BCOLZ_COLUMNS) frame['day'] = nanos_to_seconds(datetimes.asi8) frame['id'] = asset_id return frame for asset in asset_info.index: (yield (asset, _raw_data_for_asset(asset)))
[ "def", "make_bar_data", "(", "asset_info", ",", "calendar", ")", ":", "assert", "(", "PSEUDO_EPOCH", ".", "value", "<", "calendar", ".", "normalize", "(", ")", ".", "min", "(", ")", ".", "value", "<=", "asset_info", "[", "'start_date'", "]", ".", "min", ...
for a given asset/date/column combination .
train
true
51,763
def test_rus_sk_estimator(): check_estimator(RandomUnderSampler)
[ "def", "test_rus_sk_estimator", "(", ")", ":", "check_estimator", "(", "RandomUnderSampler", ")" ]
test the sklearn estimator compatibility .
train
false
51,764
def update(context, id, name, description, is_public=None): if (id is None): msg = _('id cannot be None') raise exception.InvalidGroupType(reason=msg) elevated = (context if context.is_admin else context.elevated()) try: type_updated = db.group_type_update(elevated, id, dict(name=name, description=description, is_public=is_public)) except db_exc.DBError: LOG.exception(_LE('DB error:')) raise exception.GroupTypeUpdateFailed(id=id) return type_updated
[ "def", "update", "(", "context", ",", "id", ",", "name", ",", "description", ",", "is_public", "=", "None", ")", ":", "if", "(", "id", "is", "None", ")", ":", "msg", "=", "_", "(", "'id cannot be None'", ")", "raise", "exception", ".", "InvalidGroupTyp...
update a key with a value in the minion datastore cli example: .
train
false
51,765
def init_gpg(): gpg = gnupg.GPG(homedir=config.GPG_KEY_DIR) for keyfile in ('test_journalist_key.pub', 'test_journalist_key.sec'): gpg.import_keys(open(keyfile).read()) return gpg
[ "def", "init_gpg", "(", ")", ":", "gpg", "=", "gnupg", ".", "GPG", "(", "homedir", "=", "config", ".", "GPG_KEY_DIR", ")", "for", "keyfile", "in", "(", "'test_journalist_key.pub'", ",", "'test_journalist_key.sec'", ")", ":", "gpg", ".", "import_keys", "(", ...
initialize the gpg keyring and import the journalist key for testing .
train
false
51,767
def get_deps(dsk): dependencies = {k: get_dependencies(dsk, task=v) for (k, v) in dsk.items()} dependents = reverse_dict(dependencies) return (dependencies, dependents)
[ "def", "get_deps", "(", "dsk", ")", ":", "dependencies", "=", "{", "k", ":", "get_dependencies", "(", "dsk", ",", "task", "=", "v", ")", "for", "(", "k", ",", "v", ")", "in", "dsk", ".", "items", "(", ")", "}", "dependents", "=", "reverse_dict", ...
get dependencies and dependents from dask dask graph .
train
false
51,769
def ComputeSplitPenalties(tree): _TreePenaltyAssigner().Visit(tree)
[ "def", "ComputeSplitPenalties", "(", "tree", ")", ":", "_TreePenaltyAssigner", "(", ")", ".", "Visit", "(", "tree", ")" ]
compute split penalties on tokens in the given parse tree .
train
false
51,770
def _read_dicts(fn_list, keyatom): dict_list = [] datalabel_list = [] for fn in fn_list: peaklist = Peaklist(fn) dict = peaklist.residue_dict(keyatom) dict_list.append(dict) datalabel_list.append(peaklist.datalabels) return [dict_list, datalabel_list]
[ "def", "_read_dicts", "(", "fn_list", ",", "keyatom", ")", ":", "dict_list", "=", "[", "]", "datalabel_list", "=", "[", "]", "for", "fn", "in", "fn_list", ":", "peaklist", "=", "Peaklist", "(", "fn", ")", "dict", "=", "peaklist", ".", "residue_dict", "...
read multiple files into a list of residue dictionaries .
train
false
51,771
def fresnels_zeros(nt): if ((floor(nt) != nt) or (nt <= 0) or (not isscalar(nt))): raise ValueError('Argument must be positive scalar integer.') return specfun.fcszo(2, nt)
[ "def", "fresnels_zeros", "(", "nt", ")", ":", "if", "(", "(", "floor", "(", "nt", ")", "!=", "nt", ")", "or", "(", "nt", "<=", "0", ")", "or", "(", "not", "isscalar", "(", "nt", ")", ")", ")", ":", "raise", "ValueError", "(", "'Argument must be p...
compute nt complex zeros of sine fresnel integral s(z) .
train
false
51,772
def consolidate_metadata(info_mi, info): try: raw = info[u'xmp_metadata'].rstrip() if (not raw): return info_mi xmp_mi = metadata_from_xmp_packet(raw) except Exception: import traceback traceback.print_exc() return info_mi (info_title, info_authors, info_tags) = ((info_mi.title or _(u'Unknown')), list((info_mi.authors or ())), list((info_mi.tags or ()))) info_mi.smart_update(xmp_mi, replace_metadata=True) prefer_info = False if ((u'ModDate' in info) and hasattr(xmp_mi, u'metadata_date')): try: info_date = parse_date(info[u'ModDate']) except Exception: pass else: prefer_info = (info_date > xmp_mi.metadata_date) if prefer_info: (info_mi.title, info_mi.authors, info_mi.tags) = (info_title, info_authors, info_tags) else: (info_mi.authors, info_mi.tags) = ((info_authors if xmp_mi.is_null(u'authors') else xmp_mi.authors), (xmp_mi.tags or info_tags)) return info_mi
[ "def", "consolidate_metadata", "(", "info_mi", ",", "info", ")", ":", "try", ":", "raw", "=", "info", "[", "u'xmp_metadata'", "]", ".", "rstrip", "(", ")", "if", "(", "not", "raw", ")", ":", "return", "info_mi", "xmp_mi", "=", "metadata_from_xmp_packet", ...
when both the pdf info dict and xmp metadata are present .
train
false
51,774
def test_step_schedule(backend_default): step_config = [1, 4, 5] change = [0.1, 0.3, 0.4] sch = StepSchedule(step_config=step_config, change=change) target_lr = [1.0, 0.1, 0.1, 0.1, 0.3, 0.4, 0.4, 0.4, 0.4] for (e, lr) in enumerate(target_lr): assert np.allclose(lr, sch.get_learning_rate(learning_rate=1.0, epoch=e))
[ "def", "test_step_schedule", "(", "backend_default", ")", ":", "step_config", "=", "[", "1", ",", "4", ",", "5", "]", "change", "=", "[", "0.1", ",", "0.3", ",", "0.4", "]", "sch", "=", "StepSchedule", "(", "step_config", "=", "step_config", ",", "chan...
test the stepschedule class .
train
false
51,775
def _queue_management_worker(executor_reference, processes, pending_work_items, work_ids_queue, call_queue, result_queue): executor = None def shutting_down(): return (_shutdown or (executor is None) or executor._shutdown_thread) def shutdown_worker(): nb_children_alive = sum((p.is_alive() for p in processes.values())) for i in range(0, nb_children_alive): call_queue.put_nowait(None) call_queue.close() for p in processes.values(): p.join() reader = result_queue._reader while True: _add_call_item_to_queue(pending_work_items, work_ids_queue, call_queue) sentinels = [p.sentinel for p in processes.values()] assert sentinels ready = wait(([reader] + sentinels)) if (reader in ready): result_item = reader.recv() else: executor = executor_reference() if (executor is not None): executor._broken = True executor._shutdown_thread = True executor = None for (work_id, work_item) in pending_work_items.items(): work_item.future.set_exception(BrokenProcessPool('A process in the process pool was terminated abruptly while the future was running or pending.')) del work_item pending_work_items.clear() for p in processes.values(): p.terminate() shutdown_worker() return if isinstance(result_item, int): assert shutting_down() p = processes.pop(result_item) p.join() if (not processes): shutdown_worker() return elif (result_item is not None): work_item = pending_work_items.pop(result_item.work_id, None) if (work_item is not None): if result_item.exception: work_item.future.set_exception(result_item.exception) else: work_item.future.set_result(result_item.result) del work_item executor = executor_reference() if shutting_down(): try: if (not pending_work_items): shutdown_worker() return except Full: pass executor = None
[ "def", "_queue_management_worker", "(", "executor_reference", ",", "processes", ",", "pending_work_items", ",", "work_ids_queue", ",", "call_queue", ",", "result_queue", ")", ":", "executor", "=", "None", "def", "shutting_down", "(", ")", ":", "return", "(", "_shu...
manages the communication between this process and the worker processes .
train
false
51,776
def get_build_version(): prefix = 'MSC v.' i = string.find(sys.version, prefix) if (i == (-1)): return 6 i = (i + len(prefix)) (s, rest) = sys.version[i:].split(' ', 1) majorVersion = (int(s[:(-2)]) - 6) minorVersion = (int(s[2:3]) / 10.0) if (majorVersion == 6): minorVersion = 0 if (majorVersion >= 6): return (majorVersion + minorVersion) return None
[ "def", "get_build_version", "(", ")", ":", "prefix", "=", "'MSC v.'", "i", "=", "string", ".", "find", "(", "sys", ".", "version", ",", "prefix", ")", "if", "(", "i", "==", "(", "-", "1", ")", ")", ":", "return", "6", "i", "=", "(", "i", "+", ...
return the version of msvc that was used to build python .
train
false
51,777
def upgrade_center(): try: current = pkg_resources.get_distribution('rainbowstream').version url = 'https://raw.githubusercontent.com/DTVD/rainbowstream/master/setup.py' readme = requests.get(url).text latest = readme.split("version = '")[1].split("'")[0] g['using_latest'] = (current == latest) if (not g['using_latest']): notice = light_magenta('RainbowStream latest version is ') notice += light_green(latest) notice += light_magenta(' while your current version is ') notice += (light_yellow(current) + '\n') notice += light_magenta('You should upgrade with ') notice += light_green('pip install -U rainbowstream') else: notice = light_yellow('You are running latest version (') notice += light_green(current) notice += light_yellow(')') notice += '\n' printNicely(notice) except: pass
[ "def", "upgrade_center", "(", ")", ":", "try", ":", "current", "=", "pkg_resources", ".", "get_distribution", "(", "'rainbowstream'", ")", ".", "version", "url", "=", "'https://raw.githubusercontent.com/DTVD/rainbowstream/master/setup.py'", "readme", "=", "requests", "....
check latest and notify to upgrade .
train
false
51,779
@frappe.whitelist(allow_guest=True) def get_product_info(item_code): if (not is_cart_enabled()): return {} qty = 0 cart_quotation = _get_cart_quotation() template_item_code = frappe.db.get_value(u'Item', item_code, u'variant_of') in_stock = get_qty_in_stock(item_code, template_item_code) price = get_price(item_code, template_item_code, cart_quotation.selling_price_list) if price: price[u'formatted_price'] = fmt_money(price[u'price_list_rate'], currency=price[u'currency']) price[u'currency'] = (((not cint(frappe.db.get_default(u'hide_currency_symbol'))) and (frappe.db.get_value(u'Currency', price.currency, u'symbol') or price.currency)) or u'') if (frappe.session.user != u'Guest'): item = cart_quotation.get({u'item_code': item_code}) if item: qty = item[0].qty return {u'price': price, u'stock': in_stock, u'uom': frappe.db.get_value(u'Item', item_code, u'stock_uom'), u'qty': qty}
[ "@", "frappe", ".", "whitelist", "(", "allow_guest", "=", "True", ")", "def", "get_product_info", "(", "item_code", ")", ":", "if", "(", "not", "is_cart_enabled", "(", ")", ")", ":", "return", "{", "}", "qty", "=", "0", "cart_quotation", "=", "_get_cart_...
get product price / stock info .
train
false
51,780
def getDiagonalSwitchedTetragridByPolar(diagonals, unitPolar): diagonalSwitchedTetragrid = getIdentityTetragrid() for diagonal in diagonals: diagonalSwitchedTetragrid[diagonal][diagonal] = unitPolar.real diagonalSwitchedTetragrid[diagonals[0]][diagonals[1]] = (- unitPolar.imag) diagonalSwitchedTetragrid[diagonals[1]][diagonals[0]] = unitPolar.imag return diagonalSwitchedTetragrid
[ "def", "getDiagonalSwitchedTetragridByPolar", "(", "diagonals", ",", "unitPolar", ")", ":", "diagonalSwitchedTetragrid", "=", "getIdentityTetragrid", "(", ")", "for", "diagonal", "in", "diagonals", ":", "diagonalSwitchedTetragrid", "[", "diagonal", "]", "[", "diagonal",...
get the diagonals and switched matrix by unitpolar .
train
false
51,781
def _nftables_cmd(): return 'nft'
[ "def", "_nftables_cmd", "(", ")", ":", "return", "'nft'" ]
return correct command .
train
false
51,782
def traverse_pre_order(start_node, get_children, filter_func=None): return _traverse_generic(start_node, get_parents=None, get_children=get_children, filter_func=filter_func)
[ "def", "traverse_pre_order", "(", "start_node", ",", "get_children", ",", "filter_func", "=", "None", ")", ":", "return", "_traverse_generic", "(", "start_node", ",", "get_parents", "=", "None", ",", "get_children", "=", "get_children", ",", "filter_func", "=", ...
generator for yielding nodes of a tree in a pre-order sort .
train
false
51,784
def parse_proxy(proxy_str): proxy_dict = {} if (proxy_str is None): return if (u'@' in proxy_str): (user_pass, host_port) = proxy_str.split(u'@') else: (user_pass, host_port) = (u'', proxy_str) if (u':' in host_port): (host, port) = host_port.split(u':') (proxy_dict[u'proxy_host'], proxy_dict[u'proxy_port']) = (host, int(port)) if (u':' in user_pass): (proxy_dict[u'proxy_user'], proxy_dict[u'proxy_pass']) = user_pass.split(u':') return proxy_dict
[ "def", "parse_proxy", "(", "proxy_str", ")", ":", "proxy_dict", "=", "{", "}", "if", "(", "proxy_str", "is", "None", ")", ":", "return", "if", "(", "u'@'", "in", "proxy_str", ")", ":", "(", "user_pass", ",", "host_port", ")", "=", "proxy_str", ".", "...
parses proxy address user:pass@host:port into a dict suitable for httplib2 .
train
false
51,785
def check_ucd(ucd, check_controlled_vocabulary=False, has_colon=False): if (ucd is None): return True try: parse_ucd(ucd, check_controlled_vocabulary=check_controlled_vocabulary, has_colon=has_colon) except ValueError: return False return True
[ "def", "check_ucd", "(", "ucd", ",", "check_controlled_vocabulary", "=", "False", ",", "has_colon", "=", "False", ")", ":", "if", "(", "ucd", "is", "None", ")", ":", "return", "True", "try", ":", "parse_ucd", "(", "ucd", ",", "check_controlled_vocabulary", ...
returns false if *ucd* is not a valid unified content descriptor_ .
train
false
51,786
def _represent_yaml_str(self, node): return self.represent_scalar(node)
[ "def", "_represent_yaml_str", "(", "self", ",", "node", ")", ":", "return", "self", ".", "represent_scalar", "(", "node", ")" ]
represent for yaml .
train
false
51,787
def human_readable_filesize(b): thresh = 1024.0 if (b < thresh): return '{0:.1f} B'.format(b) units = ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'] u = 0 b /= thresh while (b >= thresh): b /= thresh u += 1 return '{0:.1f} {1:s}'.format(round(b, 1), units[u])
[ "def", "human_readable_filesize", "(", "b", ")", ":", "thresh", "=", "1024.0", "if", "(", "b", "<", "thresh", ")", ":", "return", "'{0:.1f} B'", ".", "format", "(", "b", ")", "units", "=", "[", "'KiB'", ",", "'MiB'", ",", "'GiB'", ",", "'TiB'", ",", ...
returns filesize in a human readable format .
train
false
51,788
def wait_until_appeared(browser, css_selector, timeout=10, frequency=1.0): wait_until_condition(browser, condition=(lambda x: x.driver.find_element_by_css_selector(css_selector).is_displayed()), timeout=timeout, frequency=frequency)
[ "def", "wait_until_appeared", "(", "browser", ",", "css_selector", ",", "timeout", "=", "10", ",", "frequency", "=", "1.0", ")", ":", "wait_until_condition", "(", "browser", ",", "condition", "=", "(", "lambda", "x", ":", "x", ".", "driver", ".", "find_ele...
wait until the element has appeared .
train
false
51,791
def num_repr(num): if (num <= 9999): return str(num) def digit_count(x): ' Return number of digits. ' return int((math.floor(math.log10(x)) + 1)) digits = digit_count(num) sig = (3 if ((digits % 3) == 0) else 2) rounded = int(round(num, int((sig - digits)))) digits = digit_count(rounded) suffix = '_kmBTqXYX'[((digits - 1) // 3)] front = (3 if ((digits % 3) == 0) else (digits % 3)) if (not (front == 1)): return (str(rounded)[0:front] + suffix) return (((str(rounded)[0] + '.') + str(rounded)[1]) + suffix)
[ "def", "num_repr", "(", "num", ")", ":", "if", "(", "num", "<=", "9999", ")", ":", "return", "str", "(", "num", ")", "def", "digit_count", "(", "x", ")", ":", "return", "int", "(", "(", "math", ".", "floor", "(", "math", ".", "log10", "(", "x",...
return up to four digit string representation of a number .
train
false
51,793
def Timer(*args, **kwargs): return _Timer(*args, **kwargs)
[ "def", "Timer", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "_Timer", "(", "*", "args", ",", "**", "kwargs", ")" ]
factory function to create a timer object .
train
false
51,794
def handleargs(arglist): try: if (len(arglist) == 1): return (0, int(arglist[0]), 1) elif (len(arglist) == 2): return (int(arglist[0]), int(arglist[1]), 1) elif (len(arglist) == 3): if (arglist[2] == 0): raise ValueError('step argument must not be zero') return tuple((int(x) for x in arglist)) else: raise TypeError('range() accepts 1-3 arguments, given', len(arglist)) except TypeError: raise TypeError('range() arguments must be numbers or strings representing numbers')
[ "def", "handleargs", "(", "arglist", ")", ":", "try", ":", "if", "(", "len", "(", "arglist", ")", "==", "1", ")", ":", "return", "(", "0", ",", "int", "(", "arglist", "[", "0", "]", ")", ",", "1", ")", "elif", "(", "len", "(", "arglist", ")",...
take list of arguments and extract/create proper start .
train
false
51,795
def to_tree(expr, names=None): if isinstance(expr, slice): return {u'op': u'slice', u'args': [to_tree(arg, names=names) for arg in [expr.start, expr.stop, expr.step]]} if (names and (expr in names)): return names[expr] if isinstance(expr, tuple): return [to_tree(arg, names=names) for arg in expr] if isinstance(expr, expr_utils._slice): return to_tree(expr.as_slice(), names=names) elif isinstance(expr, _Data): return to_tree(symbol(u8(expr._name), expr.dshape), names) elif isinstance(expr, Expr): return {u'op': u8(type(expr).__name__), u'args': [to_tree(arg, names) for arg in expr._args]} elif isinstance(expr, str): return u8(expr) else: return expr
[ "def", "to_tree", "(", "expr", ",", "names", "=", "None", ")", ":", "if", "isinstance", "(", "expr", ",", "slice", ")", ":", "return", "{", "u'op'", ":", "u'slice'", ",", "u'args'", ":", "[", "to_tree", "(", "arg", ",", "names", "=", "names", ")", ...
represent blaze expression with core data structures transform a blaze expression into a form using only strings .
train
false
51,796
def write_hyperlinks(worksheet): if (not worksheet.hyperlinks): return tag = Element('hyperlinks') for cell in worksheet.hyperlinks: link = cell.hyperlink link.ref = cell.coordinate rel = Relationship(type='hyperlink', targetMode='External', target=link.target) worksheet._rels.append(rel) link.id = 'rId{0}'.format(len(worksheet._rels)) tag.append(link.to_tree()) return tag
[ "def", "write_hyperlinks", "(", "worksheet", ")", ":", "if", "(", "not", "worksheet", ".", "hyperlinks", ")", ":", "return", "tag", "=", "Element", "(", "'hyperlinks'", ")", "for", "cell", "in", "worksheet", ".", "hyperlinks", ":", "link", "=", "cell", "...
write worksheet hyperlinks to xml .
train
false
51,797
def _decode_value(stored_value, flags, do_unpickle): assert isinstance(stored_value, str) assert isinstance(flags, (int, long)) type_number = (flags & FLAG_TYPE_MASK) value = stored_value if (type_number == TYPE_STR): return value elif (type_number == TYPE_UNICODE): return value.decode('utf-8') elif (type_number == TYPE_PICKLED): return do_unpickle(value) elif (type_number == TYPE_BOOL): return bool(int(value)) elif (type_number == TYPE_INT): return int(value) elif (type_number == TYPE_LONG): return long(value) else: assert False, 'Unknown stored type' assert False, "Shouldn't get here."
[ "def", "_decode_value", "(", "stored_value", ",", "flags", ",", "do_unpickle", ")", ":", "assert", "isinstance", "(", "stored_value", ",", "str", ")", "assert", "isinstance", "(", "flags", ",", "(", "int", ",", "long", ")", ")", "type_number", "=", "(", ...
utility function for decoding values retrieved from memcache .
train
false
51,798
def color_print(msg): clrmsg = ((clrgreen + msg) + clrnull) print clrmsg
[ "def", "color_print", "(", "msg", ")", ":", "clrmsg", "=", "(", "(", "clrgreen", "+", "msg", ")", "+", "clrnull", ")", "print", "clrmsg" ]
add color to this print output .
train
false
51,799
def relpath(path, start='.'): if ((sys.version_info < (2, 7)) and ('posix' in sys.builtin_module_names)): if (not path): raise ValueError('no path specified') start_list = [x for x in os.path.abspath(start).split(os.path.sep) if x] path_list = [x for x in os.path.abspath(path).split(os.path.sep) if x] i = len(os.path.commonprefix([start_list, path_list])) rel_list = (([os.path.pardir] * (len(start_list) - i)) + path_list[i:]) if (not rel_list): return os.path.curdir return os.path.join(*rel_list) return os.path.relpath(path, start=start)
[ "def", "relpath", "(", "path", ",", "start", "=", "'.'", ")", ":", "if", "(", "(", "sys", ".", "version_info", "<", "(", "2", ",", "7", ")", ")", "and", "(", "'posix'", "in", "sys", ".", "builtin_module_names", ")", ")", ":", "if", "(", "not", ...
work around python bug #5117 .
train
false
51,801
def _TestFakeAuthViewfinderUser(action, tester, user_dict, device_dict=None, user_cookie=None): if ('email' in user_dict): ident_dict = {'key': ('Email:%s' % user_dict['email']), 'authority': 'FakeViewfinder'} else: ident_dict = {'key': ('Phone:%s' % user_dict['phone']), 'authority': 'FakeViewfinder'} response = _AuthViewfinderUser(tester, action, user_dict, ident_dict, device_dict, user_cookie) return auth_test._ValidateAuthUser(tester, action, user_dict, ident_dict, device_dict, user_cookie, response)
[ "def", "_TestFakeAuthViewfinderUser", "(", "action", ",", "tester", ",", "user_dict", ",", "device_dict", "=", "None", ",", "user_cookie", "=", "None", ")", ":", "if", "(", "'email'", "in", "user_dict", ")", ":", "ident_dict", "=", "{", "'key'", ":", "(", ...
called by the servicetester in order to test login/fakeviewfinder and register/fakeviewfinder service endpoints .
train
false
51,802
def hypermedia_out(): request = cherrypy.serving.request request._hypermedia_inner_handler = request.handler request.handler = hypermedia_handler
[ "def", "hypermedia_out", "(", ")", ":", "request", "=", "cherrypy", ".", "serving", ".", "request", "request", ".", "_hypermedia_inner_handler", "=", "request", ".", "handler", "request", ".", "handler", "=", "hypermedia_handler" ]
determine the best handler for the requested content type wrap the normal handler and transform the output from that handler into the requested content type .
train
true
51,803
def write_mesh(fname, vertices, faces, normals, texcoords, name='', format='obj', overwrite=False, reshape_faces=True): if (op.isfile(fname) and (not overwrite)): raise IOError(('file "%s" exists, use overwrite=True' % fname)) if (format not in 'obj'): raise ValueError('Only "obj" format writing currently supported') WavefrontWriter.write(fname, vertices, faces, normals, texcoords, name, reshape_faces)
[ "def", "write_mesh", "(", "fname", ",", "vertices", ",", "faces", ",", "normals", ",", "texcoords", ",", "name", "=", "''", ",", "format", "=", "'obj'", ",", "overwrite", "=", "False", ",", "reshape_faces", "=", "True", ")", ":", "if", "(", "op", "."...
write mesh data to file .
train
true
51,805
def ghd(ref, hyp, ins_cost=2.0, del_cost=2.0, shift_cost_coeff=1.0, boundary='1'): ref_idx = [i for (i, val) in enumerate(ref) if (val == boundary)] hyp_idx = [i for (i, val) in enumerate(hyp) if (val == boundary)] nref_bound = len(ref_idx) nhyp_bound = len(hyp_idx) if ((nref_bound == 0) and (nhyp_bound == 0)): return 0.0 elif ((nref_bound > 0) and (nhyp_bound == 0)): return (nref_bound * ins_cost) elif ((nref_bound == 0) and (nhyp_bound > 0)): return (nhyp_bound * del_cost) mat = _init_mat((nhyp_bound + 1), (nref_bound + 1), ins_cost, del_cost) _ghd_aux(mat, hyp_idx, ref_idx, ins_cost, del_cost, shift_cost_coeff) return mat[((-1), (-1))]
[ "def", "ghd", "(", "ref", ",", "hyp", ",", "ins_cost", "=", "2.0", ",", "del_cost", "=", "2.0", ",", "shift_cost_coeff", "=", "1.0", ",", "boundary", "=", "'1'", ")", ":", "ref_idx", "=", "[", "i", "for", "(", "i", ",", "val", ")", "in", "enumera...
compute the generalized hamming distance for a reference and a hypothetical segmentation .
train
false
51,807
def parse_comma_separated(tokens): tokens = [token for token in tokens if (token.type != u'S')] if (not tokens): return [] if (((len(tokens) % 2) == 1) and all((((token.type == u'DELIM') and (token.value == u',')) for token in tokens[1::2]))): return tokens[::2]
[ "def", "parse_comma_separated", "(", "tokens", ")", ":", "tokens", "=", "[", "token", "for", "token", "in", "tokens", "if", "(", "token", ".", "type", "!=", "u'S'", ")", "]", "if", "(", "not", "tokens", ")", ":", "return", "[", "]", "if", "(", "(",...
parse a list of tokens as arguments made of a single token each .
train
false
51,808
def pyext_coms(platform): if (platform == 'win32'): pyext_cccom = '$PYEXTCC /Fo$TARGET /c $PYEXTCCSHARED $PYEXTCFLAGS $PYEXTCCFLAGS $_CCCOMCOM $_PYEXTCPPINCFLAGS $SOURCES' pyext_cxxcom = '$PYEXTCXX /Fo$TARGET /c $PYEXTCSHARED $PYEXTCXXFLAGS $PYEXTCCFLAGS $_CCCOMCOM $_PYEXTCPPINCFLAGS $SOURCES' pyext_linkcom = '${TEMPFILE("$PYEXTLINK $PYEXTLINKFLAGS /OUT:$TARGET.windows $( $_LIBDIRFLAGS $) $_LIBFLAGS $_PYEXTRUNTIME $SOURCES.windows")}' else: pyext_cccom = '$PYEXTCC -o $TARGET -c $PYEXTCCSHARED $PYEXTCFLAGS $PYEXTCCFLAGS $_CCCOMCOM $_PYEXTCPPINCFLAGS $SOURCES' pyext_cxxcom = '$PYEXTCXX -o $TARGET -c $PYEXTCSHARED $PYEXTCXXFLAGS $PYEXTCCFLAGS $_CCCOMCOM $_PYEXTCPPINCFLAGS $SOURCES' pyext_linkcom = '$PYEXTLINK -o $TARGET $PYEXTLINKFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_PYEXTRUNTIME' if (platform == 'darwin'): pyext_linkcom += ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS' return (pyext_cccom, pyext_cxxcom, pyext_linkcom)
[ "def", "pyext_coms", "(", "platform", ")", ":", "if", "(", "platform", "==", "'win32'", ")", ":", "pyext_cccom", "=", "'$PYEXTCC /Fo$TARGET /c $PYEXTCCSHARED $PYEXTCFLAGS $PYEXTCCFLAGS $_CCCOMCOM $_PYEXTCPPINCFLAGS $SOURCES'", "pyext_cxxcom", "=", "'$PYEXTCXX /Fo$TARGET /c $PYEXT...
return pyextcccom .
train
false
51,809
def _iface_hdlr(iface_node): return True
[ "def", "_iface_hdlr", "(", "iface_node", ")", ":", "return", "True" ]
a handler function used by interfaces to handle suspicious interface nodes .
train
false
51,810
def find_cmd(cmd): path = py3compat.which(cmd) if (path is None): raise FindCmdError(('command could not be found: %s' % cmd)) return path
[ "def", "find_cmd", "(", "cmd", ")", ":", "path", "=", "py3compat", ".", "which", "(", "cmd", ")", "if", "(", "path", "is", "None", ")", ":", "raise", "FindCmdError", "(", "(", "'command could not be found: %s'", "%", "cmd", ")", ")", "return", "path" ]
find absolute path to executable cmd in a cross platform manner .
train
false
51,811
def parse_repl_hex_escape(source, expected_len, type): digits = [] for i in range(expected_len): ch = source.get() if (ch not in HEX_DIGITS): raise error(('incomplete escape \\%s%s' % (type, ''.join(digits))), source.string, source.pos) digits.append(ch) return int(''.join(digits), 16)
[ "def", "parse_repl_hex_escape", "(", "source", ",", "expected_len", ",", "type", ")", ":", "digits", "=", "[", "]", "for", "i", "in", "range", "(", "expected_len", ")", ":", "ch", "=", "source", ".", "get", "(", ")", "if", "(", "ch", "not", "in", "...
parses a hex escape sequence in a replacement string .
train
false
51,814
def hrm_experience_controller(): if (current.session.s3.hrm.mode is not None): current.session.error = current.T('Access denied') redirect(URL(f='index')) def prep(r): if (r.method in ('create', 'update')): field = current.s3db.hrm_experience.person_id person_id = current.request.get_vars.get('~.person_id', None) if person_id: field.default = person_id field.readable = field.writable = False elif (r.method == 'update'): refresh = r.get_vars.get('refresh') if (refresh and refresh.startswith('profile-list-hrm_experience')): field.readable = field.writable = False return True current.response.s3.prep = prep output = current.rest_controller('hrm', 'experience') return output
[ "def", "hrm_experience_controller", "(", ")", ":", "if", "(", "current", ".", "session", ".", "s3", ".", "hrm", ".", "mode", "is", "not", "None", ")", ":", "current", ".", "session", ".", "error", "=", "current", ".", "T", "(", "'Access denied'", ")", ...
experience controller .
train
false
51,816
def make_istatechange_tests(klass, kwargs1, kwargs2): def instance(kwargs): if isinstance(kwargs, dict): return klass(**kwargs) return klass(**kwargs()) class Tests(make_comparison_tests(klass, kwargs1, kwargs2), ): def test_interface(self): '\n The class implements ``IStateChange``.\n ' self.assertTrue(verifyObject(IStateChange, instance(kwargs1))) Tests.__name__ = (klass.__name__ + 'IStateChangeTests') return Tests
[ "def", "make_istatechange_tests", "(", "klass", ",", "kwargs1", ",", "kwargs2", ")", ":", "def", "instance", "(", "kwargs", ")", ":", "if", "isinstance", "(", "kwargs", ",", "dict", ")", ":", "return", "klass", "(", "**", "kwargs", ")", "return", "klass"...
create tests to verify a class provides istatechange .
train
false
51,818
@pytest.fixture def forum_locked(category, default_settings): forum = Forum(title='Test Forum', category_id=category.id) forum.locked = True forum.save() return forum
[ "@", "pytest", ".", "fixture", "def", "forum_locked", "(", "category", ",", "default_settings", ")", ":", "forum", "=", "Forum", "(", "title", "=", "'Test Forum'", ",", "category_id", "=", "category", ".", "id", ")", "forum", ".", "locked", "=", "True", ...
a single locked forum in a category .
train
false
51,819
def get_course_chapters(course_key): if (course_key is None): return try: course_obj = CourseStructure.objects.get(course_id=course_key) except CourseStructure.DoesNotExist: return course_struct = course_obj.structure try: return course_struct['blocks'][course_struct['root']].get('children', []) except KeyError: return []
[ "def", "get_course_chapters", "(", "course_key", ")", ":", "if", "(", "course_key", "is", "None", ")", ":", "return", "try", ":", "course_obj", "=", "CourseStructure", ".", "objects", ".", "get", "(", "course_id", "=", "course_key", ")", "except", "CourseStr...
extracts the chapters from a course structure .
train
false
51,820
def _list_samples(meter, project=None, resource=None, source=None, user=None): q_ts = _get_query_timestamps(flask.request.args) f = storage.SampleFilter(user=user, project=project, source=source, meter=meter, resource=resource, start=q_ts['start_timestamp'], end=q_ts['end_timestamp'], metaquery=_get_metaquery(flask.request.args)) samples = flask.request.storage_conn.get_samples(f) jsonified = flask.jsonify(events=[s.as_dict() for s in samples]) if request_wants_html(): return flask.templating.render_template('list_event.html', user=user, project=project, source=source, meter=meter, resource=resource, events=jsonified) return jsonified
[ "def", "_list_samples", "(", "meter", ",", "project", "=", "None", ",", "resource", "=", "None", ",", "source", "=", "None", ",", "user", "=", "None", ")", ":", "q_ts", "=", "_get_query_timestamps", "(", "flask", ".", "request", ".", "args", ")", "f", ...
return a list of raw samples .
train
false
51,821
def icreate_animations(figure, filename=None, sharing='public', auto_open=False): url = create_animations(figure, filename, sharing, auto_open) if isinstance(figure, dict): layout = figure.get('layout', {}) else: layout = {} embed_options = dict() embed_options['width'] = layout.get('width', '100%') embed_options['height'] = layout.get('height', 525) try: float(embed_options['width']) except (ValueError, TypeError): pass else: embed_options['width'] = (str(embed_options['width']) + 'px') try: float(embed_options['height']) except (ValueError, TypeError): pass else: embed_options['height'] = (str(embed_options['height']) + 'px') return tools.embed(url, **embed_options)
[ "def", "icreate_animations", "(", "figure", ",", "filename", "=", "None", ",", "sharing", "=", "'public'", ",", "auto_open", "=", "False", ")", ":", "url", "=", "create_animations", "(", "figure", ",", "filename", ",", "sharing", ",", "auto_open", ")", "if...
create a unique url for this animated plot in plotly and open in ipython .
train
false
51,822
def locationUpdatingRequest(): a = TpPd(pd=5) b = MessageType(mesType=8) c = LocationUpdatingTypeAndCiphKeySeqNr() e = LocalAreaId() f = MobileStationClassmark1() g = MobileId() packet = (((((a / b) / c) / e) / f) / g) return packet
[ "def", "locationUpdatingRequest", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "5", ")", "b", "=", "MessageType", "(", "mesType", "=", "8", ")", "c", "=", "LocationUpdatingTypeAndCiphKeySeqNr", "(", ")", "e", "=", "LocalAreaId", "(", ")", "f", "=",...
location updating request section 9 .
train
true
51,823
def post_save_layer(instance, sender, **kwargs): ModelDescription.objects.filter(name=instance.name).update(layer=instance)
[ "def", "post_save_layer", "(", "instance", ",", "sender", ",", "**", "kwargs", ")", ":", "ModelDescription", ".", "objects", ".", "filter", "(", "name", "=", "instance", ".", "name", ")", ".", "update", "(", "layer", "=", "instance", ")" ]
assign layer instance to the dynamic model .
train
false
51,824
def is_undefined(obj): from jinja2.runtime import Undefined return isinstance(obj, Undefined)
[ "def", "is_undefined", "(", "obj", ")", ":", "from", "jinja2", ".", "runtime", "import", "Undefined", "return", "isinstance", "(", "obj", ",", "Undefined", ")" ]
check if the object passed is undefined .
train
false
51,825
def POSform(variables, minterms, dontcares=None): variables = [sympify(v) for v in variables] if (minterms == []): return false minterms = [list(i) for i in minterms] dontcares = [list(i) for i in (dontcares or [])] for d in dontcares: if (d in minterms): raise ValueError(('%s in minterms is also in dontcares' % d)) maxterms = [] for t in product([0, 1], repeat=len(variables)): t = list(t) if ((t not in minterms) and (t not in dontcares)): maxterms.append(t) old = None new = (maxterms + dontcares) while (new != old): old = new new = _simplified_pairs(old) essential = _rem_redundancy(new, maxterms) return And(*[_convert_to_varsPOS(x, variables) for x in essential])
[ "def", "POSform", "(", "variables", ",", "minterms", ",", "dontcares", "=", "None", ")", ":", "variables", "=", "[", "sympify", "(", "v", ")", "for", "v", "in", "variables", "]", "if", "(", "minterms", "==", "[", "]", ")", ":", "return", "false", "...
the posform function uses simplified_pairs and a redundant-group eliminating algorithm to convert the list of all input combinations that generate 1 into the smallest product of sums form .
train
false
51,826
def test_log_sum_exp_1(): rng = np.random.RandomState([2015, 2, 9]) x = (1.0 + (rng.randn(5) / 10.0)) naive = np.log(np.exp(x).sum()) x = sharedX(x) stable = log_sum_exp(x).eval() assert np.allclose(naive, stable)
[ "def", "test_log_sum_exp_1", "(", ")", ":", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "[", "2015", ",", "2", ",", "9", "]", ")", "x", "=", "(", "1.0", "+", "(", "rng", ".", "randn", "(", "5", ")", "/", "10.0", ")", ")", "naive...
tests that the stable log sum exp matches the naive one for values near 1 .
train
false
51,828
def GetClientURNFromPath(path): try: return ClientURN(path.split('/')[1]) except (type_info.TypeValueError, IndexError): return None
[ "def", "GetClientURNFromPath", "(", "path", ")", ":", "try", ":", "return", "ClientURN", "(", "path", ".", "split", "(", "'/'", ")", "[", "1", "]", ")", "except", "(", "type_info", ".", "TypeValueError", ",", "IndexError", ")", ":", "return", "None" ]
extracts the client id from the path .
train
false
51,831
def _count_contributors_in_range(querysets, users, date_range): (start, end) = date_range retained_users = set() for (queryset, fields) in querysets: for field in fields: filters = {('%s__in' % field): users, 'created__gte': start, 'created__lt': end} retained_users |= set((getattr(o, field) for o in queryset.filter(**filters))) return len(retained_users)
[ "def", "_count_contributors_in_range", "(", "querysets", ",", "users", ",", "date_range", ")", ":", "(", "start", ",", "end", ")", "=", "date_range", "retained_users", "=", "set", "(", ")", "for", "(", "queryset", ",", "fields", ")", "in", "querysets", ":"...
of the group users .
train
false
51,832
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
51,833
def get_evennia_version(): import evennia return evennia.__version__
[ "def", "get_evennia_version", "(", ")", ":", "import", "evennia", "return", "evennia", ".", "__version__" ]
helper method for getting the current evennia version .
train
false
51,834
def get_parallels(bezier2, width): (c1x, c1y) = bezier2[0] (cmx, cmy) = bezier2[1] (c2x, c2y) = bezier2[2] (cos_t1, sin_t1) = get_cos_sin(c1x, c1y, cmx, cmy) (cos_t2, sin_t2) = get_cos_sin(cmx, cmy, c2x, c2y) (c1x_left, c1y_left, c1x_right, c1y_right) = get_normal_points(c1x, c1y, cos_t1, sin_t1, width) (c2x_left, c2y_left, c2x_right, c2y_right) = get_normal_points(c2x, c2y, cos_t2, sin_t2, width) (cmx_left, cmy_left) = get_intersection(c1x_left, c1y_left, cos_t1, sin_t1, c2x_left, c2y_left, cos_t2, sin_t2) (cmx_right, cmy_right) = get_intersection(c1x_right, c1y_right, cos_t1, sin_t1, c2x_right, c2y_right, cos_t2, sin_t2) path_left = [(c1x_left, c1y_left), (cmx_left, cmy_left), (c2x_left, c2y_left)] path_right = [(c1x_right, c1y_right), (cmx_right, cmy_right), (c2x_right, c2y_right)] return (path_left, path_right)
[ "def", "get_parallels", "(", "bezier2", ",", "width", ")", ":", "(", "c1x", ",", "c1y", ")", "=", "bezier2", "[", "0", "]", "(", "cmx", ",", "cmy", ")", "=", "bezier2", "[", "1", "]", "(", "c2x", ",", "c2y", ")", "=", "bezier2", "[", "2", "]"...
given the quadraitc bezier control points *bezier2* .
train
false
51,836
def _scalar_to_format(value): for type_ in (int, float, complex): try: value = type_(value) break except ValueError: continue numpy_dtype_str = np.min_scalar_type(value).str numpy_dtype_str = numpy_dtype_str[1:] try: fits_format = NUMPY2FITS[numpy_dtype_str] return FITSUPCONVERTERS.get(fits_format, fits_format) except KeyError: return ('A' + str(len(value)))
[ "def", "_scalar_to_format", "(", "value", ")", ":", "for", "type_", "in", "(", "int", ",", "float", ",", "complex", ")", ":", "try", ":", "value", "=", "type_", "(", "value", ")", "break", "except", "ValueError", ":", "continue", "numpy_dtype_str", "=", ...
given a scalar value or string .
train
false
51,837
def simple_python_completion(): python_completion = [] python_completion += builtin_module_names python_completion += tuple(dir(__builtins__)) python_completion += [module_name[1] for module_name in iter_modules()] try: python_completion += tuple(__builtins__.__dict__.keys()) except: pass python_completion = tuple(sorted(set(python_completion))) return python_completion
[ "def", "simple_python_completion", "(", ")", ":", "python_completion", "=", "[", "]", "python_completion", "+=", "builtin_module_names", "python_completion", "+=", "tuple", "(", "dir", "(", "__builtins__", ")", ")", "python_completion", "+=", "[", "module_name", "["...
return tuple of strings containing python words for simple completion .
train
false
51,839
def extend_element(element, base): u' Recursion is needed if the extension base itself extends another element.' if isinstance(base, dict): for (i, kk) in enumerate(base): if isinstance(base, Struct): element.insert(kk, base[kk], i) if (isinstance(base, Struct) and base.namespaces and kk): element.namespaces[kk] = base.namespaces[kk] element.references[kk] = base.references[kk] if base.refers_to: extend_element(element, base.refers_to)
[ "def", "extend_element", "(", "element", ",", "base", ")", ":", "if", "isinstance", "(", "base", ",", "dict", ")", ":", "for", "(", "i", ",", "kk", ")", "in", "enumerate", "(", "base", ")", ":", "if", "isinstance", "(", "base", ",", "Struct", ")", ...
recursively extend the elemnet if it has an extension base .
train
false
51,841
def dmp_zz_wang_non_divisors(E, cs, ct, K): result = [(cs * ct)] for q in E: q = abs(q) for r in reversed(result): while (r != 1): r = K.gcd(r, q) q = (q // r) if K.is_one(q): return None result.append(q) return result[1:]
[ "def", "dmp_zz_wang_non_divisors", "(", "E", ",", "cs", ",", "ct", ",", "K", ")", ":", "result", "=", "[", "(", "cs", "*", "ct", ")", "]", "for", "q", "in", "E", ":", "q", "=", "abs", "(", "q", ")", "for", "r", "in", "reversed", "(", "result"...
wang/eez: compute a set of valid divisors .
train
false
51,842
def invalid_marker(text): try: evaluate_marker(text) except SyntaxError as e: e.filename = None e.lineno = None return e return False
[ "def", "invalid_marker", "(", "text", ")", ":", "try", ":", "evaluate_marker", "(", "text", ")", "except", "SyntaxError", "as", "e", ":", "e", ".", "filename", "=", "None", "e", ".", "lineno", "=", "None", "return", "e", "return", "False" ]
validate text as a pep 508 environment marker; return an exception if invalid or false otherwise .
train
true
51,843
def parse_qsd(data, name='query string', exception=PluginError, schema=None, **params): value = dict(parse_qsl(data, **params)) if schema: value = schema.validate(value, name=name, exception=exception) return value
[ "def", "parse_qsd", "(", "data", ",", "name", "=", "'query string'", ",", "exception", "=", "PluginError", ",", "schema", "=", "None", ",", "**", "params", ")", ":", "value", "=", "dict", "(", "parse_qsl", "(", "data", ",", "**", "params", ")", ")", ...
parses a query string into a dict .
train
true
51,844
def _check_plural_is_ambiguous(plural_arg): if _STRING_RE.match(plural_arg): return False fn_match = _FUNCTION_RE.match(plural_arg) if (fn_match and (fn_match.group(1) in _functions)): return False return True
[ "def", "_check_plural_is_ambiguous", "(", "plural_arg", ")", ":", "if", "_STRING_RE", ".", "match", "(", "plural_arg", ")", ":", "return", "False", "fn_match", "=", "_FUNCTION_RE", ".", "match", "(", "plural_arg", ")", "if", "(", "fn_match", "and", "(", "fn_...
check to see if a string is ambiguously named .
train
false
51,845
def Print(file): finder = _getfinder() fss = Carbon.File.FSSpec(file) return finder._print(fss)
[ "def", "Print", "(", "file", ")", ":", "finder", "=", "_getfinder", "(", ")", "fss", "=", "Carbon", ".", "File", ".", "FSSpec", "(", "file", ")", "return", "finder", ".", "_print", "(", "fss", ")" ]
print a file thru the finder .
train
false
51,846
def is_writeable(path, check_parent=False): if (os.access(path, os.F_OK) and os.access(path, os.W_OK)): return True if (os.access(path, os.F_OK) and (not os.access(path, os.W_OK))): return False if (check_parent is False): return False parent_dir = os.path.dirname(path) if (not os.access(parent_dir, os.F_OK)): return False return os.access(parent_dir, os.W_OK)
[ "def", "is_writeable", "(", "path", ",", "check_parent", "=", "False", ")", ":", "if", "(", "os", ".", "access", "(", "path", ",", "os", ".", "F_OK", ")", "and", "os", ".", "access", "(", "path", ",", "os", ".", "W_OK", ")", ")", ":", "return", ...
check if a given path is writeable by the current user .
train
true
51,847
def _get_id_token_user(token, audiences, allowed_client_ids, time_now, cache): try: parsed_token = _verify_signed_jwt_with_certs(token, time_now, cache) except _AppIdentityError as e: logging.warning('id_token verification failed: %s', e) return None except: logging.warning('id_token verification failed.') return None if _verify_parsed_token(parsed_token, audiences, allowed_client_ids): email = parsed_token['email'] return users.User(email)
[ "def", "_get_id_token_user", "(", "token", ",", "audiences", ",", "allowed_client_ids", ",", "time_now", ",", "cache", ")", ":", "try", ":", "parsed_token", "=", "_verify_signed_jwt_with_certs", "(", "token", ",", "time_now", ",", "cache", ")", "except", "_AppId...
get a user for the given id token .
train
false
51,850
def decodeHostPort(line): abcdef = re.sub('[^0-9, ]', '', line) parsed = [int(p.strip()) for p in abcdef.split(',')] for x in parsed: if ((x < 0) or (x > 255)): raise ValueError('Out of range', line, x) (a, b, c, d, e, f) = parsed host = ('%s.%s.%s.%s' % (a, b, c, d)) port = ((int(e) << 8) + int(f)) return (host, port)
[ "def", "decodeHostPort", "(", "line", ")", ":", "abcdef", "=", "re", ".", "sub", "(", "'[^0-9, ]'", ",", "''", ",", "line", ")", "parsed", "=", "[", "int", "(", "p", ".", "strip", "(", ")", ")", "for", "p", "in", "abcdef", ".", "split", "(", "'...
decode an ftp response specifying a host and port .
train
false
51,852
def _download(quidditch, retries=5): passed = False clean = True comment = '' while (not passed): log.debug('Downloading. tries left: {0}'.format(str(retries))) passed = quidditch.Download() log.debug('Done downloading: {0}'.format(str(passed))) if isinstance(passed, Exception): clean = False comment += 'Failed while trying to download updates:\n DCTB DCTB {0}\n'.format(str(passed)) retries -= 1 if retries: comment += '{0} tries to go. retrying\n'.format(str(retries)) passed = False else: comment += 'out of retries. this update round failed.\n' return (comment, False, retries) if clean: comment += 'Download was done without error.\n' return (comment, True, retries)
[ "def", "_download", "(", "quidditch", ",", "retries", "=", "5", ")", ":", "passed", "=", "False", "clean", "=", "True", "comment", "=", "''", "while", "(", "not", "passed", ")", ":", "log", ".", "debug", "(", "'Downloading. tries left: {0}'", ".", "forma...
another wrapper method .
train
false
51,853
def parse_netntlm(authenticate_header, authorization_header, headers, ack, seq): if (authenticate_header != None): chal_header = authenticate_header.group() parse_netntlm_chal(headers, chal_header, ack) elif (authorization_header != None): resp_header = authorization_header.group() msg = parse_netntlm_resp_msg(headers, resp_header, seq) if (msg != None): return msg
[ "def", "parse_netntlm", "(", "authenticate_header", ",", "authorization_header", ",", "headers", ",", "ack", ",", "seq", ")", ":", "if", "(", "authenticate_header", "!=", "None", ")", ":", "chal_header", "=", "authenticate_header", ".", "group", "(", ")", "par...
parse ntlm hashes out .
train
false
51,854
def test_copy_function_doc_to_method_doc(): def f1(object, a, b, c): 'Docstring for f1\n\n Parameters\n ----------\n object : object\n Some object. This description also has\n\n blank lines in it.\n a : int\n Parameter a\n b : int\n Parameter b\n ' pass def f2(object): 'Docstring for f2\n\n Parameters\n ----------\n object : object\n Only one parameter\n\n Returns\n -------\n nothing.\n ' pass def f3(object): 'Docstring for f3\n\n Parameters\n ----------\n object : object\n Only one parameter\n ' pass def f4(object): 'Docstring for f4' pass def f5(object): 'Docstring for f5\n\n Parameters\n ----------\n Returns\n -------\n nothing.\n ' pass class A: @copy_function_doc_to_method_doc(f1) def method_f1(self, a, b, c): pass @copy_function_doc_to_method_doc(f2) def method_f2(self): 'method_f3 own docstring' pass @copy_function_doc_to_method_doc(f3) def method_f3(self): pass assert_equal(A.method_f1.__doc__, 'Docstring for f1\n\n Parameters\n ----------\n a : int\n Parameter a\n b : int\n Parameter b\n ') assert_equal(A.method_f2.__doc__, 'Docstring for f2\n\n Returns\n -------\n nothing.\n method_f3 own docstring') assert_equal(A.method_f3.__doc__, 'Docstring for f3\n\n ') assert_raises(ValueError, copy_function_doc_to_method_doc(f4), A.method_f1) assert_raises(ValueError, copy_function_doc_to_method_doc(f5), A.method_f1)
[ "def", "test_copy_function_doc_to_method_doc", "(", ")", ":", "def", "f1", "(", "object", ",", "a", ",", "b", ",", "c", ")", ":", "pass", "def", "f2", "(", "object", ")", ":", "pass", "def", "f3", "(", "object", ")", ":", "pass", "def", "f4", "(", ...
test decorator for re-using function docstring as method docstrings .
train
false
51,855
def decouple(prefix): dynamic.decouple(prefix) from calibre.gui2.widgets import history history.decouple(prefix)
[ "def", "decouple", "(", "prefix", ")", ":", "dynamic", ".", "decouple", "(", "prefix", ")", "from", "calibre", ".", "gui2", ".", "widgets", "import", "history", "history", ".", "decouple", "(", "prefix", ")" ]
ensure that config files used by utility code are not the same as those used by the main calibre gui .
train
false