text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def create_api(app_id=None, login=None, password=None, phone_number=None, scope='offline', api_version='5.92', http_params=None, interactive=False, service_token=None, client_secret=None, two_fa_supported=False, two_fa_force_sms=False): """Factory method to explicitly create API with app_id, login, password and phone_number parameters. If the app_id, login, password are not passed, then token-free session will be created automatically :param app_id: int: vk application id, more info: https://vk.com/dev/main :param login: str: vk login :param password: str: vk password :param phone_number: str: phone number with country code (+71234568990) :param scope: str or list of str: vk session scope :param api_version: str: vk api version, check https://vk.com/dev/versions :param interactive: bool: flag which indicates to use InteractiveVKSession :param service_token: str: new way of querying vk api, instead of getting oauth token :param http_params: dict: requests http parameters passed along :param client_secret: str: secure application key for Direct Authorization, more info: https://vk.com/dev/auth_direct :param two_fa_supported: bool: enable two-factor authentication for Direct Authorization, more info: https://vk.com/dev/auth_direct :param two_fa_force_sms: bool: force SMS two-factor authentication for Direct Authorization if two_fa_supported is True, more info: https://vk.com/dev/auth_direct :return: api instance :rtype : vk_requests.api.API """ session = VKSession(app_id=app_id, user_login=login, user_password=password, phone_number=phone_number, scope=scope, service_token=service_token, api_version=api_version, interactive=interactive, client_secret=client_secret, two_fa_supported = two_fa_supported, two_fa_force_sms=two_fa_force_sms) return API(session=session, http_params=http_params)
[ "def", "create_api", "(", "app_id", "=", "None", ",", "login", "=", "None", ",", "password", "=", "None", ",", "phone_number", "=", "None", ",", "scope", "=", "'offline'", ",", "api_version", "=", "'5.92'", ",", "http_params", "=", "None", ",", "interact...
52.731707
20.536585
def clear_distribute_compositions(self): """Removes the distribution rights. raise: NoAccess - ``Metadata.isRequired()`` is ``true`` or ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.resource.ResourceForm.clear_group_template if (self.get_distribute_compositions_metadata().is_read_only() or self.get_distribute_compositions_metadata().is_required()): raise errors.NoAccess() self._my_map['distributeCompositions'] = self._distribute_compositions_default
[ "def", "clear_distribute_compositions", "(", "self", ")", ":", "# Implemented from template for osid.resource.ResourceForm.clear_group_template", "if", "(", "self", ".", "get_distribute_compositions_metadata", "(", ")", ".", "is_read_only", "(", ")", "or", "self", ".", "get...
49
24.076923
def _dt_array_cmp(cls, op): """ Wrap comparison operations to convert datetime-like to datetime64 """ opname = '__{name}__'.format(name=op.__name__) nat_result = opname == '__ne__' def wrapper(self, other): if isinstance(other, (ABCDataFrame, ABCSeries, ABCIndexClass)): return NotImplemented other = lib.item_from_zerodim(other) if isinstance(other, (datetime, np.datetime64, str)): if isinstance(other, (datetime, np.datetime64)): # GH#18435 strings get a pass from tzawareness compat self._assert_tzawareness_compat(other) try: other = _to_M8(other, tz=self.tz) except ValueError: # string that cannot be parsed to Timestamp return ops.invalid_comparison(self, other, op) result = op(self.asi8, other.view('i8')) if isna(other): result.fill(nat_result) elif lib.is_scalar(other) or np.ndim(other) == 0: return ops.invalid_comparison(self, other, op) elif len(other) != len(self): raise ValueError("Lengths must match") else: if isinstance(other, list): try: other = type(self)._from_sequence(other) except ValueError: other = np.array(other, dtype=np.object_) elif not isinstance(other, (np.ndarray, ABCIndexClass, ABCSeries, DatetimeArray)): # Following Timestamp convention, __eq__ is all-False # and __ne__ is all True, others raise TypeError. return ops.invalid_comparison(self, other, op) if is_object_dtype(other): # We have to use _comp_method_OBJECT_ARRAY instead of numpy # comparison otherwise it would fail to raise when # comparing tz-aware and tz-naive with np.errstate(all='ignore'): result = ops._comp_method_OBJECT_ARRAY(op, self.astype(object), other) o_mask = isna(other) elif not (is_datetime64_dtype(other) or is_datetime64tz_dtype(other)): # e.g. is_timedelta64_dtype(other) return ops.invalid_comparison(self, other, op) else: self._assert_tzawareness_compat(other) if isinstance(other, (ABCIndexClass, ABCSeries)): other = other.array if (is_datetime64_dtype(other) and not is_datetime64_ns_dtype(other) or not hasattr(other, 'asi8')): # e.g. other.dtype == 'datetime64[s]' # or an object-dtype ndarray other = type(self)._from_sequence(other) result = op(self.view('i8'), other.view('i8')) o_mask = other._isnan result = com.values_from_object(result) if o_mask.any(): result[o_mask] = nat_result if self._hasnans: result[self._isnan] = nat_result return result return compat.set_function_name(wrapper, opname, cls)
[ "def", "_dt_array_cmp", "(", "cls", ",", "op", ")", ":", "opname", "=", "'__{name}__'", ".", "format", "(", "name", "=", "op", ".", "__name__", ")", "nat_result", "=", "opname", "==", "'__ne__'", "def", "wrapper", "(", "self", ",", "other", ")", ":", ...
40.390244
19.243902
def fullPath(self): """ Return full butter path from butter root. """ for ((dirTree, dirID, dirSeq), (dirPath, name)) in self.links.items(): try: path = self.fileSystem.volumes[dirTree].fullPath if path is not None: return path + ("/" if path[-1] != "/" else "") + dirPath + name except Exception: logging.debug("Haven't imported %d yet", dirTree) if self.id == BTRFS_FS_TREE_OBJECTID: return "/" else: return None
[ "def", "fullPath", "(", "self", ")", ":", "for", "(", "(", "dirTree", ",", "dirID", ",", "dirSeq", ")", ",", "(", "dirPath", ",", "name", ")", ")", "in", "self", ".", "links", ".", "items", "(", ")", ":", "try", ":", "path", "=", "self", ".", ...
39.285714
21.214286
def _key_parenleft(self, text): """Action for '('""" self.hide_completion_widget() if self.get_current_line_to_cursor(): last_obj = self.get_last_obj() if last_obj and not last_obj.isdigit(): self.insert_text(text) self.show_object_info(last_obj, call=True) return self.insert_text(text)
[ "def", "_key_parenleft", "(", "self", ",", "text", ")", ":", "self", ".", "hide_completion_widget", "(", ")", "if", "self", ".", "get_current_line_to_cursor", "(", ")", ":", "last_obj", "=", "self", ".", "get_last_obj", "(", ")", "if", "last_obj", "and", "...
39.1
7.8
def wnfetd(window, n): """ Fetch a particular interval from a double precision window. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnfetd_c.html :param window: Input window :type window: spiceypy.utils.support_types.SpiceCell :param n: Index of interval to be fetched. :type n: int :return: Left, right endpoints of the nth interval. :rtype: tuple """ assert isinstance(window, stypes.SpiceCell) assert window.dtype == 1 n = ctypes.c_int(n) left = ctypes.c_double() right = ctypes.c_double() libspice.wnfetd_c(ctypes.byref(window), n, ctypes.byref(left), ctypes.byref(right)) return left.value, right.value
[ "def", "wnfetd", "(", "window", ",", "n", ")", ":", "assert", "isinstance", "(", "window", ",", "stypes", ".", "SpiceCell", ")", "assert", "window", ".", "dtype", "==", "1", "n", "=", "ctypes", ".", "c_int", "(", "n", ")", "left", "=", "ctypes", "....
32.952381
16.190476
def _findRedundantProteins(protToPeps, pepToProts, proteins=None): """Returns a set of proteins with redundant peptide evidence. After removing the redundant proteins from the "protToPeps" and "pepToProts" mapping, all remaining proteins have at least one unique peptide. The remaining proteins are a "minimal" set of proteins that are able to explain all peptides. However, this is not guaranteed to be the optimal solution with the least number of proteins. In addition it is possible that multiple solutions with the same number of "minimal" proteins exist. Procedure for finding the redundant proteins: 1. Generate a list of proteins that do not contain any unique peptides, a unique peptide has exactly one protein entry in "pepToProts". 2. Proteins are first sorted in ascending order of the number of peptides. Proteins with an equal number of peptides are sorted in descending order of their sorted peptide frequencies (= proteins per peptide). If two proteins are still equal, they are sorted alpha numerical in descending order according to their protein names. For example in the case of a tie between proteins "A" and "B", protein "B" would be removed. 3. Parse this list of sorted non unique proteins; If all its peptides have a frequency value of greater 1; mark the protein as redundant; remove its peptides from the peptide frequency count, continue with the next entry. 4. Return the set of proteins marked as redundant. :param pepToProts: dict, for each peptide (=key) contains a set of parent proteins (=value). For Example {peptide: {protein, ...}, ...} :param protToPeps: dict, for each protein (=key) contains a set of associated peptides (=value). For Example {protein: {peptide, ...}, ...} :param proteins: iterable, proteins that are tested for being redundant. If None all proteins in "protToPeps" are parsed. :returns: a set of redundant proteins, i.e. proteins that are not necessary to explain all peptides """ if proteins is None: proteins = viewkeys(protToPeps) pepFrequency = _getValueCounts(pepToProts) protPepCounts = _getValueCounts(protToPeps) getCount = operator.itemgetter(1) getProt = operator.itemgetter(0) #TODO: quick and dirty solution #NOTE: add a test for merged proteins proteinTuples = list() for protein in proteins: if isinstance(protein, tuple): proteinTuples.append(protein) else: proteinTuples.append(tuple([protein])) sort = list() for protein in sorted(proteinTuples, reverse=True): if len(protein) == 1: protein = protein[0] protPepFreq = [pepFrequency[pep] for pep in protToPeps[protein]] if min(protPepFreq) > 1: sortValue = (len(protPepFreq)*-1, sorted(protPepFreq, reverse=True)) sort.append((protein, sortValue)) sortedProteins = map(getProt, sorted(sort, key=getCount, reverse=True)) redundantProteins = set() for protein in sortedProteins: for pep in protToPeps[protein]: if pepFrequency[pep] <= 1: break else: protPepFrequency = Counter(protToPeps[protein]) pepFrequency.subtract(protPepFrequency) redundantProteins.add(protein) return redundantProteins
[ "def", "_findRedundantProteins", "(", "protToPeps", ",", "pepToProts", ",", "proteins", "=", "None", ")", ":", "if", "proteins", "is", "None", ":", "proteins", "=", "viewkeys", "(", "protToPeps", ")", "pepFrequency", "=", "_getValueCounts", "(", "pepToProts", ...
45.783784
22.243243
def get_port_profile_for_intf_output_interface_port_profile_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_port_profile_for_intf = ET.Element("get_port_profile_for_intf") config = get_port_profile_for_intf output = ET.SubElement(get_port_profile_for_intf, "output") interface = ET.SubElement(output, "interface") port_profile = ET.SubElement(interface, "port-profile") name = ET.SubElement(port_profile, "name") name.text = kwargs.pop('name') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "get_port_profile_for_intf_output_interface_port_profile_name", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "get_port_profile_for_intf", "=", "ET", ".", "Element", "(", "\"get_port_profile_for_int...
45.428571
15.928571
def make_2D_samples_gauss(n, m, sigma, random_state=None): """return n samples drawn from 2D gaussian N(m,sigma) Parameters ---------- n : int number of samples to make m : np.array (2,) mean value of the gaussian distribution sigma : np.array (2,2) covariance matrix of the gaussian distribution random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : np.array (n,2) n samples drawn from N(m,sigma) """ generator = check_random_state(random_state) if np.isscalar(sigma): sigma = np.array([sigma, ]) if len(sigma) > 1: P = sp.linalg.sqrtm(sigma) res = generator.randn(n, 2).dot(P) + m else: res = generator.randn(n, 2) * np.sqrt(sigma) + m return res
[ "def", "make_2D_samples_gauss", "(", "n", ",", "m", ",", "sigma", ",", "random_state", "=", "None", ")", ":", "generator", "=", "check_random_state", "(", "random_state", ")", "if", "np", ".", "isscalar", "(", "sigma", ")", ":", "sigma", "=", "np", ".", ...
30.852941
21.558824
def lookup_sig(cls, sigs, method_name, expr_args, stmt_or_expr, context): """ Using a list of args, determine the most accurate signature to use from the given context """ def synonymise(s): return s.replace('int128', 'num').replace('uint256', 'num') # for sig in sigs['self'] full_sig = cls.get_full_sig( stmt_or_expr.func.attr, expr_args, None, context.custom_units, context.structs, context.constants, ) method_names_dict = dict(Counter([x.split('(')[0] for x in context.sigs['self']])) if method_name not in method_names_dict: raise FunctionDeclarationException( "Function not declared yet (reminder: functions cannot " "call functions later in code than themselves): %s" % method_name ) if method_names_dict[method_name] == 1: return next( sig for name, sig in context.sigs['self'].items() if name.split('(')[0] == method_name ) if full_sig in context.sigs['self']: return context.sigs['self'][full_sig] else: synonym_sig = synonymise(full_sig) syn_sigs_test = [synonymise(k) for k in context.sigs.keys()] if len(syn_sigs_test) != len(set(syn_sigs_test)): raise Exception( 'Incompatible default parameter signature,' 'can not tell the number type of literal', stmt_or_expr ) synonym_sigs = [(synonymise(k), v) for k, v in context.sigs['self'].items()] ssig = [s[1] for s in synonym_sigs if s[0] == synonym_sig] if len(ssig) == 0: raise FunctionDeclarationException( "Function not declared yet (reminder: functions cannot " "call functions later in code than themselves): %s" % method_name ) return ssig[0]
[ "def", "lookup_sig", "(", "cls", ",", "sigs", ",", "method_name", ",", "expr_args", ",", "stmt_or_expr", ",", "context", ")", ":", "def", "synonymise", "(", "s", ")", ":", "return", "s", ".", "replace", "(", "'int128'", ",", "'num'", ")", ".", "replace...
40.72
20.8
def file_format(self): """Formats device filesystem""" log.info('Formating, can take minutes depending on flash size...') res = self.__exchange('file.format()', timeout=300) if 'format done' not in res: log.error(res) else: log.info(res) return res
[ "def", "file_format", "(", "self", ")", ":", "log", ".", "info", "(", "'Formating, can take minutes depending on flash size...'", ")", "res", "=", "self", ".", "__exchange", "(", "'file.format()'", ",", "timeout", "=", "300", ")", "if", "'format done'", "not", "...
34.666667
17
def setQuery(self, query): """ Sets the query information for this filter widget. :param query | <orb.Query> || None """ if query is None: return count = {} for widget in self.findChildren(QWidget): column = nativestring(widget.objectName()) count.setdefault(column, 0) count[column] += 1 success, value, _ = query.findValue(column, count[column]) if success: projexui.setWidgetValue(widget, value)
[ "def", "setQuery", "(", "self", ",", "query", ")", ":", "if", "query", "is", "None", ":", "return", "count", "=", "{", "}", "for", "widget", "in", "self", ".", "findChildren", "(", "QWidget", ")", ":", "column", "=", "nativestring", "(", "widget", "....
30.894737
16.368421
def get_slide_seg_list_belonged(dt_str, seg_duration, slide_step=1, fmt='%Y-%m-%d %H:%M:%S'): """ 获取该时刻所属的所有时间片列表 :param dt_str: datetime string, eg: 2016-10-31 12:22:11 :param seg_duration: 时间片长度, unit: minute :param slide_step: 滑动步长 :param fmt: datetime string format :return: 时间片列表 """ dt = time_util.str_to_datetime(dt_str, fmt) day_slide_seg_list = gen_slide_seg_list( const.FIRST_MINUTE_OF_DAY, const.MINUTES_IN_A_DAY, seg_duration, slide_step) return filter(lambda x: lie_in_seg(dt, x, seg_duration), day_slide_seg_list)
[ "def", "get_slide_seg_list_belonged", "(", "dt_str", ",", "seg_duration", ",", "slide_step", "=", "1", ",", "fmt", "=", "'%Y-%m-%d %H:%M:%S'", ")", ":", "dt", "=", "time_util", ".", "str_to_datetime", "(", "dt_str", ",", "fmt", ")", "day_slide_seg_list", "=", ...
40.866667
15.266667
def publish(ctx, test=False): """Publish to the cheeseshop.""" clean(ctx) if test: run('python setup.py register -r test sdist bdist_wheel', echo=True) run('twine upload dist/* -r test', echo=True) else: run('python setup.py register sdist bdist_wheel', echo=True) run('twine upload dist/*', echo=True)
[ "def", "publish", "(", "ctx", ",", "test", "=", "False", ")", ":", "clean", "(", "ctx", ")", "if", "test", ":", "run", "(", "'python setup.py register -r test sdist bdist_wheel'", ",", "echo", "=", "True", ")", "run", "(", "'twine upload dist/* -r test'", ",",...
38
19.777778
def add_value_event(self, event_value, func, event_type=None, parameters=None, **kwargs): """ Add a function that is called when the value reach or pass the event_value. :param event_value: A single value or range specified as a tuple. If it is a range the function specified in func is called when the value enters the range. :type event_value: ``float``, ``string``, ``boolean`` or a tuple of these types. :param func: Function or lambda expression to be called. This function will receive the dynamcic value as a parameter. :param event_type: String with the value "warning" of "error" or None (default). If warning or error is specified the value or range are shown in UI. :type event_type: ``str`` """ self._sensor_value.add_value_event(event_value, func, event_type, parameters, **kwargs)
[ "def", "add_value_event", "(", "self", ",", "event_value", ",", "func", ",", "event_type", "=", "None", ",", "parameters", "=", "None", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_sensor_value", ".", "add_value_event", "(", "event_value", ",", "func"...
40.130435
31.695652
def _parse(self): """! @brief SWO parser as generator function coroutine. The generator yields every time it needs a byte of SWO data. The caller must use the generator's send() method to provide the next byte. """ timestamp = 0 invalid = False while True: byte = yield hdr = byte # Sync packet. if hdr == 0: packets = 0 while True: # Check for final 1 bit after at least 5 all-zero sync packets if (packets >= 5) and (byte == 0x80): break elif byte == 0: packets += 1 else: # Get early non-zero packet, reset sync packet counter. #packets = 0 invalid = True break byte = yield self._itm_page = 0 # Overflow packet. elif hdr == 0x70: self._send_event(events.TraceOverflow(timestamp)) # Protocol packet. elif (hdr & 0x3) == 0: c = (hdr >> 7) & 0x1 d = (hdr >> 4) & 0b111 # Local timestamp. if (hdr & 0xf) == 0 and d not in (0x0, 0x3): ts = 0 tc = 0 # Local timestamp packet format 1. if c == 1: tc = (hdr >> 4) & 0x3 while c == 1: byte = yield ts = (ts << 7) | (byte & 0x7f) c = (byte >> 7) & 0x1 # Local timestamp packet format 2. else: ts = (hdr >> 4) & 0x7 timestamp += ts self._send_event(events.TraceTimestamp(tc, timestamp)) # Global timestamp. elif hdr in (0b10010100, 0b10110100): t = (hdr >> 5) & 0x1 # TODO handle global timestamp # Extension. elif (hdr & 0x8) == 0x8: sh = (hdr >> 2) & 0x1 if c == 0: ex = (hdr >> 4) & 0x7 else: ex = 0 while c == 1: byte = yield ex = (ex << 7) | (byte & 0x7f) c = (byte >> 7) & 0x1 if sh == 0: # Extension packet with sh==0 sets ITM stimulus page. self._itm_page = ex else: #self._send_event(events.TraceEvent("Extension: SH={:d} EX={:#x}\n".format(sh, ex), timestamp)) invalid = True # Reserved packet. else: invalid = True # Source packet. else: ss = hdr & 0x3 l = 1 << (ss - 1) a = (hdr >> 3) & 0x1f if l == 1: payload = yield elif l == 2: byte1 = yield byte2 = yield payload = (byte1 | (byte2 << 8)) else: byte1 = yield byte2 = yield byte3 = yield byte4 = yield payload = (byte1 | (byte2 << 8) | (byte3 << 16) | (byte4 << 24)) # Instrumentation packet. if (hdr & 0x4) == 0: port = (self._itm_page * 32) + a self._send_event(events.TraceITMEvent(port, payload, l, timestamp)) # Hardware source packets... # Event counter elif a == 0: self._send_event(events.TraceEventCounter(payload, timestamp)) # Exception trace elif a == 1: exceptionNumber = payload & 0x1ff exceptionName = self._core.exception_number_to_name(exceptionNumber, True) fn = (payload >> 12) & 0x3 if 1 <= fn <= 3: self._send_event(events.TraceExceptionEvent(exceptionNumber, exceptionName, fn, timestamp)) else: invalid = True # Periodic PC elif a == 2: # A payload of 0 indicates a period PC sleep event. self._send_event(events.TracePeriodicPC(payload, timestamp)) # Data trace elif 8 <= a <= 23: type = (hdr >> 6) & 0x3 cmpn = (hdr >> 4) & 0x3 bit3 = (hdr >> 3) & 0x1 # PC value if type == 0b01 and bit3 == 0: self._send_event(events.TraceDataTraceEvent(cmpn=cmpn, pc=payload, ts=timestamp)) # Address elif type == 0b01 and bit3 == 1: self._send_event(events.TraceDataTraceEvent(cmpn=cmpn, addr=payload, ts=timestamp)) # Data value elif type == 0b10: self._send_event(events.TraceDataTraceEvent(cmpn=cmpn, value=payload, rnw=(bit3 == 0), sz=l, ts=timestamp)) else: invalid = True # Invalid DWT 'a' value. else: invalid = True
[ "def", "_parse", "(", "self", ")", ":", "timestamp", "=", "0", "invalid", "=", "False", "while", "True", ":", "byte", "=", "yield", "hdr", "=", "byte", "# Sync packet.", "if", "hdr", "==", "0", ":", "packets", "=", "0", "while", "True", ":", "# Check...
41.744526
14.313869
def askForPreviousFutures(self): """Request a status for every future to the broker.""" # Don't request it too often (otherwise it ping-pongs because) # the broker answer triggers the _poll of pop() if time.time() < self.lastStatus + POLLING_TIME / 1000: return self.lastStatus = time.time() for future in scoop._control.futureDict.values(): # Skip the root future if scoop.IS_ORIGIN and future.id == (scoop.worker, 0): continue if future not in self.inprogress: self.socket.sendStatusRequest(future)
[ "def", "askForPreviousFutures", "(", "self", ")", ":", "# Don't request it too often (otherwise it ping-pongs because)", "# the broker answer triggers the _poll of pop()", "if", "time", ".", "time", "(", ")", "<", "self", ".", "lastStatus", "+", "POLLING_TIME", "/", "1000",...
41.066667
17.6
def renames(self, new): """ .. seealso:: :func:`os.renames` """ os.renames(self, new) return self._next_class(new)
[ "def", "renames", "(", "self", ",", "new", ")", ":", "os", ".", "renames", "(", "self", ",", "new", ")", "return", "self", ".", "_next_class", "(", "new", ")" ]
33.75
8
def do_prompt(self, arg): """ Enable or disable prompt :param arg: on|off :return: """ if arg.lower() == 'off': self.response_prompt = '' self.prompt = '' return elif arg.lower() == 'on': self.prompt = PROMPT self.response_prompt = RESPONSE_PROMPT self.print_response('prompt: %s' % self.prompt, '\n', 'response: %s' % self.response_prompt)
[ "def", "do_prompt", "(", "self", ",", "arg", ")", ":", "if", "arg", ".", "lower", "(", ")", "==", "'off'", ":", "self", ".", "response_prompt", "=", "''", "self", ".", "prompt", "=", "''", "return", "elif", "arg", ".", "lower", "(", ")", "==", "'...
32.214286
13.642857
def build_precache_map(config): """ Build a mapping of contexts and models from the configuration """ precache_map = {} ss_name = config['ores']['scoring_system'] for context in config['scoring_systems'][ss_name]['scoring_contexts']: precache_map[context] = {} for model in config['scoring_contexts'][context].get('precache', []): precached_config = \ config['scoring_contexts'][context]['precache'][model] events = precached_config['on'] if len(set(events) - AVAILABLE_EVENTS) > 0: logger.warning("{0} events are not available" .format(set(events) - AVAILABLE_EVENTS)) for event in precached_config['on']: if event in precache_map[context]: precache_map[context][event].add(model) else: precache_map[context][event] = {model} logger.debug("Setting up precaching for {0} in {1} on {2}" .format(model, context, event)) return precache_map
[ "def", "build_precache_map", "(", "config", ")", ":", "precache_map", "=", "{", "}", "ss_name", "=", "config", "[", "'ores'", "]", "[", "'scoring_system'", "]", "for", "context", "in", "config", "[", "'scoring_systems'", "]", "[", "ss_name", "]", "[", "'sc...
43.48
18.76
def Baroczy(x, rhol, rhog, mul, mug): r'''Calculates void fraction in two-phase flow according to the model of [1]_ as given in [2]_, [3]_, and [4]_. .. math:: \alpha = \left[1 + \left(\frac{1-x}{x}\right)^{0.74}\left(\frac{\rho_g} {\rho_l}\right)^{0.65}\left(\frac{\mu_l}{\mu_g}\right)^{0.13} \right]^{-1} Parameters ---------- x : float Quality at the specific tube interval [] rhol : float Density of the liquid [kg/m^3] rhog : float Density of the gas [kg/m^3] mul : float Viscosity of liquid [Pa*s] mug : float Viscosity of gas [Pa*s] Returns ------- alpha : float Void fraction (area of gas / total area of channel), [-] Notes ----- Examples -------- >>> Baroczy(.4, 800, 2.5, 1E-3, 1E-5) 0.9453544598460807 References ---------- .. [1] Baroczy, C. Correlation of liquid fraction in two-phase flow with applications to liquid metals, Chem. Eng. Prog. Symp. Ser. 61 (1965) 179-191. .. [2] Xu, Yu, and Xiande Fang. "Correlations of Void Fraction for Two- Phase Refrigerant Flow in Pipes." Applied Thermal Engineering 64, no. 1-2 (March 2014): 242–51. doi:10.1016/j.applthermaleng.2013.12.032. .. [3] Dalkilic, A. S., S. Laohalertdecha, and S. Wongwises. "Effect of Void Fraction Models on the Two-Phase Friction Factor of R134a during Condensation in Vertical Downward Flow in a Smooth Tube." International Communications in Heat and Mass Transfer 35, no. 8 (October 2008): 921-27. doi:10.1016/j.icheatmasstransfer.2008.04.001. .. [4] Woldesemayat, Melkamu A., and Afshin J. Ghajar. "Comparison of Void Fraction Correlations for Different Flow Patterns in Horizontal and Upward Inclined Pipes." International Journal of Multiphase Flow 33, no. 4 (April 2007): 347-370. doi:10.1016/j.ijmultiphaseflow.2006.09.004. ''' Xtt = Lockhart_Martinelli_Xtt(x, rhol, rhog, mul, mug, pow_x=0.74, pow_rho=0.65, pow_mu=0.13) return (1 + Xtt)**-1
[ "def", "Baroczy", "(", "x", ",", "rhol", ",", "rhog", ",", "mul", ",", "mug", ")", ":", "Xtt", "=", "Lockhart_Martinelli_Xtt", "(", "x", ",", "rhol", ",", "rhog", ",", "mul", ",", "mug", ",", "pow_x", "=", "0.74", ",", "pow_rho", "=", "0.65", ","...
37.607143
26.892857
def deprecated_opts_signature(args, kwargs): """ Utility to help with the deprecation of the old .opts method signature Returns whether opts.apply_groups should be used (as a bool) and the corresponding options. """ from .options import Options groups = set(Options._option_groups) opts = {kw for kw in kwargs if kw != 'clone'} apply_groups = False options = None new_kwargs = {} if len(args) > 0 and isinstance(args[0], dict): apply_groups = True if (not set(args[0]).issubset(groups) and all(isinstance(v, dict) and not set(v).issubset(groups) for v in args[0].values())): apply_groups = False elif set(args[0].keys()) <= groups: new_kwargs = args[0] else: options = args[0] elif opts and opts.issubset(set(groups)): apply_groups = True elif kwargs.get('options', None) is not None: apply_groups = True elif not args and not kwargs: apply_groups = True return apply_groups, options, new_kwargs
[ "def", "deprecated_opts_signature", "(", "args", ",", "kwargs", ")", ":", "from", ".", "options", "import", "Options", "groups", "=", "set", "(", "Options", ".", "_option_groups", ")", "opts", "=", "{", "kw", "for", "kw", "in", "kwargs", "if", "kw", "!="...
33.903226
13.709677
def _set_esp_key(self, v, load=False): """ Setter method for esp_key, mapped from YANG variable /routing_system/interface/ve/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp_key (ipsec-authentication-hexkey-string) If this variable is read-only (config: false) in the source YANG file, then _set_esp_key is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_esp_key() directly. YANG Description: Hexadecimal key string for ESP """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="esp-key", rest_name="esp-key", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hexadecimal key string for ESP', u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """esp_key must be of a type compatible with ipsec-authentication-hexkey-string""", 'defined-type': "brocade-ospfv3:ipsec-authentication-hexkey-string", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'0..80']}), is_leaf=True, yang_name="esp-key", rest_name="esp-key", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Hexadecimal key string for ESP', u'hidden': u'full', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='ipsec-authentication-hexkey-string', is_config=True)""", }) self.__esp_key = t if hasattr(self, '_set'): self._set()
[ "def", "_set_esp_key", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base",...
87.708333
44.666667
def get_pending_boot_mode(self): """Retrieves the pending boot mode of the server. Gets the boot mode to be set on next reset. :returns: either LEGACY or UEFI. :raises: IloError, on an error from iLO. """ sushy_system = self._get_sushy_system(PROLIANT_SYSTEM_ID) try: return BOOT_MODE_MAP.get( sushy_system.bios_settings.pending_settings.boot_mode) except sushy.exceptions.SushyError as e: msg = (self._('The pending BIOS Settings was not found. Error ' '%(error)s') % {'error': str(e)}) LOG.debug(msg) raise exception.IloError(msg)
[ "def", "get_pending_boot_mode", "(", "self", ")", ":", "sushy_system", "=", "self", ".", "_get_sushy_system", "(", "PROLIANT_SYSTEM_ID", ")", "try", ":", "return", "BOOT_MODE_MAP", ".", "get", "(", "sushy_system", ".", "bios_settings", ".", "pending_settings", "."...
40.588235
12.588235
def exists(self, obj_id, obj_type=None): """ Return whether the given object exists in the search index. :param obj_id: The object's unique identifier. :param obj_type: The object's type. """ return self.object_key(obj_id, obj_type) in self._data
[ "def", "exists", "(", "self", ",", "obj_id", ",", "obj_type", "=", "None", ")", ":", "return", "self", ".", "object_key", "(", "obj_id", ",", "obj_type", ")", "in", "self", ".", "_data" ]
36
13.25
def format_messages(self, messages): """ Formats several messages with :class:Look, encodes them with :func:vital.tools.encoding.stdout_encode """ mess = "" for message in self.message: if self.pretty: mess = "{}{}".format(mess, self.format_message(message)) else: mess += str(message) if self.include_time: return ": {} : {}".format( datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), mess) return stdout_encode(mess)
[ "def", "format_messages", "(", "self", ",", "messages", ")", ":", "mess", "=", "\"\"", "for", "message", "in", "self", ".", "message", ":", "if", "self", ".", "pretty", ":", "mess", "=", "\"{}{}\"", ".", "format", "(", "mess", ",", "self", ".", "form...
42
12.153846
def validate_username(self, value): """ Verify that the username has a matching user. """ try: self.user = User.objects.get(username=value) except User.DoesNotExist: raise serializers.ValidationError("User does not exist") return value
[ "def", "validate_username", "(", "self", ",", "value", ")", ":", "try", ":", "self", ".", "user", "=", "User", ".", "objects", ".", "get", "(", "username", "=", "value", ")", "except", "User", ".", "DoesNotExist", ":", "raise", "serializers", ".", "Val...
29.9
15.7
def build_ref_list(refs): """ Given parsed references build a list of ref objects """ ref_list = [] for reference in refs: ref = ea.Citation() # Publcation Type utils.set_attr_if_value(ref, 'publication_type', reference.get('publication-type')) # id utils.set_attr_if_value(ref, 'id', reference.get('id')) # Article title utils.set_attr_if_value(ref, 'article_title', reference.get('full_article_title')) # Source utils.set_attr_if_value(ref, 'source', reference.get('source')) # Volume utils.set_attr_if_value(ref, 'volume', reference.get('volume')) # Issue utils.set_attr_if_value(ref, 'issue', reference.get('issue')) # First page utils.set_attr_if_value(ref, 'fpage', reference.get('fpage')) # Last page utils.set_attr_if_value(ref, 'lpage', reference.get('lpage')) # DOI utils.set_attr_if_value(ref, 'doi', reference.get('doi')) # Year utils.set_attr_if_value(ref, 'year', reference.get('year')) # Year date in iso 8601 format utils.set_attr_if_value(ref, 'year_iso_8601_date', reference.get('year-iso-8601-date')) # Can set the year_numeric now if ref.year_iso_8601_date is not None: # First preference take it from the iso 8601 date, if available try: ref.year_numeric = int(ref.year_iso_8601_date.split('-')[0]) except ValueError: ref.year_numeric = None if ref.year_numeric is None: # Second preference, use the year value if it is entirely numeric if utils.is_year_numeric(ref.year): ref.year_numeric = ref.year # date-in-citation utils.set_attr_if_value(ref, 'date_in_citation', reference.get('date-in-citation')) # elocation-id utils.set_attr_if_value(ref, 'elocation_id', reference.get('elocation-id')) # uri utils.set_attr_if_value(ref, 'uri', reference.get('uri')) if not ref.uri: # take uri value from uri_text utils.set_attr_if_value(ref, 'uri', reference.get('uri_text')) # pmid utils.set_attr_if_value(ref, 'pmid', reference.get('pmid')) # isbn utils.set_attr_if_value(ref, 'isbn', reference.get('isbn')) # accession utils.set_attr_if_value(ref, 'accession', reference.get('accession')) # patent utils.set_attr_if_value(ref, 'patent', reference.get('patent')) # patent country utils.set_attr_if_value(ref, 'country', reference.get('country')) # publisher-loc utils.set_attr_if_value(ref, 'publisher_loc', reference.get('publisher_loc')) # publisher-name utils.set_attr_if_value(ref, 'publisher_name', reference.get('publisher_name')) # edition utils.set_attr_if_value(ref, 'edition', reference.get('edition')) # version utils.set_attr_if_value(ref, 'version', reference.get('version')) # chapter-title utils.set_attr_if_value(ref, 'chapter_title', reference.get('chapter-title')) # comment utils.set_attr_if_value(ref, 'comment', reference.get('comment')) # data-title utils.set_attr_if_value(ref, 'data_title', reference.get('data-title')) # conf-name utils.set_attr_if_value(ref, 'conf_name', reference.get('conf-name')) # Authors if reference.get('authors'): for author in reference.get('authors'): ref_author = {} eautils.set_if_value(ref_author, 'group-type', author.get('group-type')) eautils.set_if_value(ref_author, 'surname', author.get('surname')) eautils.set_if_value(ref_author, 'given-names', author.get('given-names')) eautils.set_if_value(ref_author, 'collab', author.get('collab')) if ref_author: ref.add_author(ref_author) # Try to populate the doi attribute if the uri is a doi if not ref.doi and ref.uri: if ref.uri != eautils.doi_uri_to_doi(ref.uri): ref.doi = eautils.doi_uri_to_doi(ref.uri) # Append the reference to the list ref_list.append(ref) return ref_list
[ "def", "build_ref_list", "(", "refs", ")", ":", "ref_list", "=", "[", "]", "for", "reference", "in", "refs", ":", "ref", "=", "ea", ".", "Citation", "(", ")", "# Publcation Type", "utils", ".", "set_attr_if_value", "(", "ref", ",", "'publication_type'", ",...
46.130435
23.543478
def hit(self, code, line, duration): """ A line has finished executing. code (code) container function's code object line (int) line number of just executed line duration (float) duration of the line, in seconds """ entry = self.line_dict[line][code] entry[0] += 1 entry[1] += duration
[ "def", "hit", "(", "self", ",", "code", ",", "line", ",", "duration", ")", ":", "entry", "=", "self", ".", "line_dict", "[", "line", "]", "[", "code", "]", "entry", "[", "0", "]", "+=", "1", "entry", "[", "1", "]", "+=", "duration" ]
26.785714
10.357143
def endpoint(f): """This decorator marks this method as an endpoint. It is responsible for the request workflow and will call each relevant method in turn.""" def check_authentication(self, **kwargs): """If the `authentication` variable is defined and not None, the specified method will be run. On True the request will continue otherwise it will fail with a 401 authentication error""" if getattr(self, 'authentication', None) is None: current_app.logger.debug("No authentication method") return a = self.authentication() if not hasattr(a, 'is_authenticated'): current_app.logger.debug("No is_authenticated method") return if not a.is_authenticated(**kwargs): msg = getattr(g, 'authentication_error', 'Authentication failed') current_app.logger.warning("Authentication failed: {}".format(msg)) return self._make_response(401, msg, abort=True) current_app.logger.debug("Authentication successful") def check_authorization(self): """If the `authorization` variable is defined and not None, the specified method will be run. On True the request will continue otherwise it will fail with a 403 authorization error""" current_app.logger.info("Checking authentication/authorization") auth_class = getattr(self, 'authorization', getattr(self, 'authentication', None)) if auth_class is None: current_app.logger.debug("No authorization class") return a = auth_class() if not hasattr(a, 'is_authorized'): current_app.logger.debug("No is_authorized method") return if not a.is_authorized(record=g._resource_instance): current_app.logger.warning("Authorization failed") return self._make_response(403, "Authorization failed", abort=True) current_app.logger.debug("Authorization successful") def validate_request(self, **kwargs): """Call the validator class and validate the request_data. This method returns True or False. On False, a 400 will be returned with the reasons for the validation error. On True, the operation will continue.""" current_app.logger.info( "Checking {} validation".format(request.method)) if getattr(self, 'validation', None) is None: current_app.logger.warning("No validation specified") return v = self.validation() method = 'validate_{}'.format(request.method.lower()) if not hasattr(v, method): current_app.logger.warning("No validation method specified") return errors = getattr(v, method)(**kwargs) current_app.logger.debug("Validation errors: {}".format(errors)) if errors is not None and len(errors) > 0: current_app.logger.warning("Validation errors found") self._make_response(400, errors, abort=True) def load_request_data(self): if request.method in ['GET', 'DELETE']: return current_app.logger.info("Saving json payload in memory") # For now we assume JSON. Later in life we can make this more # payload agnostic try: d = request.data.decode('utf-8') g._rq_data = {} if d.strip() == "" else json.loads(d) except ValueError: return self._make_response(400, "Malformed JSON in request body", abort=True) if self.enforce_json_root and g._rq_data != {} and \ list(g._rq_data.keys()) != [self._payload_root()]: msg = "Invalid JSON root in request body" current_app.logger.error(msg) current_app.logger.debug( "Found {}, expecting {}".format( list(g._rq_data.keys()), self._payload_root())) return self._make_response(400, msg, abort=True) elif self._payload_root() in list(g._rq_data.keys()): current_app.logger.debug("Removing JSON root from rq payload") g._rq_data = g._rq_data[self._payload_root()] current_app.logger.debug("g._rq_data: {}".format(g._rq_data)) return g._rq_data @wraps(f) def decorator(self, *args, **kwargs): current_app.logger.info("Got {} request".format(request.method)) current_app.logger.info("Endpoint: {}".format(request.url)) if request.method not in self.allowed_methods: msg = "Request method {} is unavailable".format(request.method) current_app.logger.error(msg) return self._make_response(405, msg, abort=True) current_app.logger.info("Checking db table/collection is defined") if self.enforce_payload_collection and self.db_collection is None: msg = "No DB collection defined" current_app.logger.error(msg) return make_response(Response(msg, 424)) if request.method in ['POST', 'PUT', 'PATCH']: load_request_data(self) check_authentication(self, **kwargs) if kwargs.get('obj_id', False): kwargs['obj_id'] = self.fiddle_id(kwargs['obj_id']) self._get_instance(**kwargs) else: g._resource_instance = {} if request.method in ['POST', 'PUT', 'PATCH']: check_authorization(self) r = self.transform_record(g._rq_data.get(self._payload_root(), g._rq_data)) g._saveable_record = dict(self.merge_record_data( r, dict(getattr(g, '_resource_instance', r)))) validate_request(self, data=g._saveable_record) else: check_authorization(self) validate_request(self) return f(self, *args, **kwargs) return decorator
[ "def", "endpoint", "(", "f", ")", ":", "def", "check_authentication", "(", "self", ",", "*", "*", "kwargs", ")", ":", "\"\"\"If the `authentication` variable is defined and not None, the\n specified method will be run. On True the request will continue\n otherwise...
44.44697
19.931818
def add_to_configs(self, configs): """Add configurations to all tomodirs Parameters ---------- configs : :class:`numpy.ndarray` Nx4 numpy array with abmn configurations """ for f, td in self.tds.items(): td.configs.add_to_configs(configs)
[ "def", "add_to_configs", "(", "self", ",", "configs", ")", ":", "for", "f", ",", "td", "in", "self", ".", "tds", ".", "items", "(", ")", ":", "td", ".", "configs", ".", "add_to_configs", "(", "configs", ")" ]
27.454545
13.636364
def find(self, key): "Exact matching (returns value)" index = self.follow_bytes(key, self.ROOT) if index is None: return -1 if not self.has_value(index): return -1 return self.value(index)
[ "def", "find", "(", "self", ",", "key", ")", ":", "index", "=", "self", ".", "follow_bytes", "(", "key", ",", "self", ".", "ROOT", ")", "if", "index", "is", "None", ":", "return", "-", "1", "if", "not", "self", ".", "has_value", "(", "index", ")"...
30.625
11.625
def triangle_normal(a, b, c): """Return a vector orthogonal to the given triangle Arguments: a, b, c -- three 3D numpy vectors """ normal = np.cross(a - c, b - c) norm = np.linalg.norm(normal) return normal/norm
[ "def", "triangle_normal", "(", "a", ",", "b", ",", "c", ")", ":", "normal", "=", "np", ".", "cross", "(", "a", "-", "c", ",", "b", "-", "c", ")", "norm", "=", "np", ".", "linalg", ".", "norm", "(", "normal", ")", "return", "normal", "/", "nor...
26.888889
12
def generate_molecule_object_dict(source, format, values): """Generate a dictionary that represents a Squonk MoleculeObject when written as JSON :param source: Molecules in molfile or smiles format :param format: The format of the molecule. Either 'mol' or 'smiles' :param values: Optional dict of values (properties) for the MoleculeObject """ m = {"uuid": str(uuid.uuid4()), "source": source, "format": format} if values: m["values"] = values return m
[ "def", "generate_molecule_object_dict", "(", "source", ",", "format", ",", "values", ")", ":", "m", "=", "{", "\"uuid\"", ":", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ",", "\"source\"", ":", "source", ",", "\"format\"", ":", "format", "}", "if...
40.583333
21.75
def netconf_config_change_changed_by_server_or_user_by_user_session_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") netconf_config_change = ET.SubElement(config, "netconf-config-change", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-notifications") changed_by = ET.SubElement(netconf_config_change, "changed-by") server_or_user = ET.SubElement(changed_by, "server-or-user") by_user = ET.SubElement(server_or_user, "by-user") session_id = ET.SubElement(by_user, "session-id") session_id.text = kwargs.pop('session_id') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "netconf_config_change_changed_by_server_or_user_by_user_session_id", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "netconf_config_change", "=", "ET", ".", "SubElement", "(", "config", ",", "\"...
53.769231
24.769231
def delete_exchange(self, vhost, name): """ Delete the named exchange from the named vhost. The API returns a 204 on success, in which case this method returns True, otherwise the error is raised. :param string vhost: Vhost where target exchange was created :param string name: The name of the exchange to delete. :returns bool: True on success. """ vhost = quote(vhost, '') name = quote(name, '') path = Client.urls['exchange_by_name'] % (vhost, name) self._call(path, 'DELETE') return True
[ "def", "delete_exchange", "(", "self", ",", "vhost", ",", "name", ")", ":", "vhost", "=", "quote", "(", "vhost", ",", "''", ")", "name", "=", "quote", "(", "name", ",", "''", ")", "path", "=", "Client", ".", "urls", "[", "'exchange_by_name'", "]", ...
38.8
16.4
def unperturbed_hamiltonian(states): r"""Return the unperturbed atomic hamiltonian for given states. We calcualte the atomic hamiltonian in the basis of the ground states of \ rubidium 87 (in GHz). >>> g = State("Rb", 87, 5, 0, 1/Integer(2)) >>> magnetic_states = make_list_of_states([g], "magnetic") >>> print(np.diag(unperturbed_hamiltonian(magnetic_states))/hbar/2/pi*1e-9) [-4.2717+0.j -4.2717+0.j -4.2717+0.j 2.563 +0.j 2.563 +0.j 2.563 +0.j 2.563 +0.j 2.563 +0.j] """ Ne = len(states) H0 = np.zeros((Ne, Ne), complex) for i in range(Ne): H0[i, i] = hbar*states[i].omega return H0
[ "def", "unperturbed_hamiltonian", "(", "states", ")", ":", "Ne", "=", "len", "(", "states", ")", "H0", "=", "np", ".", "zeros", "(", "(", "Ne", ",", "Ne", ")", ",", "complex", ")", "for", "i", "in", "range", "(", "Ne", ")", ":", "H0", "[", "i",...
37.470588
18.882353
def cut(cut, node_indices): """Check that the cut is for only the given nodes.""" if cut.indices != node_indices: raise ValueError('{} nodes are not equal to subsystem nodes ' '{}'.format(cut, node_indices))
[ "def", "cut", "(", "cut", ",", "node_indices", ")", ":", "if", "cut", ".", "indices", "!=", "node_indices", ":", "raise", "ValueError", "(", "'{} nodes are not equal to subsystem nodes '", "'{}'", ".", "format", "(", "cut", ",", "node_indices", ")", ")" ]
48.8
12.6
def is_data_from_channel(channel=4): # function factory '''Selecting FE data from given channel. Parameters ---------- channel : int Channel number (4 is default channel on Single Chip Card). Returns ------- Function. Usage: 1 Selecting FE data from channel 4 (combine with is_fe_word): filter_fe_data_from_channel_4 = logical_and(is_fe_word, is_data_from_channel(4)) fe_data_from_channel_4 = data_array[filter_fe_data_from_channel_4(data_array)] 2 Sleceting data from channel 4: filter_data_from_channel_4 = is_data_from_channel(4) data_from_channel_4 = data_array[filter_data_from_channel_4(fe_data_array)] 3 Sleceting data from channel 4: data_from_channel_4 = is_data_from_channel(4)(fe_raw_data) Other usage: f_ch4 = functoools.partial(is_data_from_channel, channel=4) l_ch4 = lambda x: is_data_from_channel(x, channel=4) ''' if channel >= 0 and channel < 16: def f(value): return np.equal(np.right_shift(np.bitwise_and(value, 0x0F000000), 24), channel) f.__name__ = "is_data_from_channel_" + str(channel) # or use inspect module: inspect.stack()[0][3] return f else: raise ValueError('Invalid channel number')
[ "def", "is_data_from_channel", "(", "channel", "=", "4", ")", ":", "# function factory\r", "if", "channel", ">=", "0", "and", "channel", "<", "16", ":", "def", "f", "(", "value", ")", ":", "return", "np", ".", "equal", "(", "np", ".", "right_shift", "(...
38.818182
27.484848
def _scope_and_enforce_robots(self, site, parent_page, outlinks): ''' Returns tuple ( dict of {page_id: Page} of fresh `brozzler.Page` representing in scope links accepted by robots policy, set of in scope urls (canonicalized) blocked by robots policy, set of out-of-scope urls (canonicalized)). ''' pages = {} # {page_id: Page, ...} blocked = set() out_of_scope = set() for url in outlinks or []: url_for_scoping = urlcanon.semantic(url) url_for_crawling = urlcanon.whatwg(url) decision = site.accept_reject_or_neither( url_for_scoping, parent_page=parent_page) if decision is True: hops_off = 0 elif decision is None: decision = parent_page.hops_off < site.scope.get( 'max_hops_off', 0) hops_off = parent_page.hops_off + 1 if decision is True: if brozzler.is_permitted_by_robots(site, str(url_for_crawling)): fresh_page = self._build_fresh_page( site, parent_page, url, hops_off) if fresh_page.id in pages: self._merge_page(pages[fresh_page.id], fresh_page) else: pages[fresh_page.id] = fresh_page else: blocked.add(str(url_for_crawling)) else: out_of_scope.add(str(url_for_crawling)) return pages, blocked, out_of_scope
[ "def", "_scope_and_enforce_robots", "(", "self", ",", "site", ",", "parent_page", ",", "outlinks", ")", ":", "pages", "=", "{", "}", "# {page_id: Page, ...}", "blocked", "=", "set", "(", ")", "out_of_scope", "=", "set", "(", ")", "for", "url", "in", "outli...
45.285714
16.771429
def _postprocess(self, x, out=None): """Return the post-processed version of ``x``. C2C: use ``tmp_f`` (C2C operation) R2C: use ``tmp_f`` (C2C operation) HALFC: use ``tmp_f`` (C2C operation) The result is stored in ``out`` if given, otherwise in a temporary or a new array. """ if out is None: if self.domain.field == ComplexNumbers(): out = self._tmp_r if self._tmp_r is not None else self._tmp_f else: out = self._tmp_f return dft_postprocess_data( out, real_grid=self.domain.grid, recip_grid=self.range.grid, shift=self.shifts, axes=self.axes, sign=self.sign, interp=self.domain.interp, op='multiply', out=out)
[ "def", "_postprocess", "(", "self", ",", "x", ",", "out", "=", "None", ")", ":", "if", "out", "is", "None", ":", "if", "self", ".", "domain", ".", "field", "==", "ComplexNumbers", "(", ")", ":", "out", "=", "self", ".", "_tmp_r", "if", "self", "....
40.052632
15.578947
def _set_link_oam_interface(self, v, load=False): """ Setter method for link_oam_interface, mapped from YANG variable /interface/ethernet/link_oam_interface (container) If this variable is read-only (config: false) in the source YANG file, then _set_link_oam_interface is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_link_oam_interface() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=link_oam_interface.link_oam_interface, is_container='container', presence=False, yang_name="link-oam-interface", rest_name="link-oam", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface - Link OAM commands', u'alt-name': u'link-oam', u'sort-priority': u'117', u'cli-incomplete-command': None, u'cli-incomplete-no': None, u'callpoint': u'setDot3ahEnableInterface'}}, namespace='urn:brocade.com:mgmt:brocade-dot3ah', defining_module='brocade-dot3ah', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """link_oam_interface must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=link_oam_interface.link_oam_interface, is_container='container', presence=False, yang_name="link-oam-interface", rest_name="link-oam", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface - Link OAM commands', u'alt-name': u'link-oam', u'sort-priority': u'117', u'cli-incomplete-command': None, u'cli-incomplete-no': None, u'callpoint': u'setDot3ahEnableInterface'}}, namespace='urn:brocade.com:mgmt:brocade-dot3ah', defining_module='brocade-dot3ah', yang_type='container', is_config=True)""", }) self.__link_oam_interface = t if hasattr(self, '_set'): self._set()
[ "def", "_set_link_oam_interface", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ","...
91.954545
43.363636
def run_remove_system(name, token, org, system, prompt): """ Removes a system from the repo. """ repo = get_repo(token=token, org=org, name=name) try: label = repo.get_label(name=system.strip()) label.delete() click.secho("Successfully deleted {}".format(system), fg="green") if prompt and click.confirm("Run update to re-generate the page?"): run_update(name=name, token=token, org=org) except UnknownObjectException: click.secho("Unable to remove system {}, it does not exist.".format(system), fg="yellow")
[ "def", "run_remove_system", "(", "name", ",", "token", ",", "org", ",", "system", ",", "prompt", ")", ":", "repo", "=", "get_repo", "(", "token", "=", "token", ",", "org", "=", "org", ",", "name", "=", "name", ")", "try", ":", "label", "=", "repo",...
44
18.461538
def update_notification_list(self, apps=None, schema_editor=None, verbose=False): """Updates the notification model to ensure all registered notifications classes are listed. Typically called from a post_migrate signal. Also, in tests you can register a notification and the Notification class (not model) will automatically call this method if the named notification does not exist. See notification.notify() """ Notification = (apps or django_apps).get_model("edc_notification.notification") # flag all notifications as disabled and re-enable as required Notification.objects.all().update(enabled=False) if site_notifications.loaded: if verbose: sys.stdout.write( style.MIGRATE_HEADING("Populating Notification model:\n") ) self.delete_unregistered_notifications(apps=apps) for name, notification_cls in site_notifications.registry.items(): if verbose: sys.stdout.write( f" * Adding '{name}': '{notification_cls().display_name}'\n" ) try: obj = Notification.objects.get(name=name) except ObjectDoesNotExist: Notification.objects.create( name=name, display_name=notification_cls().display_name, enabled=True, ) else: obj.display_name = notification_cls().display_name obj.enabled = True obj.save()
[ "def", "update_notification_list", "(", "self", ",", "apps", "=", "None", ",", "schema_editor", "=", "None", ",", "verbose", "=", "False", ")", ":", "Notification", "=", "(", "apps", "or", "django_apps", ")", ".", "get_model", "(", "\"edc_notification.notifica...
44.918919
20.351351
def to_dict(self, get_value=None): """Dump counters as a dict""" self.trim() result = {} for key, value in iteritems(self.counters): if get_value is not None: value = getattr(value, get_value) r = result for _key in key[:-1]: r = r.setdefault(_key, {}) r[key[-1]] = value return result
[ "def", "to_dict", "(", "self", ",", "get_value", "=", "None", ")", ":", "self", ".", "trim", "(", ")", "result", "=", "{", "}", "for", "key", ",", "value", "in", "iteritems", "(", "self", ".", "counters", ")", ":", "if", "get_value", "is", "not", ...
32.833333
10.583333
def title(content, new_line_replacement=' ', tab_replacement=' '): """ Underlines content with '='. New lines and tabs will be replaced :param str content: :param str new_line_replacement: :param str tab_replacement: :return: unicode """ prepared_content = content.strip().replace('\n', new_line_replacement).replace('\t', tab_replacement) return u'{}\n{}'.format(prepared_content, '=' * len(prepared_content))
[ "def", "title", "(", "content", ",", "new_line_replacement", "=", "' '", ",", "tab_replacement", "=", "' '", ")", ":", "prepared_content", "=", "content", ".", "strip", "(", ")", ".", "replace", "(", "'\\n'", ",", "new_line_replacement", ")", ".", "replace"...
47.4
20
def _topology_from_residue(res): """Converts a openmm.app.Topology.Residue to openmm.app.Topology. Parameters ---------- res : openmm.app.Topology.Residue An individual residue in an openmm.app.Topology Returns ------- topology : openmm.app.Topology The generated topology """ topology = app.Topology() chain = topology.addChain() new_res = topology.addResidue(res.name, chain) atoms = dict() # { omm.Atom in res : omm.Atom in *new* topology } for res_atom in res.atoms(): topology_atom = topology.addAtom(name=res_atom.name, element=res_atom.element, residue=new_res) atoms[res_atom] = topology_atom topology_atom.bond_partners = [] for bond in res.bonds(): atom1 = atoms[bond.atom1] atom2 = atoms[bond.atom2] topology.addBond(atom1, atom2) atom1.bond_partners.append(atom2) atom2.bond_partners.append(atom1) return topology
[ "def", "_topology_from_residue", "(", "res", ")", ":", "topology", "=", "app", ".", "Topology", "(", ")", "chain", "=", "topology", ".", "addChain", "(", ")", "new_res", "=", "topology", ".", "addResidue", "(", "res", ".", "name", ",", "chain", ")", "a...
28.257143
16.657143
def set_title(self, title): """ Set terminal title. """ if self.term not in ('linux', 'eterm-color'): # Not supported by the Linux console. self.write_raw('\x1b]2;%s\x07' % title.replace('\x1b', '').replace('\x07', ''))
[ "def", "set_title", "(", "self", ",", "title", ")", ":", "if", "self", ".", "term", "not", "in", "(", "'linux'", ",", "'eterm-color'", ")", ":", "# Not supported by the Linux console.", "self", ".", "write_raw", "(", "'\\x1b]2;%s\\x07'", "%", "title", ".", "...
43.166667
21.5
def c3(x, lag): """ This function calculates the value of .. math:: \\frac{1}{n-2lag} \sum_{i=0}^{n-2lag} x_{i + 2 \cdot lag}^2 \cdot x_{i + lag} \cdot x_{i} which is .. math:: \\mathbb{E}[L^2(X)^2 \cdot L(X) \cdot X] where :math:`\\mathbb{E}` is the mean and :math:`L` is the lag operator. It was proposed in [1] as a measure of non linearity in the time series. .. rubric:: References | [1] Schreiber, T. and Schmitz, A. (1997). | Discrimination power of measures for nonlinearity in a time series | PHYSICAL REVIEW E, VOLUME 55, NUMBER 5 :param x: the time series to calculate the feature of :type x: numpy.ndarray :param lag: the lag that should be used in the calculation of the feature :type lag: int :return: the value of this feature :return type: float """ if not isinstance(x, (np.ndarray, pd.Series)): x = np.asarray(x) n = x.size if 2 * lag >= n: return 0 else: return np.mean((_roll(x, 2 * -lag) * _roll(x, -lag) * x)[0:(n - 2 * lag)])
[ "def", "c3", "(", "x", ",", "lag", ")", ":", "if", "not", "isinstance", "(", "x", ",", "(", "np", ".", "ndarray", ",", "pd", ".", "Series", ")", ")", ":", "x", "=", "np", ".", "asarray", "(", "x", ")", "n", "=", "x", ".", "size", "if", "2...
28.432432
25.621622
def as_urlpatterns(self): """ Creates the appropriate URLs for this object. """ urls = [] # for each of our actions for action in self.actions: view_class = self.view_for_action(action) view_pattern = self.pattern_for_view(view_class, action) name = self.url_name_for_action(action) urls.append(url(view_pattern, view_class.as_view(), name=name)) return urls
[ "def", "as_urlpatterns", "(", "self", ")", ":", "urls", "=", "[", "]", "# for each of our actions", "for", "action", "in", "self", ".", "actions", ":", "view_class", "=", "self", ".", "view_for_action", "(", "action", ")", "view_pattern", "=", "self", ".", ...
32.214286
17.928571
def front(self, *fields): '''Return the front pair of the structure''' v, f = tuple(self.irange(0, 0, fields=fields)) if v: return (v[0], dict(((field, f[field][0]) for field in f)))
[ "def", "front", "(", "self", ",", "*", "fields", ")", ":", "v", ",", "f", "=", "tuple", "(", "self", ".", "irange", "(", "0", ",", "0", ",", "fields", "=", "fields", ")", ")", "if", "v", ":", "return", "(", "v", "[", "0", "]", ",", "dict", ...
43.6
19.6
def get_plot(self_or_cls, obj, renderer=None, **kwargs): """ Given a HoloViews Viewable return a corresponding plot instance. """ if isinstance(obj, DynamicMap) and obj.unbounded: dims = ', '.join('%r' % dim for dim in obj.unbounded) msg = ('DynamicMap cannot be displayed without explicit indexing ' 'as {dims} dimension(s) are unbounded. ' '\nSet dimensions bounds with the DynamicMap redim.range ' 'or redim.values methods.') raise SkipRendering(msg.format(dims=dims)) # Initialize DynamicMaps with first data item initialize_dynamic(obj) if not isinstance(obj, Plot): if not displayable(obj): obj = collate(obj) initialize_dynamic(obj) obj = Compositor.map(obj, mode='data', backend=self_or_cls.backend) if not renderer: renderer = self_or_cls if not isinstance(self_or_cls, Renderer): renderer = self_or_cls.instance() if not isinstance(obj, Plot): obj = Layout.from_values(obj) if isinstance(obj, AdjointLayout) else obj plot_opts = dict(self_or_cls.plot_options(obj, self_or_cls.size), **kwargs) plot = self_or_cls.plotting_class(obj)(obj, renderer=renderer, **plot_opts) defaults = [kd.default for kd in plot.dimensions] init_key = tuple(v if d is None else d for v, d in zip(plot.keys[0], defaults)) plot.update(init_key) else: plot = obj return plot
[ "def", "get_plot", "(", "self_or_cls", ",", "obj", ",", "renderer", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "obj", ",", "DynamicMap", ")", "and", "obj", ".", "unbounded", ":", "dims", "=", "', '", ".", "join", "(", ...
44.605263
18.921053
def currentItem( self ): """ Returns the current navigation item from the current path. :return <XNavigationItem> || None """ model = self.navigationModel() if ( not model ): return None return model.itemByPath(self.text())
[ "def", "currentItem", "(", "self", ")", ":", "model", "=", "self", ".", "navigationModel", "(", ")", "if", "(", "not", "model", ")", ":", "return", "None", "return", "model", ".", "itemByPath", "(", "self", ".", "text", "(", ")", ")" ]
27.545455
13.545455
def _main(argv): """ Handle arguments for the 'lumi-upload' command. """ parser = argparse.ArgumentParser( description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( '-b', '--base-url', default=URL_BASE, help='API root url, default: %s' % URL_BASE, ) parser.add_argument( '-a', '--account-id', default=None, help='Account ID that should own the project, if not the default', ) parser.add_argument( '-l', '--language', default='en', help='The language code for the language the text is in. Default: en', ) parser.add_argument('-t', '--token', help="API authentication token") parser.add_argument( '-s', '--save-token', action='store_true', help='save --token for --base-url to ~/.luminoso/tokens.json', ) parser.add_argument( 'input_filename', help='The JSON-lines (.jsons) file of documents to upload', ) parser.add_argument( 'project_name', nargs='?', default=None, help='What the project should be called', ) args = parser.parse_args(argv) if args.save_token: if not args.token: raise ValueError("error: no token provided") LuminosoClient.save_token(args.token, domain=urlparse(args.base_url).netloc) client = LuminosoClient.connect(url=args.base_url, token=args.token) name = args.project_name if name is None: name = input('Enter a name for the project: ') if not name: print('Aborting because no name was provided.') return result = upload_docs( client, args.input_filename, args.language, name, account=args.account_id, progress=True, ) print( 'Project {!r} created with {} documents'.format( result['project_id'], result['document_count'] ) )
[ "def", "_main", "(", "argv", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "DESCRIPTION", ",", "formatter_class", "=", "argparse", ".", "RawDescriptionHelpFormatter", ",", ")", "parser", ".", "add_argument", "(", "'-b'", ...
27.805556
21.472222
def as_ipywidget(self): """ Provides an IPywidgets player that can be used in a notebook. """ from IPython.display import Audio return Audio(data=self.y, rate=self.sr)
[ "def", "as_ipywidget", "(", "self", ")", ":", "from", "IPython", ".", "display", "import", "Audio", "return", "Audio", "(", "data", "=", "self", ".", "y", ",", "rate", "=", "self", ".", "sr", ")" ]
37.6
13
def on_trial_complete(self, trial_id, result=None, error=False, early_terminated=False): """Passes the result to skopt unless early terminated or errored. The result is internally negated when interacting with Skopt so that Skopt Optimizers can "maximize" this value, as it minimizes on default. """ skopt_trial_info = self._live_trial_mapping.pop(trial_id) if result: self._skopt_opt.tell(skopt_trial_info, -result[self._reward_attr])
[ "def", "on_trial_complete", "(", "self", ",", "trial_id", ",", "result", "=", "None", ",", "error", "=", "False", ",", "early_terminated", "=", "False", ")", ":", "skopt_trial_info", "=", "self", ".", "_live_trial_mapping", ".", "pop", "(", "trial_id", ")", ...
42.5
14.928571
def _judeNOtIn(self, raw_str, ele_list): ''' 判断ele是否在原始字符串中 args: raw_str 源字符串 ele_list 待检查的列表 return boolean ''' for ele in ele_list: if ele in raw_str: return False return True
[ "def", "_judeNOtIn", "(", "self", ",", "raw_str", ",", "ele_list", ")", ":", "for", "ele", "in", "ele_list", ":", "if", "ele", "in", "raw_str", ":", "return", "False", "return", "True" ]
22.076923
17.923077
def draw_graph(self): """ The central logic for drawing the graph. Sets self.graph (the 'g' element in the SVG root) """ transform = 'translate (%s %s)' % (self.border_left, self.border_top) self.graph = etree.SubElement(self.root, 'g', transform=transform) etree.SubElement(self.graph, 'rect', { 'x': '0', 'y': '0', 'width': str(self.graph_width), 'height': str(self.graph_height), 'class': 'graphBackground' }) # Axis etree.SubElement(self.graph, 'path', { 'd': 'M 0 0 v%s' % self.graph_height, 'class': 'axis', 'id': 'xAxis' }) etree.SubElement(self.graph, 'path', { 'd': 'M 0 %s h%s' % (self.graph_height, self.graph_width), 'class': 'axis', 'id': 'yAxis' }) self.draw_x_labels() self.draw_y_labels()
[ "def", "draw_graph", "(", "self", ")", ":", "transform", "=", "'translate (%s %s)'", "%", "(", "self", ".", "border_left", ",", "self", ".", "border_top", ")", "self", ".", "graph", "=", "etree", ".", "SubElement", "(", "self", ".", "root", ",", "'g'", ...
23.903226
19.83871
def count(args): """ %prog count *.gz Count reads based on FASTQC results. FASTQC needs to be run on all the input data given before running this command. """ from jcvi.utils.table import loadtable, write_csv p = OptionParser(count.__doc__) p.add_option("--dir", help="Sub-directory where FASTQC was run [default: %default]") p.add_option("--human", default=False, action="store_true", help="Human friendly numbers [default: %default]") p.set_table() p.set_outfile() opts, args = p.parse_args(args) if len(args) < 1: sys.exit(not p.print_help()) filenames = args subdir = opts.dir header = "Filename|Total Sequences|Sequence length|Total Bases".split("|") rows = [] human = opts.human for f in filenames: folder = f.replace(".gz", "").rsplit(".", 1)[0] + "_fastqc" if subdir: folder = op.join(subdir, folder) summaryfile = op.join(folder, "fastqc_data.txt") fqcdata = FastQCdata(summaryfile, human=human) row = [fqcdata[x] for x in header] rows.append(row) print(loadtable(header, rows), file=sys.stderr) write_csv(header, rows, sep=opts.sep, filename=opts.outfile, align=opts.align)
[ "def", "count", "(", "args", ")", ":", "from", "jcvi", ".", "utils", ".", "table", "import", "loadtable", ",", "write_csv", "p", "=", "OptionParser", "(", "count", ".", "__doc__", ")", "p", ".", "add_option", "(", "\"--dir\"", ",", "help", "=", "\"Sub-...
32.025641
20.128205
def get_ldap_user_membership(self, user_dn): """Retrieve user membership from LDAP server.""" #Escape parenthesis in DN membership_filter = self.conf_LDAP_SYNC_GROUP_MEMBERSHIP_FILTER.replace('{distinguishedName}', user_dn.replace('(', "\(").replace(')', "\)")) try: uri, groups = self.ldap_search(membership_filter, self.conf_LDAP_SYNC_GROUP_ATTRIBUTES.keys(), False, membership_filter) except Exception as e: logger.error("Error reading membership: Filter %s, Keys %s" % (membership_filter, str(self.conf_LDAP_SYNC_GROUP_ATTRIBUTES.keys()))) return None #logger.debug("AD Membership: Retrieved %d groups for user '%s'" % (len(groups), user_dn)) return (uri, groups)
[ "def", "get_ldap_user_membership", "(", "self", ",", "user_dn", ")", ":", "#Escape parenthesis in DN", "membership_filter", "=", "self", ".", "conf_LDAP_SYNC_GROUP_MEMBERSHIP_FILTER", ".", "replace", "(", "'{distinguishedName}'", ",", "user_dn", ".", "replace", "(", "'(...
68.090909
40.090909
def from_table(cls, table, length, prefix=0, flatten=False): """ Extract from the given table a tree for word length, taking only prefixes of prefix length (if greater than 0) into account to compute successors. :param table: the table to extract the tree from; :param length: the length of words generated by the extracted tree; greater or equal to 1; :param prefix: if greater than 0, the length of the prefixes used for computing successors; :param flatten: whether to flatten the table or not; :return: the tree corresponding to words of length from table. """ # Build the expanded tree with necessary suffix and length tree = defaultdict(dict) # The tree pending = {(">", 0)} # The nodes to expand while pending: suffix, size = pending.pop() if size < length: choices = table.weighted_choices(suffix, exclude={"<"}, flatten=flatten) # The word length is not reached yet, expand for successor, weight in choices.items(): expanded = suffix + successor if prefix > 0: expanded = expanded[-prefix:] new_node = (expanded, size + 1) tree[(suffix, size)][new_node] = weight pending.add(new_node) else: choices = table.weighted_choices(suffix, flatten=flatten) # The word length is reached, only add < if present if "<" in choices: tree[(suffix, size)][("<", size + 1)] = 1 else: tree[(suffix, size)] = dict() return cls(cls.trim_tree(tree))
[ "def", "from_table", "(", "cls", ",", "table", ",", "length", ",", "prefix", "=", "0", ",", "flatten", "=", "False", ")", ":", "# Build the expanded tree with necessary suffix and length", "tree", "=", "defaultdict", "(", "dict", ")", "# The tree", "pending", "=...
48.394737
17.131579
def area(triangles=None, crosses=None, sum=False): """ Calculates the sum area of input triangles Parameters ---------- triangles : (n, 3, 3) float Vertices of triangles crosses : (n, 3) float or None As a speedup don't re- compute cross products sum : bool Return summed area or individual triangle area Returns ---------- area : (n,) float or float Individual or summed area depending on `sum` argument """ if crosses is None: crosses = cross(triangles) area = (np.sum(crosses**2, axis=1)**.5) * .5 if sum: return np.sum(area) return area
[ "def", "area", "(", "triangles", "=", "None", ",", "crosses", "=", "None", ",", "sum", "=", "False", ")", ":", "if", "crosses", "is", "None", ":", "crosses", "=", "cross", "(", "triangles", ")", "area", "=", "(", "np", ".", "sum", "(", "crosses", ...
25.791667
16.958333
def glm(data, xseq, **params): """ Fit GLM """ X = sm.add_constant(data['x']) Xseq = sm.add_constant(xseq) init_kwargs, fit_kwargs = separate_method_kwargs( params['method_args'], sm.GLM, sm.GLM.fit) model = sm.GLM(data['y'], X, **init_kwargs) results = model.fit(**fit_kwargs) data = pd.DataFrame({'x': xseq}) data['y'] = results.predict(Xseq) if params['se']: prediction = results.get_prediction(Xseq) ci = prediction.conf_int(1 - params['level']) data['ymin'] = ci[:, 0] data['ymax'] = ci[:, 1] return data
[ "def", "glm", "(", "data", ",", "xseq", ",", "*", "*", "params", ")", ":", "X", "=", "sm", ".", "add_constant", "(", "data", "[", "'x'", "]", ")", "Xseq", "=", "sm", ".", "add_constant", "(", "xseq", ")", "init_kwargs", ",", "fit_kwargs", "=", "s...
26.363636
15.363636
def permission_required(perm, login_url=None): """Replacement for django.contrib.auth.decorators.permission_required that returns 403 Forbidden if the user is already logged in. """ return user_passes_test(lambda u: u.has_perm(perm), login_url=login_url)
[ "def", "permission_required", "(", "perm", ",", "login_url", "=", "None", ")", ":", "return", "user_passes_test", "(", "lambda", "u", ":", "u", ".", "has_perm", "(", "perm", ")", ",", "login_url", "=", "login_url", ")" ]
44.333333
16.833333
def _get_prtfmt(self, objgowr, verbose): """Get print format containing markers.""" prtfmt = objgowr.get_prtfmt('fmt') prtfmt = prtfmt.replace('# ', '') # print('PPPPPPPPPPP', prtfmt) if not verbose: prtfmt = prtfmt.replace('{hdr1usr01:2}', '') prtfmt = prtfmt.replace('{childcnt:3} L{level:02} ', '') prtfmt = prtfmt.replace('{num_usrgos:>4} uGOs ', '') prtfmt = prtfmt.replace('{D1:5} {REL} {rel}', '') prtfmt = prtfmt.replace('R{reldepth:02} ', '') # print('PPPPPPPPPPP', prtfmt) marks = ''.join(['{{{}}}'.format(nt.hdr) for nt in self.go_ntsets]) return '{MARKS} {PRTFMT}'.format(MARKS=marks, PRTFMT=prtfmt)
[ "def", "_get_prtfmt", "(", "self", ",", "objgowr", ",", "verbose", ")", ":", "prtfmt", "=", "objgowr", ".", "get_prtfmt", "(", "'fmt'", ")", "prtfmt", "=", "prtfmt", ".", "replace", "(", "'# '", ",", "''", ")", "# print('PPPPPPPPPPP', prtfmt)", "if", "not"...
51.571429
13.857143
def write_ioc(root, output_dir=None, force=False): """ Serialize an IOC, as defined by a set of etree Elements, to a .IOC file. :param root: etree Element to write out. Should have the tag 'OpenIOC' :param output_dir: Directory to write the ioc out to. default is current working directory. :param force: If set, skip the root node tag check. :return: True, unless an error occurs while writing the IOC. """ root_tag = 'OpenIOC' if not force and root.tag != root_tag: raise ValueError('Root tag is not "{}".'.format(root_tag)) default_encoding = 'utf-8' tree = root.getroottree() # noinspection PyBroadException try: encoding = tree.docinfo.encoding except: log.debug('Failed to get encoding from docinfo') encoding = default_encoding ioc_id = root.attrib['id'] fn = ioc_id + '.ioc' if output_dir: fn = os.path.join(output_dir, fn) else: fn = os.path.join(os.getcwd(), fn) try: with open(fn, 'wb') as fout: fout.write(et.tostring(tree, encoding=encoding, xml_declaration=True, pretty_print=True)) except (IOError, OSError): log.exception('Failed to write out IOC') return False except: raise return True
[ "def", "write_ioc", "(", "root", ",", "output_dir", "=", "None", ",", "force", "=", "False", ")", ":", "root_tag", "=", "'OpenIOC'", "if", "not", "force", "and", "root", ".", "tag", "!=", "root_tag", ":", "raise", "ValueError", "(", "'Root tag is not \"{}\...
35.771429
19.028571
def uploadFiles(self): """ Uploads all the files in 'filesToSync' """ for each_file in self.filesToSync: self.uploadFile(each_file["name"], each_file["ispickle"], each_file["at_home"])
[ "def", "uploadFiles", "(", "self", ")", ":", "for", "each_file", "in", "self", ".", "filesToSync", ":", "self", ".", "uploadFile", "(", "each_file", "[", "\"name\"", "]", ",", "each_file", "[", "\"ispickle\"", "]", ",", "each_file", "[", "\"at_home\"", "]"...
37.166667
12.833333
def get_im(self, force_update=False): """Get the influence map for the model, generating it if necessary. Parameters ---------- force_update : bool Whether to generate the influence map when the function is called. If False, returns the previously generated influence map if available. Defaults to True. Returns ------- networkx MultiDiGraph object containing the influence map. The influence map can be rendered as a pdf using the dot layout program as follows:: im_agraph = nx.nx_agraph.to_agraph(influence_map) im_agraph.draw('influence_map.pdf', prog='dot') """ if self._im and not force_update: return self._im if not self.model: raise Exception("Cannot get influence map if there is no model.") def add_obs_for_agent(agent): obj_mps = list(pa.grounded_monomer_patterns(self.model, agent)) if not obj_mps: logger.debug('No monomer patterns found in model for agent %s, ' 'skipping' % agent) return obs_list = [] for obj_mp in obj_mps: obs_name = _monomer_pattern_label(obj_mp) + '_obs' # Add the observable obj_obs = Observable(obs_name, obj_mp, _export=False) obs_list.append(obs_name) try: self.model.add_component(obj_obs) except ComponentDuplicateNameError as e: pass return obs_list # Create observables for all statements to check, and add to model # Remove any existing observables in the model self.model.observables = ComponentSet([]) for stmt in self.statements: # Generate observables for Modification statements if isinstance(stmt, Modification): mod_condition_name = modclass_to_modtype[stmt.__class__] if isinstance(stmt, RemoveModification): mod_condition_name = modtype_to_inverse[mod_condition_name] # Add modification to substrate agent modified_sub = _add_modification_to_agent(stmt.sub, mod_condition_name, stmt.residue, stmt.position) obs_list = add_obs_for_agent(modified_sub) # Associate this statement with this observable self.stmt_to_obs[stmt] = obs_list # Generate observables for Activation/Inhibition statements elif isinstance(stmt, RegulateActivity): regulated_obj, polarity = \ _add_activity_to_agent(stmt.obj, stmt.obj_activity, stmt.is_activation) obs_list = add_obs_for_agent(regulated_obj) # Associate this statement with this observable self.stmt_to_obs[stmt] = obs_list elif isinstance(stmt, RegulateAmount): obs_list = add_obs_for_agent(stmt.obj) self.stmt_to_obs[stmt] = obs_list elif isinstance(stmt, Influence): obs_list = add_obs_for_agent(stmt.obj.concept) self.stmt_to_obs[stmt] = obs_list # Add observables for each agent for ag in self.agent_obs: obs_list = add_obs_for_agent(ag) self.agent_to_obs[ag] = obs_list logger.info("Generating influence map") self._im = self.generate_im(self.model) #self._im.is_multigraph = lambda: False # Now, for every rule in the model, check if there are any observables # downstream; alternatively, for every observable in the model, get a # list of rules. # We'll need the dictionary to check if nodes are observables node_attributes = nx.get_node_attributes(self._im, 'node_type') for rule in self.model.rules: obs_list = [] # Get successors of the rule node for neighb in self._im.neighbors(rule.name): # Check if the node is an observable if node_attributes[neighb] != 'variable': continue # Get the edge and check the polarity edge_sign = _get_edge_sign(self._im, (rule.name, neighb)) obs_list.append((neighb, edge_sign)) self.rule_obs_dict[rule.name] = obs_list return self._im
[ "def", "get_im", "(", "self", ",", "force_update", "=", "False", ")", ":", "if", "self", ".", "_im", "and", "not", "force_update", ":", "return", "self", ".", "_im", "if", "not", "self", ".", "model", ":", "raise", "Exception", "(", "\"Cannot get influen...
46.536082
18.164948
def messages(self): '''A generator yielding the :class:`MacIndexMessage` structures in this index file.''' # The file contains the fixed-size file header followed by # fixed-size message structures, followed by minimal message # information (subject, from, to). Start after the file # header and then simply return the message structures in # sequence until we have returned the number of messages in # this folder, ignoring the minimal message information at the # end of the file. offset = self.header_length # initial offset # how much of the data in this file we expect to use, based on # the number of messages in this folder and the index message block size maxlen = self.header_length + self.total_messages * MacIndexMessage.LENGTH while offset < maxlen: yield MacIndexMessage(mm=self.mmap, offset=offset) offset += MacIndexMessage.LENGTH
[ "def", "messages", "(", "self", ")", ":", "# The file contains the fixed-size file header followed by", "# fixed-size message structures, followed by minimal message", "# information (subject, from, to). Start after the file", "# header and then simply return the message structures in", "# sequ...
50.947368
24.105263
def boost(self, boost): """Sets boost mode.""" _LOGGER.debug("Setting boost mode: %s", boost) value = struct.pack('BB', PROP_BOOST, bool(boost)) self._conn.make_request(PROP_WRITE_HANDLE, value)
[ "def", "boost", "(", "self", ",", "boost", ")", ":", "_LOGGER", ".", "debug", "(", "\"Setting boost mode: %s\"", ",", "boost", ")", "value", "=", "struct", ".", "pack", "(", "'BB'", ",", "PROP_BOOST", ",", "bool", "(", "boost", ")", ")", "self", ".", ...
44.4
13.2
def get_conn(self, urlparsed=None): """Returns an HTTPConnection based on the urlparse result given or the default Swift cluster (internal url) urlparse result. :param urlparsed: The result from urlparse.urlparse or None to use the default Swift cluster's value """ if not urlparsed: urlparsed = self.dsc_parsed2 if urlparsed.scheme == 'http': return HTTPConnection(urlparsed.netloc) else: return HTTPSConnection(urlparsed.netloc)
[ "def", "get_conn", "(", "self", ",", "urlparsed", "=", "None", ")", ":", "if", "not", "urlparsed", ":", "urlparsed", "=", "self", ".", "dsc_parsed2", "if", "urlparsed", ".", "scheme", "==", "'http'", ":", "return", "HTTPConnection", "(", "urlparsed", ".", ...
41.307692
14.307692
def construct_parameter_validators(parameter, context): """ Constructs a dictionary of validator functions for the provided parameter definition. """ validators = ValidationDict() if '$ref' in parameter: validators.add_validator( '$ref', ParameterReferenceValidator(parameter['$ref'], context), ) for key in parameter: if key in validator_mapping: validators.add_validator( key, validator_mapping[key](context=context, **parameter), ) if 'schema' in parameter: schema_validators = construct_schema_validators(parameter['schema'], context=context) for key, value in schema_validators.items(): validators.setdefault(key, value) return validators
[ "def", "construct_parameter_validators", "(", "parameter", ",", "context", ")", ":", "validators", "=", "ValidationDict", "(", ")", "if", "'$ref'", "in", "parameter", ":", "validators", ".", "add_validator", "(", "'$ref'", ",", "ParameterReferenceValidator", "(", ...
37.095238
17.571429
def head_tail_middle(src): """Returns a tuple consisting of the head of a enumerable, the middle as a list and the tail of the enumerable. If the enumerable is 1 item, the middle will be empty and the tail will be None. >>> head_tail_middle([1, 2, 3, 4]) 1, [2, 3], 4 """ if len(src) == 0: return None, [], None if len(src) == 1: return src[0], [], None if len(src) == 2: return src[0], [], src[1] return src[0], src[1:-1], src[-1]
[ "def", "head_tail_middle", "(", "src", ")", ":", "if", "len", "(", "src", ")", "==", "0", ":", "return", "None", ",", "[", "]", ",", "None", "if", "len", "(", "src", ")", "==", "1", ":", "return", "src", "[", "0", "]", ",", "[", "]", ",", "...
25.421053
19.842105
def formatted_prefix(self, **format_info): """ Gets a dict with format info, and formats a prefix template with that info. For example: if our prefix template is: 'some_file_{groups[0]}_{file_number}' And we have this method called with: formatted_prefix(groups=[US], file_number=0) The returned formatted prefix would be: 'some_file_US_0' """ prefix_name = self.prefix_template.format(**format_info) file_number = format_info.pop('file_number', 0) if prefix_name == self.prefix_template: prefix_name += '{:04d}'.format(file_number) for key, value in format_info.iteritems(): if value and not self._has_key_info(key): prefix_name = '{}-{}'.format(prefix_name, ''.join(value)) return prefix_name
[ "def", "formatted_prefix", "(", "self", ",", "*", "*", "format_info", ")", ":", "prefix_name", "=", "self", ".", "prefix_template", ".", "format", "(", "*", "*", "format_info", ")", "file_number", "=", "format_info", ".", "pop", "(", "'file_number'", ",", ...
39.47619
17.095238
def fire_event(key, msg, tag, sock_dir, args=None, transport='zeromq'): ''' Fire deploy action ''' event = salt.utils.event.get_event( 'master', sock_dir, transport, listen=False) try: event.fire_event(msg, tag) except ValueError: # We're using at least a 0.17.x version of salt if isinstance(args, dict): args[key] = msg else: args = {key: msg} event.fire_event(args, tag) # https://github.com/zeromq/pyzmq/issues/173#issuecomment-4037083 # Assertion failed: get_load () == 0 (poller_base.cpp:32) time.sleep(0.025)
[ "def", "fire_event", "(", "key", ",", "msg", ",", "tag", ",", "sock_dir", ",", "args", "=", "None", ",", "transport", "=", "'zeromq'", ")", ":", "event", "=", "salt", ".", "utils", ".", "event", ".", "get_event", "(", "'master'", ",", "sock_dir", ","...
27.26087
21.086957
def build_request(self): """Build a prepared request object.""" clientheaders = {} if (self.parent_url and self.parent_url.lower().startswith(HTTP_SCHEMAS)): clientheaders["Referer"] = self.parent_url kwargs = dict( method='GET', url=self.url, headers=clientheaders, ) if self.auth: kwargs['auth'] = self.auth log.debug(LOG_CHECK, "Prepare request with %s", kwargs) request = requests.Request(**kwargs) return self.session.prepare_request(request)
[ "def", "build_request", "(", "self", ")", ":", "clientheaders", "=", "{", "}", "if", "(", "self", ".", "parent_url", "and", "self", ".", "parent_url", ".", "lower", "(", ")", ".", "startswith", "(", "HTTP_SCHEMAS", ")", ")", ":", "clientheaders", "[", ...
36
13.75
def __extract_modules(self, loader, name, is_pkg): """ if module found load module and save all attributes in the module found """ mod = loader.find_module(name).load_module(name) """ find the attribute method on each module """ if hasattr(mod, '__method__'): """ register to the blueprint if method attribute found """ module_router = ModuleRouter(mod, ignore_names=self.__serialize_module_paths() ).register_route(app=self.application, name=name) self.__routers.extend(module_router.routers) self.__modules.append(mod) else: """ prompt not found notification """ # print('{} has no module attribute method'.format(mod)) pass
[ "def", "__extract_modules", "(", "self", ",", "loader", ",", "name", ",", "is_pkg", ")", ":", "mod", "=", "loader", ".", "find_module", "(", "name", ")", ".", "load_module", "(", "name", ")", "\"\"\" find the attribute method on each module \"\"\"", "if", "hasat...
40.9
21.25
def gauss_fit(map_data, chs=None, mode='deg', amplitude=1, x_mean=0, y_mean=0, x_stddev=None, y_stddev=None, theta=None, cov_matrix=None, noise=0, **kwargs): """make a 2D Gaussian model and fit the observed data with the model. Args: map_data (xarray.Dataarray): Dataarray of cube or single chs. chs (list of int): in prep. mode (str): Coordinates for the fitting 'pix' 'deg' amplitude (float or None): Initial amplitude value of Gaussian fitting. x_mean (float): Initial value of mean of the fitting Gaussian in x. y_mean (float): Initial value of mean of the fitting Gaussian in y. x_stddev (float or None): Standard deviation of the Gaussian in x before rotating by theta. y_stddev (float or None): Standard deviation of the Gaussian in y before rotating by theta. theta (float, optional or None): Rotation angle in radians. cov_matrix (ndarray, optional): A 2x2 covariance matrix. If specified, overrides the ``x_stddev``, ``y_stddev``, and ``theta`` defaults. Returns: decode cube (xarray cube) with fitting results in array and attrs. """ if chs is None: chs = np.ogrid[0:63] # the number of channels would be changed if len(chs) > 1: for n,ch in enumerate(chs): subdata = np.transpose(np.full_like(map_data[:, :, ch], map_data.values[:, :, ch])) subdata[np.isnan(subdata)] = 0 if mode == 'deg': mX, mY = np.meshgrid(map_data.x, map_data.y) elif mode == 'pix': mX, mY = np.mgrid[0:len(map_data.y), 0:len(map_data.x)] g_init = models.Gaussian2D(amplitude=np.nanmax(subdata), x_mean=x_mean, y_mean=y_mean, x_stddev=x_stddev, y_stddev=y_stddev, theta=theta, cov_matrix=cov_matrix, **kwargs) + models.Const2D(noise) fit_g = fitting.LevMarLSQFitter() g = fit_g(g_init, mX, mY, subdata) g_init2 = models.Gaussian2D(amplitude=np.nanmax(subdata-g.amplitude_1), x_mean=x_mean, y_mean=y_mean, x_stddev=x_stddev, y_stddev=y_stddev, theta=theta, cov_matrix=cov_matrix, **kwargs) fit_g2 = fitting.LevMarLSQFitter() g2 = fit_g2(g_init2, mX, mY, subdata) if n == 0: results = np.array([g2(mX,mY)]) peaks = np.array([g2.amplitude.value]) x_means = np.array([g2.x_mean.value]) y_means = np.array([g2.y_mean.value]) x_stddevs = np.array([g2.x_stddev.value]) y_stddevs = np.array([g2.y_stddev.value]) thetas = np.array([g2.theta.value]) if fit_g2.fit_info['param_cov'] is None: unserts = nop.array([0]) else: error = np.diag(fit_g2.fit_info['param_cov'])**0.5 uncerts = np.array([error[0]]) else: results = np.append(results, [g2(mX,mY)], axis=0) peaks = np.append(peaks, [g2.amplitude.value], axis=0) x_means = np.append(x_means, [g2.x_mean.value], axis=0) y_means = np.append(y_means, [g2.y_mean.value], axis=0) x_stddevs = np.append(x_stddevs, [g2.x_stddev.value], axis=0) y_stddevs = np.append(y_stddevs, [g2.y_stddev.value], axis=0) thetas = np.append(thetas, [g2.theta.value], axis=0) if fit_g2.fit_info['param_cov'] is None: uncerts = np.append(uncerts, [0], axis=0) else: error = np.diag(fit_g2.fit_info['param_cov'])**0.5 uncerts = np.append(uncerts, [error[0]], axis=0) result = map_data.copy() result.values = np.transpose(results) result.attrs.update({'peak': peaks, 'x_mean': x_means, 'y_mean': y_means, 'x_stddev': x_stddevs, 'y_stddev': y_stddevs, 'theta': thetas, 'uncert': uncerts}) else: subdata = np.transpose(np.full_like(map_data[:, :, 0], map_data.values[:, :, 0])) subdata[np.isnan(subdata)] = 0 if mode == 'deg': mX, mY = np.meshgrid(map_data.x, map_data.y) elif mode == 'pix': mX, mY = np.mgrid[0:len(map_data.y), 0:len(map_data.x)] g_init = models.Gaussian2D(amplitude=np.nanmax(subdata), x_mean=x_mean, y_mean=y_mean, x_stddev=x_stddev, y_stddev=y_stddev, theta=theta, cov_matrix=cov_matrix, **kwargs) + models.Const2D(noise) fit_g = fitting.LevMarLSQFitter() g = fit_g(g_init, mX, mY, subdata) g_init2 = models.Gaussian2D(amplitude=np.nanmax(subdata-g.amplitude_1), x_mean=x_mean, y_mean=y_mean, x_stddev=x_stddev, y_stddev=y_stddev, theta=theta, cov_matrix=cov_matrix, **kwargs) fit_g2 = fitting.LevMarLSQFitter() g2 = fit_g2(g_init2, mX, mY, subdata) results = np.array([g2(mX, mY)]) peaks = np.array([g2.amplitude.value]) x_means = np.array([g2.x_mean.value]) y_means = np.array([g2.y_mean.value]) x_stddevs = np.array([g2.x_stddev.value]) y_stddevs = np.array([g2.y_stddev.value]) thetas = np.array([g2.theta.value]) error = np.diag(fit_g2.fit_info['param_cov'])**0.5 uncerts = np.array(error[0]) result = map_data.copy() result.values = np.transpose(results) result.attrs.update({'peak': peaks, 'x_mean': x_means, 'y_mean': y_means, 'x_stddev': x_stddevs, 'y_stddev': y_stddevs, 'theta': thetas, 'uncert': uncerts}) return result
[ "def", "gauss_fit", "(", "map_data", ",", "chs", "=", "None", ",", "mode", "=", "'deg'", ",", "amplitude", "=", "1", ",", "x_mean", "=", "0", ",", "y_mean", "=", "0", ",", "x_stddev", "=", "None", ",", "y_stddev", "=", "None", ",", "theta", "=", ...
48.424
26.936
def cli(ctx, common_name, directory, blatdb="", genus="", species="", public=False): """Add an organism Output: a dictionary with information about the new organism """ return ctx.gi.organisms.add_organism(common_name, directory, blatdb=blatdb, genus=genus, species=species, public=public)
[ "def", "cli", "(", "ctx", ",", "common_name", ",", "directory", ",", "blatdb", "=", "\"\"", ",", "genus", "=", "\"\"", ",", "species", "=", "\"\"", ",", "public", "=", "False", ")", ":", "return", "ctx", ".", "gi", ".", "organisms", ".", "add_organis...
37.5
32.125
def getconnections(self, vhost = None): "Return accepted connections, optionally filtered by vhost" if vhost is None: return list(self.managed_connections) else: return [c for c in self.managed_connections if c.protocol.vhost == vhost]
[ "def", "getconnections", "(", "self", ",", "vhost", "=", "None", ")", ":", "if", "vhost", "is", "None", ":", "return", "list", "(", "self", ".", "managed_connections", ")", "else", ":", "return", "[", "c", "for", "c", "in", "self", ".", "managed_connec...
46.333333
20.666667
def popupWidget(self): """ Returns the popup widget for this editor. :return <skyline.gui.XPopupWidget> """ if not self._popup: btns = QDialogButtonBox.Save | QDialogButtonBox.Cancel self._popup = XPopupWidget(self, btns) self._popup.setShowTitleBar(False) self._popup.setAutoCalculateAnchor(True) self._popup.setPositionLinkedTo(self) return self._popup
[ "def", "popupWidget", "(", "self", ")", ":", "if", "not", "self", ".", "_popup", ":", "btns", "=", "QDialogButtonBox", ".", "Save", "|", "QDialogButtonBox", ".", "Cancel", "self", ".", "_popup", "=", "XPopupWidget", "(", "self", ",", "btns", ")", "self",...
36.538462
12.230769
def qwarp_align(dset_from,dset_to,skull_strip=True,mask=None,affine_suffix='_aff',suffix='_qwarp',prefix=None): '''aligns ``dset_from`` to ``dset_to`` using 3dQwarp Will run ``3dSkullStrip`` (unless ``skull_strip`` is ``False``), ``3dUnifize``, ``3dAllineate``, and then ``3dQwarp``. This method will add suffixes to the input dataset for the intermediate files (e.g., ``_ss``, ``_u``). If those files already exist, it will assume they were intelligently named, and use them as is :skull_strip: If True/False, turns skull-stripping of both datasets on/off. If a string matching ``dset_from`` or ``dset_to``, will only skull-strip the given dataset :mask: Applies the given mask to the alignment. Because of the nature of the alignment algorithms, the mask is **always** applied to the ``dset_to``. If this isn't what you want, you need to reverse the transform and re-apply it (e.g., using :meth:`qwarp_invert` and :meth:`qwarp_apply`). If the ``dset_to`` dataset is skull-stripped, the mask will also be resampled to match the ``dset_to`` grid. :affine_suffix: Suffix applied to ``dset_from`` to name the new dataset, as well as the ``.1D`` file. :suffix: Suffix applied to the final ``dset_from`` dataset. An additional file with the additional suffix ``_WARP`` will be created containing the parameters (e.g., with the default ``_qwarp`` suffix, the parameters will be in a file with the suffix ``_qwarp_WARP``) :prefix: Alternatively to ``suffix``, explicitly give the full output filename The output affine dataset and 1D, as well as the output of qwarp are named by adding the given suffixes (``affine_suffix`` and ``qwarp_suffix``) to the ``dset_from`` file If ``skull_strip`` is a string instead of ``True``/``False``, it will only skull strip the given dataset instead of both of them # TODO: currently does not work with +tlrc datasets because the filenames get mangled ''' dset_ss = lambda dset: os.path.split(nl.suffix(dset,'_ns'))[1] dset_u = lambda dset: os.path.split(nl.suffix(dset,'_u'))[1] def dset_source(dset): if skull_strip==True or skull_strip==dset: return dset_ss(dset) else: return dset dset_affine = os.path.split(nl.suffix(dset_from,affine_suffix))[1] dset_affine_1D = nl.prefix(dset_affine) + '.1D' dset_qwarp = prefix if dset_qwarp==None: dset_qwarp = os.path.split(nl.suffix(dset_from,suffix))[1] if os.path.exists(dset_qwarp): # final product already exists return affine_align(dset_from,dset_to,skull_strip,mask,affine_suffix) for dset in [dset_from,dset_to]: nl.run([ '3dUnifize', '-prefix', dset_u(dset_source(dset)), '-input', dset_source(dset) ],products=[dset_u(dset_source(dset))]) mask_use = mask if mask: # the mask was probably made in the space of the original dset_to anatomy, # which has now been cropped from the skull stripping. So the lesion mask # needs to be resampled to match the corresponding mask if skull_strip==True or skull_strip==dset_to: nl.run(['3dresample','-master',dset_u(dset_ss(dset)),'-inset',mask,'-prefix',nl.suffix(mask,'_resam')],products=nl.suffix(mask,'_resam')) mask_use = nl.suffix(mask,'_resam') warp_cmd = [ '3dQwarp', '-prefix', dset_qwarp, '-duplo', '-useweight', '-blur', '0', '3', '-iwarp', '-base', dset_u(dset_source(dset_to)), '-source', dset_affine ] if mask: warp_cmd += ['-emask', mask_use] nl.run(warp_cmd,products=dset_qwarp)
[ "def", "qwarp_align", "(", "dset_from", ",", "dset_to", ",", "skull_strip", "=", "True", ",", "mask", "=", "None", ",", "affine_suffix", "=", "'_aff'", ",", "suffix", "=", "'_qwarp'", ",", "prefix", "=", "None", ")", ":", "dset_ss", "=", "lambda", "dset"...
47.120482
30.662651
def _build_body_schema(serializer, body_parameters): """ body is built differently, since it's a single argument no matter what. """ description = "" if isinstance(body_parameters, Param): schema = serializer.to_json_schema(body_parameters.arginfo.type) description = body_parameters.description required = True else: if len(body_parameters) == 0: return None required = set() body_properties = {} for name, param in body_parameters.items(): arginfo = param.arginfo body_properties[name] = serializer.to_json_schema(arginfo.type) body_properties[name]["description"] = param.description if arginfo.default is NoDefault: required.add(name) schema = { "type": "object", "required": list(required), "properties": body_properties, } required = len(required) > 0 return BodyParameter( { "name": "body", "description": description, "required": required, "schema": schema, } )
[ "def", "_build_body_schema", "(", "serializer", ",", "body_parameters", ")", ":", "description", "=", "\"\"", "if", "isinstance", "(", "body_parameters", ",", "Param", ")", ":", "schema", "=", "serializer", ".", "to_json_schema", "(", "body_parameters", ".", "ar...
34.9375
14.84375
def synchronous(function, event): """ Runs the function synchronously taking care of exceptions. """ try: function(event) except Exception as error: logger = get_function_logger(function) logger.exception(error)
[ "def", "synchronous", "(", "function", ",", "event", ")", ":", "try", ":", "function", "(", "event", ")", "except", "Exception", "as", "error", ":", "logger", "=", "get_function_logger", "(", "function", ")", "logger", ".", "exception", "(", "error", ")" ]
27.444444
11.444444
def hqwe(zsrc, zrec, lsrc, lrec, off, factAng, depth, ab, etaH, etaV, zetaH, zetaV, xdirect, qweargs, use_ne_eval, msrc, mrec): r"""Hankel Transform using Quadrature-With-Extrapolation. *Quadrature-With-Extrapolation* was introduced to geophysics by [Key12]_. It is one of many so-called *ISE* methods to solve Hankel Transforms, where *ISE* stands for Integration, Summation, and Extrapolation. Following [Key12]_, but without going into the mathematical details here, the QWE method rewrites the Hankel transform of the form .. math:: F(r) = \int^\infty_0 f(\lambda)J_v(\lambda r)\ \mathrm{d}\lambda as a quadrature sum which form is similar to the DLF (equation 15), .. math:: F_i \approx \sum^m_{j=1} f(x_j/r)w_j g(x_j) = \sum^m_{j=1} f(x_j/r)\hat{g}(x_j) \ , but with various bells and whistles applied (using the so-called Shanks transformation in the form of a routine called :math:`\epsilon`-algorithm ([Shan55]_, [Wynn56]_; implemented with algorithms from [Tref00]_ and [Weni89]_). This function is based on ``get_CSEM1D_FD_QWE.m``, ``qwe.m``, and ``getBesselWeights.m`` from the source code distributed with [Key12]_. In the spline-version, ``hqwe`` checks how steep the decay of the wavenumber-domain result is, and calls QUAD for the very steep interval, for which QWE is not suited. The function is called from one of the modelling routines in :mod:`model`. Consult these modelling routines for a description of the input and output parameters. Returns ------- fEM : array Returns frequency-domain EM response. kcount : int Kernel count. conv : bool If true, QWE/QUAD converged. If not, <htarg> might have to be adjusted. """ # Input params have an additional dimension for frequency, reduce here etaH = etaH[0, :] etaV = etaV[0, :] zetaH = zetaH[0, :] zetaV = zetaV[0, :] # Get rtol, atol, nquad, maxint, and pts_per_dec rtol, atol, nquad, maxint, pts_per_dec = qweargs[:5] # 1. PRE-COMPUTE THE BESSEL FUNCTIONS # at fixed quadrature points for each interval and multiply by the # corresponding Gauss quadrature weights # Get Gauss quadrature weights g_x, g_w = special.p_roots(nquad) # Compute n zeros of the Bessel function of the first kind of order 1 using # the Newton-Raphson method, which is fast enough for our purposes. Could # be done with a loop for (but it is slower): # b_zero[i] = optimize.newton(special.j1, b_zero[i]) # Initial guess using asymptotic zeros b_zero = np.pi*np.arange(1.25, maxint+1) # Newton-Raphson iterations for i in range(10): # 10 is more than enough, usually stops in 5 # Evaluate b_x0 = special.j1(b_zero) # j0 and j1 have faster versions b_x1 = special.jv(2, b_zero) # j2 does not have a faster version # The step length b_h = -b_x0/(b_x0/b_zero - b_x1) # Take the step b_zero += b_h # Check for convergence if all(np.abs(b_h) < 8*np.finfo(float).eps*b_zero): break # 2. COMPUTE THE QUADRATURE INTERVALS AND BESSEL FUNCTION WEIGHTS # Lower limit of integrand, a small but non-zero value xint = np.concatenate((np.array([1e-20]), b_zero)) # Assemble the output arrays dx = np.repeat(np.diff(xint)/2, nquad) Bx = dx*(np.tile(g_x, maxint) + 1) + np.repeat(xint[:-1], nquad) BJ0 = special.j0(Bx)*np.tile(g_w, maxint) BJ1 = special.j1(Bx)*np.tile(g_w, maxint) # 3. START QWE # Intervals and lambdas for all offset intervals = xint/off[:, None] lambd = Bx/off[:, None] # The following lines until # "Call and return QWE, depending if spline or not" # are part of the splined routine. However, we calculate it here to get # the non-zero kernels, `k_used`. # New lambda, from min to max required lambda with pts_per_dec start = np.log10(lambd.min()) stop = np.log10(lambd.max()) # If not spline, we just calculate three lambdas to check if pts_per_dec == 0: ilambd = np.logspace(start, stop, 3) else: ilambd = np.logspace(start, stop, (stop-start)*pts_per_dec + 1) # Call the kernel PJ0, PJ1, PJ0b = kernel.wavenumber(zsrc, zrec, lsrc, lrec, depth, etaH[None, :], etaV[None, :], zetaH[None, :], zetaV[None, :], np.atleast_2d(ilambd), ab, xdirect, msrc, mrec, use_ne_eval) # Check which kernels have information k_used = [True, True, True] for i, val in enumerate((PJ0, PJ1, PJ0b)): if val is None: k_used[i] = False # Call and return QWE, depending if spline or not if pts_per_dec != 0: # If spline, we calculate all kernels here # Interpolation : Has to be done separately on each PJ, # in order to work with multiple offsets which have different angles. if k_used[0]: sPJ0r = iuSpline(np.log(ilambd), PJ0.real) sPJ0i = iuSpline(np.log(ilambd), PJ0.imag) else: sPJ0r = None sPJ0i = None if k_used[1]: sPJ1r = iuSpline(np.log(ilambd), PJ1.real) sPJ1i = iuSpline(np.log(ilambd), PJ1.imag) else: sPJ1r = None sPJ1i = None if k_used[2]: sPJ0br = iuSpline(np.log(ilambd), PJ0b.real) sPJ0bi = iuSpline(np.log(ilambd), PJ0b.imag) else: sPJ0br = None sPJ0bi = None # Get quadargs: diff_quad, a, b, limit diff_quad, a, b, limit = qweargs[5:] # Set quadargs if not given: if not limit: limit = maxint if not a: a = intervals[:, 0] else: a = a*np.ones(off.shape) if not b: b = intervals[:, -1] else: b = b*np.ones(off.shape) # Check if we use QWE or SciPy's QUAD # If there are any steep decays within an interval we have to use QUAD, # as QWE is not designed for these intervals. check0 = np.log(intervals[:, :-1]) check1 = np.log(intervals[:, 1:]) numerator = np.zeros((off.size, maxint), dtype=complex) denominator = np.zeros((off.size, maxint), dtype=complex) if k_used[0]: numerator += sPJ0r(check0) + 1j*sPJ0i(check0) denominator += sPJ0r(check1) + 1j*sPJ0i(check1) if k_used[1]: numerator += sPJ1r(check0) + 1j*sPJ1i(check0) denominator += sPJ1r(check1) + 1j*sPJ1i(check1) if k_used[2]: numerator += sPJ0br(check0) + 1j*sPJ0bi(check0) denominator += sPJ0br(check1) + 1j*sPJ0bi(check1) doqwe = np.all((np.abs(numerator)/np.abs(denominator) < diff_quad), 1) # Pre-allocate output array fEM = np.zeros(off.size, dtype=complex) conv = True # Carry out SciPy's Quad if required if np.any(~doqwe): # Loop over offsets that require Quad for i in np.where(~doqwe)[0]: # Input-dictionary for quad iinp = {'a': a[i], 'b': b[i], 'epsabs': atol, 'epsrel': rtol, 'limit': limit} fEM[i], tc = quad(sPJ0r, sPJ0i, sPJ1r, sPJ1i, sPJ0br, sPJ0bi, ab, off[i], factAng[i], iinp) # Update conv conv *= tc # Return kcount=1 in case no QWE is calculated kcount = 1 if np.any(doqwe): # Get EM-field at required offsets if k_used[0]: sPJ0 = sPJ0r(np.log(lambd)) + 1j*sPJ0i(np.log(lambd)) if k_used[1]: sPJ1 = sPJ1r(np.log(lambd)) + 1j*sPJ1i(np.log(lambd)) if k_used[2]: sPJ0b = sPJ0br(np.log(lambd)) + 1j*sPJ0bi(np.log(lambd)) # Carry out and return the Hankel transform for this interval sEM = np.zeros_like(numerator, dtype=complex) if k_used[1]: sEM += np.sum(np.reshape(sPJ1*BJ1, (off.size, nquad, -1), order='F'), 1) if ab in [11, 12, 21, 22, 14, 24, 15, 25]: # Because of J2 # J2(kr) = 2/(kr)*J1(kr) - J0(kr) sEM /= np.atleast_1d(off[:, np.newaxis]) if k_used[2]: sEM += np.sum(np.reshape(sPJ0b*BJ0, (off.size, nquad, -1), order='F'), 1) if k_used[1] or k_used[2]: sEM *= factAng[:, np.newaxis] if k_used[0]: sEM += np.sum(np.reshape(sPJ0*BJ0, (off.size, nquad, -1), order='F'), 1) getkernel = sEM[doqwe, :] # Get QWE fEM[doqwe], kcount, tc = qwe(rtol, atol, maxint, getkernel, intervals[doqwe, :], None, None, None) conv *= tc else: # If not spline, we define the wavenumber-kernel here def getkernel(i, inplambd, inpoff, inpfang): r"""Return wavenumber-domain-kernel as a fct of interval i.""" # Indices and factor for this interval iB = i*nquad + np.arange(nquad) # PJ0 and PJ1 for this interval PJ0, PJ1, PJ0b = kernel.wavenumber(zsrc, zrec, lsrc, lrec, depth, etaH[None, :], etaV[None, :], zetaH[None, :], zetaV[None, :], np.atleast_2d(inplambd)[:, iB], ab, xdirect, msrc, mrec, use_ne_eval) # Carry out and return the Hankel transform for this interval gEM = np.zeros_like(inpoff, dtype=complex) if k_used[1]: gEM += inpfang*np.dot(PJ1[0, :], BJ1[iB]) if ab in [11, 12, 21, 22, 14, 24, 15, 25]: # Because of J2 # J2(kr) = 2/(kr)*J1(kr) - J0(kr) gEM /= np.atleast_1d(inpoff) if k_used[2]: gEM += inpfang*np.dot(PJ0b[0, :], BJ0[iB]) if k_used[0]: gEM += np.dot(PJ0[0, :], BJ0[iB]) return gEM # Get QWE fEM, kcount, conv = qwe(rtol, atol, maxint, getkernel, intervals, lambd, off, factAng) return fEM, kcount, conv
[ "def", "hqwe", "(", "zsrc", ",", "zrec", ",", "lsrc", ",", "lrec", ",", "off", ",", "factAng", ",", "depth", ",", "ab", ",", "etaH", ",", "etaV", ",", "zetaH", ",", "zetaV", ",", "xdirect", ",", "qweargs", ",", "use_ne_eval", ",", "msrc", ",", "m...
36.358885
23.299652
def available(self, src, dst, model): """ Iterate over all registered plugins or plugin points and prepare to add them to database. """ for name, point in six.iteritems(src): inst = dst.pop(name, None) if inst is None: self.print_(1, "Registering %s for %s" % (model.__name__, name)) inst = model(pythonpath=name) if inst.status == REMOVED: self.print_(1, "Updating %s for %s" % (model.__name__, name)) # re-enable a previously removed plugin point and its plugins inst.status = ENABLED yield point, inst
[ "def", "available", "(", "self", ",", "src", ",", "dst", ",", "model", ")", ":", "for", "name", ",", "point", "in", "six", ".", "iteritems", "(", "src", ")", ":", "inst", "=", "dst", ".", "pop", "(", "name", ",", "None", ")", "if", "inst", "is"...
44.6875
14.3125
def get_containers_by_name(self, name): """ get all task which relative with task name :param name: :class:`str`, task name :return: :class:`list`, container list """ code, containers = self.get_containers() if code != httplib.OK: return [] return [container for container in containers if any(map(lambda x: x.startswith(name), container.Names))]
[ "def", "get_containers_by_name", "(", "self", ",", "name", ")", ":", "code", ",", "containers", "=", "self", ".", "get_containers", "(", ")", "if", "code", "!=", "httplib", ".", "OK", ":", "return", "[", "]", "return", "[", "container", "for", "container...
33.846154
14.307692
def get_object_header(self, ref): """ Use this method to quickly examine the type and size of the object behind the given ref. :note: The method will only suffer from the costs of command invocation once and reuses the command in subsequent calls. :return: (hexsha, type_string, size_as_int)""" cmd = self.__get_persistent_cmd("cat_file_header", "cat_file", batch_check=True) return self.__get_object_header(cmd, ref)
[ "def", "get_object_header", "(", "self", ",", "ref", ")", ":", "cmd", "=", "self", ".", "__get_persistent_cmd", "(", "\"cat_file_header\"", ",", "\"cat_file\"", ",", "batch_check", "=", "True", ")", "return", "self", ".", "__get_object_header", "(", "cmd", ","...
43.3
19.7
def com_google_fonts_check_name_familyname(ttFont, style, familyname_with_spaces): """ Check name table: FONT_FAMILY_NAME entries. """ from fontbakery.utils import name_entry_id failed = False only_weight = get_only_weight(style) for name in ttFont['name'].names: if name.nameID == NameID.FONT_FAMILY_NAME: if name.platformID == PlatformID.MACINTOSH: expected_value = familyname_with_spaces elif name.platformID == PlatformID.WINDOWS: if style in ['Regular', 'Italic', 'Bold', 'Bold Italic']: expected_value = familyname_with_spaces else: expected_value = " ".join([familyname_with_spaces, only_weight]).strip() else: failed = True yield FAIL, ("Font should not have a " "{} entry!").format(name_entry_id(name)) continue string = name.string.decode(name.getEncoding()).strip() if string != expected_value: failed = True yield FAIL, ("Entry {} on the 'name' table: " "Expected '{}' " "but got '{}'.").format(name_entry_id(name), expected_value, string) if not failed: yield PASS, "FONT_FAMILY_NAME entries are all good."
[ "def", "com_google_fonts_check_name_familyname", "(", "ttFont", ",", "style", ",", "familyname_with_spaces", ")", ":", "from", "fontbakery", ".", "utils", "import", "name_entry_id", "failed", "=", "False", "only_weight", "=", "get_only_weight", "(", "style", ")", "f...
38.166667
16.527778
def fishqq(lon=None, lat=None, di_block=None): """ Test whether a distribution is Fisherian and make a corresponding Q-Q plot. The Q-Q plot shows the data plotted against the value expected from a Fisher distribution. The first plot is the uniform plot which is the Fisher model distribution in terms of longitude (declination). The second plot is the exponential plot which is the Fisher model distribution in terms of latitude (inclination). In addition to the plots, the test statistics Mu (uniform) and Me (exponential) are calculated and compared against the critical test values. If Mu or Me are too large in comparision to the test statistics, the hypothesis that the distribution is Fisherian is rejected (see Fisher et al., 1987). Parameters: ----------- lon : longitude or declination of the data lat : latitude or inclination of the data or di_block: a nested list of [dec,inc] A di_block can be provided in which case it will be used instead of dec, inc lists. Output: ----------- dictionary containing lon : mean longitude (or declination) lat : mean latitude (or inclination) N : number of vectors Mu : Mu test statistic value for the data Mu_critical : critical value for Mu Me : Me test statistic value for the data Me_critical : critical value for Me if the data has two modes with N >=10 (N and R) two of these dictionaries will be returned Examples -------- In this example, directions are sampled from a Fisher distribution using ``ipmag.fishrot`` and then the ``ipmag.fishqq`` function is used to test whether that distribution is Fisherian: >>> directions = ipmag.fishrot(k=40, n=50, dec=200, inc=50) >>> ipmag.fishqq(di_block = directions) {'Dec': 199.73564290371894, 'Inc': 49.017612342358298, 'Me': 0.78330310031220352, 'Me_critical': 1.094, 'Mode': 'Mode 1', 'Mu': 0.69915926146177099, 'Mu_critical': 1.207, 'N': 50, 'Test_result': 'consistent with Fisherian model'} The above example passed a di_block to the function as an input. Lists of paired declination and inclination can also be used as inputs. Here the directions di_block is unpacked to separate declination and inclination lists using the ``ipmag.unpack_di_block`` functionwhich are then used as input to fishqq: >>> dec_list, inc_list = ipmag.unpack_di_block(directions) >>> ipmag.fishqq(lon=dec_list, lat=inc_list) """ if di_block is None: all_dirs = make_di_block(lon, lat) else: all_dirs = di_block ppars = pmag.doprinc(all_dirs) # get principal directions rDIs = [] nDIs = [] QQ_dict1 = {} QQ_dict2 = {} for rec in all_dirs: angle = pmag.angle([rec[0], rec[1]], [ppars['dec'], ppars['inc']]) if angle > 90.: rDIs.append(rec) else: nDIs.append(rec) if len(rDIs) >= 10 or len(nDIs) >= 10: D1, I1 = [], [] QQ = {'unf': 1, 'exp': 2} if len(nDIs) < 10: ppars = pmag.doprinc(rDIs) # get principal directions Drbar, Irbar = ppars['dec'] - 180., -ppars['inc'] Nr = len(rDIs) for di in rDIs: d, irot = pmag.dotilt( di[0], di[1], Drbar - 180., 90. - Irbar) # rotate to mean drot = d - 180. if drot < 0: drot = drot + 360. D1.append(drot) I1.append(irot) Dtit = 'Mode 2 Declinations' Itit = 'Mode 2 Inclinations' else: ppars = pmag.doprinc(nDIs) # get principal directions Dnbar, Inbar = ppars['dec'], ppars['inc'] Nn = len(nDIs) for di in nDIs: d, irot = pmag.dotilt( di[0], di[1], Dnbar - 180., 90. - Inbar) # rotate to mean drot = d - 180. if drot < 0: drot = drot + 360. D1.append(drot) I1.append(irot) Dtit = 'Mode 1 Declinations' Itit = 'Mode 1 Inclinations' plt.figure(figsize=(6, 3)) Mu_n, Mu_ncr = pmagplotlib.plot_qq_unf( QQ['unf'], D1, Dtit, subplot=True) # make plot Me_n, Me_ncr = pmagplotlib.plot_qq_exp( QQ['exp'], I1, Itit, subplot=True) # make plot plt.tight_layout() if Mu_n <= Mu_ncr and Me_n <= Me_ncr: F_n = 'consistent with Fisherian model' else: F_n = 'Fisherian model rejected' QQ_dict1['Mode'] = 'Mode 1' QQ_dict1['Dec'] = Dnbar QQ_dict1['Inc'] = Inbar QQ_dict1['N'] = Nn QQ_dict1['Mu'] = Mu_n QQ_dict1['Mu_critical'] = Mu_ncr QQ_dict1['Me'] = Me_n QQ_dict1['Me_critical'] = Me_ncr QQ_dict1['Test_result'] = F_n if len(rDIs) > 10 and len(nDIs) > 10: D2, I2 = [], [] ppars = pmag.doprinc(rDIs) # get principal directions Drbar, Irbar = ppars['dec'] - 180., -ppars['inc'] Nr = len(rDIs) for di in rDIs: d, irot = pmag.dotilt( di[0], di[1], Drbar - 180., 90. - Irbar) # rotate to mean drot = d - 180. if drot < 0: drot = drot + 360. D2.append(drot) I2.append(irot) Dtit = 'Mode 2 Declinations' Itit = 'Mode 2 Inclinations' plt.figure(figsize=(6, 3)) Mu_r, Mu_rcr = pmagplotlib.plot_qq_unf( QQ['unf'], D2, Dtit, subplot=True) # make plot Me_r, Me_rcr = pmagplotlib.plot_qq_exp( QQ['exp'], I2, Itit, subplot=True) # make plot plt.tight_layout() if Mu_r <= Mu_rcr and Me_r <= Me_rcr: F_r = 'consistent with Fisherian model' else: F_r = 'Fisherian model rejected' QQ_dict2['Mode'] = 'Mode 2' QQ_dict2['Dec'] = Drbar QQ_dict2['Inc'] = Irbar QQ_dict2['N'] = Nr QQ_dict2['Mu'] = Mu_r QQ_dict2['Mu_critical'] = Mu_rcr QQ_dict2['Me'] = Me_r QQ_dict2['Me_critical'] = Me_rcr QQ_dict2['Test_result'] = F_r if QQ_dict2: return QQ_dict1, QQ_dict2 elif QQ_dict1: return QQ_dict1 else: print('you need N> 10 for at least one mode')
[ "def", "fishqq", "(", "lon", "=", "None", ",", "lat", "=", "None", ",", "di_block", "=", "None", ")", ":", "if", "di_block", "is", "None", ":", "all_dirs", "=", "make_di_block", "(", "lon", ",", "lat", ")", "else", ":", "all_dirs", "=", "di_block", ...
35.607955
17.255682
def transform(self, photo, **kwds): """ Endpoint: /photo/<id>/transform.json Performs the specified transformations. eg. transform(photo, rotate=90) Returns the transformed photo. """ result = self._client.post("/photo/%s/transform.json" % self._extract_id(photo), **kwds)["result"] # APIv1 doesn't return the transformed photo (frontend issue #955) if isinstance(result, bool): # pragma: no cover result = self._client.get("/photo/%s/view.json" % self._extract_id(photo))["result"] return Photo(self._client, result)
[ "def", "transform", "(", "self", ",", "photo", ",", "*", "*", "kwds", ")", ":", "result", "=", "self", ".", "_client", ".", "post", "(", "\"/photo/%s/transform.json\"", "%", "self", ".", "_extract_id", "(", "photo", ")", ",", "*", "*", "kwds", ")", "...
39.166667
16.5
async def fastStreamedQuery(self, url, *, headers=None, verify=True): """ Send a GET request with short timeout, do not retry, and return streamed response. """ response = await self.session.get(url, headers=self._buildHeaders(headers), timeout=HTTP_SHORT_TIMEOUT, ssl=verify) response.raise_for_status() return response
[ "async", "def", "fastStreamedQuery", "(", "self", ",", "url", ",", "*", ",", "headers", "=", "None", ",", "verify", "=", "True", ")", ":", "response", "=", "await", "self", ".", "session", ".", "get", "(", "url", ",", "headers", "=", "self", ".", "...
44.3
20.9
def query(self, sql=None, filename=None, **kwargs): """ run raw sql from sql or file against. :param sql: Raw SQL query to pass directly to the connection. :type sql: string :param filename: Path to a file containing a SQL query. The path should be relative to CWD. :type filename: string :param db: `optional` Database name from your ``jardin_conf.py``, overrides the default database set in the model declaration. :type db: string :param role: `optional` One of ``('master', 'replica')`` to override the default. :type role: string :returns: ``jardin.Collection`` collection, which is a ``pandas.DataFrame``. """ results = query( sql=sql, filename=filename, db=self.db_names[kwargs.get('role', 'replica')], **kwargs ) if results is None: return None else: return self.collection_instance(results)
[ "def", "query", "(", "self", ",", "sql", "=", "None", ",", "filename", "=", "None", ",", "*", "*", "kwargs", ")", ":", "results", "=", "query", "(", "sql", "=", "sql", ",", "filename", "=", "filename", ",", "db", "=", "self", ".", "db_names", "["...
40.541667
25.125
def overlay_gateway_monitor_remote_endpoint(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") overlay_gateway = ET.SubElement(config, "overlay-gateway", xmlns="urn:brocade.com:mgmt:brocade-tunnels") name_key = ET.SubElement(overlay_gateway, "name") name_key.text = kwargs.pop('name') monitor = ET.SubElement(overlay_gateway, "monitor") session_key = ET.SubElement(monitor, "session") session_key.text = kwargs.pop('session') remote_endpoint = ET.SubElement(monitor, "remote-endpoint") remote_endpoint.text = kwargs.pop('remote_endpoint') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "overlay_gateway_monitor_remote_endpoint", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "overlay_gateway", "=", "ET", ".", "SubElement", "(", "config", ",", "\"overlay-gateway\"", ",", "xml...
48.4
17.933333
def reopen(self, file_obj): """Reopen the file-like object in a safe manner.""" file_obj.open('U') if sys.version_info[0] <= 2: return file_obj else: return codecs.getreader('utf-8')(file_obj)
[ "def", "reopen", "(", "self", ",", "file_obj", ")", ":", "file_obj", ".", "open", "(", "'U'", ")", "if", "sys", ".", "version_info", "[", "0", "]", "<=", "2", ":", "return", "file_obj", "else", ":", "return", "codecs", ".", "getreader", "(", "'utf-8'...
34.571429
12.142857
def alias_field(model, field): """ Return the prefix name of a field """ for part in field.split(LOOKUP_SEP)[:-1]: model = associate_model(model,part) return model.__name__ + "-" + field.split(LOOKUP_SEP)[-1]
[ "def", "alias_field", "(", "model", ",", "field", ")", ":", "for", "part", "in", "field", ".", "split", "(", "LOOKUP_SEP", ")", "[", ":", "-", "1", "]", ":", "model", "=", "associate_model", "(", "model", ",", "part", ")", "return", "model", ".", "...
32.857143
6
def glibc_version_string(): "Returns glibc version string, or None if not using glibc." # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen # manpage says, "If filename is NULL, then the returned handle is for the # main program". This way we can let the linker do the work to figure out # which libc our process is actually using. process_namespace = ctypes.CDLL(None) try: gnu_get_libc_version = process_namespace.gnu_get_libc_version except AttributeError: # Symbol doesn't exist -> therefore, we are not linked to # glibc. return None # Call gnu_get_libc_version, which returns a string like "2.5" gnu_get_libc_version.restype = ctypes.c_char_p version_str = gnu_get_libc_version() # py2 / py3 compatibility: if not isinstance(version_str, str): version_str = version_str.decode("ascii") return version_str
[ "def", "glibc_version_string", "(", ")", ":", "# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen", "# manpage says, \"If filename is NULL, then the returned handle is for the", "# main program\". This way we can let the linker do the work to figure out", "# which libc our process ...
39.304348
21.217391
def mismatches(args): """ %prog mismatches blastfile Print out histogram of mismatches of HSPs, usually for evaluating SNP level. """ from jcvi.utils.cbook import percentage from jcvi.graphics.histogram import stem_leaf_plot p = OptionParser(mismatches.__doc__) opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) blastfile, = args data = [] matches = 0 b = Blast(blastfile) for query, bline in b.iter_best_hit(): mm = bline.nmismatch + bline.ngaps data.append(mm) nonzeros = [x for x in data if x != 0] title = "Polymorphic sites: {0}".\ format(percentage(len(nonzeros), len(data))) stem_leaf_plot(data, 0, 20, 20, title=title)
[ "def", "mismatches", "(", "args", ")", ":", "from", "jcvi", ".", "utils", ".", "cbook", "import", "percentage", "from", "jcvi", ".", "graphics", ".", "histogram", "import", "stem_leaf_plot", "p", "=", "OptionParser", "(", "mismatches", ".", "__doc__", ")", ...
25.482759
18.241379
def get_bibtex_string(self): """ Get BibTeX reference from CIF file. :param data: :return: BibTeX string """ bibtex_keys = {'author': ('_publ_author_name', '_citation_author_name'), 'title': ('_publ_section_title', '_citation_title'), 'journal': ('_journal_name_full', '_journal_name_abbrev', '_citation_journal_full', '_citation_journal_abbrev'), 'volume': ('_journal_volume', '_citation_journal_volume'), 'year': ('_journal_year', '_citation_year'), 'number': ('_journal_number', '_citation_number'), 'page_first': ('_journal_page_first', '_citation_page_first'), 'page_last': ('_journal_page_last', '_citation_page_last'), 'doi': ('_journal_DOI', '_citation_DOI')} entries = {} # TODO: parse '_publ_section_references' when it exists? # TODO: CIF specification supports multiple citations. for idx, data in enumerate(self._cif.data.values()): # convert to lower-case keys, some cif files inconsistent data = {k.lower(): v for k, v in data.data.items()} bibtex_entry = {} for field, tags in bibtex_keys.items(): for tag in tags: if tag in data: if isinstance(data[tag], list): bibtex_entry[field] = data[tag][0] else: bibtex_entry[field] = data[tag] # convert to bibtex author format ('and' delimited) if 'author' in bibtex_entry: # separate out semicolon authors if isinstance(bibtex_entry["author"], str): if ";" in bibtex_entry["author"]: bibtex_entry["author"] = bibtex_entry["author"].split(";") if isinstance(bibtex_entry['author'], list): bibtex_entry['author'] = ' and '.join(bibtex_entry['author']) # convert to bibtex page range format, use empty string if not specified if ('page_first' in bibtex_entry) or ('page_last' in bibtex_entry): bibtex_entry['pages'] = '{0}--{1}'.format(bibtex_entry.get('page_first', ''), bibtex_entry.get('page_last', '')) bibtex_entry.pop('page_first', None) # and remove page_first, page_list if present bibtex_entry.pop('page_last', None) # cite keys are given as cif-reference-idx in order they are found entries['cif-reference-{}'.format(idx)] = Entry('article', list(bibtex_entry.items())) return BibliographyData(entries).to_string(bib_format='bibtex')
[ "def", "get_bibtex_string", "(", "self", ")", ":", "bibtex_keys", "=", "{", "'author'", ":", "(", "'_publ_author_name'", ",", "'_citation_author_name'", ")", ",", "'title'", ":", "(", "'_publ_section_title'", ",", "'_citation_title'", ")", ",", "'journal'", ":", ...
47.333333
28.866667
def set(self, item_name, item_value): """ Sets the value of an option in the configuration. :param str item_name: The name of the option to set. :param item_value: The value of the option to set. """ if self.prefix: item_name = self.prefix + self.seperator + item_name item_names = item_name.split(self.seperator) item_last = item_names.pop() node = self._storage for item_name in item_names: if not item_name in node: node[item_name] = {} node = node[item_name] node[item_last] = item_value return
[ "def", "set", "(", "self", ",", "item_name", ",", "item_value", ")", ":", "if", "self", ".", "prefix", ":", "item_name", "=", "self", ".", "prefix", "+", "self", ".", "seperator", "+", "item_name", "item_names", "=", "item_name", ".", "split", "(", "se...
28.833333
13.722222