text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def update_tenant(self, tenant, name=None, description=None, enabled=True): """ ADMIN ONLY. Updates an existing tenant. """ tenant_id = utils.get_id(tenant) data = {"tenant": { "enabled": enabled, }} if name: data["tenant"]["name"] = name if description: data["tenant"]["description"] = description resp, resp_body = self.method_put("tenants/%s" % tenant_id, data=data) return Tenant(self, resp_body)
[ "def", "update_tenant", "(", "self", ",", "tenant", ",", "name", "=", "None", ",", "description", "=", "None", ",", "enabled", "=", "True", ")", ":", "tenant_id", "=", "utils", ".", "get_id", "(", "tenant", ")", "data", "=", "{", "\"tenant\"", ":", "...
36.785714
12.785714
def program_global_reg(self): """ Send the global register to the chip. Loads the values of self['GLOBAL_REG'] onto the chip. Includes enabling the clock, and loading the Control (CTR) and DAC shadow registers. """ self._clear_strobes() gr_size = len(self['GLOBAL_REG'][:]) # get the size self['SEQ']['SHIFT_IN'][0:gr_size] = self['GLOBAL_REG'][:] # this will be shifted out self['SEQ']['GLOBAL_SHIFT_EN'][0:gr_size] = bitarray(gr_size * '1') # this is to enable clock self['SEQ']['GLOBAL_CTR_LD'][gr_size + 1:gr_size + 2] = bitarray("1") # load signals self['SEQ']['GLOBAL_DAC_LD'][gr_size + 1:gr_size + 2] = bitarray("1") # Execute the program (write bits to output pins) # + 1 extra 0 bit so that everything ends on LOW instead of HIGH self._run_seq(gr_size + 3)
[ "def", "program_global_reg", "(", "self", ")", ":", "self", ".", "_clear_strobes", "(", ")", "gr_size", "=", "len", "(", "self", "[", "'GLOBAL_REG'", "]", "[", ":", "]", ")", "# get the size", "self", "[", "'SEQ'", "]", "[", "'SHIFT_IN'", "]", "[", "0"...
41.619048
26.761905
def build_url_request(self): """ Build the url to use for making a call to the Bbox API :return: url string """ # Check if the ip is LAN or WAN if net.IPAddress(self.ip).is_private(): url = "http://{}".format(self.ip) self.authentication_type = BboxConstant.AUTHENTICATION_TYPE_LOCAL else: url = "https://{}:{}".format(self.ip, BboxConstant.DEFAULT_REMOTE_PORT) self.authentication_type = BboxConstant.AUTHENTICATION_TYPE_REMOTE if self.api_class is None: url = "{}/{}".format(url, self.API_PREFIX) else: url = "{}/{}/{}".format(url, self.API_PREFIX, self.api_class) if self.api_method is None: self.url = url else: self.url = "{}/{}".format(url, self.api_method)
[ "def", "build_url_request", "(", "self", ")", ":", "# Check if the ip is LAN or WAN", "if", "net", ".", "IPAddress", "(", "self", ".", "ip", ")", ".", "is_private", "(", ")", ":", "url", "=", "\"http://{}\"", ".", "format", "(", "self", ".", "ip", ")", "...
37.73913
18.695652
def setup(self, interval): """Prepares the tests for execution, interval in ms""" self.trace_counter = 0 self._halt = False self.interval = interval
[ "def", "setup", "(", "self", ",", "interval", ")", ":", "self", ".", "trace_counter", "=", "0", "self", ".", "_halt", "=", "False", "self", ".", "interval", "=", "interval" ]
29.333333
14.333333
def threshold(np, acc, stream_raster, threshold=100., workingdir=None, mpiexedir=None, exedir=None, log_file=None, runtime_file=None, hostfile=None): """Run threshold for stream raster""" fname = TauDEM.func_name('threshold') return TauDEM.run(FileClass.get_executable_fullpath(fname, exedir), {'-ssa': acc}, workingdir, {'-thresh': threshold}, {'-src': stream_raster}, {'mpipath': mpiexedir, 'hostfile': hostfile, 'n': np}, {'logfile': log_file, 'runtimefile': runtime_file})
[ "def", "threshold", "(", "np", ",", "acc", ",", "stream_raster", ",", "threshold", "=", "100.", ",", "workingdir", "=", "None", ",", "mpiexedir", "=", "None", ",", "exedir", "=", "None", ",", "log_file", "=", "None", ",", "runtime_file", "=", "None", "...
63.9
23.4
def _check_groups(s, groups): """Ensures that all particles are included in exactly 1 group""" ans = [] for g in groups: ans.extend(g) if np.unique(ans).size != np.size(ans): return False elif np.unique(ans).size != s.obj_get_positions().shape[0]: return False else: return (np.arange(s.obj_get_radii().size) == np.sort(ans)).all()
[ "def", "_check_groups", "(", "s", ",", "groups", ")", ":", "ans", "=", "[", "]", "for", "g", "in", "groups", ":", "ans", ".", "extend", "(", "g", ")", "if", "np", ".", "unique", "(", "ans", ")", ".", "size", "!=", "np", ".", "size", "(", "ans...
34.272727
18.818182
def check_basic_battery_status(the_session, the_helper, the_snmp_value): """ OID .1.3.6.1.4.1.318.1.1.1.2.1.1.0 MIB Excerpt The status of the UPS batteries. A batteryLow(3) value indicates the UPS will be unable to sustain the current load, and its services will be lost if power is not restored. The amount of run time in reserve at the time of low battery can be configured by the upsAdvConfigLowBatteryRunTime. Value List unknown (1) batteryNormal (2) batteryLow (3) batteryInFaultCondition (4) """ apc_battery_states = { '1' : 'unknown', '2' : 'batteryNormal', '3' : 'batteryLow', '4' : 'batteryInFaultCondition' } a_state = apc_battery_states.get(the_snmp_value, 'unknown') if the_snmp_value == '2': the_helper.add_status(pynag.Plugins.ok) elif the_snmp_value == '3': the_helper.add_status(pynag.Plugins.warning) else: the_helper.add_status(pynag.Plugins.critical) the_helper.set_summary("UPS batteries state is {}".format(a_state))
[ "def", "check_basic_battery_status", "(", "the_session", ",", "the_helper", ",", "the_snmp_value", ")", ":", "apc_battery_states", "=", "{", "'1'", ":", "'unknown'", ",", "'2'", ":", "'batteryNormal'", ",", "'3'", ":", "'batteryLow'", ",", "'4'", ":", "'batteryI...
31.294118
18.470588
def gauge(self, name, value, rate=1): # type: (str, float, float) -> None """Send a Gauge metric with the specified value""" if self._should_send_metric(name, rate): if not is_numeric(value): value = float(value) self._request( Gauge( self._create_metric_name_for_request(name), value, rate ).to_request() )
[ "def", "gauge", "(", "self", ",", "name", ",", "value", ",", "rate", "=", "1", ")", ":", "# type: (str, float, float) -> None", "if", "self", ".", "_should_send_metric", "(", "name", ",", "rate", ")", ":", "if", "not", "is_numeric", "(", "value", ")", ":...
33.071429
13.071429
def readinto(self, b): """Read up to len(b) bytes into the writable buffer *b* and return the number of bytes read. If the socket is non-blocking and no bytes are available, None is returned. If *b* is non-empty, a 0 return value indicates that the connection was shutdown at the other end. """ self._checkClosed() self._checkReadable() if self._timeout_occurred: raise IOError("cannot read from timed out object") while True: try: return self._sock.recv_into(b) except timeout: self._timeout_occurred = True raise # except InterruptedError: # continue except error as e: if e.args[0] in _blocking_errnos: return None raise
[ "def", "readinto", "(", "self", ",", "b", ")", ":", "self", ".", "_checkClosed", "(", ")", "self", ".", "_checkReadable", "(", ")", "if", "self", ".", "_timeout_occurred", ":", "raise", "IOError", "(", "\"cannot read from timed out object\"", ")", "while", "...
35.75
13.958333
def refresh(self): """Obtain a new access token from the refresh_token.""" if self.refresh_token is None: raise InvalidInvocation("refresh token not provided") self._request_token( grant_type="refresh_token", refresh_token=self.refresh_token )
[ "def", "refresh", "(", "self", ")", ":", "if", "self", ".", "refresh_token", "is", "None", ":", "raise", "InvalidInvocation", "(", "\"refresh token not provided\"", ")", "self", ".", "_request_token", "(", "grant_type", "=", "\"refresh_token\"", ",", "refresh_toke...
41.857143
17.714286
def create_network(kwargs=None, call=None): ''' ... versionchanged:: 2017.7.0 Create a GCE network. Must specify name and cidr. CLI Example: .. code-block:: bash salt-cloud -f create_network gce name=mynet cidr=10.10.10.0/24 mode=legacy description=optional salt-cloud -f create_network gce name=mynet description=optional ''' if call != 'function': raise SaltCloudSystemExit( 'The create_network function must be called with -f or --function.' ) if not kwargs or 'name' not in kwargs: log.error( 'A name must be specified when creating a network.' ) return False mode = kwargs.get('mode', 'legacy') cidr = kwargs.get('cidr', None) if cidr is None and mode == 'legacy': log.error( 'A network CIDR range must be specified when creating a legacy network.' ) return False name = kwargs['name'] desc = kwargs.get('description', None) conn = get_conn() __utils__['cloud.fire_event']( 'event', 'creating network', 'salt/cloud/net/creating', args={ 'name': name, 'cidr': cidr, 'description': desc, 'mode': mode }, sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) network = conn.ex_create_network(name, cidr, desc, mode) __utils__['cloud.fire_event']( 'event', 'created network', 'salt/cloud/net/created', args={ 'name': name, 'cidr': cidr, 'description': desc, 'mode': mode }, sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) return _expand_item(network)
[ "def", "create_network", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "call", "!=", "'function'", ":", "raise", "SaltCloudSystemExit", "(", "'The create_network function must be called with -f or --function.'", ")", "if", "not", "kwargs", "o...
26.584615
20.861538
def less(args): """ %prog less filename position | less Enhance the unix `less` command by seeking to a file location first. This is useful to browse big files. Position is relative 0.00 - 1.00, or bytenumber. $ %prog less myfile 0.1 # Go to 10% of the current file and streaming $ %prog less myfile 0.1,0.2 # Stream at several positions $ %prog less myfile 100 # Go to certain byte number and streaming $ %prog less myfile 100,200 # Stream at several positions $ %prog less myfile all # Generate a snapshot every 10% (10%, 20%, ..) """ from jcvi.formats.base import must_open p = OptionParser(less.__doc__) opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) filename, pos = args fsize = getfilesize(filename) if pos == "all": pos = [x / 10. for x in range(0, 10)] else: pos = [float(x) for x in pos.split(",")] if pos[0] > 1: pos = [x / fsize for x in pos] if len(pos) > 1: counts = 20 else: counts = None fp = must_open(filename) for p in pos: snapshot(fp, p, fsize, counts=counts)
[ "def", "less", "(", "args", ")", ":", "from", "jcvi", ".", "formats", ".", "base", "import", "must_open", "p", "=", "OptionParser", "(", "less", ".", "__doc__", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "...
28.675
22.475
def start_trace(self, full=False, frame=None, below=0, under=None): """Start tracing from here""" if self.tracing: return self.reset() log.info('Starting trace') frame = frame or sys._getframe().f_back # Setting trace without pausing self.set_trace(frame, break_=False) self.tracing = True self.below = below self.under = under self.full = full
[ "def", "start_trace", "(", "self", ",", "full", "=", "False", ",", "frame", "=", "None", ",", "below", "=", "0", ",", "under", "=", "None", ")", ":", "if", "self", ".", "tracing", ":", "return", "self", ".", "reset", "(", ")", "log", ".", "info",...
33.230769
12.230769
def copy_attr(self, other): """ Copies all other attributes (not methods) from the other object to this instance. """ if not isinstance(other, Symbol): return # Nothing done if not a Symbol object tmp = re.compile('__.*__') for attr in (x for x in dir(other) if not tmp.match(x)): if ( hasattr(self.__class__, attr) and str(type(getattr(self.__class__, attr)) in ('property', 'function', 'instancemethod')) ): continue val = getattr(other, attr) if isinstance(val, str) or str(val)[0] != '<': # Not a value setattr(self, attr, val)
[ "def", "copy_attr", "(", "self", ",", "other", ")", ":", "if", "not", "isinstance", "(", "other", ",", "Symbol", ")", ":", "return", "# Nothing done if not a Symbol object", "tmp", "=", "re", ".", "compile", "(", "'__.*__'", ")", "for", "attr", "in", "(", ...
36.315789
18.947368
def random_soup(face_count=100): """ Return random triangles as a Trimesh Parameters ----------- face_count : int Number of faces desired in mesh Returns ----------- soup : trimesh.Trimesh Geometry with face_count random faces """ vertices = np.random.random((face_count * 3, 3)) - 0.5 faces = np.arange(face_count * 3).reshape((-1, 3)) soup = Trimesh(vertices=vertices, faces=faces) return soup
[ "def", "random_soup", "(", "face_count", "=", "100", ")", ":", "vertices", "=", "np", ".", "random", ".", "random", "(", "(", "face_count", "*", "3", ",", "3", ")", ")", "-", "0.5", "faces", "=", "np", ".", "arange", "(", "face_count", "*", "3", ...
24.666667
16.666667
def crypto_box_seal_open(ciphertext, pk, sk): """ Decrypts and returns an encrypted message ``ciphertext``, using the recipent's secret key ``sk`` and the sender's ephemeral public key embedded in the sealed box. The box contruct nonce is derived from the recipient's public key ``pk`` and the sender's public key. :param ciphertext: bytes :param pk: bytes :param sk: bytes :rtype: bytes .. versionadded:: 1.2 """ ensure(isinstance(ciphertext, bytes), "input ciphertext must be bytes", raising=TypeError) ensure(isinstance(pk, bytes), "public key must be bytes", raising=TypeError) ensure(isinstance(sk, bytes), "secret key must be bytes", raising=TypeError) if len(pk) != crypto_box_PUBLICKEYBYTES: raise exc.ValueError("Invalid public key") if len(sk) != crypto_box_SECRETKEYBYTES: raise exc.ValueError("Invalid secret key") _clen = len(ciphertext) ensure(_clen >= crypto_box_SEALBYTES, ("Input cyphertext must be " "at least {} long").format(crypto_box_SEALBYTES), raising=exc.TypeError) _mlen = _clen - crypto_box_SEALBYTES # zero-length malloc results are implementation.dependent plaintext = ffi.new("unsigned char[]", max(1, _mlen)) res = lib.crypto_box_seal_open(plaintext, ciphertext, _clen, pk, sk) ensure(res == 0, "An error occurred trying to decrypt the message", raising=exc.CryptoError) return ffi.buffer(plaintext, _mlen)[:]
[ "def", "crypto_box_seal_open", "(", "ciphertext", ",", "pk", ",", "sk", ")", ":", "ensure", "(", "isinstance", "(", "ciphertext", ",", "bytes", ")", ",", "\"input ciphertext must be bytes\"", ",", "raising", "=", "TypeError", ")", "ensure", "(", "isinstance", ...
31.163265
18.918367
def MakeTokenRegex(meta_left, meta_right): """Return a (compiled) regular expression for tokenization. Args: meta_left, meta_right: e.g. '{' and '}' - The regular expressions are memoized. - This function is public so the syntax highlighter can use it. """ key = meta_left, meta_right if key not in _token_re_cache: # - Need () grouping for re.split # - The first character must be a non-space. This allows us to ignore # literals like function() { return 1; } when # - There must be at least one (non-space) character inside {} _token_re_cache[key] = re.compile( r'(' + re.escape(meta_left) + r'\S.*?' + re.escape(meta_right) + r')') return _token_re_cache[key]
[ "def", "MakeTokenRegex", "(", "meta_left", ",", "meta_right", ")", ":", "key", "=", "meta_left", ",", "meta_right", "if", "key", "not", "in", "_token_re_cache", ":", "# - Need () grouping for re.split", "# - The first character must be a non-space. This allows us to ignore",...
35.090909
14.818182
def validate_inferred_freq(freq, inferred_freq, freq_infer): """ If the user passes a freq and another freq is inferred from passed data, require that they match. Parameters ---------- freq : DateOffset or None inferred_freq : DateOffset or None freq_infer : bool Returns ------- freq : DateOffset or None freq_infer : bool Notes ----- We assume at this point that `maybe_infer_freq` has been called, so `freq` is either a DateOffset object or None. """ if inferred_freq is not None: if freq is not None and freq != inferred_freq: raise ValueError('Inferred frequency {inferred} from passed ' 'values does not conform to passed frequency ' '{passed}' .format(inferred=inferred_freq, passed=freq.freqstr)) elif freq is None: freq = inferred_freq freq_infer = False return freq, freq_infer
[ "def", "validate_inferred_freq", "(", "freq", ",", "inferred_freq", ",", "freq_infer", ")", ":", "if", "inferred_freq", "is", "not", "None", ":", "if", "freq", "is", "not", "None", "and", "freq", "!=", "inferred_freq", ":", "raise", "ValueError", "(", "'Infe...
30.515152
20.575758
def exclude_paths(root, patterns, dockerfile=None): """ Given a root directory path and a list of .dockerignore patterns, return an iterator of all paths (both regular files and directories) in the root directory that do *not* match any of the patterns. All paths returned are relative to the root. """ if dockerfile is None: dockerfile = 'Dockerfile' patterns.append('!' + dockerfile) pm = PatternMatcher(patterns) return set(pm.walk(root))
[ "def", "exclude_paths", "(", "root", ",", "patterns", ",", "dockerfile", "=", "None", ")", ":", "if", "dockerfile", "is", "None", ":", "dockerfile", "=", "'Dockerfile'", "patterns", ".", "append", "(", "'!'", "+", "dockerfile", ")", "pm", "=", "PatternMatc...
31.866667
17.866667
def is_selected(self, request): """ Helper method that returns ``True`` if the menu item is active. A menu item is considered as active if it's URL or one of its descendants URL is equals to the current URL. """ current_url = request.get_full_path() return self.url == current_url or \ len([c for c in self.children if c.is_selected(request)]) > 0
[ "def", "is_selected", "(", "self", ",", "request", ")", ":", "current_url", "=", "request", ".", "get_full_path", "(", ")", "return", "self", ".", "url", "==", "current_url", "or", "len", "(", "[", "c", "for", "c", "in", "self", ".", "children", "if", ...
45.222222
13.666667
def _read(self, length): """ Reads C{length} bytes from the stream. If an attempt to read past the end of the buffer is made, L{IOError} is raised. """ bytes = self.read(length) if len(bytes) != length: self.seek(0 - len(bytes), 1) raise IOError("Tried to read %d byte(s) from the stream" % length) return bytes
[ "def", "_read", "(", "self", ",", "length", ")", ":", "bytes", "=", "self", ".", "read", "(", "length", ")", "if", "len", "(", "bytes", ")", "!=", "length", ":", "self", ".", "seek", "(", "0", "-", "len", "(", "bytes", ")", ",", "1", ")", "ra...
29.384615
20.153846
def _desy_bookkeeping(self, key, value): """Populate the ``_desy_bookkeeping`` key.""" return { 'date': normalize_date(value.get('d')), 'expert': force_single_element(value.get('a')), 'status': value.get('s'), }
[ "def", "_desy_bookkeeping", "(", "self", ",", "key", ",", "value", ")", ":", "return", "{", "'date'", ":", "normalize_date", "(", "value", ".", "get", "(", "'d'", ")", ")", ",", "'expert'", ":", "force_single_element", "(", "value", ".", "get", "(", "'...
34.428571
13.142857
def get_logger(name, level=None, fmt=':%(lineno)d: %(message)s'): """ Return a logger. Args: name (str): name to pass to the logging module. level (int): level of logging. fmt (str): format string. Returns: logging.Logger: logger from ``logging.getLogger``. """ if name not in Logger.loggers: if Logger.level is None and level is None: Logger.level = level = logging.ERROR elif Logger.level is None: Logger.level = level elif level is None: level = Logger.level logger = logging.getLogger(name) logger_handler = logging.StreamHandler() logger_handler.setFormatter(LoggingFormatter(fmt=name + fmt)) logger.addHandler(logger_handler) logger.setLevel(level) Logger.loggers[name] = logger return Logger.loggers[name]
[ "def", "get_logger", "(", "name", ",", "level", "=", "None", ",", "fmt", "=", "':%(lineno)d: %(message)s'", ")", ":", "if", "name", "not", "in", "Logger", ".", "loggers", ":", "if", "Logger", ".", "level", "is", "None", "and", "level", "is", "None", ":...
36.5
12.730769
def add_item_metadata(self, handle, key, value): """Store the given key:value pair for the item associated with handle. :param handle: handle for accessing an item before the dataset is frozen :param key: metadata key :param value: metadata value """ _mkdir_if_missing(self._metadata_fragments_abspath) prefix = self._handle_to_fragment_absprefixpath(handle) fpath = prefix + '.{}.json'.format(key) _put_obj(fpath, value)
[ "def", "add_item_metadata", "(", "self", ",", "handle", ",", "key", ",", "value", ")", ":", "_mkdir_if_missing", "(", "self", ".", "_metadata_fragments_abspath", ")", "prefix", "=", "self", ".", "_handle_to_fragment_absprefixpath", "(", "handle", ")", "fpath", "...
36.142857
17.357143
def get_column_metadata(conn, table: str, schema='public'): """Returns column data following db.Column parameter specification.""" query = """\ SELECT attname as name, format_type(atttypid, atttypmod) AS data_type, NOT attnotnull AS nullable FROM pg_catalog.pg_attribute WHERE attrelid=%s::regclass AND attnum > 0 AND NOT attisdropped ORDER BY attnum;""" qualified_name = compile_qualified_name(table, schema=schema) for record in select_dict(conn, query, params=(qualified_name,)): yield record
[ "def", "get_column_metadata", "(", "conn", ",", "table", ":", "str", ",", "schema", "=", "'public'", ")", ":", "query", "=", "\"\"\"\\\nSELECT\n attname as name,\n format_type(atttypid, atttypmod) AS data_type,\n NOT attnotnull AS nullable\nFROM pg_catalog.pg_attribute\nWHERE attr...
32.125
17.3125
def formationz(c, z, Ascaling=900, omega_M_0=0.25, omega_lambda_0=0.75): """ Rearrange eqn 18 from Correa et al (2015c) to return formation redshift for a concentration at a given redshift Parameters ---------- c : float / numpy array Concentration of halo z : float / numpy array Redshift of halo with concentration c Ascaling : float Cosmological dependent scaling between densities, use function getAscaling('WMAP5') if unsure. Default is 900. omega_M_0 : float Mass density of the universe. Default is 0.25 omega_lambda_0 : float Dark Energy density of the universe. Default is 0.75 Returns ------- zf : float / numpy array Formation redshift for halo of concentration 'c' at redshift 'z' """ Y1 = np.log(2) - 0.5 Yc = np.log(1+c) - c/(1+c) rho_2 = 200*(c**3)*Y1/Yc zf = (((1+z)**3 + omega_lambda_0/omega_M_0) * (rho_2/Ascaling) - omega_lambda_0/omega_M_0)**(1/3) - 1 return(zf)
[ "def", "formationz", "(", "c", ",", "z", ",", "Ascaling", "=", "900", ",", "omega_M_0", "=", "0.25", ",", "omega_lambda_0", "=", "0.75", ")", ":", "Y1", "=", "np", ".", "log", "(", "2", ")", "-", "0.5", "Yc", "=", "np", ".", "log", "(", "1", ...
31.1875
21.34375
def message_length(message): ''' message_length returns visual length of message. Ascii chars are counted as 1, non-asciis are 2. :param str message: random unicode mixed text :rtype: int ''' length = 0 for char in map(east_asian_width, message): if char == 'W': length += 2 elif char == 'Na': length += 1 return length
[ "def", "message_length", "(", "message", ")", ":", "length", "=", "0", "for", "char", "in", "map", "(", "east_asian_width", ",", "message", ")", ":", "if", "char", "==", "'W'", ":", "length", "+=", "2", "elif", "char", "==", "'Na'", ":", "length", "+...
23.875
21
def get_polygon(self, reverse=False): """ Returns a tuple of coordinates of 5 points describing a polygon. Points are listed in clockwise order, first point is the same as the last. :param reverse: `True` if x and y coordinates should be switched and `False` otherwise :type reverse: bool :return: `((x_1, y_1), ... , (x_5, y_5))` :rtype: tuple(tuple(float)) """ bbox = self.reverse() if reverse else self polygon = ((bbox.min_x, bbox.min_y), (bbox.min_x, bbox.max_y), (bbox.max_x, bbox.max_y), (bbox.max_x, bbox.min_y), (bbox.min_x, bbox.min_y)) return polygon
[ "def", "get_polygon", "(", "self", ",", "reverse", "=", "False", ")", ":", "bbox", "=", "self", ".", "reverse", "(", ")", "if", "reverse", "else", "self", "polygon", "=", "(", "(", "bbox", ".", "min_x", ",", "bbox", ".", "min_y", ")", ",", "(", "...
43.9375
10.875
def path(self, tax_ids): """Get the node at the end of the path described by tax_ids.""" assert tax_ids[0] == self.tax_id if len(tax_ids) == 1: return self n = tax_ids[1] try: child = next(i for i in self.children if i.tax_id == n) except StopIteration: raise ValueError(n) return child.path(tax_ids[1:])
[ "def", "path", "(", "self", ",", "tax_ids", ")", ":", "assert", "tax_ids", "[", "0", "]", "==", "self", ".", "tax_id", "if", "len", "(", "tax_ids", ")", "==", "1", ":", "return", "self", "n", "=", "tax_ids", "[", "1", "]", "try", ":", "child", ...
29.692308
16.846154
def l(*members, meta=None) -> List: """Creates a new list from members.""" return List( # pylint: disable=abstract-class-instantiated plist(iterable=members), meta=meta )
[ "def", "l", "(", "*", "members", ",", "meta", "=", "None", ")", "->", "List", ":", "return", "List", "(", "# pylint: disable=abstract-class-instantiated", "plist", "(", "iterable", "=", "members", ")", ",", "meta", "=", "meta", ")" ]
37.4
13
def basic_query(returns): """decorator factory for NS queries""" return compose( reusable, map_send(parse_request), map_yield(prepare_params, snug.prefix_adder(API_PREFIX)), map_return(loads(returns)), oneyield, )
[ "def", "basic_query", "(", "returns", ")", ":", "return", "compose", "(", "reusable", ",", "map_send", "(", "parse_request", ")", ",", "map_yield", "(", "prepare_params", ",", "snug", ".", "prefix_adder", "(", "API_PREFIX", ")", ")", ",", "map_return", "(", ...
28.555556
17.222222
def disambiguate(self, words): """Disambiguate previously analyzed words. Parameters ---------- words: list of dict A sentence of words. Returns ------- list of dict Sentence of disambiguated words. """ words = vm.SentenceAnalysis([as_wordanalysis(w) for w in words]) disambiguated = self._morf.disambiguate(words) return [postprocess_result(mr, False, True) for mr in disambiguated]
[ "def", "disambiguate", "(", "self", ",", "words", ")", ":", "words", "=", "vm", ".", "SentenceAnalysis", "(", "[", "as_wordanalysis", "(", "w", ")", "for", "w", "in", "words", "]", ")", "disambiguated", "=", "self", ".", "_morf", ".", "disambiguate", "...
30.125
19.4375
def list_(): ''' Returns the machine's bridges list CLI Example: .. code-block:: bash salt '*' bridge.list ''' brs = _os_dispatch('brshow') if not brs: return None brlist = [] for br in brs: brlist.append(br) return brlist
[ "def", "list_", "(", ")", ":", "brs", "=", "_os_dispatch", "(", "'brshow'", ")", "if", "not", "brs", ":", "return", "None", "brlist", "=", "[", "]", "for", "br", "in", "brs", ":", "brlist", ".", "append", "(", "br", ")", "return", "brlist" ]
15.166667
24.833333
def values(self): """ Iterate values. """ for key, value in self.__data__.items(): if key not in (META, KEY): yield DictTree(__data__=value)
[ "def", "values", "(", "self", ")", ":", "for", "key", ",", "value", "in", "self", ".", "__data__", ".", "items", "(", ")", ":", "if", "key", "not", "in", "(", "META", ",", "KEY", ")", ":", "yield", "DictTree", "(", "__data__", "=", "value", ")" ]
27.714286
8
def splitalleles(consensus): """ takes diploid consensus alleles with phase data stored as a mixture of upper and lower case characters and splits it into 2 alleles """ ## store two alleles, allele1 will start with bigbase allele1 = list(consensus) allele2 = list(consensus) hidx = [i for (i, j) in enumerate(consensus) if j in "RKSWYMrkswym"] ## do remaining h sites for idx in hidx: hsite = consensus[idx] if hsite.isupper(): allele1[idx] = PRIORITY[hsite] allele2[idx] = MINOR[hsite] else: allele1[idx] = MINOR[hsite.upper()] allele2[idx] = PRIORITY[hsite.upper()] ## convert back to strings allele1 = "".join(allele1) allele2 = "".join(allele2) return allele1, allele2
[ "def", "splitalleles", "(", "consensus", ")", ":", "## store two alleles, allele1 will start with bigbase", "allele1", "=", "list", "(", "consensus", ")", "allele2", "=", "list", "(", "consensus", ")", "hidx", "=", "[", "i", "for", "(", "i", ",", "j", ")", "...
32.208333
16.208333
def upload_function_zip(self, location, zip_path, project_id=None): """ Uploads zip file with sources. :param location: The location where the function is created. :type location: str :param zip_path: The path of the valid .zip file to upload. :type zip_path: str :param project_id: Optional, Google Cloud Project project_id where the function belongs. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: The upload URL that was returned by generateUploadUrl method. """ response = self.get_conn().projects().locations().functions().generateUploadUrl( parent=self._full_location(project_id, location) ).execute(num_retries=self.num_retries) upload_url = response.get('uploadUrl') with open(zip_path, 'rb') as fp: requests.put( url=upload_url, data=fp, # Those two headers needs to be specified according to: # https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions/generateUploadUrl # nopep8 headers={ 'Content-type': 'application/zip', 'x-goog-content-length-range': '0,104857600', } ) return upload_url
[ "def", "upload_function_zip", "(", "self", ",", "location", ",", "zip_path", ",", "project_id", "=", "None", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "projects", "(", ")", ".", "locations", "(", ")", ".", "functions", "(", ")"...
46.333333
22.6
def get_metadata(model): """Get metadata for a given model. Parameters ---------- model : `~astropy.modeling.Model` Model. Returns ------- metadata : dict Metadata for the model. Raises ------ synphot.exceptions.SynphotError Invalid model. """ if not isinstance(model, Model): raise SynphotError('{0} is not a model.'.format(model)) if isinstance(model, _CompoundModel): metadata = model._tree.evaluate(METADATA_OPERATORS, getter=None) else: metadata = deepcopy(model.meta) return metadata
[ "def", "get_metadata", "(", "model", ")", ":", "if", "not", "isinstance", "(", "model", ",", "Model", ")", ":", "raise", "SynphotError", "(", "'{0} is not a model.'", ".", "format", "(", "model", ")", ")", "if", "isinstance", "(", "model", ",", "_CompoundM...
20.571429
22.178571
def mark(self, digits: int = None) -> float: """ Return time in seconds since last mark, reset, or construction. :param digits: number of fractional decimal digits to retain (default as constructed) """ self._mark[:] = [self._mark[1], time()] rv = self._mark[1] - self._mark[0] if digits is not None and digits > 0: rv = round(rv, digits) elif digits == 0 or self._digits == 0: rv = int(rv) elif self._digits is not None and self._digits > 0: rv = round(rv, self._digits) return rv
[ "def", "mark", "(", "self", ",", "digits", ":", "int", "=", "None", ")", "->", "float", ":", "self", ".", "_mark", "[", ":", "]", "=", "[", "self", ".", "_mark", "[", "1", "]", ",", "time", "(", ")", "]", "rv", "=", "self", ".", "_mark", "[...
32.444444
18.444444
def find_autosummary_in_docstring(name, module=None, filename=None): """Find out what items are documented in the given object's docstring. See `find_autosummary_in_lines`. """ try: real_name, obj, parent = import_by_name(name) lines = pydoc.getdoc(obj).splitlines() return find_autosummary_in_lines(lines, module=name, filename=filename) except AttributeError: pass except ImportError, e: print "Failed to import '%s': %s" % (name, e) return []
[ "def", "find_autosummary_in_docstring", "(", "name", ",", "module", "=", "None", ",", "filename", "=", "None", ")", ":", "try", ":", "real_name", ",", "obj", ",", "parent", "=", "import_by_name", "(", "name", ")", "lines", "=", "pydoc", ".", "getdoc", "(...
35.785714
18.428571
def users_for_perms(cls, perm_names, db_session=None): """ return users hat have one of given permissions :param perm_names: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter( cls.models_proxy.User.id == cls.models_proxy.UserGroup.user_id ) query = query.filter( cls.models_proxy.UserGroup.group_id == cls.models_proxy.GroupPermission.group_id ) query = query.filter(cls.models_proxy.GroupPermission.perm_name.in_(perm_names)) query2 = db_session.query(cls.model) query2 = query2.filter( cls.models_proxy.User.id == cls.models_proxy.UserPermission.user_id ) query2 = query2.filter( cls.models_proxy.UserPermission.perm_name.in_(perm_names) ) users = query.union(query2).order_by(cls.model.id) return users
[ "def", "users_for_perms", "(", "cls", ",", "perm_names", ",", "db_session", "=", "None", ")", ":", "db_session", "=", "get_db_session", "(", "db_session", ")", "query", "=", "db_session", ".", "query", "(", "cls", ".", "model", ")", "query", "=", "query", ...
34.964286
19.607143
def instance(self, counter=None): """Returns all the information regarding a specific pipeline run See the `Go pipeline instance documentation`__ for examples. .. __: http://api.go.cd/current/#get-pipeline-instance Args: counter (int): The pipeline instance to fetch. If falsey returns the latest pipeline instance from :meth:`history`. Returns: Response: :class:`gocd.api.response.Response` object """ if not counter: history = self.history() if not history: return history else: return Response._from_json(history['pipelines'][0]) return self._get('/instance/{counter:d}'.format(counter=counter))
[ "def", "instance", "(", "self", ",", "counter", "=", "None", ")", ":", "if", "not", "counter", ":", "history", "=", "self", ".", "history", "(", ")", "if", "not", "history", ":", "return", "history", "else", ":", "return", "Response", ".", "_from_json"...
33.909091
23.318182
def printUnusedImports(self): """Produce a report of unused imports.""" for module in self.listModules(): names = [(unused.lineno, unused.name) for unused in module.unused_names] names.sort() for lineno, name in names: if not self.all_unused: line = linecache.getline(module.filename, lineno) if '#' in line: # assume there's a comment explaining why it's not used continue print("%s:%s: %s not used" % (module.filename, lineno, name))
[ "def", "printUnusedImports", "(", "self", ")", ":", "for", "module", "in", "self", ".", "listModules", "(", ")", ":", "names", "=", "[", "(", "unused", ".", "lineno", ",", "unused", ".", "name", ")", "for", "unused", "in", "module", ".", "unused_names"...
47.384615
13.307692
def parse_pr_numbers(git_log_lines): """ Parse PR numbers from commit messages. At GitHub those have the format: `here is the message (#1234)` being `1234` the PR number. """ prs = [] for line in git_log_lines: pr_number = parse_pr_number(line) if pr_number: prs.append(pr_number) return prs
[ "def", "parse_pr_numbers", "(", "git_log_lines", ")", ":", "prs", "=", "[", "]", "for", "line", "in", "git_log_lines", ":", "pr_number", "=", "parse_pr_number", "(", "line", ")", "if", "pr_number", ":", "prs", ".", "append", "(", "pr_number", ")", "return"...
24.571429
15.857143
def get_total_ram(): """The total amount of system RAM in bytes. This is what is reported by the OS, and may be overcommitted when there are multiple containers hosted on the same machine. """ with open('/proc/meminfo', 'r') as f: for line in f.readlines(): if line: key, value, unit = line.split() if key == 'MemTotal:': assert unit == 'kB', 'Unknown unit' return int(value) * 1024 # Classic, not KiB. raise NotImplementedError()
[ "def", "get_total_ram", "(", ")", ":", "with", "open", "(", "'/proc/meminfo'", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ".", "readlines", "(", ")", ":", "if", "line", ":", "key", ",", "value", ",", "unit", "=", "line", ".", "spl...
38.5
13.642857
def register_archive_format(name, function, extra_args=None, description=''): """Registers an archive format. name is the name of the format. function is the callable that will be used to create archives. If provided, extra_args is a sequence of (name, value) tuples that will be passed as arguments to the callable. description can be provided to describe the format, and will be returned by the get_archive_formats() function. """ if extra_args is None: extra_args = [] if not isinstance(function, collections.Callable): raise TypeError('The %s object is not callable' % function) if not isinstance(extra_args, (tuple, list)): raise TypeError('extra_args needs to be a sequence') for element in extra_args: if not isinstance(element, (tuple, list)) or len(element) !=2: raise TypeError('extra_args elements are : (arg_name, value)') _ARCHIVE_FORMATS[name] = (function, extra_args, description)
[ "def", "register_archive_format", "(", "name", ",", "function", ",", "extra_args", "=", "None", ",", "description", "=", "''", ")", ":", "if", "extra_args", "is", "None", ":", "extra_args", "=", "[", "]", "if", "not", "isinstance", "(", "function", ",", ...
48.5
22.5
def Compare(fromMo, toMo, diff): """ Internal method to support CompareManagedObject functionality. """ from UcsBase import UcsUtils if (fromMo.classId != toMo.classId): return CompareStatus.TypesDifferent for prop in UcsUtils.GetUcsPropertyMetaAttributeList(str(fromMo.classId)): propMeta = UcsUtils.IsPropertyInMetaIgnoreCase(fromMo.classId, prop) if propMeta != None: if ((propMeta.access == UcsPropertyMeta.Internal) or (propMeta.access == UcsPropertyMeta.ReadOnly) or ( prop in toMo._excludePropList)): continue if ((toMo.__dict__.has_key(prop)) and (fromMo.getattr(prop) != toMo.getattr(prop))): diff.append(prop) if (len(diff) > 0): return CompareStatus.PropsDifferent return CompareStatus.Equal
[ "def", "Compare", "(", "fromMo", ",", "toMo", ",", "diff", ")", ":", "from", "UcsBase", "import", "UcsUtils", "if", "(", "fromMo", ".", "classId", "!=", "toMo", ".", "classId", ")", ":", "return", "CompareStatus", ".", "TypesDifferent", "for", "prop", "i...
36.05
23.3
def p_jr(p): """ asm : JR jr_flags COMMA expr | JR jr_flags COMMA pexpr """ p[4] = Expr.makenode(Container('-', p.lineno(3)), p[4], Expr.makenode(Container(MEMORY.org + 2, p.lineno(1)))) p[0] = Asm(p.lineno(1), 'JR %s,N' % p[2], p[4])
[ "def", "p_jr", "(", "p", ")", ":", "p", "[", "4", "]", "=", "Expr", ".", "makenode", "(", "Container", "(", "'-'", ",", "p", ".", "lineno", "(", "3", ")", ")", ",", "p", "[", "4", "]", ",", "Expr", ".", "makenode", "(", "Container", "(", "M...
42.833333
19.333333
def iflat_tasks_wti(self, status=None, op="==", nids=None): """ Generator to iterate over all the tasks of the `Flow`. Yields: (task, work_index, task_index) If status is not None, only the tasks whose status satisfies the condition (task.status op status) are selected status can be either one of the flags defined in the :class:`Task` class (e.g Task.S_OK) or a string e.g "S_OK" nids is an optional list of node identifiers used to filter the tasks. """ return self._iflat_tasks_wti(status=status, op=op, nids=nids, with_wti=True)
[ "def", "iflat_tasks_wti", "(", "self", ",", "status", "=", "None", ",", "op", "=", "\"==\"", ",", "nids", "=", "None", ")", ":", "return", "self", ".", "_iflat_tasks_wti", "(", "status", "=", "status", ",", "op", "=", "op", ",", "nids", "=", "nids", ...
43.857143
23
def convert_anything_to_text( filename: str = None, blob: bytes = None, config: TextProcessingConfig = _DEFAULT_CONFIG) -> str: """ Convert arbitrary files to text, using ``strings`` or ``strings2``. (``strings`` is a standard Unix command to get text from any old rubbish.) """ strings = tools['strings'] or tools['strings2'] if strings: if filename: return get_cmd_output(strings, filename) else: return get_cmd_output_from_stdin(blob, strings) else: raise AssertionError("No fallback string-reading tool available")
[ "def", "convert_anything_to_text", "(", "filename", ":", "str", "=", "None", ",", "blob", ":", "bytes", "=", "None", ",", "config", ":", "TextProcessingConfig", "=", "_DEFAULT_CONFIG", ")", "->", "str", ":", "strings", "=", "tools", "[", "'strings'", "]", ...
37.6875
19.0625
def composite_qc(df_orig, size=(16, 12)): """ Plot composite QC figures """ df = df_orig.rename(columns={"hli_calc_age_sample_taken": "Age", "hli_calc_gender": "Gender", "eth7_max": "Ethnicity", "MeanCoverage": "Mean coverage", "Chemistry": "Sequencing chemistry", "Release Client": "Cohort", }) fig = plt.figure(1, size) ax1 = plt.subplot2grid((2, 7), (0, 0), rowspan=1, colspan=2) ax2 = plt.subplot2grid((2, 7), (0, 2), rowspan=1, colspan=2) ax3 = plt.subplot2grid((2, 7), (0, 4), rowspan=1, colspan=3) ax4 = plt.subplot2grid((2, 7), (1, 0), rowspan=1, colspan=2) ax5 = plt.subplot2grid((2, 7), (1, 2), rowspan=1, colspan=2) ax6 = plt.subplot2grid((2, 7), (1, 4), rowspan=1, colspan=3) sns.distplot(df["Age"].dropna(), kde=False, ax=ax1) sns.countplot(x="Gender", data=df, ax=ax2) sns.countplot(x="Ethnicity", data=df, ax=ax3, order = df['Ethnicity'].value_counts().index) sns.distplot(df["Mean coverage"].dropna(), kde=False, ax=ax4) ax4.set_xlim(0, 100) sns.countplot(x="Sequencing chemistry", data=df, ax=ax5) sns.countplot(x="Cohort", data=df, ax=ax6, order = df['Cohort'].value_counts().index) # Anonymize the cohorts cohorts = ax6.get_xticklabels() newCohorts = [] for i, c in enumerate(cohorts): if c.get_text() == "Spector": c = "TwinsUK" elif c.get_text() != "Health Nucleus": c = "C{}".format(i + 1) newCohorts.append(c) ax6.set_xticklabels(newCohorts) for ax in (ax6,): ax.set_xticklabels(ax.get_xticklabels(), ha="right", rotation=30) for ax in (ax1, ax2, ax3, ax4, ax5, ax6): ax.set_title(ax.get_xlabel()) ax.set_xlabel("") plt.tight_layout() root = fig.add_axes((0, 0, 1, 1)) labels = ((.02, .96, "A"), (.3, .96, "B"), (.6, .96, "C"), (.02, .52, "D"), (.3, .52, "E"), (.6, .52, "F")) panel_labels(root, labels) root.set_xlim(0, 1) root.set_ylim(0, 1) root.set_axis_off()
[ "def", "composite_qc", "(", "df_orig", ",", "size", "=", "(", "16", ",", "12", ")", ")", ":", "df", "=", "df_orig", ".", "rename", "(", "columns", "=", "{", "\"hli_calc_age_sample_taken\"", ":", "\"Age\"", ",", "\"hli_calc_gender\"", ":", "\"Gender\"", ","...
36.283333
16.616667
def set_data(self, vertices=None, tris=None, data=None): """Set the data Parameters ---------- vertices : ndarray, shape (Nv, 3) | None Vertex coordinates. tris : ndarray, shape (Nf, 3) | None Indices into the vertex array. data : ndarray, shape (Nv,) | None scalar at vertices """ # modifier pour tenier compte des None self._recompute = True if data is not None: self._data = data self._need_recompute = True if vertices is not None: self._vertices = vertices self._need_recompute = True if tris is not None: self._tris = tris self._need_recompute = True self.update()
[ "def", "set_data", "(", "self", ",", "vertices", "=", "None", ",", "tris", "=", "None", ",", "data", "=", "None", ")", ":", "# modifier pour tenier compte des None self._recompute = True", "if", "data", "is", "not", "None", ":", "self", ".", "_data", "=", "d...
32.73913
10.565217
def _http_post(self, url, data, **kwargs): """ Performs the HTTP POST request. """ if not kwargs.get('file_upload', False): data = json.dumps(data) kwargs.update({'data': data}) return self._http_request('post', url, kwargs)
[ "def", "_http_post", "(", "self", ",", "url", ",", "data", ",", "*", "*", "kwargs", ")", ":", "if", "not", "kwargs", ".", "get", "(", "'file_upload'", ",", "False", ")", ":", "data", "=", "json", ".", "dumps", "(", "data", ")", "kwargs", ".", "up...
25.181818
13.909091
def run(self, *args, **kwargs): """Update the cache of all DNS entries and perform checks Args: *args: Optional list of arguments **kwargs: Optional list of keyword arguments Returns: None """ try: zones = list(DNSZone.get_all().values()) buckets = {k.lower(): v for k, v in S3Bucket.get_all().items()} dists = list(CloudFrontDist.get_all().values()) ec2_public_ips = [x.public_ip for x in EC2Instance.get_all().values() if x.public_ip] beanstalks = {x.cname.lower(): x for x in BeanStalk.get_all().values()} existing_issues = DomainHijackIssue.get_all() issues = [] # List of different types of domain audits auditors = [ ElasticBeanstalkAudit(beanstalks), S3Audit(buckets), S3WithoutEndpointAudit(buckets), EC2PublicDns(ec2_public_ips), ] # region Build list of active issues for zone in zones: for record in zone.records: for auditor in auditors: if auditor.match(record): issues.extend(auditor.audit(record, zone)) for dist in dists: for org in dist.origins: if org['type'] == 's3': bucket = self.return_resource_name(org['source'], 's3') if bucket not in buckets: key = '{} ({})'.format(bucket, dist.type) issues.append({ 'key': key, 'value': 'S3Bucket {} doesnt exist on any known account. Referenced by {} on {}'.format( bucket, dist.domain_name, dist.account, ) }) # endregion # region Process new, old, fixed issue lists old_issues = {} new_issues = {} fixed_issues = [] for data in issues: issue_id = get_resource_id('dhi', ['{}={}'.format(k, v) for k, v in data.items()]) if issue_id in existing_issues: issue = existing_issues[issue_id] if issue.update({'state': 'EXISTING', 'end': None}): db.session.add(issue.issue) old_issues[issue_id] = issue else: properties = { 'issue_hash': issue_id, 'state': 'NEW', 'start': datetime.now(), 'end': None, 'source': data['key'], 'description': data['value'] } new_issues[issue_id] = DomainHijackIssue.create(issue_id, properties=properties) db.session.commit() for issue in list(existing_issues.values()): if issue.id not in new_issues and issue.id not in old_issues: fixed_issues.append(issue.to_json()) db.session.delete(issue.issue) # endregion # Only alert if its been more than a day since the last alert alert_cutoff = datetime.now() - timedelta(hours=self.alert_frequency) old_alerts = [] for issue_id, issue in old_issues.items(): if issue.last_alert and issue.last_alert < alert_cutoff: if issue.update({'last_alert': datetime.now()}): db.session.add(issue.issue) old_alerts.append(issue) db.session.commit() self.notify( [x.to_json() for x in new_issues.values()], [x.to_json() for x in old_alerts], fixed_issues ) finally: db.session.rollback()
[ "def", "run", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "zones", "=", "list", "(", "DNSZone", ".", "get_all", "(", ")", ".", "values", "(", ")", ")", "buckets", "=", "{", "k", ".", "lower", "(", ")", ":", ...
38.180952
20.733333
def edit_block(object): """ Handles edit blocks undo states. :param object: Object to decorate. :type object: object :return: Object. :rtype: object """ @functools.wraps(object) def edit_block_wrapper(*args, **kwargs): """ Handles edit blocks undo states. :param \*args: Arguments. :type \*args: \* :param \*\*kwargs: Keywords arguments. :type \*\*kwargs: \*\* :return: Object. :rtype: object """ if args: cursor = foundations.common.get_first_item(args).textCursor() cursor.beginEditBlock() value = None try: value = object(*args, **kwargs) finally: if args: cursor.endEditBlock() return value return edit_block_wrapper
[ "def", "edit_block", "(", "object", ")", ":", "@", "functools", ".", "wraps", "(", "object", ")", "def", "edit_block_wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"\n Handles edit blocks undo states.\n\n :param \\*args: Arguments.\...
23.171429
15.914286
def vtas2cas(tas, h): """ tas2cas conversion both m/s """ p, rho, T = vatmos(h) qdyn = p*((1.+rho*tas*tas/(7.*p))**3.5-1.) cas = np.sqrt(7.*p0/rho0*((qdyn/p0+1.)**(2./7.)-1.)) # cope with negative speed cas = np.where(tas<0, -1*cas, cas) return cas
[ "def", "vtas2cas", "(", "tas", ",", "h", ")", ":", "p", ",", "rho", ",", "T", "=", "vatmos", "(", "h", ")", "qdyn", "=", "p", "*", "(", "(", "1.", "+", "rho", "*", "tas", "*", "tas", "/", "(", "7.", "*", "p", ")", ")", "**", "3.5", "-",...
29.888889
14.888889
def list_(env=None, user=None): """ List the installed packages on an environment Returns ------- Dictionary: {package: {version: 1.0.0, build: 1 } ... } """ cmd = _create_conda_cmd('list', args=['--json'], env=env, user=user) ret = _execcmd(cmd, user=user) if ret['retcode'] == 0: pkg_list = json.loads(ret['stdout']) packages = {} for pkg in pkg_list: pkg_info = pkg.split('-') name, version, build = '-'.join(pkg_info[:-2]), pkg_info[-2], pkg_info[-1] packages[name] = {'version': version, 'build': build} return packages else: return ret
[ "def", "list_", "(", "env", "=", "None", ",", "user", "=", "None", ")", ":", "cmd", "=", "_create_conda_cmd", "(", "'list'", ",", "args", "=", "[", "'--json'", "]", ",", "env", "=", "env", ",", "user", "=", "user", ")", "ret", "=", "_execcmd", "(...
32.15
18.45
def getBinding(self): """Return the Binding object that is referenced by this port.""" wsdl = self.getService().getWSDL() return wsdl.bindings[self.binding]
[ "def", "getBinding", "(", "self", ")", ":", "wsdl", "=", "self", ".", "getService", "(", ")", ".", "getWSDL", "(", ")", "return", "wsdl", ".", "bindings", "[", "self", ".", "binding", "]" ]
44.25
5.75
def _space_in_headerblock(relative_path, contents, linter_options): """Check for space between the filename in a header block and description. like such: # /path/to/filename # # Description """ del relative_path del linter_options check_index = 1 if len(contents) > 0: if _line_is_shebang(contents[0]): check_index = 2 if len(contents) < check_index + 1: description = ("""Document cannot have less """ """than {0} lines""").format(check_index + 1) return LinterFailure(description, 1, replacement=None) candidate = contents[check_index] if not _match_space_at_line(candidate): description = """The second line must be an empty comment""" return LinterFailure(description, check_index + 1, _comment_type_from_line(candidate)[:-1] + "\n" + candidate)
[ "def", "_space_in_headerblock", "(", "relative_path", ",", "contents", ",", "linter_options", ")", ":", "del", "relative_path", "del", "linter_options", "check_index", "=", "1", "if", "len", "(", "contents", ")", ">", "0", ":", "if", "_line_is_shebang", "(", "...
31.448276
18.62069
def year(self, value=None): """ We do *NOT* know for what year we are converting so lets assume the year has 365 days. """ if value is None: return self.day() / 365 else: self.millisecond(self.day(value * 365))
[ "def", "year", "(", "self", ",", "value", "=", "None", ")", ":", "if", "value", "is", "None", ":", "return", "self", ".", "day", "(", ")", "/", "365", "else", ":", "self", ".", "millisecond", "(", "self", ".", "day", "(", "value", "*", "365", "...
30.444444
13.333333
def _build_config(self): ''' Build the config of the napalm syslog parser. ''' if not self.config_dict: if not self.config_path: # No custom config path requested # Read the native config files self.config_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'config' ) log.info('Reading the configuration from %s', self.config_path) self.config_dict = self._load_config(self.config_path) if not self.extension_config_dict and\ self.extension_config_path and\ os.path.normpath(self.extension_config_path) != os.path.normpath(self.config_path): # same path? # When extension config is not sent as dict # But `extension_config_path` is specified log.info('Reading extension configuration from %s', self.extension_config_path) self.extension_config_dict = self._load_config(self.extension_config_path) if self.extension_config_dict: napalm_logs.utils.dictupdate(self.config_dict, self.extension_config_dict)
[ "def", "_build_config", "(", "self", ")", ":", "if", "not", "self", ".", "config_dict", ":", "if", "not", "self", ".", "config_path", ":", "# No custom config path requested", "# Read the native config files", "self", ".", "config_path", "=", "os", ".", "path", ...
50.695652
21.391304
def get_formset(self, request, obj=None, **kwargs): """ Return a form, if the obj has a staffmember object, otherwise return an empty form """ if obj is not None and self.model.objects.filter(user=obj).count(): return super(StaffMemberAdmin, self).get_formset( request, obj, **kwargs ) defaults = { "exclude": None, "extra": 0, "max_num": 0, } return inlineformset_factory(self.parent_model, self.model, **defaults)
[ "def", "get_formset", "(", "self", ",", "request", ",", "obj", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "obj", "is", "not", "None", "and", "self", ".", "model", ".", "objects", ".", "filter", "(", "user", "=", "obj", ")", ".", "count...
31.722222
20.055556
def is_distributed(partition_column, lower_bound, upper_bound): """ Check if is possible distribute a query given that args Args: partition_column: column used to share the data between the workers lower_bound: the minimum value to be requested from the partition_column upper_bound: the maximum value to be requested from the partition_column Returns: True for distributed or False if not """ if ( (partition_column is not None) and (lower_bound is not None) and (upper_bound is not None) ): if upper_bound > lower_bound: return True else: raise InvalidArguments("upper_bound must be greater than lower_bound.") elif (partition_column is None) and (lower_bound is None) and (upper_bound is None): return False else: raise InvalidArguments( "Invalid combination of partition_column, lower_bound, upper_bound." "All these arguments should be passed (distributed) or none of them (standard pandas)." )
[ "def", "is_distributed", "(", "partition_column", ",", "lower_bound", ",", "upper_bound", ")", ":", "if", "(", "(", "partition_column", "is", "not", "None", ")", "and", "(", "lower_bound", "is", "not", "None", ")", "and", "(", "upper_bound", "is", "not", "...
38.925926
25.296296
async def get_protocol_version(self): """ This method returns the major and minor values for the protocol version, i.e. 2.4 :returns: Firmata protocol version """ if self.query_reply_data.get(PrivateConstants.REPORT_VERSION) == '': await self._send_command([PrivateConstants.REPORT_VERSION]) while self.query_reply_data.get( PrivateConstants.REPORT_VERSION) == '': await asyncio.sleep(self.sleep_tune) return self.query_reply_data.get(PrivateConstants.REPORT_VERSION)
[ "async", "def", "get_protocol_version", "(", "self", ")", ":", "if", "self", ".", "query_reply_data", ".", "get", "(", "PrivateConstants", ".", "REPORT_VERSION", ")", "==", "''", ":", "await", "self", ".", "_send_command", "(", "[", "PrivateConstants", ".", ...
44
17.384615
def prepend_multi(self, keys, format=None, persist_to=0, replicate_to=0): """Prepend to multiple keys. Multi variant of :meth:`prepend` .. seealso:: :meth:`prepend`, :meth:`upsert_multi`, :meth:`upsert` """ return _Base.prepend_multi(self, keys, format=format, persist_to=persist_to, replicate_to=replicate_to)
[ "def", "prepend_multi", "(", "self", ",", "keys", ",", "format", "=", "None", ",", "persist_to", "=", "0", ",", "replicate_to", "=", "0", ")", ":", "return", "_Base", ".", "prepend_multi", "(", "self", ",", "keys", ",", "format", "=", "format", ",", ...
50.75
20.75
def is_empty(self): """ A group of modules is considered empty if it has no children or if all its children are empty. >>> from admin_tools.dashboard.modules import DashboardModule, LinkList >>> mod = Group() >>> mod.is_empty() True >>> mod.children.append(DashboardModule()) >>> mod.is_empty() True >>> mod.children.append(LinkList('links', children=[ ... {'title': 'example1', 'url': 'http://example.com'}, ... {'title': 'example2', 'url': 'http://example.com'}, ... ])) >>> mod.is_empty() False """ if super(Group, self).is_empty(): return True for child in self.children: if not child.is_empty(): return False return True
[ "def", "is_empty", "(", "self", ")", ":", "if", "super", "(", "Group", ",", "self", ")", ".", "is_empty", "(", ")", ":", "return", "True", "for", "child", "in", "self", ".", "children", ":", "if", "not", "child", ".", "is_empty", "(", ")", ":", "...
32.32
17.84
def create_file_from_path(self, share_name, directory_name, file_name, local_file_path, content_settings=None, metadata=None, validate_content=False, progress_callback=None, max_connections=2, timeout=None): ''' Creates a new azure file from a local file path, or updates the content of an existing file, with automatic chunking and progress notifications. :param str share_name: Name of existing share. :param str directory_name: The path to the directory. :param str file_name: Name of file to create or update. :param str local_file_path: Path of the local file to upload as the file content. :param ~azure.storage.file.models.ContentSettings content_settings: ContentSettings object used for setting file properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) :param bool validate_content: If true, calculates an MD5 hash for each range of the file. The storage service checks the hash of the content that has arrived with the hash that was sent. This is primarily valuable for detecting bitflips on the wire if using http instead of https as https (the default) will already validate. Note that this MD5 hash is not stored with the file. :param progress_callback: Callback for progress with signature function(current, total) where current is the number of bytes transfered so far and total is the size of the file, or None if the total size is unknown. :type progress_callback: func(current, total) :param int max_connections: Maximum number of parallel connections to use. :param int timeout: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually. ''' _validate_not_none('share_name', share_name) _validate_not_none('file_name', file_name) _validate_not_none('local_file_path', local_file_path) count = path.getsize(local_file_path) with open(local_file_path, 'rb') as stream: self.create_file_from_stream( share_name, directory_name, file_name, stream, count, content_settings, metadata, validate_content, progress_callback, max_connections, timeout)
[ "def", "create_file_from_path", "(", "self", ",", "share_name", ",", "directory_name", ",", "file_name", ",", "local_file_path", ",", "content_settings", "=", "None", ",", "metadata", "=", "None", ",", "validate_content", "=", "False", ",", "progress_callback", "=...
52.54
22.58
def next(self): """ Handles the next debug event. @see: L{cont}, L{dispatch}, L{wait}, L{stop} @raise WindowsError: Raises an exception on error. If the wait operation causes an error, debugging is stopped (meaning all debugees are either killed or detached from). If the event dispatching causes an error, the event is still continued before returning. This may happen, for example, if the event handler raises an exception nobody catches. """ try: event = self.wait() except Exception: self.stop() raise try: self.dispatch() finally: self.cont()
[ "def", "next", "(", "self", ")", ":", "try", ":", "event", "=", "self", ".", "wait", "(", ")", "except", "Exception", ":", "self", ".", "stop", "(", ")", "raise", "try", ":", "self", ".", "dispatch", "(", ")", "finally", ":", "self", ".", "cont",...
30
22.583333
def _build_menu(self, context_menu: QMenu): """Build the context menu.""" logger.debug("Show tray icon enabled in settings: {}".format(cm.ConfigManager.SETTINGS[cm.SHOW_TRAY_ICON])) # Items selected for display are shown on top self._fill_context_menu_with_model_item_actions(context_menu) # The static actions are added at the bottom context_menu.addAction(self.action_view_script_error) context_menu.addAction(self.action_enable_monitoring) context_menu.addAction(self.action_hide_icon) context_menu.addAction(self.action_show_config_window) context_menu.addAction(self.action_quit)
[ "def", "_build_menu", "(", "self", ",", "context_menu", ":", "QMenu", ")", ":", "logger", ".", "debug", "(", "\"Show tray icon enabled in settings: {}\"", ".", "format", "(", "cm", ".", "ConfigManager", ".", "SETTINGS", "[", "cm", ".", "SHOW_TRAY_ICON", "]", "...
59.454545
19.727273
def copy_files(src_dir, dst_dir, filespec='*', recursive=False): """ Copies any files matching filespec from src_dir into dst_dir. If `recursive` is `True`, also copies any matching directories. """ import os from .modules import copyfiles if src_dir == dst_dir: raise RuntimeError('copy_files() src and dst directories must be different.') if not os.path.isdir(src_dir): raise RuntimeError('copy_files() src directory "{}" does not exist.'.format(src_dir)) return { 'dependencies_fn': copyfiles.list_files, 'compiler_fn': copyfiles.copy_files, 'input': src_dir, 'output': dst_dir, 'kwargs': { 'filespec': filespec, 'recursive': recursive, }, }
[ "def", "copy_files", "(", "src_dir", ",", "dst_dir", ",", "filespec", "=", "'*'", ",", "recursive", "=", "False", ")", ":", "import", "os", "from", ".", "modules", "import", "copyfiles", "if", "src_dir", "==", "dst_dir", ":", "raise", "RuntimeError", "(", ...
30.08
22.16
def _set_element_text(self, prop_name, value): """Set string value of *name* property to *value*.""" if not is_string(value): value = str(value) if len(value) > 255: tmpl = ( "exceeded 255 char limit for property, got:\n\n'%s'" ) raise ValueError(tmpl % value) element = self._get_or_add(prop_name) element.text = value
[ "def", "_set_element_text", "(", "self", ",", "prop_name", ",", "value", ")", ":", "if", "not", "is_string", "(", "value", ")", ":", "value", "=", "str", "(", "value", ")", "if", "len", "(", "value", ")", ">", "255", ":", "tmpl", "=", "(", "\"excee...
34.416667
14.166667
def map_from_config(cls, config, context_names, section_key="scoring_contexts"): """ Loads a whole set of ScoringContext's from a configuration file while maintaining a cache of model names. This aids in better memory management and allows model aliases to be implemented at the configuration level. :Returns: A map of context_names and ScoringContext's where models are loaded once and reused cross contexts. """ model_key_map = {} context_map = {} for context_name in context_names: section = config[section_key][context_name] model_map = {} for model_name, key in section['scorer_models'].items(): if key in model_key_map: scorer_model = model_key_map[key] else: scorer_model = Model.from_config(config, key) model_key_map[key] = scorer_model model_map[model_name] = scorer_model extractor = Extractor.from_config(config, section['extractor']) context_map[context_name] = cls( context_name, model_map=model_map, extractor=extractor) return context_map
[ "def", "map_from_config", "(", "cls", ",", "config", ",", "context_names", ",", "section_key", "=", "\"scoring_contexts\"", ")", ":", "model_key_map", "=", "{", "}", "context_map", "=", "{", "}", "for", "context_name", "in", "context_names", ":", "section", "=...
40.290323
19.709677
def set_errors(self, errors): """Set parameter error estimate """ if errors is None: self.__errors__ = None return self.__errors__ = [asscalar(e) for e in errors]
[ "def", "set_errors", "(", "self", ",", "errors", ")", ":", "if", "errors", "is", "None", ":", "self", ".", "__errors__", "=", "None", "return", "self", ".", "__errors__", "=", "[", "asscalar", "(", "e", ")", "for", "e", "in", "errors", "]" ]
34.166667
11.333333
def time_report(self, source=None, **kwargs): """ This will generate a time table for the source api_calls :param source: obj this can be an int(index), str(key), slice, list of api_calls or an api_call :return: ReprListList """ if source is None: api_calls = [self[-1]] elif isinstance(source, list): api_calls = source elif source is None: api_calls = self.values() elif isinstance(source, ApiCall): api_calls = [source] else: api_calls = self[source] if not isinstance(api_calls, list): api_calls = [api_calls] return '\n\n'.join([repr(api_call.time_report(**kwargs) for api_call in api_calls)])
[ "def", "time_report", "(", "self", ",", "source", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "source", "is", "None", ":", "api_calls", "=", "[", "self", "[", "-", "1", "]", "]", "elif", "isinstance", "(", "source", ",", "list", ")", ":...
37.857143
9.952381
def _call_method_from_namespace(obj, method_name, namespace): """Call the method, retrieved from obj, with the correct arguments via the namespace Args: obj: any kind of object method_name: method to be called namespace: an argparse.Namespace object containing parsed command line arguments """ method = getattr(obj, method_name) method_parser = method.parser arg_names = _get_args_name_from_parser(method_parser) if method_name == "__init__": return _call(obj, arg_names, namespace) return _call(method, arg_names, namespace)
[ "def", "_call_method_from_namespace", "(", "obj", ",", "method_name", ",", "namespace", ")", ":", "method", "=", "getattr", "(", "obj", ",", "method_name", ")", "method_parser", "=", "method", ".", "parser", "arg_names", "=", "_get_args_name_from_parser", "(", "...
36.75
13.8125
def regexNamer(regex, usePageUrl=False): """Get name from regular expression.""" @classmethod def _namer(cls, imageUrl, pageUrl): """Get first regular expression group.""" url = pageUrl if usePageUrl else imageUrl mo = regex.search(url) if mo: return mo.group(1) return _namer
[ "def", "regexNamer", "(", "regex", ",", "usePageUrl", "=", "False", ")", ":", "@", "classmethod", "def", "_namer", "(", "cls", ",", "imageUrl", ",", "pageUrl", ")", ":", "\"\"\"Get first regular expression group.\"\"\"", "url", "=", "pageUrl", "if", "usePageUrl"...
32.7
10.3
def get_manifest(self, repo_name, digest=None, version="v1"): ''' get_manifest should return an image manifest for a particular repo and tag. The image details are extracted when the client is generated. Parameters ========== repo_name: reference to the <username>/<repository>:<tag> to obtain digest: a tag or shasum version version: one of v1, v2, and config (for image config) ''' accepts = {'config': "application/vnd.docker.container.image.v1+json", 'v1': "application/vnd.docker.distribution.manifest.v1+json", 'v2': "application/vnd.docker.distribution.manifest.v2+json" } url = self._get_manifest_selfLink(repo_name, digest) bot.verbose("Obtaining manifest: %s %s" % (url, version)) headers = {'Accept': accepts[version] } try: manifest = self._get(url, headers=headers, quiet=True) manifest['selfLink'] = url except: manifest = None return manifest
[ "def", "get_manifest", "(", "self", ",", "repo_name", ",", "digest", "=", "None", ",", "version", "=", "\"v1\"", ")", ":", "accepts", "=", "{", "'config'", ":", "\"application/vnd.docker.container.image.v1+json\"", ",", "'v1'", ":", "\"application/vnd.docker.distrib...
32.733333
26
def update_gradients_full(self, dL_dK, X, X2=None): #def dK_dtheta(self, dL_dK, X, X2, target): """derivative of the covariance matrix with respect to the parameters.""" X,slices = X[:,:-1],index_to_slices(X[:,-1]) if X2 is None: X2,slices2 = X,slices K = np.zeros((X.shape[0], X.shape[0])) else: X2,slices2 = X2[:,:-1],index_to_slices(X2[:,-1]) vyt = self.variance_Yt vyx = self.variance_Yx lyt = 1./(2*self.lengthscale_Yt) lyx = 1./(2*self.lengthscale_Yx) a = self.a b = self.b c = self.c tdist = (X[:,0][:,None] - X2[:,0][None,:])**2 xdist = (X[:,1][:,None] - X2[:,1][None,:])**2 #rdist = [tdist,xdist] ttdist = (X[:,0][:,None] - X2[:,0][None,:]) rd=tdist.shape[0] dka = np.zeros([rd,rd]) dkb = np.zeros([rd,rd]) dkc = np.zeros([rd,rd]) dkYdvart = np.zeros([rd,rd]) dkYdvarx = np.zeros([rd,rd]) dkYdlent = np.zeros([rd,rd]) dkYdlenx = np.zeros([rd,rd]) kyy = lambda tdist,xdist: np.exp(-lyt*(tdist) -lyx*(xdist)) #k1 = lambda tdist: (lyt - lyt**2 * (tdist) ) #k2 = lambda xdist: ( lyx**2 * (xdist) - lyx ) #k3 = lambda xdist: ( 3*lyx**2 - 6*xdist*lyx**3 + xdist**2*lyx**4 ) #k4 = lambda tdist: -lyt*np.sqrt(tdist) k1 = lambda tdist: (2*lyt - 4*lyt**2 * (tdist) ) k2 = lambda xdist: ( 4*lyx**2 * (xdist) - 2*lyx ) k3 = lambda xdist: ( 3*4*lyx**2 - 6*8*xdist*lyx**3 + 16*xdist**2*lyx**4 ) k4 = lambda ttdist: 2*lyt*(ttdist) dkyydlyx = lambda tdist,xdist: kyy(tdist,xdist)*(-xdist) dkyydlyt = lambda tdist,xdist: kyy(tdist,xdist)*(-tdist) dk1dlyt = lambda tdist: 2. - 4*2.*lyt*tdist dk2dlyx = lambda xdist: (4.*2.*lyx*xdist -2.) dk3dlyx = lambda xdist: (6.*4.*lyx - 18.*8*xdist*lyx**2 + 4*16*xdist**2*lyx**3) dk4dlyt = lambda ttdist: 2*(ttdist) for i, s1 in enumerate(slices): for j, s2 in enumerate(slices2): for ss1 in s1: for ss2 in s2: if i==0 and j==0: dka[ss1,ss2] = 0 dkb[ss1,ss2] = 0 dkc[ss1,ss2] = 0 dkYdvart[ss1,ss2] = vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkYdvarx[ss1,ss2] = vyt*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkYdlenx[ss1,ss2] = vyt*vyx*dkyydlyx(tdist[ss1,ss2],xdist[ss1,ss2]) dkYdlent[ss1,ss2] = vyt*vyx*dkyydlyt(tdist[ss1,ss2],xdist[ss1,ss2]) elif i==0 and j==1: dka[ss1,ss2] = -k2(xdist[ss1,ss2])*vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkb[ss1,ss2] = k4(ttdist[ss1,ss2])*vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkc[ss1,ss2] = vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) #dkYdvart[ss1,ss2] = 0 #dkYdvarx[ss1,ss2] = 0 #dkYdlent[ss1,ss2] = 0 #dkYdlenx[ss1,ss2] = 0 dkYdvart[ss1,ss2] = (-a*k2(xdist[ss1,ss2])+b*k4(ttdist[ss1,ss2])+c)*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkYdvarx[ss1,ss2] = (-a*k2(xdist[ss1,ss2])+b*k4(ttdist[ss1,ss2])+c)*vyt*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkYdlent[ss1,ss2] = vyt*vyx*dkyydlyt(tdist[ss1,ss2],xdist[ss1,ss2])* (-a*k2(xdist[ss1,ss2])+b*k4(ttdist[ss1,ss2])+c)+\ vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])*b*dk4dlyt(ttdist[ss1,ss2]) dkYdlenx[ss1,ss2] = vyt*vyx*dkyydlyx(tdist[ss1,ss2],xdist[ss1,ss2])*(-a*k2(xdist[ss1,ss2])+b*k4(ttdist[ss1,ss2])+c)+\ vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])*(-a*dk2dlyx(xdist[ss1,ss2])) elif i==1 and j==1: dka[ss1,ss2] = (2*a*k3(xdist[ss1,ss2]) - 2*c*k2(xdist[ss1,ss2]))*vyt*vyx* kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkb[ss1,ss2] = 2*b*k1(tdist[ss1,ss2])*vyt*vyx* kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkc[ss1,ss2] = (-2*a*k2(xdist[ss1,ss2]) + 2*c )*vyt*vyx* kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkYdvart[ss1,ss2] = ( b**2*k1(tdist[ss1,ss2]) - 2*a*c*k2(xdist[ss1,ss2]) + a**2*k3(xdist[ss1,ss2]) + c**2 )*vyx* kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkYdvarx[ss1,ss2] = ( b**2*k1(tdist[ss1,ss2]) - 2*a*c*k2(xdist[ss1,ss2]) + a**2*k3(xdist[ss1,ss2]) + c**2 )*vyt* kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkYdlent[ss1,ss2] = vyt*vyx*dkyydlyt(tdist[ss1,ss2],xdist[ss1,ss2])*( b**2*k1(tdist[ss1,ss2]) - 2*a*c*k2(xdist[ss1,ss2]) + a**2*k3(xdist[ss1,ss2]) + c**2 ) +\ vyx*vyt*kyy(tdist[ss1,ss2],xdist[ss1,ss2])*b**2*dk1dlyt(tdist[ss1,ss2]) dkYdlenx[ss1,ss2] = vyt*vyx*dkyydlyx(tdist[ss1,ss2],xdist[ss1,ss2])*( b**2*k1(tdist[ss1,ss2]) - 2*a*c*k2(xdist[ss1,ss2]) + a**2*k3(xdist[ss1,ss2]) + c**2 ) +\ vyx*vyt*kyy(tdist[ss1,ss2],xdist[ss1,ss2])* (-2*a*c*dk2dlyx(xdist[ss1,ss2]) + a**2*dk3dlyx(xdist[ss1,ss2]) ) else: dka[ss1,ss2] = -k2(xdist[ss1,ss2])*vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkb[ss1,ss2] = -k4(ttdist[ss1,ss2])*vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkc[ss1,ss2] = vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) #dkYdvart[ss1,ss2] = 0 #dkYdvarx[ss1,ss2] = 0 #dkYdlent[ss1,ss2] = 0 #dkYdlenx[ss1,ss2] = 0 dkYdvart[ss1,ss2] = (-a*k2(xdist[ss1,ss2])-b*k4(ttdist[ss1,ss2])+c)*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkYdvarx[ss1,ss2] = (-a*k2(xdist[ss1,ss2])-b*k4(ttdist[ss1,ss2])+c)*vyt*kyy(tdist[ss1,ss2],xdist[ss1,ss2]) dkYdlent[ss1,ss2] = vyt*vyx*dkyydlyt(tdist[ss1,ss2],xdist[ss1,ss2])* (-a*k2(xdist[ss1,ss2])-b*k4(ttdist[ss1,ss2])+c)+\ vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])*(-1)*b*dk4dlyt(ttdist[ss1,ss2]) dkYdlenx[ss1,ss2] = vyt*vyx*dkyydlyx(tdist[ss1,ss2],xdist[ss1,ss2])*(-a*k2(xdist[ss1,ss2])-b*k4(ttdist[ss1,ss2])+c)+\ vyt*vyx*kyy(tdist[ss1,ss2],xdist[ss1,ss2])*(-a*dk2dlyx(xdist[ss1,ss2])) self.a.gradient = np.sum(dka * dL_dK) self.b.gradient = np.sum(dkb * dL_dK) self.c.gradient = np.sum(dkc * dL_dK) self.variance_Yt.gradient = np.sum(dkYdvart * dL_dK) # Vy self.variance_Yx.gradient = np.sum(dkYdvarx * dL_dK) self.lengthscale_Yt.gradient = np.sum(dkYdlent*(-0.5*self.lengthscale_Yt**(-2)) * dL_dK) #ly np.sum(dktheta2*(-self.lengthscale_Y**(-2)) * dL_dK) self.lengthscale_Yx.gradient = np.sum(dkYdlenx*(-0.5*self.lengthscale_Yx**(-2)) * dL_dK)
[ "def", "update_gradients_full", "(", "self", ",", "dL_dK", ",", "X", ",", "X2", "=", "None", ")", ":", "#def dK_dtheta(self, dL_dK, X, X2, target):", "X", ",", "slices", "=", "X", "[", ":", ",", ":", "-", "1", "]", ",", "index_to_slices", "(", "X", "[", ...
57.314516
35.983871
def main(): """ Default entry point """ importer = ExchangeRatesImporter() print("####################################") latest_rates_json = importer.get_latest_rates() # translate into an array of PriceModels # TODO mapper = currencyrates.FixerioModelMapper() mapper = None rates = mapper.map_to_model(latest_rates_json) print("####################################") print("importing rates into gnucash...") # For writing, use True below. with BookAggregate(for_writing=False) as svc: svc.currencies.import_fx_rates(rates) print("####################################") print("displaying rates from gnucash...") importer.display_gnucash_rates()
[ "def", "main", "(", ")", ":", "importer", "=", "ExchangeRatesImporter", "(", ")", "print", "(", "\"####################################\"", ")", "latest_rates_json", "=", "importer", ".", "get_latest_rates", "(", ")", "# translate into an array of PriceModels", "# TODO ma...
30.521739
14.347826
def get_query_info(sql, con, partition_column): """ Return a columns name list and the query string Args: sql: SQL query or table name con: database connection or url string partition_column: column used to share the data between the workers Returns: Columns name list and query string """ engine = create_engine(con) if is_table(engine, sql): table_metadata = get_table_metadata(engine, sql) query = build_query_from_table(sql) cols = get_table_columns(table_metadata) else: check_query(sql) query = sql.replace(";", "") cols = get_query_columns(engine, query) # TODO allow validation that takes into account edge cases of pandas e.g. "[index]" # check_partition_column(partition_column, cols) cols_names = list(cols.keys()) return cols_names, query
[ "def", "get_query_info", "(", "sql", ",", "con", ",", "partition_column", ")", ":", "engine", "=", "create_engine", "(", "con", ")", "if", "is_table", "(", "engine", ",", "sql", ")", ":", "table_metadata", "=", "get_table_metadata", "(", "engine", ",", "sq...
35.541667
15.625
def quick_ratio(self): """Return an upper bound on ratio() relatively quickly. This isn't defined beyond that it is an upper bound on .ratio(), and is faster to compute. """ # viewing a and b as multisets, set matches to the cardinality # of their intersection; this counts the number of matches # without regard to order, so is clearly an upper bound if self.fullbcount is None: self.fullbcount = fullbcount = {} for elt in self.b: fullbcount[elt] = fullbcount.get(elt, 0) + 1 fullbcount = self.fullbcount # avail[x] is the number of times x appears in 'b' less the # number of times we've seen it in 'a' so far ... kinda avail = {} availhas, matches = avail.__contains__, 0 for elt in self.a: if availhas(elt): numb = avail[elt] else: numb = fullbcount.get(elt, 0) avail[elt] = numb - 1 if numb > 0: matches = matches + 1 return _calculate_ratio(matches, len(self.a) + len(self.b))
[ "def", "quick_ratio", "(", "self", ")", ":", "# viewing a and b as multisets, set matches to the cardinality", "# of their intersection; this counts the number of matches", "# without regard to order, so is clearly an upper bound", "if", "self", ".", "fullbcount", "is", "None", ":", ...
39.785714
16.5
def process_default(self, event): """ Writes event string representation to file object provided to my_init(). @param event: Event to be processed. Can be of any type of events but IN_Q_OVERFLOW events (see method process_IN_Q_OVERFLOW). @type event: Event instance """ self._out.write(str(event)) self._out.write('\n') self._out.flush()
[ "def", "process_default", "(", "self", ",", "event", ")", ":", "self", ".", "_out", ".", "write", "(", "str", "(", "event", ")", ")", "self", ".", "_out", ".", "write", "(", "'\\n'", ")", "self", ".", "_out", ".", "flush", "(", ")" ]
35.083333
17.416667
def apply_mask(img, mask): """Return the image with the given `mask` applied.""" from .mask import apply_mask vol, _ = apply_mask(img, mask) return vector_to_volume(vol, read_img(mask).get_data().astype(bool))
[ "def", "apply_mask", "(", "img", ",", "mask", ")", ":", "from", ".", "mask", "import", "apply_mask", "vol", ",", "_", "=", "apply_mask", "(", "img", ",", "mask", ")", "return", "vector_to_volume", "(", "vol", ",", "read_img", "(", "mask", ")", ".", "...
36.833333
16.666667
def separation_from(self, another_icrf): """Return the angle between this position and another. >>> print(ICRF([1,0,0]).separation_from(ICRF([1,1,0]))) 45deg 00' 00.0" You can also compute separations across an array of positions. >>> directions = ICRF([[1,0,-1,0], [0,1,0,-1], [0,0,0,0]]) >>> directions.separation_from(ICRF([0,1,0])).degrees array([ 90., 0., 90., 180.]) """ p1 = self.position.au p2 = another_icrf.position.au u1 = p1 / length_of(p1) u2 = p2 / length_of(p2) if u2.ndim > 1: if u1.ndim == 1: u1 = u1[:,None] elif u1.ndim > 1: u2 = u2[:,None] c = dots(u1, u2) return Angle(radians=arccos(clip(c, -1.0, 1.0)))
[ "def", "separation_from", "(", "self", ",", "another_icrf", ")", ":", "p1", "=", "self", ".", "position", ".", "au", "p2", "=", "another_icrf", ".", "position", ".", "au", "u1", "=", "p1", "/", "length_of", "(", "p1", ")", "u2", "=", "p2", "/", "le...
32.375
17
def get_namespace( self, namespace_id, include_history=True ): """ Given a namespace ID, get the ready namespace op for it. Return the dict with the parameters on success. Return None if the namespace has not yet been revealed. """ cur = self.db.cursor() return namedb_get_namespace_ready( cur, namespace_id, include_history=include_history )
[ "def", "get_namespace", "(", "self", ",", "namespace_id", ",", "include_history", "=", "True", ")", ":", "cur", "=", "self", ".", "db", ".", "cursor", "(", ")", "return", "namedb_get_namespace_ready", "(", "cur", ",", "namespace_id", ",", "include_history", ...
39.1
22.9
def _create_stable_task_type(superclass, options_scope): """Creates a singleton (via `memoized`) subclass instance for the given superclass and scope. Currently we need to support registering the same task type multiple times in different scopes. However we still want to have each task class know the options scope it was registered in. So we create a synthetic subclass here. TODO(benjy): Revisit this when we revisit the task lifecycle. We probably want to have a task *instance* know its scope, but this means converting option registration from a class method to an instance method, and instantiating the task much sooner in the lifecycle. """ subclass_name = '{0}_{1}'.format(superclass.__name__, options_scope.replace('.', '_').replace('-', '_')) if PY2: subclass_name = subclass_name.encode('utf-8') return type(subclass_name, (superclass,), { '__doc__': superclass.__doc__, '__module__': superclass.__module__, 'options_scope': options_scope, '_stable_name': superclass.stable_name() })
[ "def", "_create_stable_task_type", "(", "superclass", ",", "options_scope", ")", ":", "subclass_name", "=", "'{0}_{1}'", ".", "format", "(", "superclass", ".", "__name__", ",", "options_scope", ".", "replace", "(", "'.'", ",", "'_'", ")", ".", "replace", "(", ...
48
24
def delete_record(self, record): """ Permanently removes record from table. """ try: self.session.delete(record) self.session.commit() except Exception as e: self.session.rollback() raise ProgrammingError(e) finally: self.session.close()
[ "def", "delete_record", "(", "self", ",", "record", ")", ":", "try", ":", "self", ".", "session", ".", "delete", "(", "record", ")", "self", ".", "session", ".", "commit", "(", ")", "except", "Exception", "as", "e", ":", "self", ".", "session", ".", ...
27.833333
8.333333
def get_folders(cls, session, mailbox_or_id): """List the folders for the mailbox. Args: mailbox_or_id (helpscout.models.Mailbox or int): Mailbox or the ID of the mailbox to get the folders for. Returns: RequestPaginator(output_type=helpscout.models.Folder): Folders iterator. """ if isinstance(mailbox_or_id, Mailbox): mailbox_or_id = mailbox_or_id.id return cls( '/mailboxes/%d/folders.json' % mailbox_or_id, session=session, out_type=Folder, )
[ "def", "get_folders", "(", "cls", ",", "session", ",", "mailbox_or_id", ")", ":", "if", "isinstance", "(", "mailbox_or_id", ",", "Mailbox", ")", ":", "mailbox_or_id", "=", "mailbox_or_id", ".", "id", "return", "cls", "(", "'/mailboxes/%d/folders.json'", "%", "...
32.833333
18.888889
def convert(self, imtls, idx=0): """ Convert a probability curve into a record of dtype `imtls.dt`. :param imtls: DictArray instance :param idx: extract the data corresponding to the given inner index """ curve = numpy.zeros(1, imtls.dt) for imt in imtls: curve[imt] = self.array[imtls(imt), idx] return curve[0]
[ "def", "convert", "(", "self", ",", "imtls", ",", "idx", "=", "0", ")", ":", "curve", "=", "numpy", ".", "zeros", "(", "1", ",", "imtls", ".", "dt", ")", "for", "imt", "in", "imtls", ":", "curve", "[", "imt", "]", "=", "self", ".", "array", "...
34.454545
14.272727
def prop_samples(self,prop,return_values=True,conf=0.683): """Returns samples of given property, based on MCMC sampling :param prop: Name of desired property. Must be column of ``self.samples``. :param return_values: (optional) If ``True`` (default), then also return (median, lo_err, hi_err) corresponding to desired credible interval. :param conf: (optional) Desired quantile for credible interval. Default = 0.683. :return: :class:`np.ndarray` of desired samples :return: Optionally also return summary statistics (median, lo_err, hi_err), if ``returns_values == True`` (this is default behavior) """ samples = self.samples[prop].values if return_values: sorted = np.sort(samples) med = np.median(samples) n = len(samples) lo_ind = int(n*(0.5 - conf/2)) hi_ind = int(n*(0.5 + conf/2)) lo = med - sorted[lo_ind] hi = sorted[hi_ind] - med return samples, (med,lo,hi) else: return samples
[ "def", "prop_samples", "(", "self", ",", "prop", ",", "return_values", "=", "True", ",", "conf", "=", "0.683", ")", ":", "samples", "=", "self", ".", "samples", "[", "prop", "]", ".", "values", "if", "return_values", ":", "sorted", "=", "np", ".", "s...
33.911765
18.764706
def terms(self): """Iterator over the terms of the sum Yield from the (possibly) infinite list of terms of the indexed sum, if the sum was written out explicitly. Each yielded term in an instance of :class:`.Expression` """ from qnet.algebra.core.scalar_algebra import ScalarValue for mapping in yield_from_ranges(self.ranges): term = self.term.substitute(mapping) if isinstance(term, ScalarValue._val_types): term = ScalarValue.create(term) assert isinstance(term, Expression) yield term
[ "def", "terms", "(", "self", ")", ":", "from", "qnet", ".", "algebra", ".", "core", ".", "scalar_algebra", "import", "ScalarValue", "for", "mapping", "in", "yield_from_ranges", "(", "self", ".", "ranges", ")", ":", "term", "=", "self", ".", "term", ".", ...
42.571429
17.714286
def search(self, remote_path, keyword, recurrent='0', **kwargs): """按文件名搜索文件(不支持查找目录). :param remote_path: 需要检索的目录路径,路径必须以 /apps/ 开头。 .. warning:: * 路径长度限制为1000; * 径中不能包含以下字符:``\\\\ ? | " > < : *``; * 文件名或路径名开头结尾不能是 ``.`` 或空白字符,空白字符包括: ``\\r, \\n, \\t, 空格, \\0, \\x0B`` 。 :type remote_path: str :param keyword: 关键词 :type keyword: str :param recurrent: 是否递归。 * "0"表示不递归 * "1"表示递归 :type recurrent: str :return: Response 对象 """ params = { 'path': remote_path, 'wd': keyword, 're': recurrent, } return self._request('file', 'search', extra_params=params, **kwargs)
[ "def", "search", "(", "self", ",", "remote_path", ",", "keyword", ",", "recurrent", "=", "'0'", ",", "*", "*", "kwargs", ")", ":", "params", "=", "{", "'path'", ":", "remote_path", ",", "'wd'", ":", "keyword", ",", "'re'", ":", "recurrent", ",", "}",...
32.607143
17.321429
def list_time_ranges(self, filter=market_filter(), granularity='DAYS', session=None, lightweight=None): """ Returns a list of time ranges in the granularity specified in the request (i.e. 3PM to 4PM, Aug 14th to Aug 15th) associated with the markets selected by the MarketFilter. :param dict filter: The filter to select desired markets :param str granularity: The granularity of time periods that correspond to markets selected by the market filter :param requests.session session: Requests session object :param bool lightweight: If True will return dict not a resource :rtype: list[resources.TimeRangeResult] """ params = clean_locals(locals()) method = '%s%s' % (self.URI, 'listTimeRanges') (response, elapsed_time) = self.request(method, params, session) return self.process_response(response, resources.TimeRangeResult, elapsed_time, lightweight)
[ "def", "list_time_ranges", "(", "self", ",", "filter", "=", "market_filter", "(", ")", ",", "granularity", "=", "'DAYS'", ",", "session", "=", "None", ",", "lightweight", "=", "None", ")", ":", "params", "=", "clean_locals", "(", "locals", "(", ")", ")",...
53.166667
25.388889
def printrdf(wflow, ctx, style): # type: (Process, ContextType, Text) -> Text """Serialize the CWL document into a string, ready for printing.""" rdf = gather(wflow, ctx).serialize(format=style, encoding='utf-8') if not rdf: return u"" return rdf.decode('utf-8')
[ "def", "printrdf", "(", "wflow", ",", "ctx", ",", "style", ")", ":", "# type: (Process, ContextType, Text) -> Text", "rdf", "=", "gather", "(", "wflow", ",", "ctx", ")", ".", "serialize", "(", "format", "=", "style", ",", "encoding", "=", "'utf-8'", ")", "...
47
20.833333
def get_default_is_active(): """ Stormpath user is active by default if e-mail verification is disabled. """ directory = APPLICATION.default_account_store_mapping.account_store verif_email = directory.account_creation_policy.verification_email_status return verif_email == AccountCreationPolicy.EMAIL_STATUS_DISABLED
[ "def", "get_default_is_active", "(", ")", ":", "directory", "=", "APPLICATION", ".", "default_account_store_mapping", ".", "account_store", "verif_email", "=", "directory", ".", "account_creation_policy", ".", "verification_email_status", "return", "verif_email", "==", "A...
42.125
20.125
def add_callback(instance, prop, callback, echo_old=False, priority=0): """ Attach a callback function to a property in an instance Parameters ---------- instance The instance to add the callback to prop : str Name of callback property in `instance` callback : func The callback function to add echo_old : bool, optional If `True`, the callback function will be invoked with both the old and new values of the property, as ``func(old, new)``. If `False` (the default), will be invoked as ``func(new)`` priority : int, optional This can optionally be used to force a certain order of execution of callbacks (larger values indicate a higher priority). Examples -------- :: class Foo: bar = CallbackProperty(0) def callback(value): pass f = Foo() add_callback(f, 'bar', callback) """ p = getattr(type(instance), prop) if not isinstance(p, CallbackProperty): raise TypeError("%s is not a CallbackProperty" % prop) p.add_callback(instance, callback, echo_old=echo_old, priority=priority)
[ "def", "add_callback", "(", "instance", ",", "prop", ",", "callback", ",", "echo_old", "=", "False", ",", "priority", "=", "0", ")", ":", "p", "=", "getattr", "(", "type", "(", "instance", ")", ",", "prop", ")", "if", "not", "isinstance", "(", "p", ...
29.307692
22.333333
def Pitzer(T, Tc, omega): r'''Calculates enthalpy of vaporization at arbitrary temperatures using a fit by [2]_ to the work of Pitzer [1]_; requires a chemical's critical temperature and acentric factor. The enthalpy of vaporization is given by: .. math:: \frac{\Delta_{vap} H}{RT_c}=7.08(1-T_r)^{0.354}+10.95\omega(1-T_r)^{0.456} Parameters ---------- T : float Temperature of fluid [K] Tc : float Critical temperature of fluid [K] omega : float Acentric factor [-] Returns ------- Hvap : float Enthalpy of vaporization, [J/mol] Notes ----- This equation is listed in [3]_, page 2-487 as method #2 for estimating Hvap. This cites [2]_. The recommended range is 0.6 to 1 Tr. Users should expect up to 5% error. T must be under Tc, or an exception is raised. The original article has been reviewed and found to have a set of tabulated values which could be used instead of the fit function to provide additional accuracy. Examples -------- Example as in [3]_, p2-487; exp: 37.51 kJ/mol >>> Pitzer(452, 645.6, 0.35017) 36696.736640106414 References ---------- .. [1] Pitzer, Kenneth S. "The Volumetric and Thermodynamic Properties of Fluids. I. Theoretical Basis and Virial Coefficients." Journal of the American Chemical Society 77, no. 13 (July 1, 1955): 3427-33. doi:10.1021/ja01618a001 .. [2] Poling, Bruce E. The Properties of Gases and Liquids. 5th edition. New York: McGraw-Hill Professional, 2000. .. [3] Green, Don, and Robert Perry. Perry's Chemical Engineers' Handbook, Eighth Edition. McGraw-Hill Professional, 2007. ''' Tr = T/Tc return R*Tc * (7.08*(1. - Tr)**0.354 + 10.95*omega*(1. - Tr)**0.456)
[ "def", "Pitzer", "(", "T", ",", "Tc", ",", "omega", ")", ":", "Tr", "=", "T", "/", "Tc", "return", "R", "*", "Tc", "*", "(", "7.08", "*", "(", "1.", "-", "Tr", ")", "**", "0.354", "+", "10.95", "*", "omega", "*", "(", "1.", "-", "Tr", ")"...
31.767857
26.482143
def _describe_fields(cls): """ Return a dictionary for the class fields description. Fields should NOT be wrapped by _precomputed_field, if necessary """ dispatch_table = { 'ShortestPathModel': 'sssp', 'GraphColoringModel': 'graph_coloring', 'PagerankModel': 'pagerank', 'ConnectedComponentsModel': 'connected_components', 'TriangleCountingModel': 'triangle_counting', 'KcoreModel': 'kcore', 'DegreeCountingModel': 'degree_count', 'LabelPropagationModel': 'label_propagation' } try: toolkit_name = dispatch_table[cls.__name__] toolkit = _tc.extensions._toolkits.graph.__dict__[toolkit_name] return toolkit.get_model_fields({}) except: raise RuntimeError('Model %s does not have fields description' % cls.__name__)
[ "def", "_describe_fields", "(", "cls", ")", ":", "dispatch_table", "=", "{", "'ShortestPathModel'", ":", "'sssp'", ",", "'GraphColoringModel'", ":", "'graph_coloring'", ",", "'PagerankModel'", ":", "'pagerank'", ",", "'ConnectedComponentsModel'", ":", "'connected_compon...
42.904762
16.904762
def case(context, case_id, case_name, institute, collaborator, vcf, vcf_sv, vcf_cancer, vcf_research, vcf_sv_research, vcf_cancer_research, peddy_ped, reupload_sv, rankscore_treshold, rankmodel_version): """ Update a case in the database """ adapter = context.obj['adapter'] if not case_id: if not (case_name and institute): LOG.info("Please specify which case to update.") context.abort case_id = "{0}-{1}".format(institute, case_name) # Check if the case exists case_obj = adapter.case(case_id) if not case_obj: LOG.warning("Case %s could not be found", case_id) context.abort() case_changed = False if collaborator: if not adapter.institute(collaborator): LOG.warning("Institute %s could not be found", collaborator) context.abort() if not collaborator in case_obj['collaborators']: case_changed = True case_obj['collaborators'].append(collaborator) LOG.info("Adding collaborator %s", collaborator) if vcf: LOG.info("Updating 'vcf_snv' to %s", vcf) case_obj['vcf_files']['vcf_snv'] = vcf case_changed = True if vcf_sv: LOG.info("Updating 'vcf_sv' to %s", vcf_sv) case_obj['vcf_files']['vcf_sv'] = vcf_sv case_changed = True if vcf_cancer: LOG.info("Updating 'vcf_cancer' to %s", vcf_cancer) case_obj['vcf_files']['vcf_cancer'] = vcf_cancer case_changed = True if vcf_research: LOG.info("Updating 'vcf_research' to %s", vcf_research) case_obj['vcf_files']['vcf_research'] = vcf_research case_changed = True if vcf_sv_research: LOG.info("Updating 'vcf_sv_research' to %s", vcf_sv_research) case_obj['vcf_files']['vcf_sv_research'] = vcf_sv_research case_changed = True if vcf_cancer_research: LOG.info("Updating 'vcf_cancer_research' to %s", vcf_cancer_research) case_obj['vcf_files']['vcf_cancer_research'] = vcf_cancer_research case_changed = True if case_changed: adapter.update_case(case_obj) if reupload_sv: LOG.info("Set needs_check to True for case %s", case_id) updates = {'needs_check': True} if rankscore_treshold: updates['sv_rank_model_version'] = rankmodel_version if vcf_sv: updates['vcf_files.vcf_sv'] = vcf_sv if vcf_sv: updates['vcf_files.vcf_sv_research'] = vcf_sv_research updated_case = adapter.case_collection.find_one_and_update( {'_id':case_id}, {'$set': updates }, return_document=pymongo.ReturnDocument.AFTER ) rankscore_treshold = rankscore_treshold or updated_case.get("rank_score_threshold", 5) # Delete and reload the clinical SV variants if updated_case['vcf_files'].get('vcf_sv'): adapter.delete_variants(case_id, variant_type='clinical', category='sv') adapter.load_variants(updated_case, variant_type='clinical', category='sv', rank_threshold=rankscore_treshold) # Delete and reload research SV variants if updated_case['vcf_files'].get('vcf_sv_research'): adapter.delete_variants(case_id, variant_type='research', category='sv') if updated_case.get('is_research'): adapter.load_variants(updated_case, variant_type='research', category='sv', rank_threshold=rankscore_treshold)
[ "def", "case", "(", "context", ",", "case_id", ",", "case_name", ",", "institute", ",", "collaborator", ",", "vcf", ",", "vcf_sv", ",", "vcf_cancer", ",", "vcf_research", ",", "vcf_sv_research", ",", "vcf_cancer_research", ",", "peddy_ped", ",", "reupload_sv", ...
41.847059
20.223529
def tagdict(self): """return a dict converted from this string interpreted as a tag-string .. code-block:: py >>> from pprint import pprint >>> dict_ = IrcString('aaa=bbb;ccc;example.com/ddd=eee').tagdict >>> pprint({str(k): str(v) for k, v in dict_.items()}) {'aaa': 'bbb', 'ccc': 'None', 'example.com/ddd': 'eee'} """ tagdict = getattr(self, '_tagdict', None) if tagdict is None: try: self._tagdict = tags.decode(self) except ValueError: self._tagdict = {} return self._tagdict
[ "def", "tagdict", "(", "self", ")", ":", "tagdict", "=", "getattr", "(", "self", ",", "'_tagdict'", ",", "None", ")", "if", "tagdict", "is", "None", ":", "try", ":", "self", ".", "_tagdict", "=", "tags", ".", "decode", "(", "self", ")", "except", "...
36.294118
17
def get_suppliers_per_page(self, per_page=1000, page=1, params=None): """ Get suppliers per page :param per_page: How many objects per page. Default: 1000 :param page: Which page. Default: 1 :param params: Search parameters. Default: {} :return: list """ return self._get_resource_per_page(resource=SUPPLIERS, per_page=per_page, page=page, params=params)
[ "def", "get_suppliers_per_page", "(", "self", ",", "per_page", "=", "1000", ",", "page", "=", "1", ",", "params", "=", "None", ")", ":", "return", "self", ".", "_get_resource_per_page", "(", "resource", "=", "SUPPLIERS", ",", "per_page", "=", "per_page", "...
41
20.6