text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def is_present(self, host=None): """ Returns true if the given host exists on the network. Returns false otherwise. """ r = self.local_renderer r.env.host = host or self.genv.host_string ret = r._local("getent hosts {host} | awk '{{ print $1 }}'", capture=True) or '' if self.verbose: print('ret:', ret) ret = ret.strip() if self.verbose: print('Host %s %s present.' % (r.env.host, 'IS' if bool(ret) else 'IS NOT')) ip = ret ret = bool(ret) if not ret: return False r.env.ip = ip with settings(warn_only=True): ret = r._local('ping -c 1 {ip}', capture=True) or '' packet_loss = re.findall(r'([0-9]+)% packet loss', ret) # print('packet_loss:',packet_loss) ip_accessible = packet_loss and int(packet_loss[0]) < 100 if self.verbose: print('IP %s accessible: %s' % (ip, ip_accessible)) return bool(ip_accessible)
[ "def", "is_present", "(", "self", ",", "host", "=", "None", ")", ":", "r", "=", "self", ".", "local_renderer", "r", ".", "env", ".", "host", "=", "host", "or", "self", ".", "genv", ".", "host_string", "ret", "=", "r", ".", "_local", "(", "\"getent ...
37.185185
17.333333
def groupedby(collection, fn): """ same like itertools.groupby :note: This function does not needs initial sorting like itertools.groupby :attention: Order of pairs is not deterministic. """ d = {} for item in collection: k = fn(item) try: arr = d[k] except KeyError: arr = [] d[k] = arr arr.append(item) yield from d.items()
[ "def", "groupedby", "(", "collection", ",", "fn", ")", ":", "d", "=", "{", "}", "for", "item", "in", "collection", ":", "k", "=", "fn", "(", "item", ")", "try", ":", "arr", "=", "d", "[", "k", "]", "except", "KeyError", ":", "arr", "=", "[", ...
21.578947
20.210526
def subtract_days(self, days: int) -> datetime: """ Subtracts dates from the given value """ self.value = self.value - relativedelta(days=days) return self.value
[ "def", "subtract_days", "(", "self", ",", "days", ":", "int", ")", "->", "datetime", ":", "self", ".", "value", "=", "self", ".", "value", "-", "relativedelta", "(", "days", "=", "days", ")", "return", "self", ".", "value" ]
45.5
10
def updated_by(self): """ | Comment: The id of the user who last updated the translation """ if self.api and self.updated_by_id: return self.api._get_user(self.updated_by_id)
[ "def", "updated_by", "(", "self", ")", ":", "if", "self", ".", "api", "and", "self", ".", "updated_by_id", ":", "return", "self", ".", "api", ".", "_get_user", "(", "self", ".", "updated_by_id", ")" ]
35.666667
11.666667
def queue_callback(self, session, block_id, data): """ Queues up a callback event to occur for a session with the given payload data. Will block if the queue is full. :param session: the session with a defined callback function to call. :param block_id: the block_id of the message received. :param data: the data payload of the message received. """ self._queue.put((session, block_id, data))
[ "def", "queue_callback", "(", "self", ",", "session", ",", "block_id", ",", "data", ")", ":", "self", ".", "_queue", ".", "put", "(", "(", "session", ",", "block_id", ",", "data", ")", ")" ]
45
18.8
def get_variant_id(variant): """Get a variant id on the format chrom_pos_ref_alt""" variant_id = '_'.join([ str(variant.CHROM), str(variant.POS), str(variant.REF), str(variant.ALT[0]) ] ) return variant_id
[ "def", "get_variant_id", "(", "variant", ")", ":", "variant_id", "=", "'_'", ".", "join", "(", "[", "str", "(", "variant", ".", "CHROM", ")", ",", "str", "(", "variant", ".", "POS", ")", ",", "str", "(", "variant", ".", "REF", ")", ",", "str", "(...
26.8
15
def extract_ace (archive, compression, cmd, verbosity, interactive, outdir): """Extract an ACE archive.""" cmdlist = [cmd, 'x'] if not outdir.endswith('/'): outdir += '/' cmdlist.extend([archive, outdir]) return cmdlist
[ "def", "extract_ace", "(", "archive", ",", "compression", ",", "cmd", ",", "verbosity", ",", "interactive", ",", "outdir", ")", ":", "cmdlist", "=", "[", "cmd", ",", "'x'", "]", "if", "not", "outdir", ".", "endswith", "(", "'/'", ")", ":", "outdir", ...
34.428571
14.857143
def namedb_get_version(con): """ Get the db version """ sql = 'SELECT version FROM db_version;' args = () try: rowdata = namedb_query_execute(con, sql, args, abort=False) row = rowdata.fetchone() return row['version'] except: # no version defined return '0.0.0.0'
[ "def", "namedb_get_version", "(", "con", ")", ":", "sql", "=", "'SELECT version FROM db_version;'", "args", "=", "(", ")", "try", ":", "rowdata", "=", "namedb_query_execute", "(", "con", ",", "sql", ",", "args", ",", "abort", "=", "False", ")", "row", "=",...
22.785714
16.785714
def new_scansock (self): """Return a connected socket for sending scan data to it.""" port = None try: self.sock.sendall("STREAM") port = None for dummy in range(60): data = self.sock.recv(self.sock_rcvbuf) i = data.find("PORT") if i != -1: port = int(data[i+5:]) break except socket.error: self.sock.close() raise if port is None: raise ClamavError(_("clamd is not ready for stream scanning")) sockinfo = get_sockinfo(self.host, port=port) wsock = create_socket(socket.AF_INET, socket.SOCK_STREAM) try: wsock.connect(sockinfo[0][4]) except socket.error: wsock.close() raise return wsock
[ "def", "new_scansock", "(", "self", ")", ":", "port", "=", "None", "try", ":", "self", ".", "sock", ".", "sendall", "(", "\"STREAM\"", ")", "port", "=", "None", "for", "dummy", "in", "range", "(", "60", ")", ":", "data", "=", "self", ".", "sock", ...
33.56
14.76
def href_for(self, operation, qs=None, **kwargs): """ Construct an full href for an operation against a resource. :parm qs: the query string dictionary, if any :param kwargs: additional arguments for path expansion """ url = urljoin(request.url_root, self.url_for(operation, **kwargs)) qs_character = "?" if url.find("?") == -1 else "&" return "{}{}".format( url, "{}{}".format(qs_character, urlencode(qs)) if qs else "", )
[ "def", "href_for", "(", "self", ",", "operation", ",", "qs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "url", "=", "urljoin", "(", "request", ".", "url_root", ",", "self", ".", "url_for", "(", "operation", ",", "*", "*", "kwargs", ")", ")", ...
33.866667
22.533333
def _ParseCachedEntry8(self, value_data, cached_entry_offset): """Parses a Windows 8.0 or 8.1 cached entry. Args: value_data (bytes): value data. cached_entry_offset (int): offset of the first cached entry data relative to the start of the value data. Returns: AppCompatCacheCachedEntry: cached entry. Raises: ParseError: if the value data could not be parsed. """ try: cached_entry = self._ReadStructureFromByteStream( value_data[cached_entry_offset:], cached_entry_offset, self._cached_entry_data_type_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse cached entry value with error: {0!s}'.format( exception)) if cached_entry.signature not in ( self._CACHED_ENTRY_SIGNATURE_8_0, self._CACHED_ENTRY_SIGNATURE_8_1): raise errors.ParseError('Unsupported cache entry signature') cached_entry_data = value_data[cached_entry_offset:] if cached_entry.signature == self._CACHED_ENTRY_SIGNATURE_8_0: data_type_map_name = 'appcompatcache_cached_entry_body_8_0' elif cached_entry.signature == self._CACHED_ENTRY_SIGNATURE_8_1: data_type_map_name = 'appcompatcache_cached_entry_body_8_1' data_type_map = self._GetDataTypeMap(data_type_map_name) context = dtfabric_data_maps.DataTypeMapContext() try: cached_entry_body = self._ReadStructureFromByteStream( cached_entry_data[12:], cached_entry_offset + 12, data_type_map, context=context) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse cached entry body with error: {0!s}'.format( exception)) data_offset = context.byte_size data_size = cached_entry_body.data_size cached_entry_object = AppCompatCacheCachedEntry() cached_entry_object.cached_entry_size = ( 12 + cached_entry.cached_entry_data_size) cached_entry_object.insertion_flags = cached_entry_body.insertion_flags cached_entry_object.last_modification_time = ( cached_entry_body.last_modification_time) cached_entry_object.path = cached_entry_body.path cached_entry_object.shim_flags = cached_entry_body.shim_flags if data_size > 0: cached_entry_object.data = cached_entry_data[ data_offset:data_offset + data_size] return cached_entry_object
[ "def", "_ParseCachedEntry8", "(", "self", ",", "value_data", ",", "cached_entry_offset", ")", ":", "try", ":", "cached_entry", "=", "self", ".", "_ReadStructureFromByteStream", "(", "value_data", "[", "cached_entry_offset", ":", "]", ",", "cached_entry_offset", ",",...
37.920635
21.206349
def lazyload(reference: str, *args, **kw): """Lazily load and cache an object reference upon dereferencing. Assign the result of calling this function with either an object reference passed in positionally: class MyClass: debug = lazyload('logging:debug') Or the attribute path to traverse (using `marrow.package.loader:traverse`) prefixed by a period. class AnotherClass: target = 'logging:info' log = lazyload('.target') Additional arguments are passed to the eventual call to `load()`. """ assert check_argument_types() def lazily_load_reference(self): ref = reference if ref.startswith('.'): ref = traverse(self, ref[1:]) return load(ref, *args, **kw) return lazy(lazily_load_reference)
[ "def", "lazyload", "(", "reference", ":", "str", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "assert", "check_argument_types", "(", ")", "def", "lazily_load_reference", "(", "self", ")", ":", "ref", "=", "reference", "if", "ref", ".", "startswith", ...
25.535714
24.357143
async def get_reviews(self, **params): """Receives all reviews by cid Accepts: - cid - coinid """ if params.get("message"): params = json.loads(params.get("message", "{}")) if not params: return {"error":400, "reason":"Missed required fields"} cid = params.get("cid", 0) coinid = params.get("coinid") if not cid and not coinid: return {"error":400, "reason":"Missed cid"} reviews = [] database = client[coinid] collection = database[settings.REVIEW] async for document in collection.find({"confirmed":None, "cid":int(cid)}): reviews.append({i:document[i] for i in document if i == "confirmed"}) return reviews
[ "async", "def", "get_reviews", "(", "self", ",", "*", "*", "params", ")", ":", "if", "params", ".", "get", "(", "\"message\"", ")", ":", "params", "=", "json", ".", "loads", "(", "params", ".", "get", "(", "\"message\"", ",", "\"{}\"", ")", ")", "i...
26.416667
20.375
def GetParserObjectByName(cls, parser_name): """Retrieves a specific parser object by its name. Args: parser_name (str): name of the parser. Returns: BaseParser: parser object or None. """ parser_class = cls._parser_classes.get(parser_name, None) if parser_class: return parser_class() return None
[ "def", "GetParserObjectByName", "(", "cls", ",", "parser_name", ")", ":", "parser_class", "=", "cls", ".", "_parser_classes", ".", "get", "(", "parser_name", ",", "None", ")", "if", "parser_class", ":", "return", "parser_class", "(", ")", "return", "None" ]
25.615385
17.384615
def filter(self, fn, skip_na=True, seed=None): """ Filter this SArray by a function. Returns a new SArray filtered by this SArray. If `fn` evaluates an element to true, this element is copied to the new SArray. If not, it isn't. Throws an exception if the return type of `fn` is not castable to a boolean value. Parameters ---------- fn : function Function that filters the SArray. Must evaluate to bool or int. skip_na : bool, optional If True, will not apply fn to any undefined values. seed : int, optional Used as the seed if a random number generator is included in fn. Returns ------- out : SArray The SArray filtered by fn. Each element of the SArray is of type int. Examples -------- >>> sa = turicreate.SArray([1,2,3]) >>> sa.filter(lambda x: x < 3) dtype: int Rows: 2 [1, 2] """ assert callable(fn), "Input must be callable" if seed is None: seed = abs(hash("%0.20f" % time.time())) % (2 ** 31) with cython_context(): return SArray(_proxy=self.__proxy__.filter(fn, skip_na, seed))
[ "def", "filter", "(", "self", ",", "fn", ",", "skip_na", "=", "True", ",", "seed", "=", "None", ")", ":", "assert", "callable", "(", "fn", ")", ",", "\"Input must be callable\"", "if", "seed", "is", "None", ":", "seed", "=", "abs", "(", "hash", "(", ...
30.317073
23.634146
def sortByNamespacePrefix(urisList, nsList): """ Given an ordered list of namespaces prefixes, order a list of uris based on that. Eg In [7]: ll Out[7]: [rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), rdflib.term.URIRef(u'printGenericTreeorg/2000/01/rdf-schema#comment'), rdflib.term.URIRef(u'http://www.w3.org/2000/01/rdf-schema#label'), rdflib.term.URIRef(u'http://www.w3.org/2002/07/owl#equivalentClass')] In [8]: sortByNamespacePrefix(ll, [OWL.OWLNS, RDFS]) Out[8]: [rdflib.term.URIRef(u'http://www.w3.org/2002/07/owl#equivalentClass'), rdflib.term.URIRef(u'http://www.w3.org/2000/01/rdf-schema#comment'), rdflib.term.URIRef(u'http://www.w3.org/2000/01/rdf-schema#label'), rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type')] """ exit = [] urisList = sort_uri_list_by_name(urisList) for ns in nsList: innerexit = [] for uri in urisList: if str(uri).startswith(str(ns)): innerexit += [uri] exit += innerexit # add remaining uris (if any) for uri in urisList: if uri not in exit: exit += [uri] return exit
[ "def", "sortByNamespacePrefix", "(", "urisList", ",", "nsList", ")", ":", "exit", "=", "[", "]", "urisList", "=", "sort_uri_list_by_name", "(", "urisList", ")", "for", "ns", "in", "nsList", ":", "innerexit", "=", "[", "]", "for", "uri", "in", "urisList", ...
35.542857
24.571429
def free_symbols(self): """Set of free SymPy symbols contained within the equation.""" try: lhs_syms = self.lhs.free_symbols except AttributeError: lhs_syms = set() try: rhs_syms = self.rhs.free_symbols except AttributeError: rhs_syms = set() return lhs_syms | rhs_syms
[ "def", "free_symbols", "(", "self", ")", ":", "try", ":", "lhs_syms", "=", "self", ".", "lhs", ".", "free_symbols", "except", "AttributeError", ":", "lhs_syms", "=", "set", "(", ")", "try", ":", "rhs_syms", "=", "self", ".", "rhs", ".", "free_symbols", ...
32.272727
11.909091
def get(self, name): """Get a device model property. Args: name (str): The name of the property to get """ name = str(name) if name not in self._properties: raise ArgumentError("Unknown property in DeviceModel", name=name) return self._properties[name]
[ "def", "get", "(", "self", ",", "name", ")", ":", "name", "=", "str", "(", "name", ")", "if", "name", "not", "in", "self", ".", "_properties", ":", "raise", "ArgumentError", "(", "\"Unknown property in DeviceModel\"", ",", "name", "=", "name", ")", "retu...
26.333333
19.833333
def ConsultarCTG(self, numero_carta_de_porte=None, numero_ctg=None, patente=None, cuit_solicitante=None, cuit_destino=None, fecha_emision_desde=None, fecha_emision_hasta=None): "Operación que realiza consulta de CTGs según el criterio ingresado." ret = self.client.consultarCTG(request=dict( auth={ 'token': self.Token, 'sign': self.Sign, 'cuitRepresentado': self.Cuit, }, consultarCTGDatos=dict( cartaPorte=numero_carta_de_porte, ctg=numero_ctg, patente=patente, cuitSolicitante=cuit_solicitante, cuitDestino=cuit_destino, fechaEmisionDesde=fecha_emision_desde, fechaEmisionHasta=fecha_emision_hasta, )))['response'] self.__analizar_errores(ret) datos = ret.get('arrayDatosConsultarCTG') if datos: self.DatosCTG = datos self.LeerDatosCTG(pop=False) return True else: self.DatosCTG = [] return ''
[ "def", "ConsultarCTG", "(", "self", ",", "numero_carta_de_porte", "=", "None", ",", "numero_ctg", "=", "None", ",", "patente", "=", "None", ",", "cuit_solicitante", "=", "None", ",", "cuit_destino", "=", "None", ",", "fecha_emision_desde", "=", "None", ",", ...
47.961538
17.269231
def init(self): """Extract some info from chunks""" for type_, data in self.chunks: if type_ == "IHDR": self.hdr = data elif type_ == "IEND": self.end = data if self.hdr: # grab w, h info self.width, self.height = struct.unpack("!II", self.hdr[8:16])
[ "def", "init", "(", "self", ")", ":", "for", "type_", ",", "data", "in", "self", ".", "chunks", ":", "if", "type_", "==", "\"IHDR\"", ":", "self", ".", "hdr", "=", "data", "elif", "type_", "==", "\"IEND\"", ":", "self", ".", "end", "=", "data", "...
24.636364
19.636364
def execute_pool_txns(self, three_pc_batch) -> List: """ Execute a transaction that involves consensus pool management, like adding a node, client or a steward. :param ppTime: PrePrepare request time :param reqs_keys: requests keys to be committed """ committed_txns = self.default_executer(three_pc_batch) for txn in committed_txns: self.poolManager.onPoolMembershipChange(txn) return committed_txns
[ "def", "execute_pool_txns", "(", "self", ",", "three_pc_batch", ")", "->", "List", ":", "committed_txns", "=", "self", ".", "default_executer", "(", "three_pc_batch", ")", "for", "txn", "in", "committed_txns", ":", "self", ".", "poolManager", ".", "onPoolMembers...
39.5
13.833333
async def get_form(self, request): """Base point load resource.""" if not self.form: return None formdata = await request.post() return self.form(formdata, obj=self.resource)
[ "async", "def", "get_form", "(", "self", ",", "request", ")", ":", "if", "not", "self", ".", "form", ":", "return", "None", "formdata", "=", "await", "request", ".", "post", "(", ")", "return", "self", ".", "form", "(", "formdata", ",", "obj", "=", ...
35.5
8.666667
def parse(self, data, extent, desc_tag): # type: (bytes, int, UDFTag) -> None ''' Parse the passed in data into a UDF Anchor Volume Structure. Parameters: data - The data to parse. extent - The extent that this descriptor currently lives at. desc_tag - A UDFTag object that represents the Descriptor Tag. Returns: Nothing. ''' if self._initialized: raise pycdlibexception.PyCdlibInternalError('Anchor Volume Structure already initialized') (tag_unused, self.main_vd_length, self.main_vd_extent, self.reserve_vd_length, self.reserve_vd_extent) = struct.unpack_from(self.FMT, data, 0) self.desc_tag = desc_tag self.orig_extent_loc = extent self._initialized = True
[ "def", "parse", "(", "self", ",", "data", ",", "extent", ",", "desc_tag", ")", ":", "# type: (bytes, int, UDFTag) -> None", "if", "self", ".", "_initialized", ":", "raise", "pycdlibexception", ".", "PyCdlibInternalError", "(", "'Anchor Volume Structure already initializ...
33.25
24.083333
def add_flags(self, *flags): """Adds one or more flags to the query. For example: current-patch-set -> --current-patch-set """ if not isinstance(flags, (list, tuple)): flags = [str(flags)] self.extend(["--%s" % f for f in flags]) return self
[ "def", "add_flags", "(", "self", ",", "*", "flags", ")", ":", "if", "not", "isinstance", "(", "flags", ",", "(", "list", ",", "tuple", ")", ")", ":", "flags", "=", "[", "str", "(", "flags", ")", "]", "self", ".", "extend", "(", "[", "\"--%s\"", ...
27.727273
15.363636
def try_read(self, address, size): """Try to read memory content at specified address. If any location was not written before, it returns a tuple (False, None). Otherwise, it returns (True, memory content). """ value = 0x0 for i in range(0, size): addr = address + i if addr in self._memory: value |= self._read_byte(addr) << (i * 8) else: return False, None return True, value
[ "def", "try_read", "(", "self", ",", "address", ",", "size", ")", ":", "value", "=", "0x0", "for", "i", "in", "range", "(", "0", ",", "size", ")", ":", "addr", "=", "address", "+", "i", "if", "addr", "in", "self", ".", "_memory", ":", "value", ...
27.166667
20.166667
def _update_version_data(self, result, info): """ Update a result dictionary (the final result from _get_project) with a dictionary for a specific version, whih typically holds information gleaned from a filename or URL for an archive for the distribution. """ name = info.pop('name') version = info.pop('version') if version in result: dist = result[version] md = dist.metadata else: dist = make_dist(name, version, scheme=self.scheme) md = dist.metadata dist.digest = self._get_digest(info) if md.source_url != info['url']: md.source_url = self.prefer_url(md.source_url, info['url']) dist.locator = self result[version] = dist
[ "def", "_update_version_data", "(", "self", ",", "result", ",", "info", ")", ":", "name", "=", "info", ".", "pop", "(", "'name'", ")", "version", "=", "info", ".", "pop", "(", "'version'", ")", "if", "version", "in", "result", ":", "dist", "=", "resu...
40.736842
14.526316
def runner(parallel, config): """Run functions, provided by string name, on multiple cores on the current machine. """ def run_parallel(fn_name, items): items = [x for x in items if x is not None] if len(items) == 0: return [] items = diagnostics.track_parallel(items, fn_name) fn, fn_name = (fn_name, fn_name.__name__) if callable(fn_name) else (get_fn(fn_name, parallel), fn_name) logger.info("multiprocessing: %s" % fn_name) if "wrapper" in parallel: wrap_parallel = {k: v for k, v in parallel.items() if k in set(["fresources", "checkpointed"])} items = [[fn_name] + parallel.get("wrapper_args", []) + [wrap_parallel] + list(x) for x in items] return run_multicore(fn, items, config, parallel=parallel) return run_parallel
[ "def", "runner", "(", "parallel", ",", "config", ")", ":", "def", "run_parallel", "(", "fn_name", ",", "items", ")", ":", "items", "=", "[", "x", "for", "x", "in", "items", "if", "x", "is", "not", "None", "]", "if", "len", "(", "items", ")", "=="...
54.666667
23
def __surname_triplet(input_string): """__surname_triplet(input_string) -> string""" consonants, vowels = __consonants_and_vowels(input_string) return __common_triplet(input_string, consonants, vowels)
[ "def", "__surname_triplet", "(", "input_string", ")", ":", "consonants", ",", "vowels", "=", "__consonants_and_vowels", "(", "input_string", ")", "return", "__common_triplet", "(", "input_string", ",", "consonants", ",", "vowels", ")" ]
42
17.4
def establish_connection(self): """Establish connection to the AMQP broker.""" conninfo = self.connection if not conninfo.port: conninfo.port = self.default_port credentials = pika.PlainCredentials(conninfo.userid, conninfo.password) return self._connection_cls(pika.ConnectionParameters( conninfo.hostname, port=conninfo.port, virtual_host=conninfo.virtual_host, credentials=credentials))
[ "def", "establish_connection", "(", "self", ")", ":", "conninfo", "=", "self", ".", "connection", "if", "not", "conninfo", ".", "port", ":", "conninfo", ".", "port", "=", "self", ".", "default_port", "credentials", "=", "pika", ".", "PlainCredentials", "(", ...
53.833333
17
def arrays2wcxf(C): """Convert a dictionary with Wilson coefficient names as keys and numbers or numpy arrays as values to a dictionary with a Wilson coefficient name followed by underscore and numeric indices as keys and numbers as values. This is needed for the output in WCxf format.""" d = {} for k, v in C.items(): if np.shape(v) == () or np.shape(v) == (1,): d[k] = v else: ind = np.indices(v.shape).reshape(v.ndim, v.size).T for i in ind: name = k + '_' + ''.join([str(int(j) + 1) for j in i]) d[name] = v[tuple(i)] return d
[ "def", "arrays2wcxf", "(", "C", ")", ":", "d", "=", "{", "}", "for", "k", ",", "v", "in", "C", ".", "items", "(", ")", ":", "if", "np", ".", "shape", "(", "v", ")", "==", "(", ")", "or", "np", ".", "shape", "(", "v", ")", "==", "(", "1"...
41.933333
19.733333
def iter_genotypes(self): """Iterates on available markers. Returns: Genotypes instances. """ for v in self.get_vcf(): alleles = {v.REF} | set(v.ALT) if self.quality_field: variant = ImputedVariant(v.ID, v.CHROM, v.POS, alleles, getattr(v, self.quality_field)) else: variant = Variant(v.ID, v.CHROM, v.POS, alleles) for coded_allele, g in self._make_genotypes(v.ALT, v.genotypes): yield Genotypes(variant, g, v.REF, coded_allele, multiallelic=len(v.ALT) > 1)
[ "def", "iter_genotypes", "(", "self", ")", ":", "for", "v", "in", "self", ".", "get_vcf", "(", ")", ":", "alleles", "=", "{", "v", ".", "REF", "}", "|", "set", "(", "v", ".", "ALT", ")", "if", "self", ".", "quality_field", ":", "variant", "=", ...
34.578947
21.736842
def dac(self, expanded=False, return_res=64, inplace=False): """ Performs the digital to analogue conversion of the signal stored in `d_signal` if expanded is False, or `e_d_signal` if expanded is True. The d_signal/e_d_signal, fmt, gain, and baseline fields must all be valid. If inplace is True, the dac will be performed inplace on the variable, the p_signal/e_p_signal attribute will be set, and the d_signal/e_d_signal field will be set to None. Parameters ---------- expanded : bool, optional Whether to transform the `e_d_signal attribute` (True) or the `d_signal` attribute (False). inplace : bool, optional Whether to automatically set the object's corresponding physical signal attribute and set the digital signal attribute to None (True), or to return the converted signal as a separate variable without changing the original digital signal attribute (False). Returns ------- p_signal : numpy array, optional The physical conversion of the signal. Either a 2d numpy array or a list of 1d numpy arrays. Examples -------- >>> import wfdb >>> record = wfdb.rdsamp('sample-data/100', physical=False) >>> p_signal = record.dac() >>> record.dac(inplace=True) >>> record.adc(inplace=True) """ # The digital nan values for each channel d_nans = _digi_nan(self.fmt) # Get the appropriate float dtype if return_res == 64: floatdtype = 'float64' elif return_res == 32: floatdtype = 'float32' else: floatdtype = 'float16' # Do inplace conversion and set relevant variables. if inplace: if expanded: for ch in range(self.n_sig): # nan locations for the channel ch_nanlocs = self.e_d_signal[ch] == d_nans[ch] self.e_d_signal[ch] = self.e_d_signal[ch].astype(floatdtype, copy=False) np.subtract(self.e_d_signal[ch], self.baseline[ch], self.e_d_signal[ch]) np.divide(self.e_d_signal[ch], self.adc_gain[ch], self.e_d_signal[ch]) self.e_d_signal[ch][ch_nanlocs] = np.nan self.e_p_signal = self.e_d_signal self.e_d_signal = None else: nanlocs = self.d_signal == d_nans # Do float conversion immediately to avoid potential under/overflow # of efficient int dtype self.d_signal = self.d_signal.astype(floatdtype, copy=False) np.subtract(self.d_signal, self.baseline, self.d_signal) np.divide(self.d_signal, self.adc_gain, self.d_signal) self.d_signal[nanlocs] = np.nan self.p_signal = self.d_signal self.d_signal = None # Return the variable else: if expanded: p_signal = [] for ch in range(self.n_sig): # nan locations for the channel ch_nanlocs = self.e_d_signal[ch] == d_nans[ch] ch_p_signal = self.e_d_signal[ch].astype(floatdtype, copy=False) np.subtract(ch_p_signal, self.baseline[ch], ch_p_signal) np.divide(ch_p_signal, self.adc_gain[ch], ch_p_signal) ch_p_signal[ch_nanlocs] = np.nan p_signal.append(ch_p_signal) else: nanlocs = self.d_signal == d_nans p_signal = self.d_signal.astype(floatdtype, copy=False) np.subtract(p_signal, self.baseline, p_signal) np.divide(p_signal, self.adc_gain, p_signal) p_signal[nanlocs] = np.nan return p_signal
[ "def", "dac", "(", "self", ",", "expanded", "=", "False", ",", "return_res", "=", "64", ",", "inplace", "=", "False", ")", ":", "# The digital nan values for each channel", "d_nans", "=", "_digi_nan", "(", "self", ".", "fmt", ")", "# Get the appropriate float dt...
41.052632
20.926316
def send_transaction(self, fn_name, fn_args, transact=None): """Calls a smart contract function using either `personal_sendTransaction` (if passphrase is available) or `ether_sendTransaction`. :param fn_name: str the smart contract function name :param fn_args: tuple arguments to pass to function above :param transact: dict arguments for the transaction such as from, gas, etc. :return: """ contract_fn = getattr(self.contract.functions, fn_name)(*fn_args) contract_function = SquidContractFunction( contract_fn ) return contract_function.transact(transact)
[ "def", "send_transaction", "(", "self", ",", "fn_name", ",", "fn_args", ",", "transact", "=", "None", ")", ":", "contract_fn", "=", "getattr", "(", "self", ".", "contract", ".", "functions", ",", "fn_name", ")", "(", "*", "fn_args", ")", "contract_function...
46.214286
21
def diff(full, dataset_uri, reference_dataset_uri): """Report the difference between two datasets. 1. Checks that the identifiers are identicial 2. Checks that the sizes are identical 3. Checks that the hashes are identical, if the '--full' option is used If a differences is detected in step 1, steps 2 and 3 will not be carried out. Similarly if a difference is detected in step 2, step 3 will not be carried out. When checking that the hashes are identical the hashes for the first dataset are recalculated using the hashing algorithm of the reference dataset. """ def echo_header(desc, ds_name, ref_ds_name, prop): click.secho("Different {}".format(desc), fg="red") click.secho("ID, {} in '{}', {} in '{}'".format( prop, ds_name, prop, ref_ds_name)) def echo_diff(diff): for d in diff: line = "{}, {}, {}".format(d[0], d[1], d[2]) click.secho(line) ds = dtoolcore.DataSet.from_uri(dataset_uri) ref_ds = dtoolcore.DataSet.from_uri(reference_dataset_uri) num_items = len(list(ref_ds.identifiers)) ids_diff = diff_identifiers(ds, ref_ds) if len(ids_diff) > 0: echo_header("identifiers", ds.name, ref_ds.name, "present") echo_diff(ids_diff) sys.exit(1) with click.progressbar(length=num_items, label="Comparing sizes") as progressbar: sizes_diff = diff_sizes(ds, ref_ds, progressbar) if len(sizes_diff) > 0: echo_header("sizes", ds.name, ref_ds.name, "size") echo_diff(sizes_diff) sys.exit(2) if full: with click.progressbar(length=num_items, label="Comparing hashes") as progressbar: content_diff = diff_content(ds, ref_ds, progressbar) if len(content_diff) > 0: echo_header("content", ds.name, ref_ds.name, "hash") echo_diff(content_diff) sys.exit(3)
[ "def", "diff", "(", "full", ",", "dataset_uri", ",", "reference_dataset_uri", ")", ":", "def", "echo_header", "(", "desc", ",", "ds_name", ",", "ref_ds_name", ",", "prop", ")", ":", "click", ".", "secho", "(", "\"Different {}\"", ".", "format", "(", "desc"...
36.45283
21.339623
def normalize_uri(cls, uri): """ Normalize the given URI (removes extra slashes) :param uri: uri to normalize :return: str """ uri = WWebRoute.multiple_slashes_re.sub("/", uri) # remove last slash if len(uri) > 1: if uri[-1] == '/': uri = uri[:-1] return uri
[ "def", "normalize_uri", "(", "cls", ",", "uri", ")", ":", "uri", "=", "WWebRoute", ".", "multiple_slashes_re", ".", "sub", "(", "\"/\"", ",", "uri", ")", "# remove last slash", "if", "len", "(", "uri", ")", ">", "1", ":", "if", "uri", "[", "-", "1", ...
19.357143
20.642857
def _try_instantiate(self, ipopo, factory, component): # type: (Any, str, str) -> None """ Tries to instantiate a component from the queue. Hides all exceptions. :param ipopo: The iPOPO service :param factory: Component factory :param component: Component name """ try: # Get component properties with self.__lock: properties = self.__queue[factory][component] except KeyError: # Component not in queue return else: try: # Try instantiation ipopo.instantiate(factory, component, properties) except TypeError: # Unknown factory: try later pass except ValueError as ex: # Already known component _logger.error("Component already running: %s", ex) except Exception as ex: # Other error _logger.exception("Error instantiating component: %s", ex)
[ "def", "_try_instantiate", "(", "self", ",", "ipopo", ",", "factory", ",", "component", ")", ":", "# type: (Any, str, str) -> None", "try", ":", "# Get component properties", "with", "self", ".", "__lock", ":", "properties", "=", "self", ".", "__queue", "[", "fa...
35.655172
13.655172
async def wait_stream(aiterable): """Wait for an asynchronous iterable to finish and return the last item. The iterable is executed within a safe stream context. A StreamEmpty exception is raised if the sequence is empty. """ async with streamcontext(aiterable) as streamer: async for item in streamer: item try: return item except NameError: raise StreamEmpty()
[ "async", "def", "wait_stream", "(", "aiterable", ")", ":", "async", "with", "streamcontext", "(", "aiterable", ")", "as", "streamer", ":", "async", "for", "item", "in", "streamer", ":", "item", "try", ":", "return", "item", "except", "NameError", ":", "rai...
33.153846
15.230769
def _convert_agg_to_wx_image(agg, bbox): """ Convert the region of the agg buffer bounded by bbox to a wx.Image. If bbox is None, the entire buffer is converted. Note: agg must be a backend_agg.RendererAgg instance. """ if bbox is None: # agg => rgb -> image image = wx.EmptyImage(int(agg.width), int(agg.height)) image.SetData(agg.tostring_rgb()) return image else: # agg => rgba buffer -> bitmap => clipped bitmap => image return wx.ImageFromBitmap(_WX28_clipped_agg_as_bitmap(agg, bbox))
[ "def", "_convert_agg_to_wx_image", "(", "agg", ",", "bbox", ")", ":", "if", "bbox", "is", "None", ":", "# agg => rgb -> image", "image", "=", "wx", ".", "EmptyImage", "(", "int", "(", "agg", ".", "width", ")", ",", "int", "(", "agg", ".", "height", ")"...
36.933333
17.6
def save(self, trial, storage=Checkpoint.DISK): """Saves the trial's state to a checkpoint.""" trial._checkpoint.storage = storage trial._checkpoint.last_result = trial.last_result if storage == Checkpoint.MEMORY: trial._checkpoint.value = trial.runner.save_to_object.remote() else: # Keeps only highest performing checkpoints if enabled if trial.keep_checkpoints_num: try: last_attr_val = trial.last_result[ trial.checkpoint_score_attr] if (trial.compare_checkpoints(last_attr_val) and not math.isnan(last_attr_val)): trial.best_checkpoint_attr_value = last_attr_val self._checkpoint_and_erase(trial) except KeyError: logger.warning( "Result dict has no key: {}. keep" "_checkpoints_num flag will not work".format( trial.checkpoint_score_attr)) else: with warn_if_slow("save_to_disk"): trial._checkpoint.value = ray.get( trial.runner.save.remote()) return trial._checkpoint.value
[ "def", "save", "(", "self", ",", "trial", ",", "storage", "=", "Checkpoint", ".", "DISK", ")", ":", "trial", ".", "_checkpoint", ".", "storage", "=", "storage", "trial", ".", "_checkpoint", ".", "last_result", "=", "trial", ".", "last_result", "if", "sto...
47.37037
16.111111
def to_dict(self): """Returns a dict with the representation of this task configuration object.""" properties = find_class_properties(self.__class__) config = { name: self.__getattribute__(name) for name, _ in properties } return config
[ "def", "to_dict", "(", "self", ")", ":", "properties", "=", "find_class_properties", "(", "self", ".", "__class__", ")", "config", "=", "{", "name", ":", "self", ".", "__getattribute__", "(", "name", ")", "for", "name", ",", "_", "in", "properties", "}",...
35.25
22.875
def jumpTo(self, bytes): """Look for the next sequence of bytes matching a given sequence. If a match is found advance the position to the last byte of the match""" newPosition = self[self.position:].find(bytes) if newPosition > -1: # XXX: This is ugly, but I can't see a nicer way to fix this. if self._position == -1: self._position = 0 self._position += (newPosition + len(bytes) - 1) return True else: raise StopIteration
[ "def", "jumpTo", "(", "self", ",", "bytes", ")", ":", "newPosition", "=", "self", "[", "self", ".", "position", ":", "]", ".", "find", "(", "bytes", ")", "if", "newPosition", ">", "-", "1", ":", "# XXX: This is ugly, but I can't see a nicer way to fix this.", ...
44.166667
13.166667
def plot_line_loading( network, timesteps=range(1,2), filename=None, boundaries=[], arrows=False): """ Plots line loading as a colored heatmap. Line loading is displayed as relative to nominal capacity in %. Parameters ---------- network : PyPSA network container Holds topology of grid including results from powerflow analysis timesteps : range Defines which timesteps are considered. If more than one, an average line loading is calculated. filename : str Specify filename If not given, figure will be show directly boundaries : list If given, the colorbar is fixed to a given min and max value arrows : bool If True, the direction of the power flows is displayed as arrows. """ # TODO: replace p0 by max(p0,p1) and analogously for q0 # TODO: implement for all given snapshots # calculate relative line loading as S/S_nom # with S = sqrt(P^2 + Q^2) cmap = plt.cm.jet array_line = [['Line'] * len(network.lines), network.lines.index] array_link = [['Link'] * len(network.links), network.links.index] if network.lines_t.q0.empty: loading_lines = pd.Series((network.lines_t.p0.mul( network.snapshot_weightings, axis=0).loc[network.snapshots[ timesteps]].abs().sum() / (network.lines.s_nom_opt)).data, index=array_line) else: loading_lines = pd.Series(((network.lines_t.p0.mul( network.snapshot_weightings, axis=0)\ .loc[network.snapshots[timesteps]].abs().sum() ** 2 +\ network.lines_t.q0.mul( network.snapshot_weightings, axis=0)\ .loc[network.snapshots[timesteps]].abs().sum() ** 2).\ apply(sqrt) / (network.lines.s_nom_opt)).data, index = array_line) # Aviod covering of bidirectional links network.links['linked_to'] = 0 for i, row in network.links.iterrows(): if not (network.links.index[(network.links.bus0 == row['bus1']) & (network.links.bus1 == row['bus0']) & (network.links.length == row['length'] )]).empty: l = network.links.index[(network.links.bus0 == row['bus1']) & (network.links.bus1 == row['bus0']) & (network.links.length == row['length'])] network.links.set_value(i, 'linked_to',l.values[0]) network.links.linked_to = network.links.linked_to.astype(str) link_load = network.links_t.p0[network.links.index[ network.links.linked_to == '0']] for i, row in network.links[network.links.linked_to != '0'].iterrows(): load = pd.DataFrame(index = network.links_t.p0.index, columns = ['to', 'from']) load['to'] = network.links_t.p0[row['linked_to']] load['from'] = network.links_t.p0[i] link_load[i] = load.abs().max(axis = 1) loading_links = pd.Series((link_load.mul( network.snapshot_weightings, axis=0).loc[network.snapshots[ timesteps]].abs().sum()[network.links.index] / ( network.links.p_nom_opt)).data, index=array_link).dropna() load_links_rel = (loading_links/ network.snapshot_weightings\ [network.snapshots[timesteps]].sum())* 100 load_lines_rel = (loading_lines / network.snapshot_weightings\ [network.snapshots[timesteps]].sum()) * 100 loading = load_lines_rel.append(load_links_rel) ll = network.plot(line_colors=loading, line_cmap=cmap, title="Line loading", line_widths=0.55) # add colorbar, note mappable sliced from ll by [1] if not boundaries: v = np.linspace(min(loading), max(loading), 101) boundaries = [min(loading), max(loading)] else: v = np.linspace(boundaries[0], boundaries[1], 101) cb = plt.colorbar(ll[1], boundaries=v, ticks=v[0:101:10]) cb_Link = plt.colorbar(ll[2], boundaries=v, ticks=v[0:101:10]) cb.set_clim(vmin=boundaries[0], vmax=boundaries[1]) cb_Link.set_clim(vmin=boundaries[0], vmax=boundaries[1]) cb_Link.remove() cb.set_label('Line loading in %') if arrows: ax = plt.axes() path = ll[1].get_segments() x_coords_lines = np.zeros([len(path)]) cmap = cmap colors = cmap(ll[1].get_array() / 100) for i in range(0, len(path)): x_coords_lines[i] = network.buses.loc[str( network.lines.iloc[i, 2]), 'x'] color = colors[i] if (x_coords_lines[i] == path[i][0][0] and load_lines_rel[i] >= 0): arrowprops = dict(arrowstyle="->", color=color) else: arrowprops = dict(arrowstyle="<-", color=color) ax.annotate( "", xy=abs( (path[i][0] - path[i][1]) * 0.51 - path[i][0]), xytext=abs( (path[i][0] - path[i][1]) * 0.49 - path[i][0]), arrowprops=arrowprops, size=10) if filename is None: plt.show() else: plt.savefig(filename) plt.close()
[ "def", "plot_line_loading", "(", "network", ",", "timesteps", "=", "range", "(", "1", ",", "2", ")", ",", "filename", "=", "None", ",", "boundaries", "=", "[", "]", ",", "arrows", "=", "False", ")", ":", "# TODO: replace p0 by max(p0,p1) and analogously for q0...
37.243056
21.215278
def beta(C, HIGHSCALE, newphys=True): """Return the beta functions of all SM parameters and SMEFT Wilson coefficients.""" g = C["g"] gp = C["gp"] gs = C["gs"] m2 = C["m2"] Lambda = C["Lambda"] Gu = C["Gu"] Gd = C["Gd"] Ge = C["Ge"] Eta1 = (3*np.trace(C["uphi"] @ Gu.conj().T) \ + 3*np.trace(C["dphi"] @ Gd.conj().T) \ + np.trace(C["ephi"] @ Ge.conj().T) \ + 3*np.conj(np.trace(C["uphi"] @ Gu.conj().T)) \ + 3*np.conj(np.trace(C["dphi"] @ Gd.conj().T)) \ + np.conj(np.trace(C["ephi"] @ Ge.conj().T)))/2 Eta2 = -6*np.trace(C["phiq3"] @ Gu @ Gu.conj().T) \ - 6*np.trace(C["phiq3"] @ Gd @ Gd.conj().T) \ - 2*np.trace(C["phil3"] @ Ge @ Ge.conj().T) \ + 3*(np.trace(C["phiud"] @ Gd.conj().T @ Gu) \ + np.conj(np.trace(C["phiud"] @ Gd.conj().T @ Gu))) Eta3 = 3*np.trace(C["phiq1"] @ Gd @ Gd.conj().T) \ - 3*np.trace(C["phiq1"] @ Gu @ Gu.conj().T) \ + 9*np.trace(C["phiq3"] @ Gd @ Gd.conj().T) \ + 9*np.trace(C["phiq3"] @ Gu @ Gu.conj().T) \ + 3*np.trace(C["phiu"] @ Gu.conj().T @ Gu) \ - 3*np.trace(C["phid"] @ Gd.conj().T @ Gd) \ - 3*(np.trace(C["phiud"] @ Gd.conj().T @ Gu) \ + np.conj(np.trace(C["phiud"] @ Gd.conj().T @ Gu))) \ + np.trace(C["phil1"] @ Ge @ Ge.conj().T) \ + 3*np.trace(C["phil3"] @ Ge @ Ge.conj().T) \ - np.trace(C["phie"] @ Ge.conj().T @ Ge) Eta4 = 12*np.trace(C["phiq1"] @ Gd @ Gd.conj().T) \ - 12*np.trace(C["phiq1"] @ Gu @ Gu.conj().T) \ + 12*np.trace(C["phiu"] @ Gu.conj().T @ Gu) \ - 12*np.trace(C["phid"] @ Gd.conj().T @ Gd) \ + 6*(np.trace(C["phiud"] @ Gd.conj().T @ Gu) \ + np.conj(np.trace(C["phiud"] @ Gd.conj().T @ Gu))) \ + 4*np.trace(C["phil1"] @ Ge @ Ge.conj().T) \ - 4*np.trace(C["phie"] @ Ge.conj().T @ Ge) Eta5 = 1j*3/2*(np.trace(Gd @ C["dphi"].conj().T) \ - np.conj(np.trace(Gd @ C["dphi"].conj().T))) \ - 1j*3/2*(np.trace(Gu @ C["uphi"].conj().T) \ - np.conj(np.trace(Gu @ C["uphi"].conj().T))) \ + 1j*1/2*(np.trace(Ge @ C["ephi"].conj().T) \ - np.conj(np.trace(Ge @ C["ephi"].conj().T))) GammaH = np.trace(3*Gu @ Gu.conj().T + 3*Gd @ Gd.conj().T + Ge @ Ge.conj().T) Gammaq = 1/2*(Gu @ Gu.conj().T + Gd @ Gd.conj().T) Gammau = Gu.conj().T @ Gu Gammad = Gd.conj().T @ Gd Gammal = 1/2*Ge @ Ge.conj().T Gammae = Ge.conj().T @ Ge Beta = OrderedDict() Beta["g"] = -19/6*g**3 - 8*g*m2/HIGHSCALE**2*C["phiW"] Beta["gp"] = 41/6*gp**3 - 8*gp*m2/HIGHSCALE**2*C["phiB"] Beta["gs"] = -7*gs**3 - 8*gs*m2/HIGHSCALE**2*C["phiG"] Beta["Lambda"] = 12*Lambda**2 \ + 3/4*gp**4 + 3/2*g**2*gp**2 + 9/4*g**4 - 3*(gp**2 + 3*g**2)*Lambda \ + 4*Lambda*GammaH \ - 4*(3*np.trace(Gd @ Gd.conj().T @ Gd @ Gd.conj().T) \ + 3*np.trace(Gu @ Gu.conj().T @ Gu @ Gu.conj().T) \ + np.trace(Ge @ Ge.conj().T @ Ge @ Ge.conj().T)) \ + 4*m2/HIGHSCALE**2*(12*C["phi"] \ + (-16*Lambda + 10/3*g**2)*C["phiBox"] \ + (6*Lambda + 3/2*(gp**2 - g**2))*C["phiD"] \ + 2*(Eta1 + Eta2) \ + 9*g**2*C["phiW"] \ + 3*gp**2*C["phiB"] \ + 3*g*gp*C["phiWB"] \ + 4/3*g**2*(np.trace(C["phil3"]) \ + 3*np.trace(C["phiq3"]))) Beta["m2"] = m2*(6*Lambda - 9/2*g**2 - 3/2*gp**2 \ + 2*GammaH + 4*m2/HIGHSCALE**2*(C["phiD"] \ - 2*C["phiBox"])) Beta["Gu"] = 3/2*(Gu @ Gu.conj().T @ Gu - Gd @ Gd.conj().T @ Gu) \ + (GammaH - 9/4*g**2 - 17/12*gp**2 - 8*gs**2)*Gu \ + 2*m2/HIGHSCALE**2*(3*C["uphi"] \ + 1/2*(C["phiD"] - 2*C["phiBox"])*Gu \ - C["phiq1"].conj().T @ Gu \ + 3*C["phiq3"].conj().T @ Gu \ + Gu @ C["phiu"].conj().T \ - Gd @ C["phiud"].conj().T \ - 2*(my_einsum("rpts,pt", C["qu1"], Gu) \ + 4/3*my_einsum("rpts,pt", C["qu8"], Gu)) \ - my_einsum("ptrs,pt", C["lequ1"], np.conj(Ge)) \ + 3*my_einsum("rspt,pt", C["quqd1"], np.conj(Gd)) \ + 1/2*(my_einsum("psrt,pt", C["quqd1"], np.conj(Gd)) \ + 4/3*my_einsum("psrt,pt", C["quqd8"], np.conj(Gd)))) Beta["Gd"] = 3/2*(Gd @ Gd.conj().T @ Gd - Gu @ Gu.conj().T @ Gd) \ + (GammaH - 9/4*g**2 - 5/12*gp**2 - 8*gs**2)*Gd \ + 2*m2/HIGHSCALE**2*(3*C["dphi"] + 1/2*(C["phiD"] \ - 2*C["phiBox"])*Gd \ + C["phiq1"].conj().T @ Gd \ + 3*C["phiq3"].conj().T @ Gd \ - Gd @ C["phid"].conj().T \ - Gu @ C["phiud"] \ - 2*(my_einsum("rpts,pt", C["qd1"], Gd) \ + 4/3*my_einsum("rpts,pt", C["qd8"], Gd)) \ + my_einsum("ptsr,pt", np.conj(C["ledq"]), Ge) \ + 3*my_einsum("ptrs,pt", C["quqd1"], np.conj(Gu)) \ + 1/2*(my_einsum("rpts,tp", C["quqd1"], np.conj(Gu)) \ + 4/3*my_einsum("rpts,tp", C["quqd8"], np.conj(Gu)))) Beta["Ge"] = 3/2*Ge @ Ge.conj().T @ Ge + (GammaH \ - 3/4*(3*g**2 + 5*gp**2))*Ge + 2*m2/HIGHSCALE**2*(3*C["ephi"] \ + 1/2*(C["phiD"] - 2*C["phiBox"])*Ge \ + C["phil1"].conj().T @ Ge \ + 3*C["phil3"].conj().T @ Ge \ - Ge @ C["phie"].conj().T \ - 2*my_einsum("rpts,pt", C["le"], Ge) \ + 3*my_einsum("rspt,tp", C["ledq"], Gd) \ - 3*my_einsum("rspt,pt", C["lequ1"], np.conj(Gu))) Beta["Theta"] = -128*np.pi**2/g**2*m2/HIGHSCALE**2*C["phiWtilde"] Beta["Thetap"] = -128*np.pi**2/gp**2*m2/HIGHSCALE**2*C["phiBtilde"] Beta["Thetas"] = -128*np.pi**2/gs**2*m2/HIGHSCALE**2*C["phiGtilde"] if not newphys: # if there is no new physics, generate a dictionary with zero # Wilson coefficients (i.e. zero beta functions) BetaSM = C_array2dict(np.zeros(5000)) BetaSM.update(Beta) return BetaSM XiB = 2/3*(C["phiBox"] + C["phiD"]) \ + 8/3*( - np.trace(C["phil1"]) + np.trace(C["phiq1"]) \ - np.trace(C["phie"]) \ + 2*np.trace(C["phiu"]) - np.trace(C["phid"])) Xie = 2*my_einsum("prst,rs", C["le"], Ge) \ - 3*my_einsum("ptsr,rs", C["ledq"], Gd) \ + 3*my_einsum("ptsr,sr", C["lequ1"], np.conj(Gu)) Xid = 2*(my_einsum("prst,rs", C["qd1"], Gd) \ + 4/3*my_einsum("prst,rs", C["qd8"], Gd)) \ - (3*my_einsum("srpt,sr", C["quqd1"], np.conj(Gu)) \ + 1/2*(my_einsum("prst,sr", C["quqd1"], np.conj(Gu)) \ + 4/3*my_einsum("prst,sr", C["quqd8"], np.conj(Gu)))) \ - my_einsum("srtp,sr", np.conj(C["ledq"]), Ge) Xiu = 2*(my_einsum("prst,rs", C["qu1"], Gu) \ + 4/3*my_einsum("prst,rs", C["qu8"], Gu)) \ - (3*my_einsum("ptsr,sr", C["quqd1"], np.conj(Gd)) \ + 1/2*(my_einsum("stpr,sr", C["quqd1"], np.conj(Gd)) \ + 4/3*my_einsum("stpr,sr", C["quqd8"], np.conj(Gd)))) \ + my_einsum("srpt,sr", C["lequ1"], np.conj(Ge)) Beta["G"] = 15*gs**2*C["G"] Beta["Gtilde"] = 15*gs**2*C["Gtilde"] Beta["W"] = 29/2*g**2*C["W"] Beta["Wtilde"] = 29/2*g**2*C["Wtilde"] #c.c. Beta["phi"] = -9/2*(3*g**2 \ + gp**2)*C["phi"] \ + Lambda*(20/3*g**2*C["phiBox"] \ + 3*(gp**2 \ - g**2)*C["phiD"]) \ - 3/4*(g**2 \ + gp**2)**2*C["phiD"] \ + 6*Lambda*(3*g**2*C["phiW"] \ + gp**2*C["phiB"] \ + g*gp*C["phiWB"]) \ - 3*(g**2*gp**2 \ + 3*g**4)*C["phiW"] \ - 3*(gp**4 \ + g**2*gp**2)*C["phiB"] \ - 3*(g*gp**3 \ + g**3*gp)*C["phiWB"] \ + 8/3*Lambda*g**2*(np.trace(C["phil3"]) \ + 3*np.trace(C["phiq3"])) \ + 54*Lambda*C["phi"] \ - 40*Lambda**2*C["phiBox"] \ + 12*Lambda**2*C["phiD"] \ + 4*Lambda*(Eta1 \ + Eta2) \ - 4*(3*np.trace(C["uphi"] @ Gu.conj().T @ Gu @ Gu.conj().T) \ + 3*np.trace(C["dphi"] @ Gd.conj().T @ Gd @ Gd.conj().T) \ + np.trace(C["ephi"] @ Ge.conj().T @ Ge @ Ge.conj().T) \ + 3*np.conj(np.trace(C["uphi"] @ Gu.conj().T @ Gu @ Gu.conj().T)) \ + 3*np.conj(np.trace(C["dphi"] @ Gd.conj().T @ Gd @ Gd.conj().T)) \ + np.conj(np.trace(C["ephi"] @ Ge.conj().T @ Ge @ Ge.conj().T))) \ + 6*GammaH*C["phi"] Beta["phiBox"] = -(4*g**2 \ + 4/3*gp**2)*C["phiBox"] \ + 5/3*gp**2*C["phiD"] \ + 2*g**2*(np.trace(C["phil3"]) \ + 3*np.trace(C["phiq3"])) \ + 2/3*gp**2*(2*np.trace(C["phiu"]) \ - np.trace(C["phid"]) \ - np.trace(C["phie"]) \ + np.trace(C["phiq1"]) \ - np.trace(C["phil1"])) \ + 12*Lambda*C["phiBox"] \ - 2*Eta3 \ + 4*GammaH*C["phiBox"] Beta["phiD"] = 20/3*gp**2*C["phiBox"] \ + (9/2*g**2 \ - 5/6*gp**2)*C["phiD"] \ + 8/3*gp**2*(2*np.trace(C["phiu"]) \ - np.trace(C["phid"]) \ - np.trace(C["phie"]) \ + np.trace(C["phiq1"]) \ - np.trace(C["phil1"])) \ + 6*Lambda*C["phiD"] \ - 2*Eta4 \ + 4*GammaH*C["phiD"] #c.c. Beta["phiG"] = (-3/2*gp**2 \ - 9/2*g**2 \ - 14*gs**2)*C["phiG"] \ + 6*Lambda*C["phiG"] \ - 2*gs*(np.trace(C["uG"] @ Gu.conj().T) \ + np.trace(C["dG"] @ Gd.conj().T) \ + np.conj(np.trace(C["uG"] @ Gu.conj().T)) \ + np.conj(np.trace(C["dG"] @ Gd.conj().T))) \ + 2*GammaH*C["phiG"] #c.c. Beta["phiB"] = (85/6*gp**2 \ - 9/2*g**2)*C["phiB"] \ + 3*g*gp*C["phiWB"] \ + 6*Lambda*C["phiB"] \ + gp*( \ - 5*np.trace(C["uB"] @ Gu.conj().T) \ + np.trace(C["dB"] @ Gd.conj().T) \ + 3*np.trace(C["eB"] @ Ge.conj().T) \ - 5*np.conj(np.trace(C["uB"] @ Gu.conj().T)) \ + np.conj(np.trace(C["dB"] @ Gd.conj().T)) \ + 3*np.conj(np.trace(C["eB"] @ Ge.conj().T))) \ + 2*GammaH*C["phiB"] #c.c. Beta["phiW"] = (-3/2*gp**2 \ - 53/6*g**2)*C["phiW"] \ + g*gp*C["phiWB"] \ - 15*g**3*C["W"] \ + 6*Lambda*C["phiW"] \ - g*(3*np.trace(C["uW"] @ Gu.conj().T) \ + 3*np.trace(C["dW"] @ Gd.conj().T) \ + np.trace(C["eW"] @ Ge.conj().T) \ + 3*np.conj(np.trace(C["uW"] @ Gu.conj().T)) \ + 3*np.conj(np.trace(C["dW"] @ Gd.conj().T)) \ + np.conj(np.trace(C["eW"] @ Ge.conj().T))) \ + 2*GammaH*C["phiW"] #c.c. Beta["phiWB"] = (19/3*gp**2 \ + 4/3*g**2)*C["phiWB"] \ + 2*g*gp*(C["phiB"] \ + C["phiW"]) \ + 3*g**2*gp*C["W"] \ + 2*Lambda*C["phiWB"] \ + g*(3*np.trace(C["uB"] @ Gu.conj().T) \ - 3*np.trace(C["dB"] @ Gd.conj().T) \ - np.trace(C["eB"] @ Ge.conj().T) \ + 3*np.conj(np.trace(C["uB"] @ Gu.conj().T)) \ - 3*np.conj(np.trace(C["dB"] @ Gd.conj().T)) \ - np.conj(np.trace(C["eB"] @ Ge.conj().T))) \ + gp*(5*np.trace(C["uW"] @ Gu.conj().T) \ + np.trace(C["dW"] @ Gd.conj().T) \ + 3*np.trace(C["eW"] @ Ge.conj().T) \ + 5*np.conj(np.trace(C["uW"] @ Gu.conj().T)) \ + np.conj(np.trace(C["dW"] @ Gd.conj().T)) \ + 3*np.conj(np.trace(C["eW"] @ Ge.conj().T))) \ + 2*GammaH*C["phiWB"] #problem with i as I*iCPV Beta["phiGtilde"] = (-3/2*gp**2 \ - 9/2*g**2 \ - 14*gs**2)*C["phiGtilde"] \ + 6*Lambda*C["phiGtilde"] \ + 2j*gs*(np.trace(C["uG"] @ Gu.conj().T) \ + np.trace(C["dG"] @ Gd.conj().T) \ - np.conj(np.trace(C["uG"] @ Gu.conj().T)) \ - np.conj(np.trace(C["dG"] @ Gd.conj().T))) \ + 2*GammaH*C["phiGtilde"] #i Beta["phiBtilde"] = (85/6*gp**2 \ - 9/2*g**2)*C["phiBtilde"] \ + 3*g*gp*C["phiWtildeB"] \ + 6*Lambda*C["phiBtilde"] \ - 1j*gp*( \ - 5*np.trace(C["uB"] @ Gu.conj().T) \ + np.trace(C["dB"] @ Gd.conj().T) \ + 3*np.trace(C["eB"] @ Ge.conj().T) \ + 5*np.conj(np.trace(C["uB"] @ Gu.conj().T)) \ - np.conj(np.trace(C["dB"] @ Gd.conj().T)) \ - 3*np.conj(np.trace(C["eB"] @ Ge.conj().T))) \ + 2*GammaH*C["phiBtilde"] #i Beta["phiWtilde"] = (-3/2*gp**2 \ - 53/6*g**2)*C["phiWtilde"] \ + g*gp*C["phiWtildeB"] \ - 15*g**3*C["Wtilde"] \ + 6*Lambda*C["phiWtilde"] \ + 1j*g*(3*np.trace(C["uW"] @ Gu.conj().T) \ + 3*np.trace(C["dW"] @ Gd.conj().T) \ + np.trace(C["eW"] @ Ge.conj().T) \ - 3*np.conj(np.trace(C["uW"] @ Gu.conj().T)) \ - 3*np.conj(np.trace(C["dW"] @ Gd.conj().T)) \ - np.conj(np.trace(C["eW"] @ Ge.conj().T))) \ + 2*GammaH*C["phiWtilde"] #i Beta["phiWtildeB"] = (19/3*gp**2 \ + 4/3*g**2)*C["phiWtildeB"] \ + 2*g*gp*(C["phiBtilde"] \ + C["phiWtilde"]) \ + 3*g**2*gp*C["Wtilde"] \ + 2*Lambda*C["phiWtildeB"] \ - 1j*g*(3*np.trace(C["uB"] @ Gu.conj().T) \ - 3*np.trace(C["dB"] @ Gd.conj().T) \ - np.trace(C["eB"] @ Ge.conj().T) \ - 3*np.conj(np.trace(C["uB"] @ Gu.conj().T)) \ + 3*np.conj(np.trace(C["dB"] @ Gd.conj().T)) \ + np.conj(np.trace(C["eB"] @ Ge.conj().T))) \ - 1j*gp*(5*np.trace(C["uW"] @ Gu.conj().T) \ + np.trace(C["dW"] @ Gd.conj().T) \ + 3*np.trace(C["eW"] @ Ge.conj().T) \ - 5*np.conj(np.trace(C["uW"] @ Gu.conj().T)) \ - np.conj(np.trace(C["dW"] @ Gd.conj().T)) \ - 3*np.conj(np.trace(C["eW"] @ Ge.conj().T))) \ + 2*GammaH*C["phiWtildeB"] """(3,3)""" #i #the coefficients of Eta5 is not equal Beta["uphi"] = (10/3*g**2*C["phiBox"] \ + 3/2*(gp**2 \ - g**2)*C["phiD"] \ + 32*gs**2*(C["phiG"] \ + 1j*C["phiGtilde"]) \ + 9*g**2*(C["phiW"] \ + 1j*C["phiWtilde"]) \ + 17/3*gp**2*(C["phiB"] \ + 1j*C["phiBtilde"]) \ - g*gp*(C["phiWB"] \ + 1j*C["phiWtildeB"]) \ + 4/3*g**2*(np.trace(C["phil3"]) \ + 3*np.trace(C["phiq3"])))*Gu \ - (35/12*gp**2 \ + 27/4*g**2 \ + 8*gs**2)*C["uphi"] \ - gp*(5*gp**2 \ - 3*g**2)*C["uB"] \ + g*(5*gp**2 \ - 9*g**2)*C["uW"] \ - (3*g**2 \ - gp**2)*Gu @ C["phiu"] \ + 3*g**2*Gd @ C["phiud"].conj().T \ + 4*gp**2*C["phiq1"] @ Gu \ - 4*gp**2*C["phiq3"] @ Gu \ - 5*gp*(C["uB"] @ Gu.conj().T @ Gu \ + Gu @ Gu.conj().T @ C["uB"]) \ - 3*g*(C["uW"] @ Gu.conj().T @ Gu \ - Gu @ Gu.conj().T @ C["uW"]) \ - 16*gs*(C["uG"] @ Gu.conj().T @ Gu \ + Gu @ Gu.conj().T @ C["uG"]) \ - 12*g*Gd @ Gd.conj().T @ C["uW"] \ - 6*g*C["dW"] @ Gd.conj().T @ Gu \ + Lambda*(12*C["uphi"] \ - 2*C["phiq1"] @ Gu \ + 6*C["phiq3"] @ Gu \ + 2*Gu @ C["phiu"] \ - 2*Gd @ C["phiud"].conj().T \ - 2*C["phiBox"]*Gu \ + C["phiD"]*Gu \ - 4*my_einsum("rpts,pt", C["qu1"], Gu) \ - 16/3*my_einsum("rpts,pt", C["qu8"], Gu) \ - 2*my_einsum("ptrs,pt", C["lequ1"], np.conj(Ge)) \ + 6*my_einsum("rspt,pt", C["quqd1"], np.conj(Gd)) \ + my_einsum("psrt,pt", C["quqd1"], np.conj(Gd)) \ + 4/3*my_einsum("psrt,pt", C["quqd8"], np.conj(Gd))) \ + 2*(Eta1 \ + Eta2 \ - 1j*Eta5)*Gu \ + (C["phiD"] \ - 6*C["phiBox"])*Gu @ Gu.conj().T @ Gu \ - 2*C["phiq1"] @ Gu @ Gu.conj().T @ Gu \ + 6*C["phiq3"] @ Gd @ Gd.conj().T @ Gu \ + 2*Gu @ Gu.conj().T @ Gu @ C["phiu"] \ - 2*Gd @ Gd.conj().T @ Gd @ C["phiud"].conj().T \ + 8*(my_einsum("rpts,pt", C["qu1"], Gu @ Gu.conj().T @ Gu) \ + 4/3*my_einsum("rpts,pt", C["qu8"], Gu @ Gu.conj().T @ Gu)) \ - 2*(my_einsum("tsrp,pt", C["quqd1"], Gd.conj().T @ Gd @ Gd.conj().T) \ + 4/3*my_einsum("tsrp,pt", C["quqd8"], Gd.conj().T @ Gd @ Gd.conj().T)) \ - 12*my_einsum("rstp,pt", C["quqd1"], Gd.conj().T @ Gd @ Gd.conj().T) \ + 4*my_einsum("tprs,pt", C["lequ1"], Ge.conj().T @ Ge @ Ge.conj().T) \ + 4*C["uphi"] @ Gu.conj().T @ Gu \ + 5*Gu @ Gu.conj().T @ C["uphi"] \ - 2*Gd @ C["dphi"].conj().T @ Gu \ - C["dphi"] @ Gd.conj().T @ Gu \ - 2*Gd @ Gd.conj().T @ C["uphi"] \ + 3*GammaH*C["uphi"] \ + Gammaq @ C["uphi"] \ + C["uphi"] @ Gammau #i #Eta5 Beta["dphi"] = (10/3*g**2*C["phiBox"] \ + 3/2*(gp**2 \ - g**2)*C["phiD"] \ + 32*gs**2*(C["phiG"] \ + 1j*C["phiGtilde"]) \ + 9*g**2*(C["phiW"] \ + 1j*C["phiWtilde"]) \ + 5/3*gp**2*(C["phiB"] \ + 1j*C["phiBtilde"]) \ + g*gp*(C["phiWB"] \ + 1j*C["phiWtildeB"]) \ + 4/3*g**2*(np.trace(C["phil3"]) \ + 3*np.trace(C["phiq3"])))*Gd \ - (23/12*gp**2 \ + 27/4*g**2 \ + 8*gs**2)*C["dphi"] \ - gp*(3*g**2 \ - gp**2)*C["dB"] \ - g*(9*g**2 \ - gp**2)*C["dW"] \ + (3*g**2 \ + gp**2)*Gd @ C["phid"] \ + 3*g**2*Gu @ C["phiud"] \ - 2*gp**2*C["phiq1"] @ Gd \ - 2*gp**2*C["phiq3"] @ Gd \ + gp*(C["dB"] @ Gd.conj().T @ Gd \ + Gd @ Gd.conj().T @ C["dB"]) \ - 3*g*(C["dW"] @ Gd.conj().T @ Gd \ - Gd @ Gd.conj().T @ C["dW"]) \ - 16*gs*(C["dG"] @ Gd.conj().T @ Gd \ + Gd @ Gd.conj().T @ C["dG"]) \ - 12*g*Gu @ Gu.conj().T @ C["dW"] \ - 6*g*C["uW"] @ Gu.conj().T @ Gd \ + Lambda*(12*C["dphi"] \ + 2*C["phiq1"] @ Gd \ + 6*C["phiq3"] @ Gd \ - 2*Gd @ C["phid"] \ - 2*Gu @ C["phiud"] \ - 2*C["phiBox"]*Gd \ + C["phiD"]*Gd \ - 4*my_einsum("rpts,pt", C["qd1"], Gd) \ - 16/3*my_einsum("rpts,pt", C["qd8"], Gd) \ + 2*my_einsum("ptsr,pt", np.conj(C["ledq"]), Ge) \ + 6*my_einsum("ptrs,pt", C["quqd1"], np.conj(Gu)) \ + my_einsum("rtps,pt", C["quqd1"], np.conj(Gu)) \ + 4/3*my_einsum("rtps,pt", C["quqd8"], np.conj(Gu))) \ + 2*(Eta1 \ + Eta2 \ + 1j*Eta5)*Gd \ + (C["phiD"] \ - 6*C["phiBox"])*Gd @ Gd.conj().T @ Gd \ + 2*C["phiq1"] @ Gd @ Gd.conj().T @ Gd \ + 6*C["phiq3"] @ Gu @ Gu.conj().T @ Gd \ - 2*Gd @ Gd.conj().T @ Gd @ C["phid"] \ - 2*Gu @ Gu.conj().T @ Gu @ C["phiud"] \ + 8*(my_einsum("rpts,pt", C["qd1"], Gd @ Gd.conj().T @ Gd) \ + 4/3*my_einsum("rpts,pt", C["qd8"], Gd @ Gd.conj().T @ Gd)) \ - 2*(my_einsum("rpts,pt", C["quqd1"], Gu.conj().T @ Gu @ Gu.conj().T) \ + 4/3*my_einsum("rpts,pt", C["quqd8"], Gu.conj().T @ Gu @ Gu.conj().T)) \ - 12*my_einsum("tprs,pt", C["quqd1"], Gu @ Gu.conj().T @ Gu) \ - 4*my_einsum("ptsr,pt", np.conj(C["ledq"]), Ge @ Ge.conj().T @ Ge) \ + 4*C["dphi"] @ Gd.conj().T @ Gd \ + 5*Gd @ Gd.conj().T @ C["dphi"] \ - 2*Gu @ C["uphi"].conj().T @ Gd \ - C["uphi"] @ Gu.conj().T @ Gd \ - 2*Gu @ Gu.conj().T @ C["dphi"] \ + 3*GammaH*C["dphi"] \ + Gammaq @ C["dphi"] \ + C["dphi"] @ Gammad #i Beta["ephi"] = (10/3*g**2*C["phiBox"] \ + 3/2*(gp**2 \ - g**2)*C["phiD"] \ + 9*g**2*(C["phiW"] \ + 1j*C["phiWtilde"]) \ + 15*gp**2*(C["phiB"] \ + 1j*C["phiBtilde"]) \ - 3*g*gp*(C["phiWB"] \ + 1j*C["phiWtildeB"]) \ + 4/3*g**2*(np.trace(C["phil3"]) \ + 3*np.trace(C["phiq3"])))*Ge \ - 3/4*(7*gp**2 \ + 9*g**2)*C["ephi"] \ - 3*gp*(g**2 \ - 3*gp**2)*C["eB"] \ - 9*g*(g**2 \ - gp**2)*C["eW"] \ + 3*(g**2 \ - gp**2)*Ge @ C["phie"] \ - 6*gp**2*C["phil1"] @ Ge \ - 6*gp**2*C["phil3"] @ Ge \ + 9*gp*(C["eB"] @ Ge.conj().T @ Ge \ + Ge @ Ge.conj().T @ C["eB"]) \ - 3*g*(C["eW"] @ Ge.conj().T @ Ge \ - Ge @ Ge.conj().T @ C["eW"]) \ + Lambda*(12*C["ephi"] \ + 2*C["phil1"] @ Ge \ + 6*C["phil3"] @ Ge \ - 2*Ge @ C["phie"] \ - 2*C["phiBox"]*Ge \ + C["phiD"]*Ge \ - 4*my_einsum("rpts,pt", C["le"], Ge) \ + 6*my_einsum("rspt,tp", C["ledq"], Gd) \ - 6*my_einsum("rspt,pt", C["lequ1"], np.conj(Gu))) \ + 2*(Eta1 \ + Eta2 \ + 1j*Eta5)*Ge \ + (C["phiD"] \ - 6*C["phiBox"])*Ge @ Ge.conj().T @ Ge \ + 2*C["phil1"] @ Ge @ Ge.conj().T @ Ge \ - 2*Ge @ Ge.conj().T @ Ge @ C["phie"] \ + 8*my_einsum("rpts,pt", C["le"], Ge @ Ge.conj().T @ Ge) \ - 12*my_einsum("rspt,tp", C["ledq"], Gd @ Gd.conj().T @ Gd) \ + 12*my_einsum("rstp,pt", C["lequ1"], Gu.conj().T @ Gu @ Gu.conj().T) \ + 4*C["ephi"] @ Ge.conj().T @ Ge \ + 5*Ge @ Ge.conj().T @ C["ephi"] \ + 3*GammaH*C["ephi"] \ + Gammal @ C["ephi"] \ + C["ephi"] @ Gammae #i Beta["eW"] = 1/12*(3*gp**2 \ - 11*g**2)*C["eW"] \ - 1/2*g*gp*C["eB"] \ - (g*(C["phiW"] \ + 1j*C["phiWtilde"]) \ - 3/2*gp*(C["phiWB"] \ + 1j*C["phiWtildeB"]))*Ge \ - 6*g*my_einsum("rspt,pt", C["lequ3"], np.conj(Gu)) \ + C["eW"] @ Ge.conj().T @ Ge \ + GammaH*C["eW"] \ + Gammal @ C["eW"] \ + C["eW"] @ Gammae #i Beta["eB"] = 1/4*(151/3*gp**2 \ - 9*g**2)*C["eB"] \ - 3/2*g*gp*C["eW"] \ - (3/2*g*(C["phiWB"] \ + 1j*C["phiWtildeB"]) \ - 3*gp*(C["phiB"] \ + 1j*C["phiBtilde"]))*Ge \ + 10*gp*my_einsum("rspt,pt", C["lequ3"], np.conj(Gu)) \ + C["eB"] @ Ge.conj().T @ Ge \ + 2*Ge @ Ge.conj().T @ C["eB"] \ + GammaH*C["eB"] \ + Gammal @ C["eB"] \ + C["eB"] @ Gammae #i Beta["uG"] = -1/36*(81*g**2 \ + 19*gp**2 \ + 204*gs**2)*C["uG"] \ + 6*g*gs*C["uW"] \ + 10/3*gp*gs*C["uB"] \ - gs*(4*(C["phiG"] \ + 1j*C["phiGtilde"]) \ - 9*gs*(C["G"] \ + 1j*C["Gtilde"]))*Gu \ - gs*(my_einsum("psrt,pt", C["quqd1"], np.conj(Gd)) \ - 1/6*my_einsum("psrt,pt", C["quqd8"], np.conj(Gd))) \ + 2*Gu @ Gu.conj().T @ C["uG"] \ - 2*Gd @ Gd.conj().T @ C["uG"] \ - C["dG"] @ Gd.conj().T @ Gu \ + C["uG"] @ Gu.conj().T @ Gu \ + GammaH*C["uG"] \ + Gammaq @ C["uG"] \ + C["uG"] @ Gammau #i Beta["uW"] = -1/36*(33*g**2 \ + 19*gp**2 \ - 96*gs**2)*C["uW"] \ + 8/3*g*gs*C["uG"] \ - 1/6*g*gp*C["uB"] \ - (g*(C["phiW"] \ + 1j*C["phiWtilde"]) \ - 5/6*gp*(C["phiWB"] \ + 1j*C["phiWtildeB"]))*Gu \ + g/4*(my_einsum("psrt,pt", C["quqd1"], np.conj(Gd)) \ + 4/3*my_einsum("psrt,pt", C["quqd8"], np.conj(Gd))) \ - 2*g*my_einsum("ptrs,pt", C["lequ3"], np.conj(Ge)) \ + 2*Gd @ Gd.conj().T @ C["uW"] \ - C["dW"] @ Gd.conj().T @ Gu \ + C["uW"] @ Gu.conj().T @ Gu \ + GammaH*C["uW"] \ + Gammaq @ C["uW"] \ + C["uW"] @ Gammau #i Beta["uB"] = -1/36*(81*g**2 \ - 313*gp**2 \ - 96*gs**2)*C["uB"] \ + 40/9*gp*gs*C["uG"] \ - 1/2*g*gp*C["uW"] \ - (-3/2*g*(C["phiWB"] \ + 1j*C["phiWtildeB"]) \ + 5/3*gp*(C["phiB"] \ + 1j*C["phiBtilde"]))*Gu \ + gp/12*(my_einsum("psrt,pt", C["quqd1"], np.conj(Gd)) \ + 4/3*my_einsum("psrt,pt", C["quqd8"], np.conj(Gd))) \ - 6*gp*my_einsum("ptrs,pt", C["lequ3"], np.conj(Ge)) \ + 2*Gu @ Gu.conj().T @ C["uB"] \ - 2*Gd @ Gd.conj().T @ C["uB"] \ - C["dB"] @ Gd.conj().T @ Gu \ + C["uB"] @ Gu.conj().T @ Gu \ + GammaH*C["uB"] \ + Gammaq @ C["uB"] \ + C["uB"] @ Gammau #i Beta["dG"] = -1/36*(81*g**2 \ + 31*gp**2 \ + 204*gs**2)*C["dG"] \ + 6*g*gs*C["dW"] \ - 2/3*gp*gs*C["dB"] \ - gs*(4*(C["phiG"] \ + 1j*C["phiGtilde"]) \ - 9*gs*(C["G"] \ + 1j*C["Gtilde"]))*Gd \ - gs*(my_einsum("rtps,pt", C["quqd1"], np.conj(Gu)) \ - 1/6*my_einsum("rtps,pt", C["quqd8"], np.conj(Gu))) \ - 2*Gu @ Gu.conj().T @ C["dG"] \ + 2*Gd @ Gd.conj().T @ C["dG"] \ - C["uG"] @ Gu.conj().T @ Gd \ + C["dG"] @ Gd.conj().T @ Gd \ + GammaH*C["dG"] \ + Gammaq @ C["dG"] \ + C["dG"] @ Gammad #i Beta["dW"] = -1/36*(33*g**2 \ + 31*gp**2 \ - 96*gs**2)*C["dW"] \ + 8/3*g*gs*C["dG"] \ + 5/6*g*gp*C["dB"] \ - (g*(C["phiW"] \ + 1j*C["phiWtilde"]) \ - gp/6*(C["phiWB"] \ + 1j*C["phiWtildeB"]))*Gd \ + g/4*(my_einsum("rtps,pt", C["quqd1"], np.conj(Gu)) \ + 4/3*my_einsum("rtps,pt", C["quqd8"], np.conj(Gu))) \ + 2*Gu @ Gu.conj().T @ C["dW"] \ - C["uW"] @ Gu.conj().T @ Gd \ + C["dW"] @ Gd.conj().T @ Gd \ + GammaH*C["dW"] \ + Gammaq @ C["dW"] \ + C["dW"] @ Gammad #i Beta["dB"] = -1/36*(81*g**2 \ - 253*gp**2 \ - 96*gs**2)*C["dB"] \ - 8/9*gp*gs*C["dG"] \ + 5/2*g*gp*C["dW"] \ - (3/2*g*(C["phiWB"] \ + 1j*C["phiWtildeB"]) \ - gp/3*(C["phiB"] \ + 1j*C["phiBtilde"]))*Gd \ - 5/12*gp*(my_einsum("rtps,pt", C["quqd1"], np.conj(Gu)) \ + 4/3*my_einsum("rtps,pt", C["quqd8"], np.conj(Gu))) \ - 2*Gu @ Gu.conj().T @ C["dB"] \ + 2*Gd @ Gd.conj().T @ C["dB"] \ - C["uB"] @ Gu.conj().T @ Gd \ + C["dB"] @ Gd.conj().T @ Gd \ + GammaH*C["dB"] \ + Gammaq @ C["dB"] \ + C["dB"] @ Gammad #I3 #coefficient not equal with manual!!!!!! Beta["phil1"] = -1/4*XiB*gp**2*I3 \ + 1/3*gp**2*C["phil1"] \ - 2/3*gp**2*(my_einsum("rstt", C["ld"]) \ + my_einsum("rstt", C["le"]) \ + 2*my_einsum("rstt", C["ll"]) \ + my_einsum("rtts", C["ll"]) \ - my_einsum("rstt", C["lq1"]) \ - 2*my_einsum("rstt", C["lu"])) \ - 1/2*(C["phiBox"] \ + C["phiD"])*Ge @ Ge.conj().T \ - Ge @ C["phie"] @ Ge.conj().T \ + 3/2*(Ge @ Ge.conj().T @ C["phil1"] \ + C["phil1"] @ Ge @ Ge.conj().T \ + 3*Ge @ Ge.conj().T @ C["phil3"] \ + 3*C["phil3"] @ Ge @ Ge.conj().T) \ + 2*my_einsum("rspt,tp", C["le"], Ge.conj().T @ Ge) \ - 2*(2*my_einsum("rspt,tp", C["ll"], Ge @ Ge.conj().T) \ + my_einsum("rtps,tp", C["ll"], Ge @ Ge.conj().T)) \ - 6*my_einsum("rspt,tp", C["lq1"], Gd @ Gd.conj().T) \ + 6*my_einsum("rspt,tp", C["lq1"], Gu @ Gu.conj().T) \ - 6*my_einsum("rspt,tp", C["lu"], Gu.conj().T @ Gu) \ + 6*my_einsum("rspt,tp", C["ld"], Gd.conj().T @ Gd) \ + 2*GammaH*C["phil1"] \ + Gammal @ C["phil1"] \ + C["phil1"] @ Gammal #I3 #coefficient Beta["phil3"] = 2/3*g**2*(1/4*C["phiBox"] \ + np.trace(C["phil3"]) \ + 3*np.trace(C["phiq3"]))*I3 \ - 17/3*g**2*C["phil3"] \ + 2/3*g**2*my_einsum("rtts", C["ll"]) \ + 2*g**2*my_einsum("rstt", C["lq3"]) \ - 1/2*C["phiBox"]*Ge @ Ge.conj().T \ + 1/2*(3*Ge @ Ge.conj().T @ C["phil1"] \ + 3*C["phil1"] @ Ge @ Ge.conj().T \ + Ge @ Ge.conj().T @ C["phil3"] \ + C["phil3"] @ Ge @ Ge.conj().T) \ - 2*(my_einsum("rtps,tp", C["ll"], Ge @ Ge.conj().T)) \ - 6*my_einsum("rspt,tp", C["lq3"], Gd @ Gd.conj().T) \ - 6*my_einsum("rspt,tp", C["lq3"], Gu @ Gu.conj().T) \ + 2*GammaH*C["phil3"] \ + Gammal @ C["phil3"] \ + C["phil3"] @ Gammal #I3 #coefficient even terms not equal... Beta["phie"] = -1/2*XiB*gp**2*I3 \ + 1/3*gp**2*C["phie"] \ - 2/3*gp**2*(my_einsum("rstt", C["ed"]) \ + 4*my_einsum("rstt", C["ee"]) \ - 2*my_einsum("rstt", C["eu"]) \ + my_einsum("ttrs", C["le"]) \ - my_einsum("ttrs", C["qe"])) \ + (C["phiBox"] \ + C["phiD"])*Ge.conj().T @ Ge \ - 2*Ge.conj().T @ C["phil1"] @ Ge \ + 3*(Ge.conj().T @ Ge @ C["phie"] \ + C["phie"] @ Ge.conj().T @ Ge) \ - 2*my_einsum("ptrs,tp", C["le"], Ge @ Ge.conj().T) \ + 8*my_einsum("rspt,tp", C["ee"], Ge.conj().T @ Ge) \ - 6*my_einsum("rspt,tp", C["eu"], Gu.conj().T @ Gu) \ + 6*my_einsum("rspt,tp", C["ed"], Gd.conj().T @ Gd) \ - 6*my_einsum("ptrs,tp", C["qe"], Gd @ Gd.conj().T) \ + 6*my_einsum("ptrs,tp", C["qe"], Gu @ Gu.conj().T) \ + 2*GammaH*C["phie"] \ + Gammae @ C["phie"] \ + C["phie"] @ Gammae #I3 #coefficient??? Beta["phiq1"] = 1/12*XiB*gp**2*I3 \ + 1/3*gp**2*C["phiq1"] \ - 2/3*gp**2*(my_einsum("ttrs", C["lq1"]) \ + my_einsum("rstt", C["qd1"]) \ - 2*my_einsum("rstt", C["qu1"]) \ + my_einsum("rstt", C["qe"]) \ - 2*my_einsum("rstt", C["qq1"]) \ - 1/3*my_einsum("rtts", C["qq1"]) \ - my_einsum("rtts", C["qq3"])) \ + 1/2*(C["phiBox"] \ + C["phiD"])*(Gu @ Gu.conj().T \ - Gd @ Gd.conj().T) \ - Gu @ C["phiu"] @ Gu.conj().T \ - Gd @ C["phid"] @ Gd.conj().T \ + 2*my_einsum("rspt,tp", C["qe"], Ge.conj().T @ Ge) \ - 2*my_einsum("ptrs,tp", C["lq1"], Ge @ Ge.conj().T) \ + 3/2*(Gd @ Gd.conj().T @ C["phiq1"] \ + Gu @ Gu.conj().T @ C["phiq1"] \ + C["phiq1"] @ Gd @ Gd.conj().T \ + C["phiq1"] @ Gu @ Gu.conj().T \ + 3*Gd @ Gd.conj().T @ C["phiq3"] \ - 3*Gu @ Gu.conj().T @ C["phiq3"] \ + 3*C["phiq3"] @ Gd @ Gd.conj().T \ - 3*C["phiq3"] @ Gu @ Gu.conj().T) \ - 2*(6*my_einsum("ptrs,tp", C["qq1"], Gd @ Gd.conj().T) \ + my_einsum("psrt,tp", C["qq1"], Gd @ Gd.conj().T) \ + 3*my_einsum("psrt,tp", C["qq3"], Gd @ Gd.conj().T) \ - 6*my_einsum("ptrs,tp", C["qq1"], Gu @ Gu.conj().T) \ - my_einsum("psrt,tp", C["qq1"], Gu @ Gu.conj().T) \ - 3*my_einsum("psrt,tp", C["qq3"], Gu @ Gu.conj().T)) \ - 6*my_einsum("rspt,tp", C["qu1"], Gu.conj().T @ Gu) \ + 6*my_einsum("rspt,tp", C["qd1"], Gd.conj().T @ Gd) \ + 2*GammaH*C["phiq1"] \ + Gammaq @ C["phiq1"] \ + C["phiq1"] @ Gammaq #I3 #co Beta["phiq3"] = 2/3*g**2*(1/4*C["phiBox"] \ + np.trace(C["phil3"]) \ + 3*np.trace(C["phiq3"]))*I3 \ - 17/3*g**2*C["phiq3"] \ + 2/3*g**2*(my_einsum("ttrs", C["lq3"]) \ + my_einsum("rtts", C["qq1"]) \ + 6*my_einsum("rstt", C["qq3"]) \ - my_einsum("rtts", C["qq3"])) \ - 1/2*C["phiBox"]*(Gu @ Gu.conj().T \ + Gd @ Gd.conj().T) \ + 1/2*(3*Gd @ Gd.conj().T @ C["phiq1"] \ - 3*Gu @ Gu.conj().T @ C["phiq1"] \ + 3*C["phiq1"] @ Gd @ Gd.conj().T \ - 3*C["phiq1"] @ Gu @ Gu.conj().T \ + Gd @ Gd.conj().T @ C["phiq3"] \ + Gu @ Gu.conj().T @ C["phiq3"] \ + C["phiq3"] @ Gd @ Gd.conj().T \ + C["phiq3"] @ Gu @ Gu.conj().T) \ - 2*(6*my_einsum("rspt,tp", C["qq3"], Gd @ Gd.conj().T) \ + my_einsum("rtps,tp", C["qq1"], Gd @ Gd.conj().T) \ - my_einsum("rtps,tp", C["qq3"], Gd @ Gd.conj().T) \ + 6*my_einsum("rspt,tp", C["qq3"], Gu @ Gu.conj().T) \ + my_einsum("rtps,tp", C["qq1"], Gu @ Gu.conj().T) \ - my_einsum("rtps,tp", C["qq3"], Gu @ Gu.conj().T)) \ - 2*my_einsum("ptrs,tp", C["lq3"], Ge @ Ge.conj().T) \ + 2*GammaH*C["phiq3"] \ + Gammaq @ C["phiq3"] \ + C["phiq3"] @ Gammaq #I3 #co Beta["phiu"] = 1/3*XiB*gp**2*I3 \ + 1/3*gp**2*C["phiu"] \ - 2/3*gp**2*(my_einsum("ttrs", C["eu"]) \ + my_einsum("ttrs", C["lu"]) \ - my_einsum("ttrs", C["qu1"]) \ + my_einsum("rstt", C["ud1"]) \ - 4*my_einsum("rstt", C["uu"]) \ - 4/3*my_einsum("rtts", C["uu"])) \ - (C["phiBox"] \ + C["phiD"])*Gu.conj().T @ Gu \ - 2*Gu.conj().T @ C["phiq1"] @ Gu \ + 3*(Gu.conj().T @ Gu @ C["phiu"] \ + C["phiu"] @ Gu.conj().T @ Gu) \ + Gu.conj().T @ Gd @ C["phiud"].conj().T \ + C["phiud"] @ Gd.conj().T @ Gu \ - 4*(3*my_einsum("rspt,tp", C["uu"], Gu.conj().T @ Gu) \ + my_einsum("rtps,tp", C["uu"], Gu.conj().T @ Gu)) \ + 2*my_einsum("ptrs,tp", C["eu"], Ge.conj().T @ Ge) \ - 2*my_einsum("ptrs,tp", C["lu"], Ge @ Ge.conj().T) \ + 6*my_einsum("rspt,tp", C["ud1"], Gd.conj().T @ Gd) \ - 6*my_einsum("ptrs,tp", C["qu1"], Gd @ Gd.conj().T) \ + 6*my_einsum("ptrs,tp", C["qu1"], Gu @ Gu.conj().T) \ + 2*GammaH*C["phiu"] \ + Gammau @ C["phiu"] \ + C["phiu"] @ Gammau #I3 #co Beta["phid"] = -1/6*XiB*gp**2*I3 \ + 1/3*gp**2*C["phid"] \ - 2/3*gp**2*(2*my_einsum("rstt", C["dd"]) \ + 2/3*my_einsum("rtts", C["dd"]) \ + my_einsum("ttrs", C["ed"]) \ + my_einsum("ttrs", C["ld"]) \ - my_einsum("ttrs", C["qd1"]) \ - 2*my_einsum("ttrs", C["ud1"])) \ + (C["phiBox"] \ + C["phiD"])*Gd.conj().T @ Gd \ - 2*Gd.conj().T @ C["phiq1"] @ Gd \ + 3*(Gd.conj().T @ Gd @ C["phid"] \ + C["phid"] @ Gd.conj().T @ Gd) \ - Gd.conj().T @ Gu @ C["phiud"] \ - C["phiud"].conj().T @ Gu.conj().T @ Gd \ + 4*(3*my_einsum("rspt,tp", C["dd"], Gd.conj().T @ Gd) \ + my_einsum("rtps,tp", C["dd"], Gd.conj().T @ Gd)) \ + 2*my_einsum("ptrs,tp", C["ed"], Ge.conj().T @ Ge) \ - 2*my_einsum("ptrs,tp", C["ld"], Ge @ Ge.conj().T) \ - 6*my_einsum("ptrs,tp", C["ud1"], Gu.conj().T @ Gu) \ - 6*my_einsum("ptrs,tp", C["qd1"], Gd @ Gd.conj().T) \ + 6*my_einsum("ptrs,tp", C["qd1"], Gu @ Gu.conj().T) \ + 2*GammaH*C["phid"] \ + Gammad @ C["phid"] \ + C["phid"] @ Gammad #co Beta["phiud"] = -3*gp**2*C["phiud"] \ + (2*C["phiBox"] \ - C["phiD"])*Gu.conj().T @ Gd \ - 2*Gu.conj().T @ Gd @ C["phid"] \ + 2*C["phiu"] @ Gu.conj().T @ Gd \ + 4*(my_einsum("rtps,tp", C["ud1"], Gu.conj().T @ Gd) \ + 4/3*my_einsum("rtps,tp", C["ud8"], Gu.conj().T @ Gd)) \ + 2*Gu.conj().T @ Gu @ C["phiud"] \ + 2*C["phiud"] @ Gd.conj().T @ Gd \ + 2*GammaH*C["phiud"] \ + Gammau @ C["phiud"] \ + C["phiud"] @ Gammad """Dimension-5""" Beta["llphiphi"] = (2*Lambda \ - 3*g**2 \ + 2*GammaH)*C["llphiphi"]-3/2*(C["llphiphi"] @ Ge @ Ge.conj().T \ + Ge.conj() @ Ge.T @ C["llphiphi"]) """(3,3,3,3)""" # the einsum function is strong Beta["ll"] = -1/6*gp**2*my_einsum("st,pr", C["phil1"], I3) \ - 1/6*g**2*(my_einsum("st,pr", C["phil3"], I3) \ - 2*my_einsum("sr,pt", C["phil3"], I3)) \ + 1/3*gp**2*(2*my_einsum("prww,st", C["ll"], I3) \ + my_einsum("pwwr,st", C["ll"], I3)) \ - 1/3*g**2*my_einsum("pwwr,st", C["ll"], I3) \ + 2/3*g**2*my_einsum("swwr,pt", C["ll"], I3) \ - 1/3*gp**2*my_einsum("prww,st", C["lq1"], I3) \ - g**2*my_einsum("prww,st", C["lq3"], I3) \ + 2*g**2*my_einsum("ptww,rs", C["lq3"], I3) \ + 1/3*gp**2*( \ - 2*my_einsum("prww,st", C["lu"], I3) \ + my_einsum("prww,st", C["ld"], I3) \ + my_einsum("prww,st", C["le"], I3)) \ - 1/2*(my_einsum("pr,st", Ge @ Ge.conj().T, C["phil1"]) \ - my_einsum("pr,st", Ge @ Ge.conj().T, C["phil3"])) \ - my_einsum("pt,sr", Ge @ Ge.conj().T, C["phil3"]) \ - 1/2*my_einsum("sv,tw,prvw", Ge, np.conj(Ge), C["le"]) \ + my_einsum("pv,vrst", Gammal, C["ll"]) \ + my_einsum("pvst,vr", C["ll"], Gammal) \ - 1/6*gp**2*my_einsum("pr,st", C["phil1"], I3) \ - 1/6*g**2*(my_einsum("pr,st", C["phil3"], I3) \ - 2*my_einsum("pt,sr", C["phil3"], I3)) \ + 1/3*gp**2*(2*my_einsum("stww,pr", C["ll"], I3) \ + my_einsum("swwt,pr", C["ll"], I3)) \ - 1/3*g**2*my_einsum("swwt,pr", C["ll"], I3) \ + 2/3*g**2*my_einsum("pwwt,sr", C["ll"], I3) \ - 1/3*gp**2*my_einsum("stww,pr", C["lq1"], I3) \ - g**2*my_einsum("stww,pr", C["lq3"], I3) \ + 2*g**2*my_einsum("srww,tp", C["lq3"], I3) \ + 1/3*gp**2*( \ - 2*my_einsum("stww,pr", C["lu"], I3) \ + my_einsum("stww,pr", C["ld"], I3) \ + my_einsum("stww,pr", C["le"], I3)) \ - 1/2*(my_einsum("st,pr", Ge @ Ge.conj().T, C["phil1"]) \ - my_einsum("st,pr", Ge @ Ge.conj().T, C["phil3"])) \ - my_einsum("sr,pt", Ge @ Ge.conj().T, C["phil3"]) \ - 1/2*my_einsum("pv,rw,stvw", Ge, np.conj(Ge), C["le"]) \ + my_einsum("sv,vtpr", Gammal, C["ll"]) \ + my_einsum("svpr,vt", C["ll"], Gammal) \ + 6*g**2*my_einsum("ptsr", C["ll"]) \ + 3*(gp**2 \ - g**2)*my_einsum("prst", C["ll"]) Beta["qq1"] = 1/18*gp**2*my_einsum("st,pr", C["phiq1"], I3) \ - 1/9*gp**2*my_einsum("wwst,pr", C["lq1"], I3) \ + 1/9*gp**2*(2*my_einsum("prww,st", C["qq1"], I3) \ + 1/3*(my_einsum("pwwr,st", C["qq1"], I3) \ + 3*my_einsum("pwwr,st", C["qq3"], I3))) \ + 1/3*gs**2*(my_einsum("swwr,pt", C["qq1"], I3) \ + 3*my_einsum("swwr,pt", C["qq3"], I3)) \ - 2/9*gs**2*(my_einsum("pwwr,st", C["qq1"], I3) \ + 3*my_einsum("pwwr,st", C["qq3"], I3)) \ + 2/9*gp**2*my_einsum("prww,st", C["qu1"], I3) \ - 1/9*gp**2*my_einsum("prww,st", C["qd1"], I3) \ + 1/12*gs**2*(my_einsum("srww,pt", C["qu8"], I3) \ + my_einsum("srww,pt", C["qd8"], I3)) \ - 1/18*gs**2*(my_einsum("prww,st", C["qu8"], I3) \ + my_einsum("prww,st", C["qd8"], I3)) \ - 1/9*gp**2*my_einsum("prww,st", C["qe"], I3) \ + 1/2*(my_einsum("pr,st", Gu @ Gu.conj().T, C["phiq1"]) \ - my_einsum("pr,st", Gd @ Gd.conj().T, C["phiq1"])) \ - 1/2*(my_einsum("pv,rw,stvw", Gu, np.conj(Gu), C["qu1"]) \ - 1/6*my_einsum("pv,rw,stvw", Gu, np.conj(Gu), C["qu8"])) \ - 1/2*(my_einsum("pv,rw,stvw", Gd, np.conj(Gd), C["qd1"]) \ - 1/6*my_einsum("pv,rw,stvw", Gd, np.conj(Gd), C["qd8"])) \ - 1/8*(my_einsum("pv,tw,srvw", Gu, np.conj(Gu), C["qu8"]) \ + my_einsum("pv,tw,srvw", Gd, np.conj(Gd), C["qd8"])) \ - 1/8*(my_einsum("tw,rv,pvsw", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ - 1/6*my_einsum("tw,rv,pvsw", np.conj(Gd), np.conj(Gu), C["quqd8"])) \ - 1/8*(my_einsum("sw,pv,rvtw", Gd, Gu, np.conj(C["quqd1"])) \ - 1/6*my_einsum("sw,pv,rvtw", Gd, Gu, np.conj(C["quqd8"]))) \ + 1/16*(my_einsum("tw,rv,svpw", np.conj(Gd), np.conj(Gu), C["quqd8"]) \ + my_einsum("sw,pv,tvrw", Gd, Gu, np.conj(C["quqd8"]))) \ + my_einsum("pv,vrst", Gammaq, C["qq1"]) \ + my_einsum("pvst,vr", C["qq1"], Gammaq) \ + 1/18*gp**2*my_einsum("pr,st", C["phiq1"], I3) \ - 1/9*gp**2*my_einsum("wwpr,st", C["lq1"], I3) \ + 1/9*gp**2*(2*my_einsum("stww,pr", C["qq1"], I3) \ + 1/3*(my_einsum("swwt,pr", C["qq1"], I3) \ + 3*my_einsum("swwt,pr", C["qq3"], I3))) \ + 1/3*gs**2*(my_einsum("pwwt,sr", C["qq1"], I3) \ + 3*my_einsum("pwwt,sr", C["qq3"], I3)) \ - 2/9*gs**2*(my_einsum("swwt,pr", C["qq1"], I3) \ + 3*my_einsum("swwt,pr", C["qq3"], I3)) \ + 2/9*gp**2*my_einsum("stww,pr", C["qu1"], I3) \ - 1/9*gp**2*my_einsum("stww,pr", C["qd1"], I3) \ + 1/12*gs**2*(my_einsum("ptww,sr", C["qu8"], I3) \ + my_einsum("ptww,sr", C["qd8"], I3)) \ - 1/18*gs**2*(my_einsum("stww,pr", C["qu8"], I3) \ + my_einsum("stww,pr", C["qd8"], I3)) \ - 1/9*gp**2*my_einsum("stww,pr", C["qe"], I3) \ + 1/2*(my_einsum("st,pr", Gu @ Gu.conj().T, C["phiq1"]) \ - my_einsum("st,pr", Gd @ Gd.conj().T, C["phiq1"])) \ - 1/2*(my_einsum("sv,tw,prvw", Gu, np.conj(Gu), C["qu1"]) \ - 1/6*my_einsum("sv,tw,prvw", Gu, np.conj(Gu), C["qu8"])) \ - 1/2*(my_einsum("sv,tw,prvw", Gd, np.conj(Gd), C["qd1"]) \ - 1/6*my_einsum("sv,tw,prvw", Gd, np.conj(Gd), C["qd8"])) \ - 1/8*(my_einsum("sv,rw,ptvw", Gu, np.conj(Gu), C["qu8"]) \ + my_einsum("sv,rw,ptvw", Gd, np.conj(Gd), C["qd8"])) \ - 1/8*(my_einsum("rw,tv,svpw", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ - 1/6*my_einsum("rw,tv,svpw", np.conj(Gd), np.conj(Gu), C["quqd8"])) \ - 1/8*(my_einsum("pw,sv,tvrw", Gd, Gu, np.conj(C["quqd1"])) \ - 1/6*my_einsum("pw,sv,tvrw", Gd, Gu, np.conj(C["quqd8"]))) \ + 1/16*(my_einsum("rw,tv,pvsw", np.conj(Gd), np.conj(Gu), C["quqd8"]) \ + my_einsum("pw,sv,rvtw", Gd, Gu, np.conj(C["quqd8"]))) \ + my_einsum("sv,vtpr", Gammaq, C["qq1"]) \ + my_einsum("svpr,vt", C["qq1"], Gammaq) \ + 9*g**2*my_einsum("prst", C["qq3"]) \ - 2*(gs**2 \ - 1/6*gp**2)*my_einsum("prst", C["qq1"]) \ + 3*gs**2*(my_einsum("ptsr", C["qq1"]) \ + 3*my_einsum("ptsr", C["qq3"])) Beta["qq3"] = 1/6*g**2*my_einsum("st,pr", C["phiq3"], I3) \ + 1/3*g**2*my_einsum("wwst,pr", C["lq3"], I3) \ + 1/3*g**2*(my_einsum("pwwr,st", C["qq1"], I3) \ - my_einsum("pwwr,st", C["qq3"], I3)) \ + 2*g**2*my_einsum("prww,st", C["qq3"], I3) \ + 1/3*gs**2*(my_einsum("swwr,pt", C["qq1"], I3) \ + 3*my_einsum("swwr,pt", C["qq3"], I3)) \ + 1/12*gs**2*(my_einsum("srww,pt", C["qu8"], I3) \ + my_einsum("srww,pt", C["qd8"], I3)) \ - 1/2*(my_einsum("pr,st", Gu @ Gu.conj().T, C["phiq3"]) \ + my_einsum("pr,st", Gd @ Gd.conj().T, C["phiq3"])) \ - 1/8*(my_einsum("pv,tw,srvw", Gu, np.conj(Gu), C["qu8"]) \ + my_einsum("pv,tw,srvw", Gd, np.conj(Gd), C["qd8"])) \ + 1/8*(my_einsum("tw,rv,pvsw", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ - 1/6*my_einsum("tw,rv,pvsw", np.conj(Gd), np.conj(Gu), C["quqd8"])) \ + 1/8*(my_einsum("sw,pv,rvtw", Gd, Gu, np.conj(C["quqd1"])) \ - 1/6*my_einsum("sw,pv,rvtw", Gd, Gu, np.conj(C["quqd8"]))) \ - 1/16*(my_einsum("tw,rv,svpw", np.conj(Gd), np.conj(Gu), C["quqd8"]) \ + my_einsum("sw,pv,tvrw", Gd, Gu, np.conj(C["quqd8"]))) \ + my_einsum("pv,vrst", Gammaq, C["qq3"]) \ + my_einsum("pvst,vr", C["qq3"], Gammaq) \ + 1/6*g**2*my_einsum("pr,st", C["phiq3"], I3) \ + 1/3*g**2*my_einsum("wwpr,st", C["lq3"], I3) \ + 1/3*g**2*(my_einsum("swwt,pr", C["qq1"], I3) \ - my_einsum("swwt,pr", C["qq3"], I3)) \ + 2*g**2*my_einsum("stww,pr", C["qq3"], I3) \ + 1/3*gs**2*(my_einsum("pwwt,sr", C["qq1"], I3) \ + 3*my_einsum("pwwt,sr", C["qq3"], I3)) \ + 1/12*gs**2*(my_einsum("ptww,sr", C["qu8"], I3) \ + my_einsum("ptww,sr", C["qd8"], I3)) \ - 1/2*(my_einsum("st,pr", Gu @ Gu.conj().T, C["phiq3"]) \ + my_einsum("st,pr", Gd @ Gd.conj().T, C["phiq3"])) \ - 1/8*(my_einsum("sv,rw,ptvw", Gu, np.conj(Gu), C["qu8"]) \ + my_einsum("sv,rw,ptvw", Gd, np.conj(Gd), C["qd8"])) \ + 1/8*(my_einsum("rw,tv,svpw", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ - 1/6*my_einsum("rw,tv,svpw", np.conj(Gd), np.conj(Gu), C["quqd8"])) \ + 1/8*(my_einsum("pw,sv,tvrw", Gd, Gu, np.conj(C["quqd1"])) \ - 1/6*my_einsum("pw,sv,tvrw", Gd, Gu, np.conj(C["quqd8"]))) \ - 1/16*(my_einsum("rw,tv,pvsw", np.conj(Gd), np.conj(Gu), C["quqd8"]) \ + my_einsum("pw,sv,rvtw", Gd, Gu, np.conj(C["quqd8"]))) \ + my_einsum("sv,vtpr", Gammaq, C["qq3"]) \ + my_einsum("svpr,vt", C["qq3"], Gammaq) \ + 3*gs**2*(my_einsum("ptsr", C["qq1"]) \ - my_einsum("ptsr", C["qq3"])) \ - 2*(gs**2 \ + 3*g**2 \ - 1/6*gp**2)*my_einsum("prst", C["qq3"]) \ + 3*g**2*my_einsum("prst", C["qq1"]) #the terms are equal, but the order is not. No wonder if you check some differences inside Beta["lq1"] = -1/3*gp**2*my_einsum("st,pr", C["phiq1"], I3) \ + 1/9*gp**2*my_einsum("pr,st", C["phil1"], I3) \ - 2/9*gp**2*(2*my_einsum("prww,st", C["ll"], I3) \ + my_einsum("pwwr,st", C["ll"], I3)) \ + 2/9*gp**2*my_einsum("prww,st", C["lq1"], I3) \ + 2/3*gp**2*my_einsum("wwst,pr", C["lq1"], I3) \ - 2/9*gp**2*(6*my_einsum("stww,pr", C["qq1"], I3) \ + my_einsum("swwt,pr", C["qq1"], I3) \ + 3*my_einsum("swwt,pr", C["qq3"], I3)) \ - 2/3*gp**2*(2*my_einsum("stww,pr", C["qu1"], I3) \ - my_einsum("stww,pr", C["qd1"], I3) \ - my_einsum("stww,pr", C["qe"], I3)) \ + 2/9*gp**2*(2*my_einsum("prww,st", C["lu"], I3) \ - my_einsum("prww,st", C["ld"], I3) \ - my_einsum("prww,st", C["le"], I3)) \ - gp**2*my_einsum("prst", C["lq1"]) \ + 9*g**2*my_einsum("prst", C["lq3"]) \ - my_einsum("pr,st", Ge @ Ge.conj().T, C["phiq1"]) \ + my_einsum("st,pr", Gu @ Gu.conj().T, C["phil1"]) \ - my_einsum("st,pr", Gd @ Gd.conj().T, C["phil1"]) \ + 1/4*(my_einsum("tw,rv,pvsw", np.conj(Gu), np.conj(Ge), C["lequ1"]) \ - 12*my_einsum("tw,rv,pvsw", np.conj(Gu), np.conj(Ge), C["lequ3"]) \ + my_einsum("sw,pv,rvtw", Gu, Ge, np.conj(C["lequ1"])) \ - 12*my_einsum("sw,pv,rvtw", Gu, Ge, np.conj(C["lequ3"]))) \ - my_einsum("sv,tw,prvw", Gu, np.conj(Gu), C["lu"]) \ - my_einsum("sv,tw,prvw", Gd, np.conj(Gd), C["ld"]) \ - my_einsum("pv,rw,stvw", Ge, np.conj(Ge), C["qe"]) \ + 1/4*(my_einsum("sw,rv,pvwt", Gd, np.conj(Ge), C["ledq"]) \ + my_einsum("pv,tw,rvws", Ge, np.conj(Gd), np.conj(C["ledq"]))) \ + my_einsum("pv,vrst", Gammal, C["lq1"]) \ + my_einsum("sv,prvt", Gammaq, C["lq1"]) \ + my_einsum("pvst,vr", C["lq1"], Gammal) \ + my_einsum("prsv,vt", C["lq1"], Gammaq) Beta["lq3"] = 1/3*g**2*(my_einsum("st,pr", C["phiq3"], I3) \ + my_einsum("pr,st", C["phil3"], I3)) \ + 2/3*g**2*(3*my_einsum("prww,st", C["lq3"], I3) \ + my_einsum("wwst,pr", C["lq3"], I3)) \ + 2/3*g**2*(6*my_einsum("stww,pr", C["qq3"], I3) \ + my_einsum("swwt,pr", C["qq1"], I3) \ - my_einsum("swwt,pr", C["qq3"], I3)) \ + 2/3*g**2*my_einsum("pwwr,st", C["ll"], I3) \ + 3*g**2*my_einsum("prst", C["lq1"]) \ - (6*g**2 \ + gp**2)*my_einsum("prst", C["lq3"]) \ - my_einsum("pr,st", Ge @ Ge.conj().T, C["phiq3"]) \ - my_einsum("st,pr", Gu @ Gu.conj().T, C["phil3"]) \ - my_einsum("st,pr", Gd @ Gd.conj().T, C["phil3"]) \ - 1/4*(my_einsum("tw,rv,pvsw", np.conj(Gu), np.conj(Ge), C["lequ1"]) \ - 12*my_einsum("tw,rv,pvsw", np.conj(Gu), np.conj(Ge), C["lequ3"]) \ + my_einsum("sw,pv,rvtw", Gu, Ge, np.conj(C["lequ1"])) \ - 12*my_einsum("sw,pv,rvtw", Gu, Ge, np.conj(C["lequ3"]))) \ + 1/4*(my_einsum("sw,rv,pvwt", Gd, np.conj(Ge), C["ledq"]) \ + my_einsum("pv,tw,rvws", Ge, np.conj(Gd), np.conj(C["ledq"]))) \ + my_einsum("pv,vrst", Gammal, C["lq3"]) \ + my_einsum("sv,prvt", Gammaq, C["lq3"]) \ + my_einsum("pvst,vr", C["lq3"], Gammal) \ + my_einsum("prsv,vt", C["lq3"], Gammaq) #order Beta["ee"] = -1/3*gp**2*my_einsum("st,pr", C["phie"], I3) \ + 2/3*gp**2*(my_einsum("wwpr,st", C["le"], I3) \ - my_einsum("wwpr,st", C["qe"], I3) \ - 2*my_einsum("prww,st", C["eu"], I3) \ + my_einsum("prww,st", C["ed"], I3) \ + 4*my_einsum("prww,st", C["ee"], I3)) \ + my_einsum("pr,st", Ge.conj().T @ Ge, C["phie"]) \ - my_einsum("wr,vp,vwst", Ge, np.conj(Ge), C["le"]) \ + my_einsum("pv,vrst", Gammae, C["ee"]) \ + my_einsum("pvst,vr", C["ee"], Gammae) \ - 1/3*gp**2*my_einsum("pr,st", C["phie"], I3) \ + 2/3*gp**2*(my_einsum("wwst,pr", C["le"], I3) \ - my_einsum("wwst,pr", C["qe"], I3) \ - 2*my_einsum("stww,pr", C["eu"], I3) \ + my_einsum("stww,pr", C["ed"], I3) \ + 4*my_einsum("wwst,pr", C["ee"], I3)) \ + my_einsum("st,pr", Ge.conj().T @ Ge, C["phie"]) \ - my_einsum("wt,vs,vwpr", Ge, np.conj(Ge), C["le"]) \ + my_einsum("sv,vtpr", Gammae, C["ee"]) \ + my_einsum("svpr,vt", C["ee"], Gammae) \ + 12*gp**2*my_einsum("prst", C["ee"]) #order Beta["uu"] = 2/9*gp**2*my_einsum("st,pr", C["phiu"], I3) \ - 4/9*gp**2*(my_einsum("wwst,pr", C["eu"], I3) \ + my_einsum("wwst,pr", C["lu"], I3) \ - my_einsum("wwst,pr", C["qu1"], I3) \ - 4*my_einsum("wwst,pr", C["uu"], I3) \ - 4/3*my_einsum("swwt,pr", C["uu"], I3)) \ - 1/9*gs**2*(my_einsum("wwst,pr", C["qu8"], I3) \ - 3*my_einsum("wwsr,pt", C["qu8"], I3)) \ + 2/3*gs**2*my_einsum("pwwt,rs", C["uu"], I3) \ - 2/9*gs**2*my_einsum("swwt,pr", C["uu"], I3) \ - 4/9*gp**2*my_einsum("stww,pr", C["ud1"], I3) \ - 1/18*gs**2*(my_einsum("stww,pr", C["ud8"], I3) \ - 3*my_einsum("srww,pt", C["ud8"], I3)) \ - my_einsum("pr,st", Gu.conj().T @ Gu, C["phiu"]) \ - (my_einsum("wr,vp,vwst", Gu, np.conj(Gu), C["qu1"]) \ - 1/6*my_einsum("wr,vp,vwst", Gu, np.conj(Gu), C["qu8"])) \ - 1/2*my_einsum("wr,vs,vwpt", Gu, np.conj(Gu), C["qu8"]) \ + my_einsum("pv,vrst", Gammau, C["uu"]) \ + my_einsum("pvst,vr", C["uu"], Gammau) \ + 2/9*gp**2*my_einsum("pr,st", C["phiu"], I3) \ - 4/9*gp**2*(my_einsum("wwpr,st", C["eu"], I3) \ + my_einsum("wwpr,st", C["lu"], I3) \ - my_einsum("wwpr,st", C["qu1"], I3) \ - 4*my_einsum("wwpr,st", C["uu"], I3) \ - 4/3*my_einsum("pwwr,st", C["uu"], I3)) \ - 1/9*gs**2*(my_einsum("wwpr,st", C["qu8"], I3) \ - 3*my_einsum("wwpt,sr", C["qu8"], I3)) \ + 2/3*gs**2*my_einsum("swwr,tp", C["uu"], I3) \ - 2/9*gs**2*my_einsum("pwwr,st", C["uu"], I3) \ - 4/9*gp**2*my_einsum("prww,st", C["ud1"], I3) \ - 1/18*gs**2*(my_einsum("prww,st", C["ud8"], I3) \ - 3*my_einsum("ptww,sr", C["ud8"], I3)) \ - my_einsum("st,pr", Gu.conj().T @ Gu, C["phiu"]) \ - (my_einsum("wt,vs,vwpr", Gu, np.conj(Gu), C["qu1"]) \ - 1/6*my_einsum("wt,vs,vwpr", Gu, np.conj(Gu), C["qu8"])) \ - 1/2*my_einsum("wt,vp,vwsr", Gu, np.conj(Gu), C["qu8"]) \ + my_einsum("sv,vtpr", Gammau, C["uu"]) \ + my_einsum("svpr,vt", C["uu"], Gammau) \ + 2*(8/3*gp**2 \ - gs**2)*my_einsum("prst", C["uu"]) \ + 6*gs**2*my_einsum("ptsr", C["uu"]) #order Beta["dd"] = -1/9*gp**2*my_einsum("st,pr", C["phid"], I3) \ + 2/9*gp**2*(my_einsum("wwst,pr", C["ed"], I3) \ + my_einsum("wwst,pr", C["ld"], I3) \ - my_einsum("wwst,pr", C["qd1"], I3) \ + 2*my_einsum("wwst,pr", C["dd"], I3) \ + 2/3*my_einsum("swwt,pr", C["dd"], I3)) \ - 1/9*gs**2*(my_einsum("wwst,pr", C["qd8"], I3) \ - 3*my_einsum("wwsr,pt", C["qd8"], I3)) \ + 2/3*gs**2*my_einsum("pwwt,rs", C["dd"], I3) \ - 2/9*gs**2*my_einsum("swwt,pr", C["dd"], I3) \ - 4/9*gp**2*my_einsum("wwst,pr", C["ud1"], I3) \ - 1/18*gs**2*(my_einsum("wwst,pr", C["ud8"], I3) \ - 3*my_einsum("wwsr,pt", C["ud8"], I3)) \ + my_einsum("pr,st", Gd.conj().T @ Gd, C["phid"]) \ - (my_einsum("wr,vp,vwst", Gd, np.conj(Gd), C["qd1"]) \ - 1/6*my_einsum("wr,vp,vwst", Gd, np.conj(Gd), C["qd8"])) \ - 1/2*my_einsum("wr,vs,vwpt", Gd, np.conj(Gd), C["qd8"]) \ + my_einsum("pv,vrst", Gammad, C["dd"]) \ + my_einsum("pvst,vr", C["dd"], Gammad) \ - 1/9*gp**2*my_einsum("pr,st", C["phid"], I3) \ + 2/9*gp**2*(my_einsum("wwpr,st", C["ed"], I3) \ + my_einsum("wwpr,st", C["ld"], I3) \ - my_einsum("wwpr,st", C["qd1"], I3) \ + 2*my_einsum("wwpr,st", C["dd"], I3) \ + 2/3*my_einsum("pwwr,st", C["dd"], I3)) \ - 1/9*gs**2*(my_einsum("wwpr,st", C["qd8"], I3) \ - 3*my_einsum("wwpt,sr", C["qd8"], I3)) \ + 2/3*gs**2*my_einsum("swwr,tp", C["dd"], I3) \ - 2/9*gs**2*my_einsum("pwwr,st", C["dd"], I3) \ - 4/9*gp**2*my_einsum("wwpr,st", C["ud1"], I3) \ - 1/18*gs**2*(my_einsum("wwpr,st", C["ud8"], I3) \ - 3*my_einsum("wwpt,sr", C["ud8"], I3)) \ + my_einsum("st,pr", Gd.conj().T @ Gd, C["phid"]) \ - (my_einsum("wt,vs,vwpr", Gd, np.conj(Gd), C["qd1"]) \ - 1/6*my_einsum("wt,vs,vwpr", Gd, np.conj(Gd), C["qd8"])) \ - 1/2*my_einsum("wt,vp,vwsr", Gd, np.conj(Gd), C["qd8"]) \ + my_einsum("sv,vtpr", Gammad, C["dd"]) \ + my_einsum("svpr,vt", C["dd"], Gammad) \ + 2*(2/3*gp**2 \ - gs**2)*my_einsum("prst", C["dd"]) \ + 6*gs**2*my_einsum("ptsr", C["dd"]) Beta["eu"] = -2/3*gp**2*(my_einsum("st,pr", C["phiu"], I3) \ + 2*(my_einsum("wwst,pr", C["qu1"], I3) \ - my_einsum("wwst,pr", C["lu"], I3) \ + 4*my_einsum("wwst,pr", C["uu"], I3) \ - my_einsum("wwst,pr", C["eu"], I3) \ - my_einsum("stww,pr", C["ud1"], I3)) \ + 8/3*my_einsum("swwt,pr", C["uu"], I3)) \ + 4/9*gp**2*(my_einsum("pr,st", C["phie"], I3) \ + 2*(my_einsum("wwpr,st", C["qe"], I3) \ - my_einsum("wwpr,st", C["le"], I3) \ - 4*my_einsum("prww,st", C["ee"], I3) \ + 2*my_einsum("prww,st", C["eu"], I3) \ - my_einsum("prww,st", C["ed"], I3))) \ - 8*gp**2*my_einsum("prst", C["eu"]) \ + 2*my_einsum("pr,st", Ge.conj().T @ Ge, C["phiu"]) \ - 2*my_einsum("st,pr", Gu.conj().T @ Gu, C["phie"]) \ + my_einsum("vp,ws,vrwt", np.conj(Ge), np.conj(Gu), C["lequ1"]) \ - 12*my_einsum("vp,ws,vrwt", np.conj(Ge), np.conj(Gu), C["lequ3"]) \ + my_einsum("vr,wt,vpws", Ge, Gu, np.conj(C["lequ1"])) \ - 12*my_einsum("vr,wt,vpws", Ge, Gu, np.conj(C["lequ3"])) \ - 2*my_einsum("vp,wr,vwst", np.conj(Ge), Ge, C["lu"]) \ - 2*my_einsum("vs,wt,vwpr", np.conj(Gu), Gu, C["qe"]) \ + my_einsum("pv,vrst", Gammae, C["eu"]) \ + my_einsum("sv,prvt", Gammau, C["eu"]) \ + my_einsum("pvst,vr", C["eu"], Gammae) \ + my_einsum("prsv,vt", C["eu"], Gammau) Beta["ed"] = -2/3*gp**2*(my_einsum("st,pr", C["phid"], I3) \ + 2*(my_einsum("wwst,pr", C["qd1"], I3) \ - my_einsum("wwst,pr", C["ld"], I3) \ - 2*my_einsum("wwst,pr", C["dd"], I3) \ - my_einsum("wwst,pr", C["ed"], I3) \ + 2*my_einsum("wwst,pr", C["ud1"], I3)) \ - 4/3*my_einsum("swwt,pr", C["dd"], I3)) \ - 2/9*gp**2*(my_einsum("pr,st", C["phie"], I3) \ + 2*(my_einsum("wwpr,st", C["qe"], I3) \ - my_einsum("wwpr,st", C["le"], I3) \ - 4*my_einsum("prww,st", C["ee"], I3) \ - my_einsum("prww,st", C["ed"], I3) \ + 2*my_einsum("prww,st", C["eu"], I3))) \ + 4*gp**2*my_einsum("prst", C["ed"]) \ + 2*my_einsum("pr,st", Ge.conj().T @ Ge, C["phid"]) \ + 2*my_einsum("st,pr", Gd.conj().T @ Gd, C["phie"]) \ - 2*my_einsum("vp,wr,vwst", np.conj(Ge), Ge, C["ld"]) \ - 2*my_einsum("vs,wt,vwpr", np.conj(Gd), Gd, C["qe"]) \ + my_einsum("vp,wt,vrsw", np.conj(Ge), Gd, C["ledq"]) \ + my_einsum("vr,ws,vptw", Ge, np.conj(Gd), np.conj(C["ledq"])) \ + my_einsum("pv,vrst", Gammae, C["ed"]) \ + my_einsum("sv,prvt", Gammad, C["ed"]) \ + my_einsum("pvst,vr", C["ed"], Gammae) \ + my_einsum("prsv,vt", C["ed"], Gammad) #order Beta["ud1"] = 4/9*gp**2*(my_einsum("st,pr", C["phid"], I3) \ + 2*(my_einsum("wwst,pr", C["qd1"], I3) \ - my_einsum("wwst,pr", C["ld"], I3) \ - 2*my_einsum("wwst,pr", C["dd"], I3) \ + 2*my_einsum("wwst,pr", C["ud1"], I3) \ - my_einsum("wwst,pr", C["ed"], I3)) \ - 4/3*my_einsum("swwt,pr", C["dd"], I3)) \ - 2/9*gp**2*(my_einsum("pr,st", C["phiu"], I3) \ + 2*(my_einsum("wwpr,st", C["qu1"], I3) \ - my_einsum("wwpr,st", C["lu"], I3) \ + 4*my_einsum("wwpr,st", C["uu"], I3) \ - my_einsum("prww,st", C["ud1"], I3) \ - my_einsum("wwpr,st", C["eu"], I3)) \ + 8/3*my_einsum("pwwr,st", C["uu"], I3)) \ - 8/3*(gp**2*my_einsum("prst", C["ud1"]) \ - gs**2*my_einsum("prst", C["ud8"])) \ - 2*my_einsum("pr,st", Gu.conj().T @ Gu, C["phid"]) \ + 2*my_einsum("st,pr", Gd.conj().T @ Gd, C["phiu"]) \ + 2/3*my_einsum("sr,pt", Gd.conj().T @ Gu, C["phiud"]) \ + 2/3*my_einsum("pt,rs", Gu.conj().T @ Gd, np.conj(C["phiud"])) \ + 1/3*(my_einsum("vs,wp,vrwt", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ + 4/3*my_einsum("vs,wp,vrwt", np.conj(Gd), np.conj(Gu), C["quqd8"]) \ + my_einsum("vt,wr,vpws", Gd, Gu, np.conj(C["quqd1"])) \ + 4/3*my_einsum("vt,wr,vpws", Gd, Gu, np.conj(C["quqd8"]))) \ - my_einsum("ws,vp,vrwt", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ - my_einsum("wt,vr,vpws", Gd, Gu, np.conj(C["quqd1"])) \ - 2*my_einsum("vp,wr,vwst", np.conj(Gu), Gu, C["qd1"]) \ - 2*my_einsum("vs,wt,vwpr", np.conj(Gd), Gd, C["qu1"]) \ + my_einsum("pv,vrst", Gammau, C["ud1"]) \ + my_einsum("sv,prvt", Gammad, C["ud1"]) \ + my_einsum("pvst,vr", C["ud1"], Gammau) \ + my_einsum("prsv,vt", C["ud1"], Gammad) #order Beta["ud8"] = 8/3*gs**2*my_einsum("pwwr,st", C["uu"], I3) \ + 8/3*gs**2*my_einsum("swwt,pr", C["dd"], I3) \ + 4/3*gs**2*my_einsum("wwpr,st", C["qu8"], I3) \ + 4/3*gs**2*my_einsum("wwst,pr", C["qd8"], I3) \ + 2/3*gs**2*my_einsum("prww,st", C["ud8"], I3) \ + 2/3*gs**2*my_einsum("wwst,pr", C["ud8"], I3) \ - 4*(2/3*gp**2 \ + gs**2)*my_einsum("prst", C["ud8"]) \ + 12*gs**2*my_einsum("prst", C["ud1"]) \ + 4*my_einsum("sr,pt", Gd.conj().T @ Gu, C["phiud"]) \ + 4*my_einsum("pt,rs", Gu.conj().T @ Gd, np.conj(C["phiud"])) \ + 2*(my_einsum("vs,wp,vrwt", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ - 1/6*my_einsum("vs,wp,vrwt", np.conj(Gd), np.conj(Gu), C["quqd8"]) \ + my_einsum("vt,wr,vpws", Gd, Gu, np.conj(C["quqd1"])) \ - 1/6*my_einsum("vt,wr,vpws", Gd, Gu, np.conj(C["quqd8"]))) \ - 2*my_einsum("vp,wr,vwst", np.conj(Gu), Gu, C["qd8"]) \ - 2*my_einsum("vs,wt,vwpr", np.conj(Gd), Gd, C["qu8"]) \ - (my_einsum("ws,vp,vrwt", np.conj(Gd), np.conj(Gu), C["quqd8"]) \ + my_einsum("wt,vr,vpws", Gd, Gu, np.conj(C["quqd8"]))) \ + my_einsum("pv,vrst", Gammau, C["ud8"]) \ + my_einsum("sv,prvt", Gammad, C["ud8"]) \ + my_einsum("pvst,vr", C["ud8"], Gammau) \ + my_einsum("prsv,vt", C["ud8"], Gammad) Beta["le"] = -1/3*gp**2*my_einsum("st,pr", C["phie"], I3) \ - 2/3*gp**2*my_einsum("pr,st", C["phil1"], I3) \ + 8/3*gp**2*my_einsum("prww,st", C["ll"], I3) \ + 4/3*gp**2*my_einsum("pwwr,st", C["ll"], I3) \ - 4/3*gp**2*my_einsum("prww,st", C["lq1"], I3) \ - 2/3*gp**2*my_einsum("wwst,pr", C["qe"], I3) \ + 4/3*gp**2*my_einsum("prww,st", C["le"], I3) \ + 2/3*gp**2*my_einsum("wwst,pr", C["le"], I3) \ - 8/3*gp**2*my_einsum("prww,st", C["lu"], I3) \ + 4/3*gp**2*my_einsum("prww,st", C["ld"], I3) \ - 4/3*gp**2*my_einsum("stww,pr", C["eu"], I3) \ + 2/3*gp**2*my_einsum("stww,pr", C["ed"], I3) \ + 8/3*gp**2*my_einsum("wwst,pr", C["ee"], I3) \ - 6*gp**2*my_einsum("prst", C["le"]) \ + my_einsum("rs,pt", np.conj(Ge), Xie) \ + my_einsum("pt,rs", Ge, np.conj(Xie)) \ - my_einsum("pr,st", Ge @ Ge.conj().T, C["phie"]) \ + 2*my_einsum("st,pr", Ge.conj().T @ Ge, C["phil1"]) \ - 4*my_einsum("pv,rw,vtsw", Ge, np.conj(Ge), C["ee"]) \ + my_einsum("pw,vs,vrwt", Ge, np.conj(Ge), C["le"]) \ - 2*my_einsum("wt,vs,pwvr", Ge, np.conj(Ge), C["ll"]) \ - 4*my_einsum("wt,vs,prvw", Ge, np.conj(Ge), C["ll"]) \ + my_einsum("vt,rw,pvsw", Ge, np.conj(Ge), C["le"]) \ + my_einsum("pv,vrst", Gammal, C["le"]) \ + my_einsum("sv,prvt", Gammae, C["le"]) \ + my_einsum("pvst,vr", C["le"], Gammal) \ + my_einsum("prsv,vt", C["le"], Gammae) #order Beta["lu"] = -1/3*gp**2*my_einsum("st,pr", C["phiu"], I3) \ + 4/9*gp**2*my_einsum("pr,st", C["phil1"], I3) \ - 16/9*gp**2*my_einsum("prww,st", C["ll"], I3) \ - 8/9*gp**2*my_einsum("pwwr,st", C["ll"], I3) \ + 8/9*gp**2*my_einsum("prww,st", C["lq1"], I3) \ - 2/3*gp**2*my_einsum("wwst,pr", C["qu1"], I3) \ + 16/9*gp**2*my_einsum("prww,st", C["lu"], I3) \ + 2/3*gp**2*my_einsum("wwst,pr", C["lu"], I3) \ - 8/9*gp**2*my_einsum("prww,st", C["ld"], I3) \ - 8/9*gp**2*my_einsum("prww,st", C["le"], I3) \ + 2/3*gp**2*my_einsum("stww,pr", C["ud1"], I3) \ + 2/3*gp**2*my_einsum("wwst,pr", C["eu"], I3) \ - 8/3*gp**2*my_einsum("stww,pr", C["uu"], I3) \ - 8/9*gp**2*my_einsum("swwt,pr", C["uu"], I3) \ + 4*gp**2*my_einsum("prst", C["lu"]) \ - my_einsum("pr,st", Ge @ Ge.conj().T, C["phiu"]) \ - 2*my_einsum("st,pr", Gu.conj().T @ Gu, C["phil1"]) \ - 1/2*(my_einsum("rv,ws,pvwt", np.conj(Ge), np.conj(Gu), C["lequ1"]) \ + 12*my_einsum("rv,ws,pvwt", np.conj(Ge), np.conj(Gu), C["lequ3"])) \ - 1/2*(my_einsum("pv,wt,rvws", Ge, Gu, np.conj(C["lequ1"])) \ + 12*my_einsum("pv,wt,rvws", Ge, Gu, np.conj(C["lequ3"]))) \ - 2*my_einsum("vs,wt,prvw", np.conj(Gu), Gu, C["lq1"]) \ - my_einsum("rw,pv,vwst", np.conj(Ge), Ge, C["eu"]) \ + my_einsum("pv,vrst", Gammal, C["lu"]) \ + my_einsum("sv,prvt", Gammau, C["lu"]) \ + my_einsum("pvst,vr", C["lu"], Gammal) \ + my_einsum("prsv,vt", C["lu"], Gammau) Beta["ld"] = -1/3*gp**2*my_einsum("st,pr", C["phid"], I3) \ - 2/9*gp**2*my_einsum("pr,st", C["phil1"], I3) \ + 8/9*gp**2*my_einsum("prww,st", C["ll"], I3) \ + 4/9*gp**2*my_einsum("pwwr,st", C["ll"], I3) \ - 4/9*gp**2*my_einsum("prww,st", C["lq1"], I3) \ - 2/3*gp**2*my_einsum("wwst,pr", C["qd1"], I3) \ + 4/9*gp**2*my_einsum("prww,st", C["ld"], I3) \ + 2/3*gp**2*my_einsum("wwst,pr", C["ld"], I3) \ - 8/9*gp**2*my_einsum("prww,st", C["lu"], I3) \ + 4/9*gp**2*my_einsum("prww,st", C["le"], I3) \ - 4/3*gp**2*my_einsum("wwst,pr", C["ud1"], I3) \ + 2/3*gp**2*my_einsum("wwst,pr", C["ed"], I3) \ + 4/3*gp**2*my_einsum("stww,pr", C["dd"], I3) \ + 4/9*gp**2*my_einsum("swwt,pr", C["dd"], I3) \ - 2*gp**2*my_einsum("prst", C["ld"]) \ - my_einsum("pr,st", Ge @ Ge.conj().T, C["phid"]) \ + 2*my_einsum("st,pr", Gd.conj().T @ Gd, C["phil1"]) \ - 1/2*my_einsum("rv,wt,pvsw", np.conj(Ge), Gd, C["ledq"]) \ - 1/2*my_einsum("pv,ws,rvtw", Ge, np.conj(Gd), np.conj(C["ledq"])) \ - 2*my_einsum("vs,wt,prvw", np.conj(Gd), Gd, C["lq1"]) \ - my_einsum("rw,pv,vwst", np.conj(Ge), Ge, C["ed"]) \ + my_einsum("pv,vrst", Gammal, C["ld"]) \ + my_einsum("sv,prvt", Gammad, C["ld"]) \ + my_einsum("pvst,vr", C["ld"], Gammal) \ + my_einsum("prsv,vt", C["ld"], Gammad) Beta["qe"] = 1/9*gp**2*my_einsum("st,pr", C["phie"], I3) \ - 2/3*gp**2*my_einsum("pr,st", C["phiq1"], I3) \ - 8/3*gp**2*my_einsum("prww,st", C["qq1"], I3) \ - 4/9*gp**2*(my_einsum("pwwr,st", C["qq1"], I3) \ + 3*my_einsum("pwwr,st", C["qq3"], I3)) \ + 4/3*gp**2*my_einsum("wwpr,st", C["lq1"], I3) \ - 2/9*gp**2*my_einsum("wwst,pr", C["le"], I3) \ + 4/3*gp**2*my_einsum("prww,st", C["qe"], I3) \ + 2/9*gp**2*my_einsum("wwst,pr", C["qe"], I3) \ - 8/3*gp**2*my_einsum("prww,st", C["qu1"], I3) \ + 4/3*gp**2*my_einsum("prww,st", C["qd1"], I3) \ + 4/9*gp**2*my_einsum("stww,pr", C["eu"], I3) \ - 2/9*gp**2*my_einsum("stww,pr", C["ed"], I3) \ - 8/9*gp**2*my_einsum("wwst,pr", C["ee"], I3) \ + 2*gp**2*my_einsum("prst", C["qe"]) \ + my_einsum("pr,st", Gu @ Gu.conj().T, C["phie"]) \ - my_einsum("pr,st", Gd @ Gd.conj().T, C["phie"]) \ + 2*my_einsum("st,pr", Ge.conj().T @ Ge, C["phiq1"]) \ - 1/2*my_einsum("pw,vs,vtwr", Gd, np.conj(Ge), C["ledq"]) \ - 1/2*my_einsum("vt,rw,vswp", Ge, np.conj(Gd), np.conj(C["ledq"])) \ - 2*my_einsum("vs,wt,vwpr", np.conj(Ge), Ge, C["lq1"]) \ - 1/2*(my_einsum("rw,vs,vtpw", np.conj(Gu), np.conj(Ge), C["lequ1"]) \ + 12*my_einsum("rw,vs,vtpw", np.conj(Gu), np.conj(Ge), C["lequ3"])) \ - 1/2*(my_einsum("pw,vt,vsrw", Gu, Ge, np.conj(C["lequ1"])) \ + 12*my_einsum("pw,vt,vsrw", Gu, Ge, np.conj(C["lequ3"]))) \ - my_einsum("rw,pv,stvw", np.conj(Gd), Gd, C["ed"]) \ - my_einsum("rw,pv,stvw", np.conj(Gu), Gu, C["eu"]) \ + my_einsum("pv,vrst", Gammaq, C["qe"]) \ + my_einsum("sv,prvt", Gammae, C["qe"]) \ + my_einsum("pvst,vr", C["qe"], Gammaq) \ + my_einsum("prsv,vt", C["qe"], Gammae) Beta["qu1"] = 1/9*gp**2*my_einsum("st,pr", C["phiu"], I3) \ + 4/9*gp**2*my_einsum("pr,st", C["phiq1"], I3) \ + 16/9*gp**2*my_einsum("prww,st", C["qq1"], I3) \ + 8/27*gp**2*(my_einsum("pwwr,st", C["qq1"], I3) \ + 3*my_einsum("pwwr,st", C["qq3"], I3)) \ - 8/9*gp**2*my_einsum("wwpr,st", C["lq1"], I3) \ - 8/9*gp**2*my_einsum("prww,st", C["qe"], I3) \ - 8/9*gp**2*my_einsum("prww,st", C["qd1"], I3) \ + 16/9*gp**2*my_einsum("prww,st", C["qu1"], I3) \ + 2/9*gp**2*my_einsum("wwst,pr", C["qu1"], I3) \ - 2/9*gp**2*my_einsum("wwst,pr", C["lu"], I3) \ - 2/9*gp**2*my_einsum("wwst,pr", C["eu"], I3) \ - 2/9*gp**2*my_einsum("stww,pr", C["ud1"], I3) \ + 8/9*gp**2*my_einsum("stww,pr", C["uu"], I3) \ + 8/27*gp**2*my_einsum("swwt,pr", C["uu"], I3) \ - 4/3*gp**2*my_einsum("prst", C["qu1"]) \ - 8/3*gs**2*my_einsum("prst", C["qu8"]) \ + 1/3*my_einsum("rs,pt", np.conj(Gu), Xiu) \ + 1/3*my_einsum("pt,rs", Gu, np.conj(Xiu)) \ + my_einsum("pr,st", Gu @ Gu.conj().T, C["phiu"]) \ - my_einsum("pr,st", Gd @ Gd.conj().T, C["phiu"]) \ - 2*my_einsum("st,pr", Gu.conj().T @ Gu, C["phiq1"]) \ + 1/3*(my_einsum("pw,vs,vrwt", Gu, np.conj(Gu), C["qu1"]) \ + 4/3*my_einsum("pw,vs,vrwt", Gu, np.conj(Gu), C["qu8"])) \ + 1/3*(my_einsum("vt,rw,pvsw", Gu, np.conj(Gu), C["qu1"]) \ + 4/3*my_einsum("vt,rw,pvsw", Gu, np.conj(Gu), C["qu8"])) \ + 1/3*(my_einsum("rw,vs,ptvw", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ + 4/3*my_einsum("rw,vs,ptvw", np.conj(Gd), np.conj(Gu), C["quqd8"])) \ + 1/3*(my_einsum("pw,vt,rsvw", Gd, Gu, np.conj(C["quqd1"])) \ + 4/3*my_einsum("pw,vt,rsvw", Gd, Gu, np.conj(C["quqd8"]))) \ + 1/2*my_einsum("rw,vs,vtpw", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ + 1/2*my_einsum("pw,vt,vsrw", Gd, Gu, np.conj(C["quqd1"])) \ - 2/3*(my_einsum("vt,ws,pvwr", Gu, np.conj(Gu), C["qq1"]) \ + 3*my_einsum("vt,ws,pvwr", Gu, np.conj(Gu), C["qq3"])) \ - 4*my_einsum("wt,vs,prvw", Gu, np.conj(Gu), C["qq1"]) \ - 2/3*my_einsum("pv,rw,vtsw", Gu, np.conj(Gu), C["uu"]) \ - 2*my_einsum("pv,rw,vwst", Gu, np.conj(Gu), C["uu"]) \ - my_einsum("pv,rw,stvw", Gd, np.conj(Gd), C["ud1"]) \ + my_einsum("pv,vrst", Gammaq, C["qu1"]) \ + my_einsum("sv,prvt", Gammau, C["qu1"]) \ + my_einsum("pvst,vr", C["qu1"], Gammaq) \ + my_einsum("prsv,vt", C["qu1"], Gammau) Beta["qd1"] = 1/9*gp**2*my_einsum("st,pr", C["phid"], I3) \ - 2/9*gp**2*my_einsum("pr,st", C["phiq1"], I3) \ - 8/9*gp**2*my_einsum("prww,st", C["qq1"], I3) \ - 4/27*gp**2*(my_einsum("pwwr,st", C["qq1"], I3) \ + 3*my_einsum("pwwr,st", C["qq3"], I3)) \ + 4/9*gp**2*my_einsum("wwpr,st", C["lq1"], I3) \ + 4/9*gp**2*my_einsum("prww,st", C["qe"], I3) \ - 8/9*gp**2*my_einsum("prww,st", C["qu1"], I3) \ + 4/9*gp**2*my_einsum("prww,st", C["qd1"], I3) \ + 2/9*gp**2*my_einsum("wwst,pr", C["qd1"], I3) \ - 2/9*gp**2*my_einsum("wwst,pr", C["ld"], I3) \ - 2/9*gp**2*my_einsum("wwst,pr", C["ed"], I3) \ + 4/9*gp**2*my_einsum("wwst,pr", C["ud1"], I3) \ - 4/9*gp**2*my_einsum("stww,pr", C["dd"], I3) \ - 4/27*gp**2*my_einsum("swwt,pr", C["dd"], I3) \ + 2/3*gp**2*my_einsum("prst", C["qd1"]) \ - 8/3*gs**2*my_einsum("prst", C["qd8"]) \ + 1/3*my_einsum("rs,pt", np.conj(Gd), Xid) \ + 1/3*my_einsum("pt,rs", Gd, np.conj(Xid)) \ + my_einsum("pr,st", Gu @ Gu.conj().T, C["phid"]) \ - my_einsum("pr,st", Gd @ Gd.conj().T, C["phid"]) \ + 2*my_einsum("st,pr", Gd.conj().T @ Gd, C["phiq1"]) \ + 1/3*(my_einsum("pw,vs,vrwt", Gd, np.conj(Gd), C["qd1"]) \ + 4/3*my_einsum("pw,vs,vrwt", Gd, np.conj(Gd), C["qd8"])) \ + 1/3*(my_einsum("vt,rw,pvsw", Gd, np.conj(Gd), C["qd1"]) \ + 4/3*my_einsum("vt,rw,pvsw", Gd, np.conj(Gd), C["qd8"])) \ + 1/3*(my_einsum("rw,vs,vwpt", np.conj(Gu), np.conj(Gd), C["quqd1"]) \ + 4/3*my_einsum("rw,vs,vwpt", np.conj(Gu), np.conj(Gd), C["quqd8"])) \ + 1/3*(my_einsum("pw,vt,vwrs", Gu, Gd, np.conj(C["quqd1"])) \ + 4/3*my_einsum("pw,vt,vwrs", Gu, Gd, np.conj(C["quqd8"]))) \ + 1/2*my_einsum("ws,rv,pvwt", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ + 1/2*my_einsum("pv,wt,rvws", Gu, Gd, np.conj(C["quqd1"])) \ - 2/3*(my_einsum("vt,ws,pvwr", Gd, np.conj(Gd), C["qq1"]) \ + 3*my_einsum("vt,ws,pvwr", Gd, np.conj(Gd), C["qq3"])) \ - 4*my_einsum("wt,vs,prvw", Gd, np.conj(Gd), C["qq1"]) \ - 2/3*my_einsum("pv,rw,vtsw", Gd, np.conj(Gd), C["dd"]) \ - 2*my_einsum("pv,rw,vwst", Gd, np.conj(Gd), C["dd"]) \ - my_einsum("pv,rw,vwst", Gu, np.conj(Gu), C["ud1"]) \ + my_einsum("pv,vrst", Gammaq, C["qd1"]) \ + my_einsum("sv,prvt", Gammad, C["qd1"]) \ + my_einsum("pvst,vr", C["qd1"], Gammaq) \ + my_einsum("prsv,vt", C["qd1"], Gammad) Beta["qu8"] = 8/3*gs**2*(my_einsum("pwwr,st", C["qq1"], I3) \ + 3*my_einsum("pwwr,st", C["qq3"], I3)) \ + 2/3*gs**2*my_einsum("prww,st", C["qu8"], I3) \ + 2/3*gs**2*my_einsum("prww,st", C["qd8"], I3) \ + 4/3*gs**2*my_einsum("wwst,pr", C["qu8"], I3) \ + 2/3*gs**2*my_einsum("stww,pr", C["ud8"], I3) \ + 8/3*gs**2*my_einsum("swwt,pr", C["uu"], I3) \ - (4/3*gp**2 \ + 14*gs**2)*my_einsum("prst", C["qu8"]) \ - 12*gs**2*my_einsum("prst", C["qu1"]) \ + 2*my_einsum("rs,pt", np.conj(Gu), Xiu) \ + 2*my_einsum("pt,rs", Gu, np.conj(Xiu)) \ + 2*(my_einsum("pw,vs,vrwt", Gu, np.conj(Gu), C["qu1"]) \ - 1/6*my_einsum("pw,vs,vrwt", Gu, np.conj(Gu), C["qu8"])) \ + 2*(my_einsum("vt,rw,pvsw", Gu, np.conj(Gu), C["qu1"]) \ - 1/6*my_einsum("vt,rw,pvsw", Gu, np.conj(Gu), C["qu8"])) \ + 2*(my_einsum("rw,vs,ptvw", np.conj(Gd), np.conj(Gu), C["quqd1"]) \ - 1/6*my_einsum("rw,vs,ptvw", np.conj(Gd), np.conj(Gu), C["quqd8"])) \ + 2*(my_einsum("pw,vt,rsvw", Gd, Gu, np.conj(C["quqd1"])) \ - 1/6*my_einsum("pw,vt,rsvw", Gd, Gu, np.conj(C["quqd8"]))) \ + 1/2*my_einsum("vs,rw,vtpw", np.conj(Gu), np.conj(Gd), C["quqd8"]) \ + 1/2*my_einsum("vt,pw,vsrw", Gu, Gd, np.conj(C["quqd8"])) \ - 4*(my_einsum("vt,ws,pvwr", Gu, np.conj(Gu), C["qq1"]) \ + 3*my_einsum("vt,ws,pvwr", Gu, np.conj(Gu), C["qq3"])) \ - 4*my_einsum("pv,rw,vtsw", Gu, np.conj(Gu), C["uu"]) \ - my_einsum("pv,rw,stvw", Gd, np.conj(Gd), C["ud8"]) \ + my_einsum("pv,vrst", Gammaq, C["qu8"]) \ + my_einsum("sv,prvt", Gammau, C["qu8"]) \ + my_einsum("pvst,vr", C["qu8"], Gammaq) \ + my_einsum("prsv,vt", C["qu8"], Gammau) Beta["qd8"] = 8/3*gs**2*(my_einsum("pwwr,st", C["qq1"], I3) \ + 3*my_einsum("pwwr,st", C["qq3"], I3)) \ + 2/3*gs**2*my_einsum("prww,st", C["qu8"], I3) \ + 2/3*gs**2*my_einsum("prww,st", C["qd8"], I3) \ + 4/3*gs**2*my_einsum("wwst,pr", C["qd8"], I3) \ + 2/3*gs**2*my_einsum("wwst,pr", C["ud8"], I3) \ + 8/3*gs**2*my_einsum("swwt,pr", C["dd"], I3) \ - (-2/3*gp**2 \ + 14*gs**2)*my_einsum("prst", C["qd8"]) \ - 12*gs**2*my_einsum("prst", C["qd1"]) \ + 2*my_einsum("rs,pt", np.conj(Gd), Xid) \ + 2*my_einsum("pt,rs", Gd, np.conj(Xid)) \ + 2*(my_einsum("pw,vs,vrwt", Gd, np.conj(Gd), C["qd1"]) \ - 1/6*my_einsum("pw,vs,vrwt", Gd, np.conj(Gd), C["qd8"])) \ + 2*(my_einsum("vt,rw,pvsw", Gd, np.conj(Gd), C["qd1"]) \ - 1/6*my_einsum("vt,rw,pvsw", Gd, np.conj(Gd), C["qd8"])) \ + 2*(my_einsum("rw,vs,vwpt", np.conj(Gu), np.conj(Gd), C["quqd1"]) \ - 1/6*my_einsum("rw,vs,vwpt", np.conj(Gu), np.conj(Gd), C["quqd8"])) \ + 2*(my_einsum("pw,vt,vwrs", Gu, Gd, np.conj(C["quqd1"])) \ - 1/6*my_einsum("pw,vt,vwrs", Gu, Gd, np.conj(C["quqd8"]))) \ + 1/2*my_einsum("vs,rw,pwvt", np.conj(Gd), np.conj(Gu), C["quqd8"]) \ + 1/2*my_einsum("vt,pw,rwvs", Gd, Gu, np.conj(C["quqd8"])) \ - 4*(my_einsum("vt,ws,pvwr", Gd, np.conj(Gd), C["qq1"]) \ + 3*my_einsum("vt,ws,pvwr", Gd, np.conj(Gd), C["qq3"])) \ - 4*my_einsum("pv,rw,vtsw", Gd, np.conj(Gd), C["dd"]) \ - my_einsum("pv,rw,vwst", Gu, np.conj(Gu), C["ud8"]) \ + my_einsum("pv,vrst", Gammaq, C["qd8"]) \ + my_einsum("sv,prvt", Gammad, C["qd8"]) \ + my_einsum("pvst,vr", C["qd8"], Gammaq) \ + my_einsum("prsv,vt", C["qd8"], Gammad) Beta["ledq"] = -(8/3*gp**2 \ + 8*gs**2)*my_einsum("prst", C["ledq"]) \ - 2*my_einsum("ts,pr", np.conj(Gd), Xie) \ - 2*my_einsum("pr,ts", Ge, np.conj(Xid)) \ + 2*my_einsum("pv,tw,vrsw", Ge, np.conj(Gd), C["ed"]) \ - 2*my_einsum("vr,tw,pvsw", Ge, np.conj(Gd), C["ld"]) \ + 2*my_einsum("vr,ws,pvwt", Ge, np.conj(Gd), C["lq1"]) \ + 6*my_einsum("vr,ws,pvwt", Ge, np.conj(Gd), C["lq3"]) \ - 2*my_einsum("pw,vs,vtwr", Ge, np.conj(Gd), C["qe"]) \ + 2*my_einsum("vs,tw,prvw", np.conj(Gd), np.conj(Gu), C["lequ1"]) \ + my_einsum("pv,vrst", Gammal, C["ledq"]) \ + my_einsum("sv,prvt", Gammad, C["ledq"]) \ + my_einsum("pvst,vr", C["ledq"], Gammae) \ + my_einsum("prsv,vt", C["ledq"], Gammaq) Beta["quqd1"] = 10/3*gp*my_einsum("st,pr", C["dB"], Gu) \ - 6*g*my_einsum("st,pr", C["dW"], Gu) \ - 20/9*gp*my_einsum("pt,sr", C["dB"], Gu) \ + 4*g*my_einsum("pt,sr", C["dW"], Gu) \ - 64/9*gs*my_einsum("pt,sr", C["dG"], Gu) \ - 2/3*gp*my_einsum("pr,st", C["uB"], Gd) \ - 6*g*my_einsum("pr,st", C["uW"], Gd) \ + 4/9*gp*my_einsum("sr,pt", C["uB"], Gd) \ + 4*g*my_einsum("sr,pt", C["uW"], Gd) \ - 64/9*gs*my_einsum("sr,pt", C["uG"], Gd) \ - 1/2*(11/9*gp**2 + 3*g**2 + 32*gs**2)*my_einsum("prst", C["quqd1"]) \ - 1/3*( - 5/9*gp**2 - 3*g**2 + 64/3*gs**2)*my_einsum("srpt", C["quqd1"]) \ - 4/9*( - 5/9*gp**2 - 3*g**2 + 28/3*gs**2)*my_einsum("srpt", C["quqd8"]) \ + 16/9*gs**2*my_einsum("prst", C["quqd8"]) \ - 2*my_einsum("pr,st", Gu, Xid) \ - 2*my_einsum("st,pr", Gd, Xiu) \ + 4/3*(my_einsum("vr,pw,svwt", Gu, Gd, C["qd1"]) \ + 4/3*my_einsum("vr,pw,svwt", Gu, Gd, C["qd8"]) \ + my_einsum("vt,sw,pvwr", Gd, Gu, C["qu1"]) \ + 4/3*my_einsum("vt,sw,pvwr", Gd, Gu, C["qu8"]) \ + my_einsum("pw,sv,vrwt", Gd, Gu, C["ud1"]) \ + 4/3*my_einsum("pw,sv,vrwt", Gd, Gu, C["ud8"])) \ + 8/3*(my_einsum("wt,vr,svpw", Gd, Gu, C["qq1"]) \ - 3*my_einsum("wt,vr,svpw", Gd, Gu, C["qq3"]) \ - 3*my_einsum("wt,vr,swpv", Gd, Gu, C["qq1"]) \ + 9*my_einsum("wt,vr,swpv", Gd, Gu, C["qq3"])) \ - 4*my_einsum("sw,pv,vrwt", Gd, Gu, C["ud1"]) \ + my_einsum("pv,vrst", Gammaq, C["quqd1"]) \ + my_einsum("sv,prvt", Gammaq, C["quqd1"]) \ + my_einsum("pvst,vr", C["quqd1"], Gammau) \ + my_einsum("prsv,vt", C["quqd1"], Gammad) Beta["quqd8"] = 8*gs*my_einsum("st,pr", C["dG"], Gu) \ - 40/3*gp*my_einsum("pt,sr", C["dB"], Gu) \ + 24*g*my_einsum("pt,sr", C["dW"], Gu) \ + 16/3*gs*my_einsum("pt,sr", C["dG"], Gu) \ + 8*gs*my_einsum("pr,st", C["uG"], Gd) \ + 8/3*gp*my_einsum("sr,pt", C["uB"], Gd) \ + 24*g*my_einsum("sr,pt", C["uW"], Gd) \ + 16/3*gs*my_einsum("sr,pt", C["uG"], Gd) \ + 8*gs**2*my_einsum("prst", C["quqd1"]) \ + (10/9*gp**2 + 6*g**2 + 16/3*gs**2)*my_einsum("srpt", C["quqd1"]) \ + (-11/18*gp**2 - 3/2*g**2 + 16/3*gs**2)*my_einsum("prst", C["quqd8"]) \ - 1/3*(5/9*gp**2 + 3*g**2 \ + 44/3*gs**2)*my_einsum("srpt", C["quqd8"]) \ + 8*(my_einsum("vr,pw,svwt", Gu, Gd, C["qd1"]) \ - 1/6*my_einsum("vr,pw,svwt", Gu, Gd, C["qd8"]) \ + my_einsum("vt,sw,pvwr", Gd, Gu, C["qu1"]) \ - 1/6*my_einsum("vt,sw,pvwr", Gd, Gu, C["qu8"]) \ + my_einsum("pw,sv,vrwt", Gd, Gu, C["ud1"]) \ - 1/6*my_einsum("pw,sv,vrwt", Gd, Gu, C["ud8"])) \ + 16*(my_einsum("wt,vr,svpw", Gd, Gu, C["qq1"]) \ - 3*my_einsum("wt,vr,svpw", Gd, Gu, C["qq3"])) \ - 4*my_einsum("sw,pv,vrwt", Gd, Gu, C["ud8"]) \ + my_einsum("pv,vrst", Gammaq, C["quqd8"]) \ + my_einsum("sv,prvt", Gammaq, C["quqd8"]) \ + my_einsum("pvst,vr", C["quqd8"], Gammau) \ + my_einsum("prsv,vt", C["quqd8"], Gammad) Beta["lequ1"] = -(11/3*gp**2 + 8*gs**2)*my_einsum("prst", C["lequ1"]) \ + (30*gp**2 + 18*g**2)*my_einsum("prst", C["lequ3"]) \ + 2*my_einsum("st,pr", Gu, Xie) \ + 2*my_einsum("pr,st", Ge, Xiu) \ + 2*my_einsum("sv,wt,prvw", Gd, Gu, C["ledq"]) \ + 2*my_einsum("pv,sw,vrwt", Ge, Gu, C["eu"]) \ + 2*my_einsum("vr,wt,pvsw", Ge, Gu, C["lq1"]) \ - 6*my_einsum("vr,wt,pvsw", Ge, Gu, C["lq3"]) \ - 2*my_einsum("vr,sw,pvwt", Ge, Gu, C["lu"]) \ - 2*my_einsum("pw,vt,svwr", Ge, Gu, C["qe"]) \ + my_einsum("pv,vrst", Gammal, C["lequ1"]) \ + my_einsum("sv,prvt", Gammaq, C["lequ1"]) \ + my_einsum("pvst,vr", C["lequ1"], Gammae) \ + my_einsum("prsv,vt", C["lequ1"], Gammau) Beta["lequ3"] = 5/6*gp*my_einsum("pr,st", C["eB"], Gu) \ - 3/2*g*my_einsum("st,pr", C["uW"], Ge) \ - 3/2*gp*my_einsum("st,pr", C["uB"], Ge) \ - 3/2*g*my_einsum("pr,st", C["eW"], Gu) \ + (2/9*gp**2 - 3*g**2 + 8/3*gs**2)*my_einsum("prst", C["lequ3"]) \ + 1/8*(5*gp**2 + 3*g**2)*my_einsum("prst", C["lequ1"]) \ - 1/2*my_einsum("sw,pv,vrwt", Gu, Ge, C["eu"]) \ - 1/2*my_einsum("vr,wt,pvsw", Ge, Gu, C["lq1"]) \ + 3/2*my_einsum("vr,wt,pvsw", Ge, Gu, C["lq3"]) \ - 1/2*my_einsum("vr,sw,pvwt", Ge, Gu, C["lu"]) \ - 1/2*my_einsum("pw,vt,svwr", Ge, Gu, C["qe"]) \ + my_einsum("pv,vrst", Gammal, C["lequ3"]) \ + my_einsum("sv,prvt", Gammaq, C["lequ3"]) \ + my_einsum("pvst,vr", C["lequ3"], Gammae) \ + my_einsum("prsv,vt", C["lequ3"], Gammau) Beta["duql"] = -(9/2*g**2 \ + 11/6*gp**2 \ + 4*gs**2)*my_einsum("prst", C["duql"]) \ - my_einsum("sv,wp,vrwt", np.conj(Gd), Gd, C["duql"]) \ - my_einsum("sv,wr,pvwt", np.conj(Gu), Gu, C["duql"]) \ + 2*my_einsum("tv,sw,prwv", np.conj(Ge), np.conj(Gu), C["duue"]) \ + my_einsum("tv,sw,pwrv", np.conj(Ge), np.conj(Gu), C["duue"]) \ + 4*my_einsum("vp,wr,vwst", Gd, Gu, C["qqql"]) \ + 4*my_einsum("vp,wr,wvst", Gd, Gu, C["qqql"]) \ - my_einsum("vp,wr,vswt", Gd, Gu, C["qqql"]) \ - my_einsum("vp,wr,wsvt", Gd, Gu, C["qqql"]) \ + 2*my_einsum("wp,tv,wsrv", Gd, np.conj(Ge), C["qque"]) \ + my_einsum("vp,vrst", Gd.conj().T @ Gd, C["duql"]) \ + my_einsum("vr,pvst", Gu.conj().T @ Gu, C["duql"]) \ + 1/2*(my_einsum("vs,prvt", Gu @ Gu.conj().T, C["duql"]) \ + my_einsum("vs,prvt", Gd @ Gd.conj().T, C["duql"])) \ + 1/2*my_einsum("vt,prsv", Ge @ Ge.conj().T, C["duql"]) Beta["qque"] = -(9/2*g**2 \ + 23/6*gp**2 + 4*gs**2)*my_einsum("prst", C["qque"]) \ - my_einsum("rv,ws,pwvt", np.conj(Gu), Gu, C["qque"]) \ + 1/2*my_einsum("wt,rv,vspw", Ge, np.conj(Gd), C["duql"]) \ - 1/2*(2*my_einsum("pv,rw,vwst", np.conj(Gd), np.conj(Gu), C["duue"]) \ + my_einsum("pv,rw,vswt", np.conj(Gd), np.conj(Gu), C["duue"])) \ + 1/2*( \ - 2*my_einsum("ws,vt,prwv", Gu, Ge, C["qqql"]) \ + my_einsum("ws,vt,pwrv", Gu, Ge, C["qqql"]) \ - 2*my_einsum("ws,vt,wprv", Gu, Ge, C["qqql"])) \ + 1/2*(my_einsum("vp,vrst", Gu @ Gu.conj().T, C["qque"]) \ + my_einsum("vp,vrst", Gd @ Gd.conj().T, C["qque"])) \ - my_einsum("pv,ws,rwvt", np.conj(Gu), Gu, C["qque"]) \ + 1/2*my_einsum("wt,pv,vsrw", Ge, np.conj(Gd), C["duql"]) \ - 1/2*(2*my_einsum("rv,pw,vwst", np.conj(Gd), np.conj(Gu), C["duue"]) \ + my_einsum("rv,pw,vswt", np.conj(Gd), np.conj(Gu), C["duue"])) \ + 1/2*( \ - 2*my_einsum("ws,vt,rpwv", Gu, Ge, C["qqql"]) \ + my_einsum("ws,vt,rwpv", Gu, Ge, C["qqql"]) \ - 2*my_einsum("ws,vt,wrpv", Gu, Ge, C["qqql"])) \ + 1/2*(my_einsum("vr,vpst", Gu @ Gu.conj().T, C["qque"]) \ + my_einsum("vr,vpst", Gd @ Gd.conj().T, C["qque"])) \ + my_einsum("vs,prvt", Gu.conj().T @ Gu, C["qque"]) \ + my_einsum("vt,prsv", Ge.conj().T @ Ge, C["qque"]) Beta["qqql"] = -(3*g**2 \ + 1/3*gp**2 + 4*gs**2)*my_einsum("prst", C["qqql"]) \ - 4*g**2*(my_einsum("rpst", C["qqql"]) \ + my_einsum("srpt", C["qqql"]) \ + my_einsum("psrt", C["qqql"])) \ - 4*my_einsum("tv,sw,prwv", np.conj(Ge), np.conj(Gu), C["qque"]) \ + 2*(my_einsum("pv,rw,vwst", np.conj(Gd), np.conj(Gu), C["duql"]) \ + my_einsum("rv,pw,vwst", np.conj(Gd), np.conj(Gu), C["duql"])) \ + 1/2*(my_einsum("vp,vrst", Gu @ Gu.conj().T, C["qqql"]) \ + my_einsum("vp,vrst", Gd @ Gd.conj().T, C["qqql"])) \ + 1/2*(my_einsum("vr,pvst", Gu @ Gu.conj().T, C["qqql"]) \ + my_einsum("vr,pvst", Gd @ Gd.conj().T, C["qqql"])) \ + 1/2*(my_einsum("vs,prvt", Gu @ Gu.conj().T, C["qqql"]) \ + my_einsum("vs,prvt", Gd @ Gd.conj().T, C["qqql"])) \ + 1/2*my_einsum("vt,prsv", Ge @ Ge.conj().T, C["qqql"]) Beta["duue"] = -(2*gp**2 + 4*gs**2)*my_einsum("prst", C["duue"]) \ - 20/3*gp**2*my_einsum("psrt", C["duue"]) \ + 4*my_einsum("ws,vt,prwv", Gu, Ge, C["duql"]) \ - 8*my_einsum("vp,wr,vwst", Gd, Gu, C["qque"]) \ + my_einsum("vp,vrst", Gd.conj().T @ Gd, C["duue"]) \ + my_einsum("vr,pvst", Gu.conj().T @ Gu, C["duue"]) \ + my_einsum("vs,prvt", Gu.conj().T @ Gu, C["duue"]) \ + my_einsum("vt,prsv", Ge.conj().T @ Ge, C["duue"]) Beta["llphiphi"] = (2*Lambda \ - 3*g**2 \ + 2*GammaH)*C["llphiphi"]-3/2*(C["llphiphi"] @ Ge @ Ge.conj().T \ + Ge.conj() @ Ge.T @ C["llphiphi"]) return Beta
[ "def", "beta", "(", "C", ",", "HIGHSCALE", ",", "newphys", "=", "True", ")", ":", "g", "=", "C", "[", "\"g\"", "]", "gp", "=", "C", "[", "\"gp\"", "]", "gs", "=", "C", "[", "\"gs\"", "]", "m2", "=", "C", "[", "\"m2\"", "]", "Lambda", "=", "...
43.926092
14.986002
def djfrontend_normalize(version=None): """ Returns Normalize CSS file. Included in HTML5 Boilerplate. """ if version is None: version = getattr(settings, 'DJFRONTEND_NORMALIZE', DJFRONTEND_NORMALIZE_DEFAULT) return format_html( '<link rel="stylesheet" href="{0}djfrontend/css/normalize/{1}/normalize.css">', _static_url, version)
[ "def", "djfrontend_normalize", "(", "version", "=", "None", ")", ":", "if", "version", "is", "None", ":", "version", "=", "getattr", "(", "settings", ",", "'DJFRONTEND_NORMALIZE'", ",", "DJFRONTEND_NORMALIZE_DEFAULT", ")", "return", "format_html", "(", "'<link rel...
33.545455
17.909091
def add_quantity_modifier(self, quantity, modifier, overwrite=False): """ Add a quantify modifier. Consider useing the high-level function `add_derived_quantity` instead! Parameters ---------- quantity : str name of the derived quantity to add modifier : None or str or tuple If the quantity modifier is a tuple of length >=2 and the first element is a callable, it should be in the formate of `(callable, native quantity 1, native quantity 2, ...)`. And the modifier would work as callable(native quantity 1, native quantity 2, ...) If the quantity modifier is None, the quantity will be used as the native quantity name Otherwise, the modifier would be use directly as a native quantity name overwrite : bool, optional If False and quantity are already specified in _quantity_modifiers, raise an ValueError """ if quantity in self._quantity_modifiers and not overwrite: raise ValueError('quantity `{}` already exists'.format(quantity)) self._quantity_modifiers[quantity] = modifier self._check_quantities_exist([quantity], raise_exception=False)
[ "def", "add_quantity_modifier", "(", "self", ",", "quantity", ",", "modifier", ",", "overwrite", "=", "False", ")", ":", "if", "quantity", "in", "self", ".", "_quantity_modifiers", "and", "not", "overwrite", ":", "raise", "ValueError", "(", "'quantity `{}` alrea...
50.875
29.708333
def map_to(self, attrname, tablename=None, selectable=None, schema=None, base=None, mapper_args=util.immutabledict()): """Configure a mapping to the given attrname. This is the "master" method that can be used to create any configuration. :param attrname: String attribute name which will be established as an attribute on this :class:.`.SQLSoup` instance. :param base: a Python class which will be used as the base for the mapped class. If ``None``, the "base" argument specified by this :class:`.SQLSoup` instance's constructor will be used, which defaults to ``object``. :param mapper_args: Dictionary of arguments which will be passed directly to :func:`.orm.mapper`. :param tablename: String name of a :class:`.Table` to be reflected. If a :class:`.Table` is already available, use the ``selectable`` argument. This argument is mutually exclusive versus the ``selectable`` argument. :param selectable: a :class:`.Table`, :class:`.Join`, or :class:`.Select` object which will be mapped. This argument is mutually exclusive versus the ``tablename`` argument. :param schema: String schema name to use if the ``tablename`` argument is present. """ if attrname in self._cache: raise SQLSoupError( "Attribute '%s' is already mapped to '%s'" % ( attrname, class_mapper(self._cache[attrname]).mapped_table )) if tablename is not None: if not isinstance(tablename, basestring): raise ArgumentError("'tablename' argument must be a string." ) if selectable is not None: raise ArgumentError("'tablename' and 'selectable' " "arguments are mutually exclusive") selectable = Table(tablename, self._metadata, autoload=True, autoload_with=self.bind, schema=schema or self.schema) elif schema: raise ArgumentError("'tablename' argument is required when " "using 'schema'.") elif selectable is not None: if not isinstance(selectable, expression.FromClause): raise ArgumentError("'selectable' argument must be a " "table, select, join, or other " "selectable construct.") else: raise ArgumentError("'tablename' or 'selectable' argument is " "required.") if not selectable.primary_key.columns and not \ 'primary_key' in mapper_args: if tablename: raise SQLSoupError( "table '%s' does not have a primary " "key defined" % tablename) else: raise SQLSoupError( "selectable '%s' does not have a primary " "key defined" % selectable) mapped_cls = _class_for_table( self.session, self.engine, selectable, base or self.base, mapper_args ) self._cache[attrname] = mapped_cls return mapped_cls
[ "def", "map_to", "(", "self", ",", "attrname", ",", "tablename", "=", "None", ",", "selectable", "=", "None", ",", "schema", "=", "None", ",", "base", "=", "None", ",", "mapper_args", "=", "util", ".", "immutabledict", "(", ")", ")", ":", "if", "attr...
43.195122
20.170732
def modified_lines(filename, extra_data, commit=None): """Returns the lines that have been modifed for this file. Args: filename: the file to check. extra_data: is the extra_data returned by modified_files. Additionally, a value of None means that the file was not modified. commit: the complete sha1 (40 chars) of the commit. Note that specifying this value will only work (100%) when commit == last_commit (with respect to the currently checked out revision), otherwise, we could miss some lines. Returns: a list of lines that were modified, or None in case all lines are new. """ if extra_data is None: return [] if extra_data != 'M': return None command = ['hg', 'diff', '-U', '0'] if commit: command.append('--change=%s' % commit) command.append(filename) # Split as bytes, as the output may have some non unicode characters. diff_lines = subprocess.check_output(command).split( os.linesep.encode('utf-8')) diff_line_numbers = utils.filter_lines( diff_lines, br'@@ -\d+,\d+ \+(?P<start_line>\d+),(?P<lines>\d+) @@', groups=('start_line', 'lines')) modified_line_numbers = [] for start_line, lines in diff_line_numbers: start_line = int(start_line) lines = int(lines) modified_line_numbers.extend(range(start_line, start_line + lines)) return modified_line_numbers
[ "def", "modified_lines", "(", "filename", ",", "extra_data", ",", "commit", "=", "None", ")", ":", "if", "extra_data", "is", "None", ":", "return", "[", "]", "if", "extra_data", "!=", "'M'", ":", "return", "None", "command", "=", "[", "'hg'", ",", "'di...
36.692308
20.717949
def _RunAndWaitForVFSFileUpdate(self, path): """Runs a flow on the client, and waits for it to finish.""" client_id = rdf_client.GetClientURNFromPath(path) # If we're not actually in a directory on a client, no need to run a flow. if client_id is None: return flow_utils.UpdateVFSFileAndWait( client_id, token=self.token, vfs_file_urn=self.root.Add(path), timeout=self.timeout)
[ "def", "_RunAndWaitForVFSFileUpdate", "(", "self", ",", "path", ")", ":", "client_id", "=", "rdf_client", ".", "GetClientURNFromPath", "(", "path", ")", "# If we're not actually in a directory on a client, no need to run a flow.", "if", "client_id", "is", "None", ":", "re...
30.357143
19.357143
def on_path(self, new): """ Handle the file path changing. """ self.name = basename(new) self.graph = self.editor_input.load()
[ "def", "on_path", "(", "self", ",", "new", ")", ":", "self", ".", "name", "=", "basename", "(", "new", ")", "self", ".", "graph", "=", "self", ".", "editor_input", ".", "load", "(", ")" ]
30.8
5.8
def query_job_status(self, submissionid): """ Queries vmray to check id a job was :param submissionid: ID of the job/submission :type submissionid: int :returns: True if job finished, false if not :rtype: bool """ apiurl = '/rest/submission/' result = self.session.get('{}{}{}'.format(self.url, apiurl, submissionid)) if result.status_code == 200: submission_info = json.loads(result.text) if submission_info.get('data', {}).get('submission_finished', False): # Or something like that return True else: raise UnknownSubmissionIdError('Submission id seems invalid, response was not HTTP 200.') return False
[ "def", "query_job_status", "(", "self", ",", "submissionid", ")", ":", "apiurl", "=", "'/rest/submission/'", "result", "=", "self", ".", "session", ".", "get", "(", "'{}{}{}'", ".", "format", "(", "self", ".", "url", ",", "apiurl", ",", "submissionid", ")"...
39.315789
20.052632
def T(a, half=False, cuda=True): """ Convert numpy array into a pytorch tensor. if Cuda is available and USE_GPU=True, store resulting tensor in GPU. """ if not torch.is_tensor(a): a = np.array(np.ascontiguousarray(a)) if a.dtype in (np.int8, np.int16, np.int32, np.int64): a = torch.LongTensor(a.astype(np.int64)) elif a.dtype in (np.float32, np.float64): a = to_half(a) if half else torch.FloatTensor(a) else: raise NotImplementedError(a.dtype) if cuda: a = to_gpu(a) return a
[ "def", "T", "(", "a", ",", "half", "=", "False", ",", "cuda", "=", "True", ")", ":", "if", "not", "torch", ".", "is_tensor", "(", "a", ")", ":", "a", "=", "np", ".", "array", "(", "np", ".", "ascontiguousarray", "(", "a", ")", ")", "if", "a",...
39.285714
12.571429
def _wait_for_files(path): """ Retry with backoff up to 1 second to delete files from a directory. :param str path: The path to crawl to delete files from :return: A list of remaining paths or None :rtype: Optional[List[str]] """ timeout = 0.001 remaining = [] while timeout < 1.0: remaining = [] if os.path.isdir(path): L = os.listdir(path) for target in L: _remaining = _wait_for_files(target) if _remaining: remaining.extend(_remaining) continue try: os.unlink(path) except FileNotFoundError as e: if e.errno == errno.ENOENT: return except (OSError, IOError, PermissionError): time.sleep(timeout) timeout *= 2 remaining.append(path) else: return return remaining
[ "def", "_wait_for_files", "(", "path", ")", ":", "timeout", "=", "0.001", "remaining", "=", "[", "]", "while", "timeout", "<", "1.0", ":", "remaining", "=", "[", "]", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "L", "=", "os", "...
29.032258
14.451613
def parse(text): """Try to parse into a number. Return: the number (int or float) if successful; otherwise None. """ try: return int(text) except ValueError: try: amount = float(text) assert not isnan(amount) and not isinf(amount) return amount except (ValueError, AssertionError): return None
[ "def", "parse", "(", "text", ")", ":", "try", ":", "return", "int", "(", "text", ")", "except", "ValueError", ":", "try", ":", "amount", "=", "float", "(", "text", ")", "assert", "not", "isnan", "(", "amount", ")", "and", "not", "isinf", "(", "amou...
28.8
16.866667
def to_xdr_object(self): """Creates an XDR Operation object that represents this :class:`AllowTrust`. """ trustor = account_xdr_object(self.trustor) length = len(self.asset_code) assert length <= 12 pad_length = 4 - length if length <= 4 else 12 - length # asset_code = self.asset_code + '\x00' * pad_length # asset_code = bytearray(asset_code, encoding='utf-8') asset_code = bytearray(self.asset_code, 'ascii') + b'\x00' * pad_length asset = Xdr.nullclass() if len(asset_code) == 4: asset.type = Xdr.const.ASSET_TYPE_CREDIT_ALPHANUM4 asset.assetCode4 = asset_code else: asset.type = Xdr.const.ASSET_TYPE_CREDIT_ALPHANUM12 asset.assetCode12 = asset_code allow_trust_op = Xdr.types.AllowTrustOp(trustor, asset, self.authorize) self.body.type = Xdr.const.ALLOW_TRUST self.body.allowTrustOp = allow_trust_op return super(AllowTrust, self).to_xdr_object()
[ "def", "to_xdr_object", "(", "self", ")", ":", "trustor", "=", "account_xdr_object", "(", "self", ".", "trustor", ")", "length", "=", "len", "(", "self", ".", "asset_code", ")", "assert", "length", "<=", "12", "pad_length", "=", "4", "-", "length", "if",...
42.25
16.5
def openflow_controller_connection_address_connection_port(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") openflow_controller = ET.SubElement(config, "openflow-controller", xmlns="urn:brocade.com:mgmt:brocade-openflow") controller_name_key = ET.SubElement(openflow_controller, "controller-name") controller_name_key.text = kwargs.pop('controller_name') connection_address = ET.SubElement(openflow_controller, "connection-address") connection_port = ET.SubElement(connection_address, "connection-port") connection_port.text = kwargs.pop('connection_port') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "openflow_controller_connection_address_connection_port", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "openflow_controller", "=", "ET", ".", "SubElement", "(", "config", ",", "\"openflow-contr...
56.307692
27.307692
def checkout_deploy_branch(deploy_branch, canpush=True): """ Checkout the deploy branch, creating it if it doesn't exist. """ # Create an empty branch with .nojekyll if it doesn't already exist create_deploy_branch(deploy_branch, push=canpush) remote_branch = "doctr_remote/{}".format(deploy_branch) print("Checking out doctr working branch tracking", remote_branch) clear_working_branch() # If gh-pages doesn't exist the above create_deploy_branch() will create # it we can push, but if we can't, it won't and the --track would fail. if run(['git', 'rev-parse', '--verify', remote_branch], exit=False) == 0: extra_args = ['--track', remote_branch] else: extra_args = [] run(['git', 'checkout', '-b', DOCTR_WORKING_BRANCH] + extra_args) print("Done") return canpush
[ "def", "checkout_deploy_branch", "(", "deploy_branch", ",", "canpush", "=", "True", ")", ":", "# Create an empty branch with .nojekyll if it doesn't already exist", "create_deploy_branch", "(", "deploy_branch", ",", "push", "=", "canpush", ")", "remote_branch", "=", "\"doct...
43.368421
22.315789
def add_beads_stats(beads_table, beads_samples, mef_outputs=None): """ Add stats fields to beads table. The following information is added to each row: - Notes (warnings, errors) resulting from the analysis - Number of Events - Acquisition Time (s) The following information is added for each row, for each channel in which MEF values have been specified: - Detector voltage (gain) - Amplification type - Bead model fitted parameters Parameters ---------- beads_table : DataFrame Table specifying bead samples to analyze. For more information about the fields required in this table, please consult the module's documentation. beads_samples : list FCSData objects from which to calculate statistics. ``beads_samples[i]`` should correspond to ``beads_table.iloc[i]``. mef_outputs : list, optional A list with the intermediate results of the generation of the MEF transformation functions, as given by ``mef.get_transform_fxn()``. This is used to populate the fields ``<channel> Beads Model``, ``<channel> Beads Params. Names``, and ``<channel> Beads Params. Values``. If specified, ``mef_outputs[i]`` should correspond to ``beads_table.iloc[i]``. """ # The index name is not preserved if beads_table is empty. # Save the index name for later beads_table_index_name = beads_table.index.name # Add per-row info notes = [] n_events = [] acq_time = [] for beads_sample in beads_samples: # Check if sample is an exception, otherwise assume it's an FCSData if isinstance(beads_sample, ExcelUIException): # Print error message notes.append("ERROR: {}".format(str(beads_sample))) n_events.append(np.nan) acq_time.append(np.nan) else: notes.append('') n_events.append(beads_sample.shape[0]) acq_time.append(beads_sample.acquisition_time) beads_table['Analysis Notes'] = notes beads_table['Number of Events'] = n_events beads_table['Acquisition Time (s)'] = acq_time # List of channels that require stats columns headers = list(beads_table.columns) stats_headers = [h for h in headers if re_mef_values.match(h)] stats_channels = [re_mef_values.match(h).group(1) for h in stats_headers] # Iterate through channels for header, channel in zip(stats_headers, stats_channels): # Add empty columns to table beads_table[channel + ' Detector Volt.'] = np.nan beads_table[channel + ' Amp. Type'] = "" if mef_outputs: beads_table[channel + ' Beads Model'] = "" beads_table[channel + ' Beads Params. Names'] = "" beads_table[channel + ' Beads Params. Values'] = "" # Iterate for i, row_id in enumerate(beads_table.index): # If error, skip if isinstance(beads_samples[i], ExcelUIException): continue # If MEF values are specified, calculate stats. If not, leave empty. if pd.notnull(beads_table[header][row_id]): # Detector voltage # Dataframes, such as beads_table, are modified differently # depending on pandas' version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): beads_table.set_value( row_id, channel + ' Detector Volt.', beads_samples[i].detector_voltage(channel)) else: beads_table.at[row_id, channel + ' Detector Volt.'] = \ beads_samples[i].detector_voltage(channel) # Amplification type if beads_samples[i].amplification_type(channel)[0]: amplification_type = "Log" else: amplification_type = "Linear" # Dataframes, such as beads_table, are modified differently # depending on pandas' version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): beads_table.set_value(row_id, channel + ' Amp. Type', amplification_type) else: beads_table.at[row_id, channel + ' Amp. Type'] = \ amplification_type # Bead model and parameters # Only populate if mef_outputs has been provided if mef_outputs: # Try to find the current channel among the mef'd channels. # If successful, extract bead fitted parameters. try: mef_channel_index = mef_outputs[i]. \ mef_channels.index(channel) except ValueError: pass else: # Bead model beads_model_str = mef_outputs[i]. \ fitting['beads_model_str'][mef_channel_index] # Dataframes, such as beads_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): beads_table.set_value(row_id, channel + ' Beads Model', beads_model_str) else: beads_table.at[row_id, channel + ' Beads Model'] = \ beads_model_str # Bead parameter names params_names = mef_outputs[i]. \ fitting['beads_params_names'][mef_channel_index] params_names_str = ", ".join([str(p) for p in params_names]) # Dataframes, such as beads_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): beads_table.set_value( row_id, channel + ' Beads Params. Names', params_names_str) else: beads_table.at[ row_id, channel + ' Beads Params. Names'] = \ params_names_str # Bead parameter values params = mef_outputs[i]. \ fitting['beads_params'][mef_channel_index] params_str = ", ".join([str(p) for p in params]) # Dataframes, such as beads_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): beads_table.set_value( row_id, channel + ' Beads Params. Values', params_str) else: beads_table.at[ row_id, channel + ' Beads Params. Values'] = \ params_str # Restore index name if table is empty if len(beads_table) == 0: beads_table.index.name = beads_table_index_name
[ "def", "add_beads_stats", "(", "beads_table", ",", "beads_samples", ",", "mef_outputs", "=", "None", ")", ":", "# The index name is not preserved if beads_table is empty.", "# Save the index name for later", "beads_table_index_name", "=", "beads_table", ".", "index", ".", "na...
44.455056
19.713483
def push(self): """Binds the app context to the current context.""" self._refcnt += 1 _app_ctx_stack.push(self) appcontext_pushed.send(self.app)
[ "def", "push", "(", "self", ")", ":", "self", ".", "_refcnt", "+=", "1", "_app_ctx_stack", ".", "push", "(", "self", ")", "appcontext_pushed", ".", "send", "(", "self", ".", "app", ")" ]
34.4
9.4
def save(self): """ Update the SouceReading information for the currently recorded observations and then flush those to a file. @return: mpc_filename of the resulting save. """ self.get_writer().flush() mpc_filename = self.get_writer().get_filename() self.get_writer().close() self._writer = None return mpc_filename
[ "def", "save", "(", "self", ")", ":", "self", ".", "get_writer", "(", ")", ".", "flush", "(", ")", "mpc_filename", "=", "self", ".", "get_writer", "(", ")", ".", "get_filename", "(", ")", "self", ".", "get_writer", "(", ")", ".", "close", "(", ")",...
37.9
16.7
def run_file(self, debug=False): """Run script inside current interpreter or in a new one""" editorstack = self.get_current_editorstack() if editorstack.save(): editor = self.get_current_editor() fname = osp.abspath(self.get_current_filename()) # Get fname's dirname before we escape the single and double # quotes (Fixes Issue #6771) dirname = osp.dirname(fname) # Escape single and double quotes in fname and dirname # (Fixes Issue #2158) fname = fname.replace("'", r"\'").replace('"', r'\"') dirname = dirname.replace("'", r"\'").replace('"', r'\"') runconf = get_run_configuration(fname) if runconf is None: dialog = RunConfigOneDialog(self) dialog.size_change.connect(lambda s: self.set_dialog_size(s)) if self.dialog_size is not None: dialog.resize(self.dialog_size) dialog.setup(fname) if CONF.get('run', 'open_at_least_once', not running_under_pytest()): # Open Run Config dialog at least once: the first time # a script is ever run in Spyder, so that the user may # see it at least once and be conscious that it exists show_dlg = True CONF.set('run', 'open_at_least_once', False) else: # Open Run Config dialog only # if ALWAYS_OPEN_FIRST_RUN_OPTION option is enabled show_dlg = CONF.get('run', ALWAYS_OPEN_FIRST_RUN_OPTION) if show_dlg and not dialog.exec_(): return runconf = dialog.get_configuration() args = runconf.get_arguments() python_args = runconf.get_python_arguments() interact = runconf.interact post_mortem = runconf.post_mortem current = runconf.current systerm = runconf.systerm clear_namespace = runconf.clear_namespace if runconf.file_dir: wdir = dirname elif runconf.cw_dir: wdir = '' elif osp.isdir(runconf.dir): wdir = runconf.dir else: wdir = '' python = True # Note: in the future, it may be useful to run # something in a terminal instead of a Python interp. self.__last_ec_exec = (fname, wdir, args, interact, debug, python, python_args, current, systerm, post_mortem, clear_namespace) self.re_run_file() if not interact and not debug: # If external console dockwidget is hidden, it will be # raised in top-level and so focus will be given to the # current external shell automatically # (see SpyderPluginWidget.visibility_changed method) editor.setFocus()
[ "def", "run_file", "(", "self", ",", "debug", "=", "False", ")", ":", "editorstack", "=", "self", ".", "get_current_editorstack", "(", ")", "if", "editorstack", ".", "save", "(", ")", ":", "editor", "=", "self", ".", "get_current_editor", "(", ")", "fnam...
46.477612
18.029851
def format_message(self, message): """ Formats a message with :class:Look """ look = Look(message) return look.pretty(display=False)
[ "def", "format_message", "(", "self", ",", "message", ")", ":", "look", "=", "Look", "(", "message", ")", "return", "look", ".", "pretty", "(", "display", "=", "False", ")" ]
38.25
4.75
def simple_cmd(): """ ``Deprecated``: Not better than ``fire`` -> pip install fire """ parser = argparse.ArgumentParser( prog="Simple command-line function toolkit.", description="""Input function name and args and kwargs. python xxx.py main -a 1 2 3 -k a=1,b=2,c=3""", ) parser.add_argument("-f", "--func_name", default="main") parser.add_argument("-a", "--args", dest="args", nargs="*") parser.add_argument("-k", "--kwargs", dest="kwargs") parser.add_argument( "-i", "-s", "--info", "--show", "--status", dest="show", action="store_true", help="show the args, kwargs and function's source code.", ) params = parser.parse_args() func_name = params.func_name func = globals().get(func_name) if not (callable(func)): Config.utils_logger.warning("invalid func_name: %s" % func_name) return args = params.args or [] kwargs = params.kwargs or {} if kwargs: items = [re.split("[:=]", i) for i in re.split("[,;]+", kwargs)] kwargs = dict(items) if params.show: from inspect import getsource Config.utils_logger.info("args: %s; kwargs: %s" % (args, kwargs)) Config.utils_logger.info(getsource(func)) return func(*args, **kwargs)
[ "def", "simple_cmd", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "prog", "=", "\"Simple command-line function toolkit.\"", ",", "description", "=", "\"\"\"Input function name and args and kwargs.\n python xxx.py main -a 1 2 3 -k a=1,b=2,c=3\"\"\"", ...
32.775
17.85
def ls(sess_id_or_alias, path): """ List files in a path of a running container. \b SESSID: Session ID or its alias given when creating the session. PATH: Path inside container. """ with Session() as session: try: print_wait('Retrieving list of files in "{}"...'.format(path)) kernel = session.Kernel(sess_id_or_alias) result = kernel.list_files(path) if 'errors' in result and result['errors']: print_fail(result['errors']) sys.exit(1) files = json.loads(result['files']) table = [] headers = ['file name', 'size', 'modified', 'mode'] for file in files: mdt = datetime.fromtimestamp(file['mtime']) mtime = mdt.strftime('%b %d %Y %H:%M:%S') row = [file['filename'], file['size'], mtime, file['mode']] table.append(row) print_done('Retrived.') print('Path in container:', result['abspath'], end='') print(tabulate(table, headers=headers)) except Exception as e: print_error(e) sys.exit(1)
[ "def", "ls", "(", "sess_id_or_alias", ",", "path", ")", ":", "with", "Session", "(", ")", "as", "session", ":", "try", ":", "print_wait", "(", "'Retrieving list of files in \"{}\"...'", ".", "format", "(", "path", ")", ")", "kernel", "=", "session", ".", "...
36.09375
17.09375
def load_extra_data(backend, details, response, uid, user, social_user=None, *args, **kwargs): """ Load extra data from provider and store it on current UserSocialAuth extra_data field. """ social_user = social_user or UserSocialAuth.get_social_auth(backend.name, uid) # create verified email address if kwargs['is_new'] and EMAIL_CONFIRMATION: from ..models import EmailAddress # check if email exist before creating it # we might be associating an exisiting user if EmailAddress.objects.filter(email=user.email).count() < 1: EmailAddress.objects.create(user=user, email=user.email, verified=True, primary=True) if social_user: extra_data = backend.extra_data(user, uid, response, details) if kwargs.get('original_email') and 'email' not in extra_data: extra_data['email'] = kwargs.get('original_email') # update extra data if anything has changed if extra_data and social_user.extra_data != extra_data: if social_user.extra_data: social_user.extra_data.update(extra_data) else: social_user.extra_data = extra_data social_user.save() # fetch additional data from facebook on creation if backend.name == 'facebook' and kwargs['is_new']: response = json.loads(requests.get('https://graph.facebook.com/%s?access_token=%s' % (extra_data['id'], extra_data['access_token'])).content) try: user.city, user.country = response.get('hometown').get('name').split(', ') except (AttributeError, TypeError): pass try: user.birth_date = datetime.strptime(response.get('birthday'), '%m/%d/%Y').date() except (AttributeError, TypeError): pass user.save() return {'social_user': social_user}
[ "def", "load_extra_data", "(", "backend", ",", "details", ",", "response", ",", "uid", ",", "user", ",", "social_user", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "social_user", "=", "social_user", "or", "UserSocialAuth", ".", "get...
50
21.95
def get_version(): """ Read version from __init__.py """ version_regex = re.compile( '__version__\\s*=\\s*(?P<q>[\'"])(?P<version>\\d+(\\.\\d+)*(-(alpha|beta|rc)(\\.\\d+)?)?)(?P=q)' ) here = path.abspath(path.dirname(__file__)) init_location = path.join(here, "CHAID/__init__.py") with open(init_location) as init_file: for line in init_file: match = version_regex.search(line) if not match: raise Exception( "Couldn't read version information from '{0}'".format(init_location) ) return match.group('version')
[ "def", "get_version", "(", ")", ":", "version_regex", "=", "re", ".", "compile", "(", "'__version__\\\\s*=\\\\s*(?P<q>[\\'\"])(?P<version>\\\\d+(\\\\.\\\\d+)*(-(alpha|beta|rc)(\\\\.\\\\d+)?)?)(?P=q)'", ")", "here", "=", "path", ".", "abspath", "(", "path", ".", "dirname", ...
29.45
20.75
def _broadcast_indexes(self, key): """Prepare an indexing key for an indexing operation. Parameters ----------- key: int, slice, array, dict or tuple of integer, slices and arrays Any valid input for indexing. Returns ------- dims: tuple Dimension of the resultant variable. indexers: IndexingTuple subclass Tuple of integer, array-like, or slices to use when indexing self._data. The type of this argument indicates the type of indexing to perform, either basic, outer or vectorized. new_order : Optional[Sequence[int]] Optional reordering to do on the result of indexing. If not None, the first len(new_order) indexing should be moved to these positions. """ key = self._item_key_to_tuple(key) # key is a tuple # key is a tuple of full size key = indexing.expanded_indexer(key, self.ndim) # Convert a scalar Variable to an integer key = tuple( k.data.item() if isinstance(k, Variable) and k.ndim == 0 else k for k in key) # Convert a 0d-array to an integer key = tuple( k.item() if isinstance(k, np.ndarray) and k.ndim == 0 else k for k in key) if all(isinstance(k, BASIC_INDEXING_TYPES) for k in key): return self._broadcast_indexes_basic(key) self._validate_indexers(key) # Detect it can be mapped as an outer indexer # If all key is unlabeled, or # key can be mapped as an OuterIndexer. if all(not isinstance(k, Variable) for k in key): return self._broadcast_indexes_outer(key) # If all key is 1-dimensional and there are no duplicate labels, # key can be mapped as an OuterIndexer. dims = [] for k, d in zip(key, self.dims): if isinstance(k, Variable): if len(k.dims) > 1: return self._broadcast_indexes_vectorized(key) dims.append(k.dims[0]) elif not isinstance(k, integer_types): dims.append(d) if len(set(dims)) == len(dims): return self._broadcast_indexes_outer(key) return self._broadcast_indexes_vectorized(key)
[ "def", "_broadcast_indexes", "(", "self", ",", "key", ")", ":", "key", "=", "self", ".", "_item_key_to_tuple", "(", "key", ")", "# key is a tuple", "# key is a tuple of full size", "key", "=", "indexing", ".", "expanded_indexer", "(", "key", ",", "self", ".", ...
39.982456
17.666667
def encode_varint(v, f): """Encode integer `v` to file `f`. Parameters ---------- v: int Integer v >= 0. f: file Object containing a write method. Returns ------- int Number of bytes written. """ assert v >= 0 num_bytes = 0 while True: b = v % 0x80 v = v // 0x80 if v > 0: b = b | 0x80 f.write(FIELD_U8.pack(b)) num_bytes += 1 if v == 0: break return num_bytes
[ "def", "encode_varint", "(", "v", ",", "f", ")", ":", "assert", "v", ">=", "0", "num_bytes", "=", "0", "while", "True", ":", "b", "=", "v", "%", "0x80", "v", "=", "v", "//", "0x80", "if", "v", ">", "0", ":", "b", "=", "b", "|", "0x80", "f",...
15.125
23.84375
def update(self, statement): """ Modifies an entry in the database. Creates an entry if one does not exist. """ Statement = self.get_model('statement') Tag = self.get_model('tag') if statement is not None: session = self.Session() record = None if hasattr(statement, 'id') and statement.id is not None: record = session.query(Statement).get(statement.id) else: record = session.query(Statement).filter( Statement.text == statement.text, Statement.conversation == statement.conversation, ).first() # Create a new statement entry if one does not already exist if not record: record = Statement( text=statement.text, conversation=statement.conversation, persona=statement.persona ) # Update the response value record.in_response_to = statement.in_response_to record.created_at = statement.created_at record.search_text = self.tagger.get_bigram_pair_string(statement.text) if statement.in_response_to: record.search_in_response_to = self.tagger.get_bigram_pair_string(statement.in_response_to) for tag_name in statement.get_tags(): tag = session.query(Tag).filter_by(name=tag_name).first() if not tag: # Create the record tag = Tag(name=tag_name) record.tags.append(tag) session.add(record) self._session_finish(session)
[ "def", "update", "(", "self", ",", "statement", ")", ":", "Statement", "=", "self", ".", "get_model", "(", "'statement'", ")", "Tag", "=", "self", ".", "get_model", "(", "'tag'", ")", "if", "statement", "is", "not", "None", ":", "session", "=", "self",...
34.3
20.1
def canberra_distance_numpy(object1, object2): """! @brief Calculate Canberra distance between two objects using numpy. @param[in] object1 (array_like): The first vector. @param[in] object2 (array_like): The second vector. @return (float) Canberra distance between two objects. """ with numpy.errstate(divide='ignore', invalid='ignore'): result = numpy.divide(numpy.abs(object1 - object2), numpy.abs(object1) + numpy.abs(object2)) if len(result.shape) > 1: return numpy.sum(numpy.nan_to_num(result), axis=1).T else: return numpy.sum(numpy.nan_to_num(result))
[ "def", "canberra_distance_numpy", "(", "object1", ",", "object2", ")", ":", "with", "numpy", ".", "errstate", "(", "divide", "=", "'ignore'", ",", "invalid", "=", "'ignore'", ")", ":", "result", "=", "numpy", ".", "divide", "(", "numpy", ".", "abs", "(",...
36.588235
23.352941
def global_include(self, pattern): """ Include all files anywhere in the current directory that match the pattern. This is very inefficient on large file trees. """ if self.allfiles is None: self.findall() match = translate_pattern(os.path.join('**', pattern)) found = [f for f in self.allfiles if match.match(f)] self.extend(found) return bool(found)
[ "def", "global_include", "(", "self", ",", "pattern", ")", ":", "if", "self", ".", "allfiles", "is", "None", ":", "self", ".", "findall", "(", ")", "match", "=", "translate_pattern", "(", "os", ".", "path", ".", "join", "(", "'**'", ",", "pattern", "...
38.636364
13.909091
def parse(cls, src, dist=None): """Parse a single entry point from string `src` Entry point syntax follows the form:: name = some.module:some.attr [extra1,extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional """ try: attrs = extras = () name,value = src.split('=',1) if '[' in value: value,extras = value.split('[',1) req = Requirement.parse("x["+extras) if req.specs: raise ValueError extras = req.extras if ':' in value: value,attrs = value.split(':',1) if not MODULE(attrs.rstrip()): raise ValueError attrs = attrs.rstrip().split('.') except ValueError: raise ValueError( "EntryPoint must be in 'name=module:attrs [extras]' format", src ) else: return cls(name.strip(), value.strip(), attrs, extras, dist)
[ "def", "parse", "(", "cls", ",", "src", ",", "dist", "=", "None", ")", ":", "try", ":", "attrs", "=", "extras", "=", "(", ")", "name", ",", "value", "=", "src", ".", "split", "(", "'='", ",", "1", ")", "if", "'['", "in", "value", ":", "value"...
35.366667
15.833333
def func_args(func): '''Basic function which returns a tuple of arguments of a function or method. ''' try: return tuple(inspect.signature(func).parameters) except: return tuple(inspect.getargspec(func).args)
[ "def", "func_args", "(", "func", ")", ":", "try", ":", "return", "tuple", "(", "inspect", ".", "signature", "(", "func", ")", ".", "parameters", ")", "except", ":", "return", "tuple", "(", "inspect", ".", "getargspec", "(", "func", ")", ".", "args", ...
29.625
25.375
def _create_application_version_request(app_metadata, application_id, template): """ Construct the request body to create application version. :param app_metadata: Object containing app metadata :type app_metadata: ApplicationMetadata :param application_id: The Amazon Resource Name (ARN) of the application :type application_id: str :param template: A packaged YAML or JSON SAM template :type template: str :return: SAR CreateApplicationVersion request body :rtype: dict """ app_metadata.validate(['semantic_version']) request = { 'ApplicationId': application_id, 'SemanticVersion': app_metadata.semantic_version, 'SourceCodeUrl': app_metadata.source_code_url, 'TemplateBody': template } return {k: v for k, v in request.items() if v}
[ "def", "_create_application_version_request", "(", "app_metadata", ",", "application_id", ",", "template", ")", ":", "app_metadata", ".", "validate", "(", "[", "'semantic_version'", "]", ")", "request", "=", "{", "'ApplicationId'", ":", "application_id", ",", "'Sema...
38.47619
16.857143
def list_files(self, id=None, path="/"): """ List files in an allocation directory. https://www.nomadproject.io/docs/http/client-fs-ls.html arguments: - id - path returns: list raises: - nomad.api.exceptions.BaseNomadException - nomad.api.exceptions.URLNotFoundNomadException """ if id: return self.request(id, params={"path": path}, method="get").json() else: return self.request(params={"path": path}, method="get").json()
[ "def", "list_files", "(", "self", ",", "id", "=", "None", ",", "path", "=", "\"/\"", ")", ":", "if", "id", ":", "return", "self", ".", "request", "(", "id", ",", "params", "=", "{", "\"path\"", ":", "path", "}", ",", "method", "=", "\"get\"", ")"...
34.058824
20.941176
def _from_stream(cls, stream, blob, filename=None): """ Return an instance of the |Image| subclass corresponding to the format of the image in *stream*. """ image_header = _ImageHeaderFactory(stream) if filename is None: filename = 'image.%s' % image_header.default_ext return cls(blob, filename, image_header)
[ "def", "_from_stream", "(", "cls", ",", "stream", ",", "blob", ",", "filename", "=", "None", ")", ":", "image_header", "=", "_ImageHeaderFactory", "(", "stream", ")", "if", "filename", "is", "None", ":", "filename", "=", "'image.%s'", "%", "image_header", ...
41.111111
10.222222
def select_by_ids(selname, idlist, selection_exists=False, chunksize=20, restrict=None): """Selection with a large number of ids concatenated into a selection list can cause buffer overflow in PyMOL. This function takes a selection name and and list of IDs (list of integers) as input and makes a careful step-by-step selection (packages of 20 by default)""" idlist = list(set(idlist)) # Remove duplicates if not selection_exists: cmd.select(selname, 'None') # Empty selection first idchunks = [idlist[i:i+chunksize] for i in range(0, len(idlist), chunksize)] for idchunk in idchunks: cmd.select(selname, '%s or (id %s)' % (selname, '+'.join(map(str, idchunk)))) if restrict is not None: cmd.select(selname, '%s and %s' % (selname, restrict))
[ "def", "select_by_ids", "(", "selname", ",", "idlist", ",", "selection_exists", "=", "False", ",", "chunksize", "=", "20", ",", "restrict", "=", "None", ")", ":", "idlist", "=", "list", "(", "set", "(", "idlist", ")", ")", "# Remove duplicates", "if", "n...
60.923077
22.615385
def calcPosition(self,parent_circle): ''' Position the circle tangent to the parent circle with the line connecting the centers of the two circles meeting the x axis at angle theta. ''' if r not in self: raise AttributeError("radius must be calculated before position.") if theta not in self: raise AttributeError("theta must be set before position can be calculated.") x_offset = math.cos(t_radians) * (parent_circle.r + self.r) y_offset = math.sin(t_radians) * (parent_circle.r + self.r) self.x = parent_circle.x + x_offset self.y = parent_circle.y + y_offset
[ "def", "calcPosition", "(", "self", ",", "parent_circle", ")", ":", "if", "r", "not", "in", "self", ":", "raise", "AttributeError", "(", "\"radius must be calculated before position.\"", ")", "if", "theta", "not", "in", "self", ":", "raise", "AttributeError", "(...
63.2
29
def read(self): """We have been called to read! As a consumer, continue to read for the length of the packet and then pass to the callback. """ data = self.dev.read() if len(data) == 0: self.log.warning("READ : Nothing received") return if data == b'\x00': self.log.warning("READ : Empty packet (Got \\x00)") return pkt = bytearray(data) data = self.dev.read(pkt[0]) pkt.extend(bytearray(data)) self.log.info("READ : %s" % self.format_packet(pkt)) self.do_callback(pkt) return pkt
[ "def", "read", "(", "self", ")", ":", "data", "=", "self", ".", "dev", ".", "read", "(", ")", "if", "len", "(", "data", ")", "==", "0", ":", "self", ".", "log", ".", "warning", "(", "\"READ : Nothing received\"", ")", "return", "if", "data", "==", ...
27.636364
20
def _control_transfer(self, data): """ Send device a control request with standard parameters and <data> as payload. """ LOGGER.debug('Ctrl transfer: %r', data) self._device.ctrl_transfer(bmRequestType=0x21, bRequest=0x09, wValue=0x0200, wIndex=0x01, data_or_wLength=data, timeout=TIMEOUT)
[ "def", "_control_transfer", "(", "self", ",", "data", ")", ":", "LOGGER", ".", "debug", "(", "'Ctrl transfer: %r'", ",", "data", ")", "self", ".", "_device", ".", "ctrl_transfer", "(", "bmRequestType", "=", "0x21", ",", "bRequest", "=", "0x09", ",", "wValu...
42.75
17.5
def _count_values(self): """Return dict mapping relevance level to sample index""" indices = {yi: [i] for i, yi in enumerate(self.y) if self.status[i]} return indices
[ "def", "_count_values", "(", "self", ")", ":", "indices", "=", "{", "yi", ":", "[", "i", "]", "for", "i", ",", "yi", "in", "enumerate", "(", "self", ".", "y", ")", "if", "self", ".", "status", "[", "i", "]", "}", "return", "indices" ]
37.4
22
def as_coeff_unit(self): """Factor the coefficient multiplying a unit For units that are multiplied by a constant dimensionless coefficient, returns a tuple containing the coefficient and a new unit object for the unmultiplied unit. Example ------- >>> import unyt as u >>> unit = (u.m**2/u.cm).simplify() >>> unit 100*m >>> unit.as_coeff_unit() (100.0, m) """ coeff, mul = self.expr.as_coeff_Mul() coeff = float(coeff) ret = Unit( mul, self.base_value / coeff, self.base_offset, self.dimensions, self.registry, ) return coeff, ret
[ "def", "as_coeff_unit", "(", "self", ")", ":", "coeff", ",", "mul", "=", "self", ".", "expr", ".", "as_coeff_Mul", "(", ")", "coeff", "=", "float", "(", "coeff", ")", "ret", "=", "Unit", "(", "mul", ",", "self", ".", "base_value", "/", "coeff", ","...
26.37037
18.333333
def _file_size(self, field): """ Returns the file size for given file field. Args: field (str): File field Returns: int. File size """ size = 0 try: handle = open(self._files[field], "r") size = os.fstat(handle.fileno()).st_size handle.close() except: size = 0 self._file_lengths[field] = size return self._file_lengths[field]
[ "def", "_file_size", "(", "self", ",", "field", ")", ":", "size", "=", "0", "try", ":", "handle", "=", "open", "(", "self", ".", "_files", "[", "field", "]", ",", "\"r\"", ")", "size", "=", "os", ".", "fstat", "(", "handle", ".", "fileno", "(", ...
25.277778
16.388889
def add_ref(self, ref): """ Add a reference to a memory data object. :param CodeReference ref: The reference. :return: None """ self.refs[ref.insn_addr].append(ref) self.data_addr_to_ref[ref.memory_data.addr].append(ref)
[ "def", "add_ref", "(", "self", ",", "ref", ")", ":", "self", ".", "refs", "[", "ref", ".", "insn_addr", "]", ".", "append", "(", "ref", ")", "self", ".", "data_addr_to_ref", "[", "ref", ".", "memory_data", ".", "addr", "]", ".", "append", "(", "ref...
29
14.2
def delete_floating_ip(kwargs=None, call=None): ''' Delete a floating IP .. versionadded:: 2016.3.0 CLI Examples: .. code-block:: bash salt-cloud -f delete_floating_ip my-digitalocean-config floating_ip='45.55.96.47' ''' if call != 'function': log.error( 'The delete_floating_ip function must be called with -f or --function.' ) return False if not kwargs: kwargs = {} if 'floating_ip' not in kwargs: log.error('A floating IP is required.') return False floating_ip = kwargs['floating_ip'] log.debug('Floating ip is %s', kwargs['floating_ip']) result = query(method='floating_ips', command=floating_ip, http_method='delete') return result
[ "def", "delete_floating_ip", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "call", "!=", "'function'", ":", "log", ".", "error", "(", "'The delete_floating_ip function must be called with -f or --function.'", ")", "return", "False", "if", "...
23.424242
24.090909
def dict(self): """A dict that holds key/values for all of the properties in the object. :return: """ SKIP_KEYS = ('_source_table', '_dest_table', 'd_vid', 't_vid', 'st_id', 'dataset', 'hash', 'process_records') return OrderedDict([(k, getattr(self, k)) for k in self.properties if k not in SKIP_KEYS])
[ "def", "dict", "(", "self", ")", ":", "SKIP_KEYS", "=", "(", "'_source_table'", ",", "'_dest_table'", ",", "'d_vid'", ",", "'t_vid'", ",", "'st_id'", ",", "'dataset'", ",", "'hash'", ",", "'process_records'", ")", "return", "OrderedDict", "(", "[", "(", "k...
36.4
26.9
def maxlen(max_length, strict=False # type: bool ): """ 'Maximum length' validation_function generator. Returns a validation_function to check that len(x) <= max_length (strict=False, default) or len(x) < max_length (strict=True) :param max_length: maximum length for x :param strict: Boolean flag to switch between len(x) <= max_length (strict=False) and len(x) < max_length (strict=True) :return: """ if strict: def maxlen_(x): if len(x) < max_length: return True else: # raise Failure('maxlen: len(x) < ' + str(max_length) + ' does not hold for x=' + str(x)) raise TooLong(wrong_value=x, max_length=max_length, strict=True) else: def maxlen_(x): if len(x) <= max_length: return True else: # raise Failure('maxlen: len(x) <= ' + str(max_length) + ' does not hold for x=' + str(x)) raise TooLong(wrong_value=x, max_length=max_length, strict=False) maxlen_.__name__ = 'length_{}lesser_than_{}'.format('strictly_' if strict else '', max_length) return maxlen_
[ "def", "maxlen", "(", "max_length", ",", "strict", "=", "False", "# type: bool", ")", ":", "if", "strict", ":", "def", "maxlen_", "(", "x", ")", ":", "if", "len", "(", "x", ")", "<", "max_length", ":", "return", "True", "else", ":", "# raise Failure('m...
40.103448
28.103448
def next(self): """Get the next row in the page.""" self._parse_block() if self._remaining > 0: self._remaining -= 1 return six.next(self._iter_rows)
[ "def", "next", "(", "self", ")", ":", "self", ".", "_parse_block", "(", ")", "if", "self", ".", "_remaining", ">", "0", ":", "self", ".", "_remaining", "-=", "1", "return", "six", ".", "next", "(", "self", ".", "_iter_rows", ")" ]
31.333333
9.166667
def copy2(src, dst, metadata=None, retry_params=None): """Copy the file content from src to dst. Args: src: /bucket/filename dst: /bucket/filename metadata: a dict of metadata for this copy. If None, old metadata is copied. For example, {'x-goog-meta-foo': 'bar'}. retry_params: An api_utils.RetryParams for this call to GCS. If None, the default one is used. Raises: errors.AuthorizationError: if authorization failed. errors.NotFoundError: if an object that's expected to exist doesn't. """ common.validate_file_path(src) common.validate_file_path(dst) if metadata is None: metadata = {} copy_meta = 'COPY' else: copy_meta = 'REPLACE' metadata.update({'x-goog-copy-source': src, 'x-goog-metadata-directive': copy_meta}) api = storage_api._get_storage_api(retry_params=retry_params) status, resp_headers, content = api.put_object( api_utils._quote_filename(dst), headers=metadata) errors.check_status(status, [200], src, metadata, resp_headers, body=content)
[ "def", "copy2", "(", "src", ",", "dst", ",", "metadata", "=", "None", ",", "retry_params", "=", "None", ")", ":", "common", ".", "validate_file_path", "(", "src", ")", "common", ".", "validate_file_path", "(", "dst", ")", "if", "metadata", "is", "None", ...
34.366667
21.233333
def run_batch(args: dict) -> int: """Runs a batch operation for the given arguments""" batcher.run_project( project_directory=args.get('project_directory'), log_path=args.get('logging_path'), output_directory=args.get('output_directory'), shared_data=load_shared_data(args.get('shared_data_path')) ) return 0
[ "def", "run_batch", "(", "args", ":", "dict", ")", "->", "int", ":", "batcher", ".", "run_project", "(", "project_directory", "=", "args", ".", "get", "(", "'project_directory'", ")", ",", "log_path", "=", "args", ".", "get", "(", "'logging_path'", ")", ...
34.8
18.4
def make_logging_handlers_and_tools(self, multiproc=False): """Creates logging handlers and redirects stdout.""" log_stdout = self.log_stdout if sys.stdout is self._stdout_to_logger: # If we already redirected stdout we don't neet to redo it again log_stdout = False if self.log_config: if multiproc: proc_log_config = self._mp_config else: proc_log_config = self._sp_config if proc_log_config: if isinstance(proc_log_config, dict): new_dict = self._handle_dict_config(proc_log_config) dictConfig(new_dict) else: parser = self._handle_config_parsing(proc_log_config) memory_file = self._parser_to_string_io(parser) fileConfig(memory_file, disable_existing_loggers=False) if log_stdout: # Create a logging mock for stdout std_name, std_level = self.log_stdout stdout = StdoutToLogger(std_name, log_level=std_level) stdout.start() self._tools.append(stdout)
[ "def", "make_logging_handlers_and_tools", "(", "self", ",", "multiproc", "=", "False", ")", ":", "log_stdout", "=", "self", ".", "log_stdout", "if", "sys", ".", "stdout", "is", "self", ".", "_stdout_to_logger", ":", "# If we already redirected stdout we don't neet to ...
38.533333
19.666667
def unsubscribe(self, request, *args, **kwargs): """ Performs the unsubscribe action. """ self.object = self.get_object() self.object.subscribers.remove(request.user) messages.success(self.request, self.success_message) return HttpResponseRedirect(self.get_success_url())
[ "def", "unsubscribe", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "object", "=", "self", ".", "get_object", "(", ")", "self", ".", "object", ".", "subscribers", ".", "remove", "(", "request", ".", ...
51
10
def records( self ): """ Returns the record set for the current settings of this browser. :return <orb.RecordSet> """ if ( self.isGroupingActive() ): self._records.setGroupBy(self.currentGrouping()) else: self._records.setGroupBy(None) return self._records
[ "def", "records", "(", "self", ")", ":", "if", "(", "self", ".", "isGroupingActive", "(", ")", ")", ":", "self", ".", "_records", ".", "setGroupBy", "(", "self", ".", "currentGrouping", "(", ")", ")", "else", ":", "self", ".", "_records", ".", "setGr...
31.727273
13.545455
def to_ufos( font, include_instances=False, family_name=None, propagate_anchors=True, ufo_module=defcon, minimize_glyphs_diffs=False, generate_GDEF=True, store_editor_state=True, ): """Take a GSFont object and convert it into one UFO per master. Takes in data as Glyphs.app-compatible classes, as documented at https://docu.glyphsapp.com/ If include_instances is True, also returns the parsed instance data. If family_name is provided, the master UFOs will be given this name and only instances with this name will be returned. If generate_GDEF is True, write a `table GDEF {...}` statement in the UFO's features.fea, containing GlyphClassDef and LigatureCaretByPos. """ builder = UFOBuilder( font, ufo_module=ufo_module, family_name=family_name, propagate_anchors=propagate_anchors, minimize_glyphs_diffs=minimize_glyphs_diffs, generate_GDEF=generate_GDEF, store_editor_state=store_editor_state, ) result = list(builder.masters) if include_instances: return result, builder.instance_data return result
[ "def", "to_ufos", "(", "font", ",", "include_instances", "=", "False", ",", "family_name", "=", "None", ",", "propagate_anchors", "=", "True", ",", "ufo_module", "=", "defcon", ",", "minimize_glyphs_diffs", "=", "False", ",", "generate_GDEF", "=", "True", ",",...
29.552632
20.657895
def pretty_memory_info(): ''' Pretty format memory info. Returns ------- str Memory info. Examples -------- >>> pretty_memory_info() '5MB memory usage' ''' process = psutil.Process(os.getpid()) return '{}MB memory usage'.format(int(process.memory_info().rss / 2**20))
[ "def", "pretty_memory_info", "(", ")", ":", "process", "=", "psutil", ".", "Process", "(", "os", ".", "getpid", "(", ")", ")", "return", "'{}MB memory usage'", ".", "format", "(", "int", "(", "process", ".", "memory_info", "(", ")", ".", "rss", "/", "2...
19.375
25.375
def hashify_targets(targets: list, build_context) -> list: """Return sorted hashes of `targets`.""" return sorted(build_context.targets[target_name].hash(build_context) for target_name in listify(targets))
[ "def", "hashify_targets", "(", "targets", ":", "list", ",", "build_context", ")", "->", "list", ":", "return", "sorted", "(", "build_context", ".", "targets", "[", "target_name", "]", ".", "hash", "(", "build_context", ")", "for", "target_name", "in", "listi...
57
16
def build_variables(self, variable_placeholders): """ :param variables: The list of vertices/edges to return :return: a dict where the keys are the names of the variables to return, the values are the JSON of the properties of these variables """ variables = self.__substitute_names_in_list(variable_placeholders) attributes = {} for i, variable in enumerate(variables): placeholder_name = variable_placeholders[i] try: vertices = self.g.vs.select(name=variable) attributes[placeholder_name] = vertices[0].attributes() except: pass for i, variable in enumerate(variables): placeholder_name = variable_placeholders[i] try: edges = self.g.es.select(name=variable) edge_attr = edges[0].attributes() attributes[placeholder_name] = edge_attr except: pass for i, variable in enumerate(variables): placeholder_name = variable_placeholders[i] try: attributes[placeholder_name] = self.match_info[placeholder_name] except: pass return attributes
[ "def", "build_variables", "(", "self", ",", "variable_placeholders", ")", ":", "variables", "=", "self", ".", "__substitute_names_in_list", "(", "variable_placeholders", ")", "attributes", "=", "{", "}", "for", "i", ",", "variable", "in", "enumerate", "(", "vari...
41.833333
18.9
def read_from_list_with_ids(self, lines): """ Read text fragments from a given list of tuples:: [(id_1, text_1), (id_2, text_2), ..., (id_n, text_n)]. :param list lines: the list of ``[id, text]`` fragments (see above) """ self.log(u"Reading text fragments from list with ids") self._create_text_fragments([(line[0], [line[1]]) for line in lines])
[ "def", "read_from_list_with_ids", "(", "self", ",", "lines", ")", ":", "self", ".", "log", "(", "u\"Reading text fragments from list with ids\"", ")", "self", ".", "_create_text_fragments", "(", "[", "(", "line", "[", "0", "]", ",", "[", "line", "[", "1", "]...
40
21.8
def _update_example(self, request): """Updates the specified example. Args: request: A request that should contain 'index' and 'example'. Returns: An empty response. """ if request.method != 'POST': return http_util.Respond(request, {'error': 'invalid non-POST request'}, 'application/json', code=405) example_json = request.form['example'] index = int(request.form['index']) if index >= len(self.examples): return http_util.Respond(request, {'error': 'invalid index provided'}, 'application/json', code=400) new_example = self.example_class() json_format.Parse(example_json, new_example) self.examples[index] = new_example self.updated_example_indices.add(index) self.generate_sprite([ex.SerializeToString() for ex in self.examples]) return http_util.Respond(request, {}, 'application/json')
[ "def", "_update_example", "(", "self", ",", "request", ")", ":", "if", "request", ".", "method", "!=", "'POST'", ":", "return", "http_util", ".", "Respond", "(", "request", ",", "{", "'error'", ":", "'invalid non-POST request'", "}", ",", "'application/json'",...
39.73913
16.869565
def _validate(self): """ Ensure that our percentile bounds are well-formed. """ if not 0.0 <= self._min_percentile < self._max_percentile <= 100.0: raise BadPercentileBounds( min_percentile=self._min_percentile, max_percentile=self._max_percentile, upper_bound=100.0 ) return super(PercentileFilter, self)._validate()
[ "def", "_validate", "(", "self", ")", ":", "if", "not", "0.0", "<=", "self", ".", "_min_percentile", "<", "self", ".", "_max_percentile", "<=", "100.0", ":", "raise", "BadPercentileBounds", "(", "min_percentile", "=", "self", ".", "_min_percentile", ",", "ma...
38.090909
13.545455
def atlas_node_add_callback(atlas_state, callback_name, callback): """ Add a callback to the initialized atlas state """ if callback_name == 'store_zonefile': atlas_state['zonefile_crawler'].set_store_zonefile_callback(callback) else: raise ValueError("Unrecognized callback {}".format(callback_name))
[ "def", "atlas_node_add_callback", "(", "atlas_state", ",", "callback_name", ",", "callback", ")", ":", "if", "callback_name", "==", "'store_zonefile'", ":", "atlas_state", "[", "'zonefile_crawler'", "]", ".", "set_store_zonefile_callback", "(", "callback", ")", "else"...
36.666667
19.777778