repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
taddeus/wspy
python_digest.py
calculate_request_digest
def calculate_request_digest(method, partial_digest, digest_response=None, uri=None, nonce=None, nonce_count=None, client_nonce=None): ''' Calculates a value for the 'response' value of the client authentication request. Requires the 'partial_digest' calculated from the realm, username, and password. Either call it with a digest_response to use the values from an authentication request, or pass the individual parameters (i.e. to generate an authentication request). ''' if digest_response: if uri or nonce or nonce_count or client_nonce: raise Exception("Both digest_response and one or more " "individual parameters were sent.") uri = digest_response.uri nonce = digest_response.nonce nonce_count = digest_response.nc client_nonce=digest_response.cnonce elif not (uri and nonce and (nonce_count != None) and client_nonce): raise Exception("Neither digest_response nor all individual parameters were sent.") ha2 = md5.md5("%s:%s" % (method, uri)).hexdigest() data = "%s:%s:%s:%s:%s" % (nonce, "%08x" % nonce_count, client_nonce, 'auth', ha2) kd = md5.md5("%s:%s" % (partial_digest, data)).hexdigest() return kd
python
def calculate_request_digest(method, partial_digest, digest_response=None, uri=None, nonce=None, nonce_count=None, client_nonce=None): ''' Calculates a value for the 'response' value of the client authentication request. Requires the 'partial_digest' calculated from the realm, username, and password. Either call it with a digest_response to use the values from an authentication request, or pass the individual parameters (i.e. to generate an authentication request). ''' if digest_response: if uri or nonce or nonce_count or client_nonce: raise Exception("Both digest_response and one or more " "individual parameters were sent.") uri = digest_response.uri nonce = digest_response.nonce nonce_count = digest_response.nc client_nonce=digest_response.cnonce elif not (uri and nonce and (nonce_count != None) and client_nonce): raise Exception("Neither digest_response nor all individual parameters were sent.") ha2 = md5.md5("%s:%s" % (method, uri)).hexdigest() data = "%s:%s:%s:%s:%s" % (nonce, "%08x" % nonce_count, client_nonce, 'auth', ha2) kd = md5.md5("%s:%s" % (partial_digest, data)).hexdigest() return kd
[ "def", "calculate_request_digest", "(", "method", ",", "partial_digest", ",", "digest_response", "=", "None", ",", "uri", "=", "None", ",", "nonce", "=", "None", ",", "nonce_count", "=", "None", ",", "client_nonce", "=", "None", ")", ":", "if", "digest_respo...
Calculates a value for the 'response' value of the client authentication request. Requires the 'partial_digest' calculated from the realm, username, and password. Either call it with a digest_response to use the values from an authentication request, or pass the individual parameters (i.e. to generate an authentication request).
[ "Calculates", "a", "value", "for", "the", "response", "value", "of", "the", "client", "authentication", "request", ".", "Requires", "the", "partial_digest", "calculated", "from", "the", "realm", "username", "and", "password", "." ]
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/python_digest.py#L339-L362
taddeus/wspy
python_digest.py
get_nonce_timestamp
def get_nonce_timestamp(nonce): ''' Extract the timestamp from a Nonce. To be sure the timestamp was generated by this site, make sure you validate the nonce using validate_nonce(). ''' components = nonce.split(':',2) if not len(components) == 3: return None try: return float(components[0]) except ValueError: return None
python
def get_nonce_timestamp(nonce): ''' Extract the timestamp from a Nonce. To be sure the timestamp was generated by this site, make sure you validate the nonce using validate_nonce(). ''' components = nonce.split(':',2) if not len(components) == 3: return None try: return float(components[0]) except ValueError: return None
[ "def", "get_nonce_timestamp", "(", "nonce", ")", ":", "components", "=", "nonce", ".", "split", "(", "':'", ",", "2", ")", "if", "not", "len", "(", "components", ")", "==", "3", ":", "return", "None", "try", ":", "return", "float", "(", "components", ...
Extract the timestamp from a Nonce. To be sure the timestamp was generated by this site, make sure you validate the nonce using validate_nonce().
[ "Extract", "the", "timestamp", "from", "a", "Nonce", ".", "To", "be", "sure", "the", "timestamp", "was", "generated", "by", "this", "site", "make", "sure", "you", "validate", "the", "nonce", "using", "validate_nonce", "()", "." ]
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/python_digest.py#L364-L376
taddeus/wspy
python_digest.py
calculate_nonce
def calculate_nonce(timestamp, secret, salt=None): ''' Generate a nonce using the provided timestamp, secret, and salt. If the salt is not provided, (and one should only be provided when validating a nonce) one will be generated randomly in order to ensure that two simultaneous requests do not generate identical nonces. ''' if not salt: salt = ''.join([random.choice('0123456789ABCDEF') for x in range(4)]) return "%s:%s:%s" % (timestamp, salt, md5.md5("%s:%s:%s" % (timestamp, salt, secret)).hexdigest())
python
def calculate_nonce(timestamp, secret, salt=None): ''' Generate a nonce using the provided timestamp, secret, and salt. If the salt is not provided, (and one should only be provided when validating a nonce) one will be generated randomly in order to ensure that two simultaneous requests do not generate identical nonces. ''' if not salt: salt = ''.join([random.choice('0123456789ABCDEF') for x in range(4)]) return "%s:%s:%s" % (timestamp, salt, md5.md5("%s:%s:%s" % (timestamp, salt, secret)).hexdigest())
[ "def", "calculate_nonce", "(", "timestamp", ",", "secret", ",", "salt", "=", "None", ")", ":", "if", "not", "salt", ":", "salt", "=", "''", ".", "join", "(", "[", "random", ".", "choice", "(", "'0123456789ABCDEF'", ")", "for", "x", "in", "range", "("...
Generate a nonce using the provided timestamp, secret, and salt. If the salt is not provided, (and one should only be provided when validating a nonce) one will be generated randomly in order to ensure that two simultaneous requests do not generate identical nonces.
[ "Generate", "a", "nonce", "using", "the", "provided", "timestamp", "secret", "and", "salt", ".", "If", "the", "salt", "is", "not", "provided", "(", "and", "one", "should", "only", "be", "provided", "when", "validating", "a", "nonce", ")", "one", "will", ...
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/python_digest.py#L378-L387
taddeus/wspy
python_digest.py
build_authorization_request
def build_authorization_request(username, method, uri, nonce_count, digest_challenge=None, realm=None, nonce=None, opaque=None, password=None, request_digest=None, client_nonce=None): ''' Builds an authorization request that may be sent as the value of the 'Authorization' header in an HTTP request. Either a digest_challenge object (as returned from parse_digest_challenge) or its required component parameters (nonce, realm, opaque) must be provided. The nonce_count should be the last used nonce_count plus one. Either the password or the request_digest should be provided - if provided, the password will be used to generate a request digest. The client_nonce is optional - if not provided, a random value will be generated. ''' if not client_nonce: client_nonce = ''.join([random.choice('0123456789ABCDEF') for x in range(32)]) if digest_challenge and (realm or nonce or opaque): raise Exception("Both digest_challenge and one or more of realm, nonce, and opaque" "were sent.") if digest_challenge: if isinstance(digest_challenge, types.StringType): digest_challenge_header = digest_challenge digest_challenge = parse_digest_challenge(digest_challenge_header) if not digest_challenge: raise Exception("The provided digest challenge header could not be parsed: %s" % digest_challenge_header) realm = digest_challenge.realm nonce = digest_challenge.nonce opaque = digest_challenge.opaque elif not (realm and nonce and opaque): raise Exception("Either digest_challenge or realm, nonce, and opaque must be sent.") if password and request_digest: raise Exception("Both password and calculated request_digest were sent.") elif not request_digest: if not password: raise Exception("Either password or calculated request_digest must be provided.") partial_digest = calculate_partial_digest(username, realm, password) request_digest = calculate_request_digest(method, partial_digest, uri=uri, nonce=nonce, nonce_count=nonce_count, client_nonce=client_nonce) return 'Digest %s' % format_parts(username=username, realm=realm, nonce=nonce, uri=uri, response=request_digest, algorithm='MD5', opaque=opaque, qop='auth', nc='%08x' % nonce_count, cnonce=client_nonce)
python
def build_authorization_request(username, method, uri, nonce_count, digest_challenge=None, realm=None, nonce=None, opaque=None, password=None, request_digest=None, client_nonce=None): ''' Builds an authorization request that may be sent as the value of the 'Authorization' header in an HTTP request. Either a digest_challenge object (as returned from parse_digest_challenge) or its required component parameters (nonce, realm, opaque) must be provided. The nonce_count should be the last used nonce_count plus one. Either the password or the request_digest should be provided - if provided, the password will be used to generate a request digest. The client_nonce is optional - if not provided, a random value will be generated. ''' if not client_nonce: client_nonce = ''.join([random.choice('0123456789ABCDEF') for x in range(32)]) if digest_challenge and (realm or nonce or opaque): raise Exception("Both digest_challenge and one or more of realm, nonce, and opaque" "were sent.") if digest_challenge: if isinstance(digest_challenge, types.StringType): digest_challenge_header = digest_challenge digest_challenge = parse_digest_challenge(digest_challenge_header) if not digest_challenge: raise Exception("The provided digest challenge header could not be parsed: %s" % digest_challenge_header) realm = digest_challenge.realm nonce = digest_challenge.nonce opaque = digest_challenge.opaque elif not (realm and nonce and opaque): raise Exception("Either digest_challenge or realm, nonce, and opaque must be sent.") if password and request_digest: raise Exception("Both password and calculated request_digest were sent.") elif not request_digest: if not password: raise Exception("Either password or calculated request_digest must be provided.") partial_digest = calculate_partial_digest(username, realm, password) request_digest = calculate_request_digest(method, partial_digest, uri=uri, nonce=nonce, nonce_count=nonce_count, client_nonce=client_nonce) return 'Digest %s' % format_parts(username=username, realm=realm, nonce=nonce, uri=uri, response=request_digest, algorithm='MD5', opaque=opaque, qop='auth', nc='%08x' % nonce_count, cnonce=client_nonce)
[ "def", "build_authorization_request", "(", "username", ",", "method", ",", "uri", ",", "nonce_count", ",", "digest_challenge", "=", "None", ",", "realm", "=", "None", ",", "nonce", "=", "None", ",", "opaque", "=", "None", ",", "password", "=", "None", ",",...
Builds an authorization request that may be sent as the value of the 'Authorization' header in an HTTP request. Either a digest_challenge object (as returned from parse_digest_challenge) or its required component parameters (nonce, realm, opaque) must be provided. The nonce_count should be the last used nonce_count plus one. Either the password or the request_digest should be provided - if provided, the password will be used to generate a request digest. The client_nonce is optional - if not provided, a random value will be generated.
[ "Builds", "an", "authorization", "request", "that", "may", "be", "sent", "as", "the", "value", "of", "the", "Authorization", "header", "in", "an", "HTTP", "request", "." ]
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/python_digest.py#L389-L438
taddeus/wspy
python_digest.py
parse_digest_response
def parse_digest_response(digest_response_string): ''' Parse the parameters of a Digest response. The input is a comma separated list of token=(token|quoted-string). See RFCs 2616 and 2617 for details. Known issue: this implementation will fail if there are commas embedded in quoted-strings. ''' parts = parse_parts(digest_response_string, defaults={'algorithm': 'MD5'}) if not _check_required_parts(parts, _REQUIRED_DIGEST_RESPONSE_PARTS): return None if not parts['nc'] or [c for c in parts['nc'] if not c in '0123456789abcdefABCDEF']: return None parts['nc'] = int(parts['nc'], 16) digest_response = _build_object_from_parts(parts, _REQUIRED_DIGEST_RESPONSE_PARTS) if ('MD5', 'auth') != (digest_response.algorithm, digest_response.qop): return None return digest_response
python
def parse_digest_response(digest_response_string): ''' Parse the parameters of a Digest response. The input is a comma separated list of token=(token|quoted-string). See RFCs 2616 and 2617 for details. Known issue: this implementation will fail if there are commas embedded in quoted-strings. ''' parts = parse_parts(digest_response_string, defaults={'algorithm': 'MD5'}) if not _check_required_parts(parts, _REQUIRED_DIGEST_RESPONSE_PARTS): return None if not parts['nc'] or [c for c in parts['nc'] if not c in '0123456789abcdefABCDEF']: return None parts['nc'] = int(parts['nc'], 16) digest_response = _build_object_from_parts(parts, _REQUIRED_DIGEST_RESPONSE_PARTS) if ('MD5', 'auth') != (digest_response.algorithm, digest_response.qop): return None return digest_response
[ "def", "parse_digest_response", "(", "digest_response_string", ")", ":", "parts", "=", "parse_parts", "(", "digest_response_string", ",", "defaults", "=", "{", "'algorithm'", ":", "'MD5'", "}", ")", "if", "not", "_check_required_parts", "(", "parts", ",", "_REQUIR...
Parse the parameters of a Digest response. The input is a comma separated list of token=(token|quoted-string). See RFCs 2616 and 2617 for details. Known issue: this implementation will fail if there are commas embedded in quoted-strings.
[ "Parse", "the", "parameters", "of", "a", "Digest", "response", ".", "The", "input", "is", "a", "comma", "separated", "list", "of", "token", "=", "(", "token|quoted", "-", "string", ")", ".", "See", "RFCs", "2616", "and", "2617", "for", "details", "." ]
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/python_digest.py#L456-L476
taddeus/wspy
python_digest.py
parse_digest_challenge
def parse_digest_challenge(authentication_header): ''' Parses the value of a 'WWW-Authenticate' header. Returns an object with properties corresponding to each of the recognized parameters in the header. ''' if not is_digest_challenge(authentication_header): return None parts = parse_parts(authentication_header[7:], defaults={'algorithm': 'MD5', 'stale': 'false'}) if not _check_required_parts(parts, _REQUIRED_DIGEST_CHALLENGE_PARTS): return None parts['stale'] = parts['stale'].lower() == 'true' digest_challenge = _build_object_from_parts(parts, _REQUIRED_DIGEST_CHALLENGE_PARTS) if ('MD5', 'auth') != (digest_challenge.algorithm, digest_challenge.qop): return None return digest_challenge
python
def parse_digest_challenge(authentication_header): ''' Parses the value of a 'WWW-Authenticate' header. Returns an object with properties corresponding to each of the recognized parameters in the header. ''' if not is_digest_challenge(authentication_header): return None parts = parse_parts(authentication_header[7:], defaults={'algorithm': 'MD5', 'stale': 'false'}) if not _check_required_parts(parts, _REQUIRED_DIGEST_CHALLENGE_PARTS): return None parts['stale'] = parts['stale'].lower() == 'true' digest_challenge = _build_object_from_parts(parts, _REQUIRED_DIGEST_CHALLENGE_PARTS) if ('MD5', 'auth') != (digest_challenge.algorithm, digest_challenge.qop): return None return digest_challenge
[ "def", "parse_digest_challenge", "(", "authentication_header", ")", ":", "if", "not", "is_digest_challenge", "(", "authentication_header", ")", ":", "return", "None", "parts", "=", "parse_parts", "(", "authentication_header", "[", "7", ":", "]", ",", "defaults", "...
Parses the value of a 'WWW-Authenticate' header. Returns an object with properties corresponding to each of the recognized parameters in the header.
[ "Parses", "the", "value", "of", "a", "WWW", "-", "Authenticate", "header", ".", "Returns", "an", "object", "with", "properties", "corresponding", "to", "each", "of", "the", "recognized", "parameters", "in", "the", "header", "." ]
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/python_digest.py#L502-L521
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__edges
def __edges(self, nbunch=None, keys=False): """ Iterates over edges in current :class:`BreakpointGraph` instance. Returns a generator over the edges in current :class:`BreakpointGraph` instance producing instances of :class:`bg.edge.BGEdge` instances wrapping around information in underlying MultiGraph object. :param nbunch: a vertex to iterate over edges outgoing from, if not provided,iteration over all edges is performed. :type nbuch: any hashable python object :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges in current :class:`BreakpointGraph` :rtype: ``generator`` """ for v1, v2, key, data in self.bg.edges(nbunch=nbunch, data=True, keys=True): bgedge = BGEdge(vertex1=v1, vertex2=v2, multicolor=data["attr_dict"]["multicolor"], data=data["attr_dict"]["data"]) if not keys: yield bgedge else: yield bgedge, key
python
def __edges(self, nbunch=None, keys=False): """ Iterates over edges in current :class:`BreakpointGraph` instance. Returns a generator over the edges in current :class:`BreakpointGraph` instance producing instances of :class:`bg.edge.BGEdge` instances wrapping around information in underlying MultiGraph object. :param nbunch: a vertex to iterate over edges outgoing from, if not provided,iteration over all edges is performed. :type nbuch: any hashable python object :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges in current :class:`BreakpointGraph` :rtype: ``generator`` """ for v1, v2, key, data in self.bg.edges(nbunch=nbunch, data=True, keys=True): bgedge = BGEdge(vertex1=v1, vertex2=v2, multicolor=data["attr_dict"]["multicolor"], data=data["attr_dict"]["data"]) if not keys: yield bgedge else: yield bgedge, key
[ "def", "__edges", "(", "self", ",", "nbunch", "=", "None", ",", "keys", "=", "False", ")", ":", "for", "v1", ",", "v2", ",", "key", ",", "data", "in", "self", ".", "bg", ".", "edges", "(", "nbunch", "=", "nbunch", ",", "data", "=", "True", ",",...
Iterates over edges in current :class:`BreakpointGraph` instance. Returns a generator over the edges in current :class:`BreakpointGraph` instance producing instances of :class:`bg.edge.BGEdge` instances wrapping around information in underlying MultiGraph object. :param nbunch: a vertex to iterate over edges outgoing from, if not provided,iteration over all edges is performed. :type nbuch: any hashable python object :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges in current :class:`BreakpointGraph` :rtype: ``generator``
[ "Iterates", "over", "edges", "in", "current", ":", "class", ":", "BreakpointGraph", "instance", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L80-L98
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.edges
def edges(self, nbunch=None, keys=False): """ Iterates over edges in current :class:`BreakpointGraph` instance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__edges`. :param nbunch: a vertex to iterate over edges outgoing from, if not provided,iteration over all edges is performed. :type nbuch: any hashable python object :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges in current :class:`BreakpointGraph` :rtype: ``generator`` """ for entry in self.__edges(nbunch=nbunch, keys=keys): yield entry
python
def edges(self, nbunch=None, keys=False): """ Iterates over edges in current :class:`BreakpointGraph` instance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__edges`. :param nbunch: a vertex to iterate over edges outgoing from, if not provided,iteration over all edges is performed. :type nbuch: any hashable python object :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges in current :class:`BreakpointGraph` :rtype: ``generator`` """ for entry in self.__edges(nbunch=nbunch, keys=keys): yield entry
[ "def", "edges", "(", "self", ",", "nbunch", "=", "None", ",", "keys", "=", "False", ")", ":", "for", "entry", "in", "self", ".", "__edges", "(", "nbunch", "=", "nbunch", ",", "keys", "=", "keys", ")", ":", "yield", "entry" ]
Iterates over edges in current :class:`BreakpointGraph` instance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__edges`. :param nbunch: a vertex to iterate over edges outgoing from, if not provided,iteration over all edges is performed. :type nbuch: any hashable python object :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges in current :class:`BreakpointGraph` :rtype: ``generator``
[ "Iterates", "over", "edges", "in", "current", ":", "class", ":", "BreakpointGraph", "instance", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L100-L113
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.add_edge
def add_edge(self, vertex1, vertex2, multicolor, merge=True, data=None): """ Creates a new :class:`bg.edge.BGEdge` object from supplied information and adds it to current instance of :class:`BreakpointGraph`. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__add_bgedge` method. :param vertex1: first vertex instance out of two in current :class:`BreakpointGraph` :type vertex1: any hashable object :param vertex2: second vertex instance out of two in current :class:`BreakpointGraph` :type vertex2: any hashable object :param multicolor: an information about multi-colors of added edge :type multicolor: :class:`bg.multicolor.Multicolor` :param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices :type merge: ``Boolean`` :return: ``None``, performs inplace changes """ self.__add_bgedge(BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=multicolor, data=data), merge=merge)
python
def add_edge(self, vertex1, vertex2, multicolor, merge=True, data=None): """ Creates a new :class:`bg.edge.BGEdge` object from supplied information and adds it to current instance of :class:`BreakpointGraph`. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__add_bgedge` method. :param vertex1: first vertex instance out of two in current :class:`BreakpointGraph` :type vertex1: any hashable object :param vertex2: second vertex instance out of two in current :class:`BreakpointGraph` :type vertex2: any hashable object :param multicolor: an information about multi-colors of added edge :type multicolor: :class:`bg.multicolor.Multicolor` :param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices :type merge: ``Boolean`` :return: ``None``, performs inplace changes """ self.__add_bgedge(BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=multicolor, data=data), merge=merge)
[ "def", "add_edge", "(", "self", ",", "vertex1", ",", "vertex2", ",", "multicolor", ",", "merge", "=", "True", ",", "data", "=", "None", ")", ":", "self", ".", "__add_bgedge", "(", "BGEdge", "(", "vertex1", "=", "vertex1", ",", "vertex2", "=", "vertex2"...
Creates a new :class:`bg.edge.BGEdge` object from supplied information and adds it to current instance of :class:`BreakpointGraph`. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__add_bgedge` method. :param vertex1: first vertex instance out of two in current :class:`BreakpointGraph` :type vertex1: any hashable object :param vertex2: second vertex instance out of two in current :class:`BreakpointGraph` :type vertex2: any hashable object :param multicolor: an information about multi-colors of added edge :type multicolor: :class:`bg.multicolor.Multicolor` :param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices :type merge: ``Boolean`` :return: ``None``, performs inplace changes
[ "Creates", "a", "new", ":", "class", ":", "bg", ".", "edge", ".", "BGEdge", "object", "from", "supplied", "information", "and", "adds", "it", "to", "current", "instance", "of", ":", "class", ":", "BreakpointGraph", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L124-L139
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__add_bgedge
def __add_bgedge(self, bgedge, merge=True): """ Adds supplied :class:`bg.edge.BGEdge` object to current instance of :class:`BreakpointGraph`. Checks that vertices in supplied :class:`bg.edge.BGEdge` instance actually are present in current :class:`BreakpointGraph` if **merge** option of provided. Otherwise a new edge is added to the current :class:`BreakpointGraph`. :param bgedge: instance of :class:`bg.edge.BGEdge` infromation form which is to be added to current :class:`BreakpointGraph` :type bgedge: :class:`bg.edge.BGEdge` :param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices :type merge: ``Boolean`` :return: ``None``, performs inplace changes """ if bgedge.vertex1 in self.bg and bgedge.vertex2 in self.bg[bgedge.vertex1] and merge: key = min(self.bg[bgedge.vertex1][bgedge.vertex2].keys()) self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"] += bgedge.multicolor self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["data"] = {} else: self.bg.add_edge(bgedge.vertex1, bgedge.vertex2, attr_dict={"multicolor": deepcopy(bgedge.multicolor), "data": bgedge.data}) self.cache_valid["overall_set_of_colors"] = False
python
def __add_bgedge(self, bgedge, merge=True): """ Adds supplied :class:`bg.edge.BGEdge` object to current instance of :class:`BreakpointGraph`. Checks that vertices in supplied :class:`bg.edge.BGEdge` instance actually are present in current :class:`BreakpointGraph` if **merge** option of provided. Otherwise a new edge is added to the current :class:`BreakpointGraph`. :param bgedge: instance of :class:`bg.edge.BGEdge` infromation form which is to be added to current :class:`BreakpointGraph` :type bgedge: :class:`bg.edge.BGEdge` :param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices :type merge: ``Boolean`` :return: ``None``, performs inplace changes """ if bgedge.vertex1 in self.bg and bgedge.vertex2 in self.bg[bgedge.vertex1] and merge: key = min(self.bg[bgedge.vertex1][bgedge.vertex2].keys()) self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"] += bgedge.multicolor self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["data"] = {} else: self.bg.add_edge(bgedge.vertex1, bgedge.vertex2, attr_dict={"multicolor": deepcopy(bgedge.multicolor), "data": bgedge.data}) self.cache_valid["overall_set_of_colors"] = False
[ "def", "__add_bgedge", "(", "self", ",", "bgedge", ",", "merge", "=", "True", ")", ":", "if", "bgedge", ".", "vertex1", "in", "self", ".", "bg", "and", "bgedge", ".", "vertex2", "in", "self", ".", "bg", "[", "bgedge", ".", "vertex1", "]", "and", "m...
Adds supplied :class:`bg.edge.BGEdge` object to current instance of :class:`BreakpointGraph`. Checks that vertices in supplied :class:`bg.edge.BGEdge` instance actually are present in current :class:`BreakpointGraph` if **merge** option of provided. Otherwise a new edge is added to the current :class:`BreakpointGraph`. :param bgedge: instance of :class:`bg.edge.BGEdge` infromation form which is to be added to current :class:`BreakpointGraph` :type bgedge: :class:`bg.edge.BGEdge` :param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices :type merge: ``Boolean`` :return: ``None``, performs inplace changes
[ "Adds", "supplied", ":", "class", ":", "bg", ".", "edge", ".", "BGEdge", "object", "to", "current", "instance", "of", ":", "class", ":", "BreakpointGraph", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L141-L159
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.add_bgedge
def add_bgedge(self, bgedge, merge=True): """ Adds supplied :class:`bg.edge.BGEdge` object to current instance of :class:`BreakpointGraph`. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__add_bgedge` method. :param bgedge: instance of :class:`bg.edge.BGEdge` infromation form which is to be added to current :class:`BreakpointGraph` :type bgedge: :class:`bg.edge.BGEdge` :param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices :type merge: ``Boolean`` :return: ``None``, performs inplace changes """ self.__add_bgedge(bgedge=bgedge, merge=merge)
python
def add_bgedge(self, bgedge, merge=True): """ Adds supplied :class:`bg.edge.BGEdge` object to current instance of :class:`BreakpointGraph`. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__add_bgedge` method. :param bgedge: instance of :class:`bg.edge.BGEdge` infromation form which is to be added to current :class:`BreakpointGraph` :type bgedge: :class:`bg.edge.BGEdge` :param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices :type merge: ``Boolean`` :return: ``None``, performs inplace changes """ self.__add_bgedge(bgedge=bgedge, merge=merge)
[ "def", "add_bgedge", "(", "self", ",", "bgedge", ",", "merge", "=", "True", ")", ":", "self", ".", "__add_bgedge", "(", "bgedge", "=", "bgedge", ",", "merge", "=", "merge", ")" ]
Adds supplied :class:`bg.edge.BGEdge` object to current instance of :class:`BreakpointGraph`. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__add_bgedge` method. :param bgedge: instance of :class:`bg.edge.BGEdge` infromation form which is to be added to current :class:`BreakpointGraph` :type bgedge: :class:`bg.edge.BGEdge` :param merge: a flag to merge supplied information from multi-color perspective into a first existing edge between two supplied vertices :type merge: ``Boolean`` :return: ``None``, performs inplace changes
[ "Adds", "supplied", ":", "class", ":", "bg", ".", "edge", ".", "BGEdge", "object", "to", "current", "instance", "of", ":", "class", ":", "BreakpointGraph", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L161-L172
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__get_vertex_by_name
def __get_vertex_by_name(self, vertex_name): """ Obtains a vertex object by supplied label Returns a :class:`bg.vertex.BGVertex` or its subclass instance :param vertex_name: a vertex label it is identified by. :type vertex_name: any hashable python object. ``str`` expected. :return: vertex with supplied label if present in current :class:`BreakpointGraph`, ``None`` otherwise """ vertex_class = BGVertex.get_vertex_class_from_vertex_name(vertex_name) data = vertex_name.split(BlockVertex.NAME_SEPARATOR) root_name, data = data[0], data[1:] if issubclass(vertex_class, TaggedVertex): tags = [entry.split(TaggedVertex.TAG_SEPARATOR) for entry in data] for tag_entry in tags: if len(tag_entry) == 1: tag_entry.append(None) elif len(tag_entry) > 2: tag_entry[1:] = [TaggedVertex.TAG_SEPARATOR.join(tag_entry[1:])] result = vertex_class(root_name) for tag, value in tags: if tag == InfinityVertex.NAME_SUFFIX and issubclass(vertex_class, InfinityVertex): continue result.add_tag(tag, value) else: result = vertex_class(root_name) if result in self.bg: adjacencies = self.bg[result] for key, _ in adjacencies.items(): for ref_key, values in self.bg[key].items(): if ref_key == result: return ref_key return list(self.bg[result].keys())[0] return None
python
def __get_vertex_by_name(self, vertex_name): """ Obtains a vertex object by supplied label Returns a :class:`bg.vertex.BGVertex` or its subclass instance :param vertex_name: a vertex label it is identified by. :type vertex_name: any hashable python object. ``str`` expected. :return: vertex with supplied label if present in current :class:`BreakpointGraph`, ``None`` otherwise """ vertex_class = BGVertex.get_vertex_class_from_vertex_name(vertex_name) data = vertex_name.split(BlockVertex.NAME_SEPARATOR) root_name, data = data[0], data[1:] if issubclass(vertex_class, TaggedVertex): tags = [entry.split(TaggedVertex.TAG_SEPARATOR) for entry in data] for tag_entry in tags: if len(tag_entry) == 1: tag_entry.append(None) elif len(tag_entry) > 2: tag_entry[1:] = [TaggedVertex.TAG_SEPARATOR.join(tag_entry[1:])] result = vertex_class(root_name) for tag, value in tags: if tag == InfinityVertex.NAME_SUFFIX and issubclass(vertex_class, InfinityVertex): continue result.add_tag(tag, value) else: result = vertex_class(root_name) if result in self.bg: adjacencies = self.bg[result] for key, _ in adjacencies.items(): for ref_key, values in self.bg[key].items(): if ref_key == result: return ref_key return list(self.bg[result].keys())[0] return None
[ "def", "__get_vertex_by_name", "(", "self", ",", "vertex_name", ")", ":", "vertex_class", "=", "BGVertex", ".", "get_vertex_class_from_vertex_name", "(", "vertex_name", ")", "data", "=", "vertex_name", ".", "split", "(", "BlockVertex", ".", "NAME_SEPARATOR", ")", ...
Obtains a vertex object by supplied label Returns a :class:`bg.vertex.BGVertex` or its subclass instance :param vertex_name: a vertex label it is identified by. :type vertex_name: any hashable python object. ``str`` expected. :return: vertex with supplied label if present in current :class:`BreakpointGraph`, ``None`` otherwise
[ "Obtains", "a", "vertex", "object", "by", "supplied", "label" ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L174-L208
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__get_edge_by_two_vertices
def __get_edge_by_two_vertices(self, vertex1, vertex2, key=None): """ Returns an instance of :class:`bg.edge.BBGEdge` edge between to supplied vertices (if ``key`` is supplied, returns a :class:`bg.edge.BBGEdge` instance about specified edge). Checks that both specified vertices are in current :class:`BreakpointGraph` and then depending on ``key`` argument, creates a new :class:`bg.edge.BBGEdge` instance and incorporates respective multi-color information into it. :param vertex1: first vertex instance out of two in current :class:`BreakpointGraph` :type vertex1: any hashable object :param vertex2: second vertex instance out of two in current :class:`BreakpointGraph` :type vertex2: any hashable object :param key: unique identifier of edge of interested to be retrieved from current :class:`BreakpointGraph` :type key: any python object. ``None`` or ``int`` is expected :return: edge between two specified edges respecting a ``key`` argument. :rtype: :class:`bg.edge.BGEdge` """ if vertex1 in self.bg and vertex2 in self.bg[vertex1]: if key is None: key = min(self.bg[vertex1][vertex2]) return BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=self.bg[vertex1][vertex2][key]["attr_dict"]["multicolor"], data=self.bg[vertex1][vertex2][key]["attr_dict"]["data"]) return None
python
def __get_edge_by_two_vertices(self, vertex1, vertex2, key=None): """ Returns an instance of :class:`bg.edge.BBGEdge` edge between to supplied vertices (if ``key`` is supplied, returns a :class:`bg.edge.BBGEdge` instance about specified edge). Checks that both specified vertices are in current :class:`BreakpointGraph` and then depending on ``key`` argument, creates a new :class:`bg.edge.BBGEdge` instance and incorporates respective multi-color information into it. :param vertex1: first vertex instance out of two in current :class:`BreakpointGraph` :type vertex1: any hashable object :param vertex2: second vertex instance out of two in current :class:`BreakpointGraph` :type vertex2: any hashable object :param key: unique identifier of edge of interested to be retrieved from current :class:`BreakpointGraph` :type key: any python object. ``None`` or ``int`` is expected :return: edge between two specified edges respecting a ``key`` argument. :rtype: :class:`bg.edge.BGEdge` """ if vertex1 in self.bg and vertex2 in self.bg[vertex1]: if key is None: key = min(self.bg[vertex1][vertex2]) return BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=self.bg[vertex1][vertex2][key]["attr_dict"]["multicolor"], data=self.bg[vertex1][vertex2][key]["attr_dict"]["data"]) return None
[ "def", "__get_edge_by_two_vertices", "(", "self", ",", "vertex1", ",", "vertex2", ",", "key", "=", "None", ")", ":", "if", "vertex1", "in", "self", ".", "bg", "and", "vertex2", "in", "self", ".", "bg", "[", "vertex1", "]", ":", "if", "key", "is", "No...
Returns an instance of :class:`bg.edge.BBGEdge` edge between to supplied vertices (if ``key`` is supplied, returns a :class:`bg.edge.BBGEdge` instance about specified edge). Checks that both specified vertices are in current :class:`BreakpointGraph` and then depending on ``key`` argument, creates a new :class:`bg.edge.BBGEdge` instance and incorporates respective multi-color information into it. :param vertex1: first vertex instance out of two in current :class:`BreakpointGraph` :type vertex1: any hashable object :param vertex2: second vertex instance out of two in current :class:`BreakpointGraph` :type vertex2: any hashable object :param key: unique identifier of edge of interested to be retrieved from current :class:`BreakpointGraph` :type key: any python object. ``None`` or ``int`` is expected :return: edge between two specified edges respecting a ``key`` argument. :rtype: :class:`bg.edge.BGEdge`
[ "Returns", "an", "instance", "of", ":", "class", ":", "bg", ".", "edge", ".", "BBGEdge", "edge", "between", "to", "supplied", "vertices", "(", "if", "key", "is", "supplied", "returns", "a", ":", "class", ":", "bg", ".", "edge", ".", "BBGEdge", "instanc...
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L222-L242
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.get_edge_by_two_vertices
def get_edge_by_two_vertices(self, vertex1, vertex2, key=None): """ Returns an instance of :class:`bg.edge.BBGEdge` edge between to supplied vertices (if ``key`` is supplied, returns a :class:`bg.edge.BBGEdge` instance about specified edge). Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__get_edge_by_two_vertices`. :param vertex1: first vertex instance out of two in current :class:`BreakpointGraph` :type vertex1: any hashable object :param vertex2: second vertex instance out of two in current :class:`BreakpointGraph` :type vertex2: any hashable object :param key: unique identifier of edge of interested to be retrieved from current :class:`BreakpointGraph` :type key: any python object. ``None`` or ``int`` is expected :return: edge between two specified edges respecting a ``key`` argument. :rtype: :class:`bg.edge.BGEdge` """ return self.__get_edge_by_two_vertices(vertex1=vertex1, vertex2=vertex2, key=key)
python
def get_edge_by_two_vertices(self, vertex1, vertex2, key=None): """ Returns an instance of :class:`bg.edge.BBGEdge` edge between to supplied vertices (if ``key`` is supplied, returns a :class:`bg.edge.BBGEdge` instance about specified edge). Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__get_edge_by_two_vertices`. :param vertex1: first vertex instance out of two in current :class:`BreakpointGraph` :type vertex1: any hashable object :param vertex2: second vertex instance out of two in current :class:`BreakpointGraph` :type vertex2: any hashable object :param key: unique identifier of edge of interested to be retrieved from current :class:`BreakpointGraph` :type key: any python object. ``None`` or ``int`` is expected :return: edge between two specified edges respecting a ``key`` argument. :rtype: :class:`bg.edge.BGEdge` """ return self.__get_edge_by_two_vertices(vertex1=vertex1, vertex2=vertex2, key=key)
[ "def", "get_edge_by_two_vertices", "(", "self", ",", "vertex1", ",", "vertex2", ",", "key", "=", "None", ")", ":", "return", "self", ".", "__get_edge_by_two_vertices", "(", "vertex1", "=", "vertex1", ",", "vertex2", "=", "vertex2", ",", "key", "=", "key", ...
Returns an instance of :class:`bg.edge.BBGEdge` edge between to supplied vertices (if ``key`` is supplied, returns a :class:`bg.edge.BBGEdge` instance about specified edge). Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__get_edge_by_two_vertices`. :param vertex1: first vertex instance out of two in current :class:`BreakpointGraph` :type vertex1: any hashable object :param vertex2: second vertex instance out of two in current :class:`BreakpointGraph` :type vertex2: any hashable object :param key: unique identifier of edge of interested to be retrieved from current :class:`BreakpointGraph` :type key: any python object. ``None`` or ``int`` is expected :return: edge between two specified edges respecting a ``key`` argument. :rtype: :class:`bg.edge.BGEdge`
[ "Returns", "an", "instance", "of", ":", "class", ":", "bg", ".", "edge", ".", "BBGEdge", "edge", "between", "to", "supplied", "vertices", "(", "if", "key", "is", "supplied", "returns", "a", ":", "class", ":", "bg", ".", "edge", ".", "BBGEdge", "instanc...
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L244-L258
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__get_edges_by_vertex
def __get_edges_by_vertex(self, vertex, keys=False): """ Iterates over edges that are incident to supplied vertex argument in current :class:`BreakpointGraph` Checks that the supplied vertex argument exists in underlying MultiGraph object as a vertex, then iterates over all edges that are incident to it. Wraps each yielded object into :class:`bg.edge.BGEdge` object. :param vertex: a vertex object in current :class:`BreakpointGraph` object :type vertex: any hashable object. :class:`bg.vertex.BGVertex` object is expected. :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGEVertex` :rtype: ``generator`` """ if vertex in self.bg: for vertex2, edges in self.bg[vertex].items(): for key, data in self.bg[vertex][vertex2].items(): bg_edge = BGEdge(vertex1=vertex, vertex2=vertex2, multicolor=data["attr_dict"]["multicolor"], data=data["attr_dict"]["data"]) if keys: yield bg_edge, key else: yield bg_edge
python
def __get_edges_by_vertex(self, vertex, keys=False): """ Iterates over edges that are incident to supplied vertex argument in current :class:`BreakpointGraph` Checks that the supplied vertex argument exists in underlying MultiGraph object as a vertex, then iterates over all edges that are incident to it. Wraps each yielded object into :class:`bg.edge.BGEdge` object. :param vertex: a vertex object in current :class:`BreakpointGraph` object :type vertex: any hashable object. :class:`bg.vertex.BGVertex` object is expected. :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGEVertex` :rtype: ``generator`` """ if vertex in self.bg: for vertex2, edges in self.bg[vertex].items(): for key, data in self.bg[vertex][vertex2].items(): bg_edge = BGEdge(vertex1=vertex, vertex2=vertex2, multicolor=data["attr_dict"]["multicolor"], data=data["attr_dict"]["data"]) if keys: yield bg_edge, key else: yield bg_edge
[ "def", "__get_edges_by_vertex", "(", "self", ",", "vertex", ",", "keys", "=", "False", ")", ":", "if", "vertex", "in", "self", ".", "bg", ":", "for", "vertex2", ",", "edges", "in", "self", ".", "bg", "[", "vertex", "]", ".", "items", "(", ")", ":",...
Iterates over edges that are incident to supplied vertex argument in current :class:`BreakpointGraph` Checks that the supplied vertex argument exists in underlying MultiGraph object as a vertex, then iterates over all edges that are incident to it. Wraps each yielded object into :class:`bg.edge.BGEdge` object. :param vertex: a vertex object in current :class:`BreakpointGraph` object :type vertex: any hashable object. :class:`bg.vertex.BGVertex` object is expected. :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGEVertex` :rtype: ``generator``
[ "Iterates", "over", "edges", "that", "are", "incident", "to", "supplied", "vertex", "argument", "in", "current", ":", "class", ":", "BreakpointGraph" ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L260-L280
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.get_edges_by_vertex
def get_edges_by_vertex(self, vertex, keys=False): """ Iterates over edges that are incident to supplied vertex argument in current :class:`BreakpointGraph` Proxies a call to :meth:`Breakpoint._Breakpoint__get_edges_by_vertex` method. :param vertex: a vertex object in current :class:`BreakpointGraph` object :type vertex: any hashable object. :class:`bg.vertex.BGVertex` object is expected. :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGEVertex` :rtype: ``generator`` """ for entry in self.__get_edges_by_vertex(vertex=vertex, keys=keys): yield entry
python
def get_edges_by_vertex(self, vertex, keys=False): """ Iterates over edges that are incident to supplied vertex argument in current :class:`BreakpointGraph` Proxies a call to :meth:`Breakpoint._Breakpoint__get_edges_by_vertex` method. :param vertex: a vertex object in current :class:`BreakpointGraph` object :type vertex: any hashable object. :class:`bg.vertex.BGVertex` object is expected. :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGEVertex` :rtype: ``generator`` """ for entry in self.__get_edges_by_vertex(vertex=vertex, keys=keys): yield entry
[ "def", "get_edges_by_vertex", "(", "self", ",", "vertex", ",", "keys", "=", "False", ")", ":", "for", "entry", "in", "self", ".", "__get_edges_by_vertex", "(", "vertex", "=", "vertex", ",", "keys", "=", "keys", ")", ":", "yield", "entry" ]
Iterates over edges that are incident to supplied vertex argument in current :class:`BreakpointGraph` Proxies a call to :meth:`Breakpoint._Breakpoint__get_edges_by_vertex` method. :param vertex: a vertex object in current :class:`BreakpointGraph` object :type vertex: any hashable object. :class:`bg.vertex.BGVertex` object is expected. :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGEVertex` :rtype: ``generator``
[ "Iterates", "over", "edges", "that", "are", "incident", "to", "supplied", "vertex", "argument", "in", "current", ":", "class", ":", "BreakpointGraph" ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L282-L295
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__edges_between_two_vertices
def __edges_between_two_vertices(self, vertex1, vertex2, keys=False): """ Iterates over edges between two supplied vertices in current :class:`BreakpointGraph` Checks that both supplied vertices are present in current breakpoint graph and then yield all edges that are located between two supplied vertices. If keys option is specified, then not just edges are yielded, but rather pairs (edge, edge_id) are yielded :param vertex1: a first vertex out of two, edges of interest are incident to :type vertex1: any hashable object, :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two, edges of interest are incident to :type vertex2: any hashable object, :class:`bg.vertex.BGVertex` is expected :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) between two supplied vertices in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGVertex` :rtype: ``generator`` """ for vertex in vertex1, vertex2: if vertex not in self.bg: raise ValueError("Supplied vertex ({vertex_name}) is not present in current BreakpointGraph" "".format(vertex_name=str(vertex.name))) for bgedge, key in self.__get_edges_by_vertex(vertex=vertex1, keys=True): if bgedge.vertex2 == vertex2: if keys: yield bgedge, key else: yield bgedge
python
def __edges_between_two_vertices(self, vertex1, vertex2, keys=False): """ Iterates over edges between two supplied vertices in current :class:`BreakpointGraph` Checks that both supplied vertices are present in current breakpoint graph and then yield all edges that are located between two supplied vertices. If keys option is specified, then not just edges are yielded, but rather pairs (edge, edge_id) are yielded :param vertex1: a first vertex out of two, edges of interest are incident to :type vertex1: any hashable object, :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two, edges of interest are incident to :type vertex2: any hashable object, :class:`bg.vertex.BGVertex` is expected :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) between two supplied vertices in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGVertex` :rtype: ``generator`` """ for vertex in vertex1, vertex2: if vertex not in self.bg: raise ValueError("Supplied vertex ({vertex_name}) is not present in current BreakpointGraph" "".format(vertex_name=str(vertex.name))) for bgedge, key in self.__get_edges_by_vertex(vertex=vertex1, keys=True): if bgedge.vertex2 == vertex2: if keys: yield bgedge, key else: yield bgedge
[ "def", "__edges_between_two_vertices", "(", "self", ",", "vertex1", ",", "vertex2", ",", "keys", "=", "False", ")", ":", "for", "vertex", "in", "vertex1", ",", "vertex2", ":", "if", "vertex", "not", "in", "self", ".", "bg", ":", "raise", "ValueError", "(...
Iterates over edges between two supplied vertices in current :class:`BreakpointGraph` Checks that both supplied vertices are present in current breakpoint graph and then yield all edges that are located between two supplied vertices. If keys option is specified, then not just edges are yielded, but rather pairs (edge, edge_id) are yielded :param vertex1: a first vertex out of two, edges of interest are incident to :type vertex1: any hashable object, :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two, edges of interest are incident to :type vertex2: any hashable object, :class:`bg.vertex.BGVertex` is expected :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) between two supplied vertices in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGVertex` :rtype: ``generator``
[ "Iterates", "over", "edges", "between", "two", "supplied", "vertices", "in", "current", ":", "class", ":", "BreakpointGraph" ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L297-L321
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.edges_between_two_vertices
def edges_between_two_vertices(self, vertex1, vertex2, keys=False): """ Iterates over edges between two supplied vertices in current :class:`BreakpointGraph` Proxies a call to :meth:`Breakpoint._Breakpoint__edges_between_two_vertices` method. :param vertex1: a first vertex out of two, edges of interest are incident to :type vertex1: any hashable object, :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two, edges of interest are incident to :type vertex2: any hashable object, :class:`bg.vertex.BGVertex` is expected :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) between two supplied vertices in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGVertex` :rtype: ``generator`` """ for entry in self.__edges_between_two_vertices(vertex1=vertex1, vertex2=vertex2, keys=keys): yield entry
python
def edges_between_two_vertices(self, vertex1, vertex2, keys=False): """ Iterates over edges between two supplied vertices in current :class:`BreakpointGraph` Proxies a call to :meth:`Breakpoint._Breakpoint__edges_between_two_vertices` method. :param vertex1: a first vertex out of two, edges of interest are incident to :type vertex1: any hashable object, :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two, edges of interest are incident to :type vertex2: any hashable object, :class:`bg.vertex.BGVertex` is expected :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) between two supplied vertices in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGVertex` :rtype: ``generator`` """ for entry in self.__edges_between_two_vertices(vertex1=vertex1, vertex2=vertex2, keys=keys): yield entry
[ "def", "edges_between_two_vertices", "(", "self", ",", "vertex1", ",", "vertex2", ",", "keys", "=", "False", ")", ":", "for", "entry", "in", "self", ".", "__edges_between_two_vertices", "(", "vertex1", "=", "vertex1", ",", "vertex2", "=", "vertex2", ",", "ke...
Iterates over edges between two supplied vertices in current :class:`BreakpointGraph` Proxies a call to :meth:`Breakpoint._Breakpoint__edges_between_two_vertices` method. :param vertex1: a first vertex out of two, edges of interest are incident to :type vertex1: any hashable object, :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two, edges of interest are incident to :type vertex2: any hashable object, :class:`bg.vertex.BGVertex` is expected :param keys: a flag to indicate if information about unique edge's ids has to be returned alongside with edge :type keys: ``Boolean`` :return: generator over edges (tuples ``edge, edge_id`` if keys specified) between two supplied vertices in current :class:`BreakpointGraph` wrapped in :class:`bg.vertex.BGVertex` :rtype: ``generator``
[ "Iterates", "over", "edges", "between", "two", "supplied", "vertices", "in", "current", ":", "class", ":", "BreakpointGraph" ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L323-L338
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.connected_components_subgraphs
def connected_components_subgraphs(self, copy=True): """ Iterates over connected components in current :class:`BreakpointGraph` object, and yields new instances of :class:`BreakpointGraph` with respective information deep-copied by default (week reference is possible of specified in method call). :param copy: a flag to signal if graph information has to be deep copied while producing new :class:`BreakpointGraph` instances, of just reference to respective data has to be made. :type copy: ``Boolean`` :return: generator over connected components in current :class:`BreakpointGraph` wrapping respective connected components into new :class:`BreakpointGraph` objects. :rtype: ``generator`` """ for component in nx.connected_components(self.bg): component = self.bg.subgraph(component) if copy: component.copy() yield BreakpointGraph(component)
python
def connected_components_subgraphs(self, copy=True): """ Iterates over connected components in current :class:`BreakpointGraph` object, and yields new instances of :class:`BreakpointGraph` with respective information deep-copied by default (week reference is possible of specified in method call). :param copy: a flag to signal if graph information has to be deep copied while producing new :class:`BreakpointGraph` instances, of just reference to respective data has to be made. :type copy: ``Boolean`` :return: generator over connected components in current :class:`BreakpointGraph` wrapping respective connected components into new :class:`BreakpointGraph` objects. :rtype: ``generator`` """ for component in nx.connected_components(self.bg): component = self.bg.subgraph(component) if copy: component.copy() yield BreakpointGraph(component)
[ "def", "connected_components_subgraphs", "(", "self", ",", "copy", "=", "True", ")", ":", "for", "component", "in", "nx", ".", "connected_components", "(", "self", ".", "bg", ")", ":", "component", "=", "self", ".", "bg", ".", "subgraph", "(", "component",...
Iterates over connected components in current :class:`BreakpointGraph` object, and yields new instances of :class:`BreakpointGraph` with respective information deep-copied by default (week reference is possible of specified in method call). :param copy: a flag to signal if graph information has to be deep copied while producing new :class:`BreakpointGraph` instances, of just reference to respective data has to be made. :type copy: ``Boolean`` :return: generator over connected components in current :class:`BreakpointGraph` wrapping respective connected components into new :class:`BreakpointGraph` objects. :rtype: ``generator``
[ "Iterates", "over", "connected", "components", "in", "current", ":", "class", ":", "BreakpointGraph", "object", "and", "yields", "new", "instances", "of", ":", "class", ":", "BreakpointGraph", "with", "respective", "information", "deep", "-", "copied", "by", "de...
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L340-L352
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__delete_bgedge
def __delete_bgedge(self, bgedge, key=None, keep_vertices=False): """ Deletes a supplied :class:`bg.edge.BGEdge` from a perspective of multi-color substitution. If unique identifier ``key`` is not provided, most similar (from perspective of :meth:`bg.multicolor.Multicolor.similarity_score` result) edge between respective vertices is chosen for change. If no unique identifier for edge to be changed is specified, edge to be updated is determined by iterating over all edges between vertices in supplied :class:`bg.edge.BGEdge` instance and the edge with most similarity score to supplied one is chosen. Once the edge to be substituted from is determined, substitution if performed form a perspective of :class:`bg.multicolor.Multicolor` substitution. If after substitution the remaining multicolor of respective edge is empty, such edge is deleted form a perspective of MultiGraph edge deletion. :param bgedge: an edge to be deleted from a perspective of multi-color substitution :type bgedge: :class:`bg.edge.BGEdge` :param key: unique identifier of existing edges in current :class:`BreakpointGraph` instance to be changed :type: any python object. ``int`` is expected. :return: ``None``, performed inplace changes. """ ############################################################################################################ # # determines which edge to delete # candidate edges setup # ############################################################################################################ if key is not None: ############################################################################################################ # # is an edge specific key is provided, only edge with that key can undergo multicolor deletion # even if that edge is not the most suited to the edge to be deleted # ############################################################################################################ self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"] -= bgedge.multicolor if len(self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"].multicolors) == 0: ############################################################################################################ # # since edge deletion correspond to multicolor substitution one must make sure # that no edges with empty multicolor are left in the graph # ############################################################################################################ self.bg.remove_edge(v=bgedge.vertex1, u=bgedge.vertex2, key=key) if keep_vertices: self.bg.add_node(bgedge.vertex1) self.bg.add_node(bgedge.vertex2) else: candidate_data, candidate_id, candidate_score = self.__determine_most_suitable_edge_for_deletion(bgedge) if candidate_data is not None: candidate_data["attr_dict"]["multicolor"] -= bgedge.multicolor if len(self.bg[bgedge.vertex1][bgedge.vertex2][candidate_id]["attr_dict"][ "multicolor"].multicolors) == 0: self.bg.remove_edge(v=bgedge.vertex1, u=bgedge.vertex2, key=candidate_id) if keep_vertices: self.bg.add_node(bgedge.vertex1) self.bg.add_node(bgedge.vertex2) self.cache_valid["overall_set_of_colors"] = False
python
def __delete_bgedge(self, bgedge, key=None, keep_vertices=False): """ Deletes a supplied :class:`bg.edge.BGEdge` from a perspective of multi-color substitution. If unique identifier ``key`` is not provided, most similar (from perspective of :meth:`bg.multicolor.Multicolor.similarity_score` result) edge between respective vertices is chosen for change. If no unique identifier for edge to be changed is specified, edge to be updated is determined by iterating over all edges between vertices in supplied :class:`bg.edge.BGEdge` instance and the edge with most similarity score to supplied one is chosen. Once the edge to be substituted from is determined, substitution if performed form a perspective of :class:`bg.multicolor.Multicolor` substitution. If after substitution the remaining multicolor of respective edge is empty, such edge is deleted form a perspective of MultiGraph edge deletion. :param bgedge: an edge to be deleted from a perspective of multi-color substitution :type bgedge: :class:`bg.edge.BGEdge` :param key: unique identifier of existing edges in current :class:`BreakpointGraph` instance to be changed :type: any python object. ``int`` is expected. :return: ``None``, performed inplace changes. """ ############################################################################################################ # # determines which edge to delete # candidate edges setup # ############################################################################################################ if key is not None: ############################################################################################################ # # is an edge specific key is provided, only edge with that key can undergo multicolor deletion # even if that edge is not the most suited to the edge to be deleted # ############################################################################################################ self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"] -= bgedge.multicolor if len(self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"].multicolors) == 0: ############################################################################################################ # # since edge deletion correspond to multicolor substitution one must make sure # that no edges with empty multicolor are left in the graph # ############################################################################################################ self.bg.remove_edge(v=bgedge.vertex1, u=bgedge.vertex2, key=key) if keep_vertices: self.bg.add_node(bgedge.vertex1) self.bg.add_node(bgedge.vertex2) else: candidate_data, candidate_id, candidate_score = self.__determine_most_suitable_edge_for_deletion(bgedge) if candidate_data is not None: candidate_data["attr_dict"]["multicolor"] -= bgedge.multicolor if len(self.bg[bgedge.vertex1][bgedge.vertex2][candidate_id]["attr_dict"][ "multicolor"].multicolors) == 0: self.bg.remove_edge(v=bgedge.vertex1, u=bgedge.vertex2, key=candidate_id) if keep_vertices: self.bg.add_node(bgedge.vertex1) self.bg.add_node(bgedge.vertex2) self.cache_valid["overall_set_of_colors"] = False
[ "def", "__delete_bgedge", "(", "self", ",", "bgedge", ",", "key", "=", "None", ",", "keep_vertices", "=", "False", ")", ":", "############################################################################################################", "#", "# determines which edge to delete", ...
Deletes a supplied :class:`bg.edge.BGEdge` from a perspective of multi-color substitution. If unique identifier ``key`` is not provided, most similar (from perspective of :meth:`bg.multicolor.Multicolor.similarity_score` result) edge between respective vertices is chosen for change. If no unique identifier for edge to be changed is specified, edge to be updated is determined by iterating over all edges between vertices in supplied :class:`bg.edge.BGEdge` instance and the edge with most similarity score to supplied one is chosen. Once the edge to be substituted from is determined, substitution if performed form a perspective of :class:`bg.multicolor.Multicolor` substitution. If after substitution the remaining multicolor of respective edge is empty, such edge is deleted form a perspective of MultiGraph edge deletion. :param bgedge: an edge to be deleted from a perspective of multi-color substitution :type bgedge: :class:`bg.edge.BGEdge` :param key: unique identifier of existing edges in current :class:`BreakpointGraph` instance to be changed :type: any python object. ``int`` is expected. :return: ``None``, performed inplace changes.
[ "Deletes", "a", "supplied", ":", "class", ":", "bg", ".", "edge", ".", "BGEdge", "from", "a", "perspective", "of", "multi", "-", "color", "substitution", ".", "If", "unique", "identifier", "key", "is", "not", "provided", "most", "similar", "(", "from", "...
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L354-L403
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.delete_edge
def delete_edge(self, vertex1, vertex2, multicolor, key=None): """ Creates a new :class:`bg.edge.BGEdge` instance from supplied information and deletes it from a perspective of multi-color substitution. If unique identifier ``key`` is not provided, most similar (from perspective of :meth:`bg.multicolor.Multicolor.similarity_score` result) edge between respective vertices is chosen for change. Proxies a call to :math:`BreakpointGraph._BreakpointGraph__delete_bgedge` method. :param vertex1: a first vertex out of two the edge to be deleted is incident to :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two the edge to be deleted is incident to :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param multicolor: a multi-color to find most suitable edge to be deleted :type multicolor: :class:`bg.multicolor.Multicolor` :param key: unique identifier of existing edges in current :class:`BreakpointGraph` instance to be changed :type: any python object. ``int`` is expected. :return: ``None``, performed inplace changes. """ self.__delete_bgedge(bgedge=BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=multicolor), key=key)
python
def delete_edge(self, vertex1, vertex2, multicolor, key=None): """ Creates a new :class:`bg.edge.BGEdge` instance from supplied information and deletes it from a perspective of multi-color substitution. If unique identifier ``key`` is not provided, most similar (from perspective of :meth:`bg.multicolor.Multicolor.similarity_score` result) edge between respective vertices is chosen for change. Proxies a call to :math:`BreakpointGraph._BreakpointGraph__delete_bgedge` method. :param vertex1: a first vertex out of two the edge to be deleted is incident to :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two the edge to be deleted is incident to :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param multicolor: a multi-color to find most suitable edge to be deleted :type multicolor: :class:`bg.multicolor.Multicolor` :param key: unique identifier of existing edges in current :class:`BreakpointGraph` instance to be changed :type: any python object. ``int`` is expected. :return: ``None``, performed inplace changes. """ self.__delete_bgedge(bgedge=BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=multicolor), key=key)
[ "def", "delete_edge", "(", "self", ",", "vertex1", ",", "vertex2", ",", "multicolor", ",", "key", "=", "None", ")", ":", "self", ".", "__delete_bgedge", "(", "bgedge", "=", "BGEdge", "(", "vertex1", "=", "vertex1", ",", "vertex2", "=", "vertex2", ",", ...
Creates a new :class:`bg.edge.BGEdge` instance from supplied information and deletes it from a perspective of multi-color substitution. If unique identifier ``key`` is not provided, most similar (from perspective of :meth:`bg.multicolor.Multicolor.similarity_score` result) edge between respective vertices is chosen for change. Proxies a call to :math:`BreakpointGraph._BreakpointGraph__delete_bgedge` method. :param vertex1: a first vertex out of two the edge to be deleted is incident to :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two the edge to be deleted is incident to :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param multicolor: a multi-color to find most suitable edge to be deleted :type multicolor: :class:`bg.multicolor.Multicolor` :param key: unique identifier of existing edges in current :class:`BreakpointGraph` instance to be changed :type: any python object. ``int`` is expected. :return: ``None``, performed inplace changes.
[ "Creates", "a", "new", ":", "class", ":", "bg", ".", "edge", ".", "BGEdge", "instance", "from", "supplied", "information", "and", "deletes", "it", "from", "a", "perspective", "of", "multi", "-", "color", "substitution", ".", "If", "unique", "identifier", "...
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L423-L438
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.delete_bgedge
def delete_bgedge(self, bgedge, key=None): """ Deletes a supplied :class:`bg.edge.BGEdge` from a perspective of multi-color substitution. If unique identifier ``key`` is not provided, most similar (from perspective of :meth:`bg.multicolor.Multicolor.similarity_score` result) edge between respective vertices is chosen for change. Proxies a call to :math:`BreakpointGraph._BreakpointGraph__delete_bgedge` method. :param bgedge: an edge to be deleted from a perspective of multi-color substitution :type bgedge: :class:`bg.edge.BGEdge` :param key: unique identifier of existing edges in current :class:`BreakpointGraph` instance to be changed :type: any python object. ``int`` is expected. :return: ``None``, performed inplace changes. """ self.__delete_bgedge(bgedge=bgedge, key=key)
python
def delete_bgedge(self, bgedge, key=None): """ Deletes a supplied :class:`bg.edge.BGEdge` from a perspective of multi-color substitution. If unique identifier ``key`` is not provided, most similar (from perspective of :meth:`bg.multicolor.Multicolor.similarity_score` result) edge between respective vertices is chosen for change. Proxies a call to :math:`BreakpointGraph._BreakpointGraph__delete_bgedge` method. :param bgedge: an edge to be deleted from a perspective of multi-color substitution :type bgedge: :class:`bg.edge.BGEdge` :param key: unique identifier of existing edges in current :class:`BreakpointGraph` instance to be changed :type: any python object. ``int`` is expected. :return: ``None``, performed inplace changes. """ self.__delete_bgedge(bgedge=bgedge, key=key)
[ "def", "delete_bgedge", "(", "self", ",", "bgedge", ",", "key", "=", "None", ")", ":", "self", ".", "__delete_bgedge", "(", "bgedge", "=", "bgedge", ",", "key", "=", "key", ")" ]
Deletes a supplied :class:`bg.edge.BGEdge` from a perspective of multi-color substitution. If unique identifier ``key`` is not provided, most similar (from perspective of :meth:`bg.multicolor.Multicolor.similarity_score` result) edge between respective vertices is chosen for change. Proxies a call to :math:`BreakpointGraph._BreakpointGraph__delete_bgedge` method. :param bgedge: an edge to be deleted from a perspective of multi-color substitution :type bgedge: :class:`bg.edge.BGEdge` :param key: unique identifier of existing edges in current :class:`BreakpointGraph` instance to be changed :type: any python object. ``int`` is expected. :return: ``None``, performed inplace changes.
[ "Deletes", "a", "supplied", ":", "class", ":", "bg", ".", "edge", ".", "BGEdge", "from", "a", "perspective", "of", "multi", "-", "color", "substitution", ".", "If", "unique", "identifier", "key", "is", "not", "provided", "most", "similar", "(", "from", "...
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L440-L451
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__split_bgedge
def __split_bgedge(self, bgedge, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True, key=None): """ Splits a :class:`bg.edge.BGEdge` in current :class:`BreakpointGraph` most similar to supplied one (if no unique identifier ``key`` is provided) with respect to supplied guidance. If no unique identifier for edge to be changed is specified, edge to be split is determined by iterating over all edges between vertices in supplied :class:`bg.edge.BGEdge` instance and the edge with most similarity score to supplied one is chosen. Once the edge to be split is determined, split if performed form a perspective of :class:`bg.multicolor.Multicolor` split. The originally detected edge is deleted, and new edges containing information about multi-colors after splitting, are added to the current :class:`BreakpointGraph`. :param bgedge: an edge to find most "similar to" among existing edges for a split :type bgedge: :class:`bg.edge.BGEdge` :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` object to be split :type guidance: iterable where each entry is iterable with colors entries :param duplication_splitting: flag (**not** currently implemented) for a splitting of color-based splitting to take into account multiplicity of respective colors :type duplication_splitting: ``Boolean`` :param key: unique identifier of edge to be split :type key: any python object. ``int`` is expected :return: ``None``, performs inplace changes """ candidate_id = None candidate_score = 0 candidate_data = None if key is not None: new_multicolors = Multicolor.split_colors( multicolor=self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"], guidance=guidance, sorted_guidance=sorted_guidance, account_for_color_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance) self.__delete_bgedge(bgedge=BGEdge(vertex1=bgedge.vertex1, vertex2=bgedge.vertex2, multicolor=self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"]), key=key) for multicolor in new_multicolors: self.__add_bgedge(BGEdge(vertex1=bgedge.vertex1, vertex2=bgedge.vertex2, multicolor=multicolor), merge=False) else: for v1, v2, key, data in self.bg.edges(nbunch=bgedge.vertex1, data=True, keys=True): if v2 == bgedge.vertex2: score = Multicolor.similarity_score(bgedge.multicolor, data["attr_dict"]["multicolor"]) if score > candidate_score: candidate_id = key candidate_data = data candidate_score = score if candidate_data is not None: new_multicolors = Multicolor.split_colors(multicolor=candidate_data["attr_dict"]["multicolor"], guidance=guidance, sorted_guidance=sorted_guidance, account_for_color_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance) self.__delete_bgedge(bgedge=BGEdge(vertex1=bgedge.vertex1, vertex2=bgedge.vertex2, multicolor=candidate_data["attr_dict"]["multicolor"]), key=candidate_id) for multicolor in new_multicolors: self.__add_bgedge(BGEdge(vertex1=bgedge.vertex1, vertex2=bgedge.vertex2, multicolor=multicolor), merge=False)
python
def __split_bgedge(self, bgedge, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True, key=None): """ Splits a :class:`bg.edge.BGEdge` in current :class:`BreakpointGraph` most similar to supplied one (if no unique identifier ``key`` is provided) with respect to supplied guidance. If no unique identifier for edge to be changed is specified, edge to be split is determined by iterating over all edges between vertices in supplied :class:`bg.edge.BGEdge` instance and the edge with most similarity score to supplied one is chosen. Once the edge to be split is determined, split if performed form a perspective of :class:`bg.multicolor.Multicolor` split. The originally detected edge is deleted, and new edges containing information about multi-colors after splitting, are added to the current :class:`BreakpointGraph`. :param bgedge: an edge to find most "similar to" among existing edges for a split :type bgedge: :class:`bg.edge.BGEdge` :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` object to be split :type guidance: iterable where each entry is iterable with colors entries :param duplication_splitting: flag (**not** currently implemented) for a splitting of color-based splitting to take into account multiplicity of respective colors :type duplication_splitting: ``Boolean`` :param key: unique identifier of edge to be split :type key: any python object. ``int`` is expected :return: ``None``, performs inplace changes """ candidate_id = None candidate_score = 0 candidate_data = None if key is not None: new_multicolors = Multicolor.split_colors( multicolor=self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"], guidance=guidance, sorted_guidance=sorted_guidance, account_for_color_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance) self.__delete_bgedge(bgedge=BGEdge(vertex1=bgedge.vertex1, vertex2=bgedge.vertex2, multicolor=self.bg[bgedge.vertex1][bgedge.vertex2][key]["attr_dict"]["multicolor"]), key=key) for multicolor in new_multicolors: self.__add_bgedge(BGEdge(vertex1=bgedge.vertex1, vertex2=bgedge.vertex2, multicolor=multicolor), merge=False) else: for v1, v2, key, data in self.bg.edges(nbunch=bgedge.vertex1, data=True, keys=True): if v2 == bgedge.vertex2: score = Multicolor.similarity_score(bgedge.multicolor, data["attr_dict"]["multicolor"]) if score > candidate_score: candidate_id = key candidate_data = data candidate_score = score if candidate_data is not None: new_multicolors = Multicolor.split_colors(multicolor=candidate_data["attr_dict"]["multicolor"], guidance=guidance, sorted_guidance=sorted_guidance, account_for_color_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance) self.__delete_bgedge(bgedge=BGEdge(vertex1=bgedge.vertex1, vertex2=bgedge.vertex2, multicolor=candidate_data["attr_dict"]["multicolor"]), key=candidate_id) for multicolor in new_multicolors: self.__add_bgedge(BGEdge(vertex1=bgedge.vertex1, vertex2=bgedge.vertex2, multicolor=multicolor), merge=False)
[ "def", "__split_bgedge", "(", "self", ",", "bgedge", ",", "guidance", "=", "None", ",", "sorted_guidance", "=", "False", ",", "account_for_colors_multiplicity_in_guidance", "=", "True", ",", "key", "=", "None", ")", ":", "candidate_id", "=", "None", "candidate_s...
Splits a :class:`bg.edge.BGEdge` in current :class:`BreakpointGraph` most similar to supplied one (if no unique identifier ``key`` is provided) with respect to supplied guidance. If no unique identifier for edge to be changed is specified, edge to be split is determined by iterating over all edges between vertices in supplied :class:`bg.edge.BGEdge` instance and the edge with most similarity score to supplied one is chosen. Once the edge to be split is determined, split if performed form a perspective of :class:`bg.multicolor.Multicolor` split. The originally detected edge is deleted, and new edges containing information about multi-colors after splitting, are added to the current :class:`BreakpointGraph`. :param bgedge: an edge to find most "similar to" among existing edges for a split :type bgedge: :class:`bg.edge.BGEdge` :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` object to be split :type guidance: iterable where each entry is iterable with colors entries :param duplication_splitting: flag (**not** currently implemented) for a splitting of color-based splitting to take into account multiplicity of respective colors :type duplication_splitting: ``Boolean`` :param key: unique identifier of edge to be split :type key: any python object. ``int`` is expected :return: ``None``, performs inplace changes
[ "Splits", "a", ":", "class", ":", "bg", ".", "edge", ".", "BGEdge", "in", "current", ":", "class", ":", "BreakpointGraph", "most", "similar", "to", "supplied", "one", "(", "if", "no", "unique", "identifier", "key", "is", "provided", ")", "with", "respect...
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L453-L503
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.split_edge
def split_edge(self, vertex1, vertex2, multicolor, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True, key=None): """ Splits an edge in current :class:`BreakpointGraph` most similar to supplied data (if no unique identifier ``key`` is provided) with respect to supplied guidance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__split_bgedge` method. :param vertex1: a first vertex out of two the edge to be split is incident to :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two the edge to be split is incident to :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param multicolor: a multi-color to find most suitable edge to be split :type multicolor: :class:`bg.multicolor.Multicolor` :param duplication_splitting: flag (**not** currently implemented) for a splitting of color-based splitting to take into account multiplicity of respective colors :type duplication_splitting: ``Boolean`` :param key: unique identifier of edge to be split :type key: any python object. ``int`` is expected :return: ``None``, performs inplace changes """ self.__split_bgedge(bgedge=BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=multicolor), guidance=guidance, sorted_guidance=sorted_guidance, account_for_colors_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance, key=key)
python
def split_edge(self, vertex1, vertex2, multicolor, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True, key=None): """ Splits an edge in current :class:`BreakpointGraph` most similar to supplied data (if no unique identifier ``key`` is provided) with respect to supplied guidance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__split_bgedge` method. :param vertex1: a first vertex out of two the edge to be split is incident to :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two the edge to be split is incident to :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param multicolor: a multi-color to find most suitable edge to be split :type multicolor: :class:`bg.multicolor.Multicolor` :param duplication_splitting: flag (**not** currently implemented) for a splitting of color-based splitting to take into account multiplicity of respective colors :type duplication_splitting: ``Boolean`` :param key: unique identifier of edge to be split :type key: any python object. ``int`` is expected :return: ``None``, performs inplace changes """ self.__split_bgedge(bgedge=BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=multicolor), guidance=guidance, sorted_guidance=sorted_guidance, account_for_colors_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance, key=key)
[ "def", "split_edge", "(", "self", ",", "vertex1", ",", "vertex2", ",", "multicolor", ",", "guidance", "=", "None", ",", "sorted_guidance", "=", "False", ",", "account_for_colors_multiplicity_in_guidance", "=", "True", ",", "key", "=", "None", ")", ":", "self",...
Splits an edge in current :class:`BreakpointGraph` most similar to supplied data (if no unique identifier ``key`` is provided) with respect to supplied guidance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__split_bgedge` method. :param vertex1: a first vertex out of two the edge to be split is incident to :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second vertex out of two the edge to be split is incident to :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param multicolor: a multi-color to find most suitable edge to be split :type multicolor: :class:`bg.multicolor.Multicolor` :param duplication_splitting: flag (**not** currently implemented) for a splitting of color-based splitting to take into account multiplicity of respective colors :type duplication_splitting: ``Boolean`` :param key: unique identifier of edge to be split :type key: any python object. ``int`` is expected :return: ``None``, performs inplace changes
[ "Splits", "an", "edge", "in", "current", ":", "class", ":", "BreakpointGraph", "most", "similar", "to", "supplied", "data", "(", "if", "no", "unique", "identifier", "key", "is", "provided", ")", "with", "respect", "to", "supplied", "guidance", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L505-L526
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.split_bgedge
def split_bgedge(self, bgedge, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True, key=None): """ Splits a :class:`bg.edge.BGEdge` in current :class:`BreakpointGraph` most similar to supplied one (if no unique identifier ``key`` is provided) with respect to supplied guidance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__split_bgedge` method. :param bgedge: an edge to find most "similar to" among existing edges for a split :type bgedge: :class:`bg.edge.BGEdge` :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` object to be split :type guidance: iterable where each entry is iterable with colors entries :param duplication_splitting: flag (**not** currently implemented) for a splitting of color-based splitting to take into account multiplicity of respective colors :type duplication_splitting: ``Boolean`` :param key: unique identifier of edge to be split :type key: any python object. ``int`` is expected :return: ``None``, performs inplace changes """ self.__split_bgedge(bgedge=bgedge, guidance=guidance, sorted_guidance=sorted_guidance, account_for_colors_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance, key=key)
python
def split_bgedge(self, bgedge, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True, key=None): """ Splits a :class:`bg.edge.BGEdge` in current :class:`BreakpointGraph` most similar to supplied one (if no unique identifier ``key`` is provided) with respect to supplied guidance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__split_bgedge` method. :param bgedge: an edge to find most "similar to" among existing edges for a split :type bgedge: :class:`bg.edge.BGEdge` :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` object to be split :type guidance: iterable where each entry is iterable with colors entries :param duplication_splitting: flag (**not** currently implemented) for a splitting of color-based splitting to take into account multiplicity of respective colors :type duplication_splitting: ``Boolean`` :param key: unique identifier of edge to be split :type key: any python object. ``int`` is expected :return: ``None``, performs inplace changes """ self.__split_bgedge(bgedge=bgedge, guidance=guidance, sorted_guidance=sorted_guidance, account_for_colors_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance, key=key)
[ "def", "split_bgedge", "(", "self", ",", "bgedge", ",", "guidance", "=", "None", ",", "sorted_guidance", "=", "False", ",", "account_for_colors_multiplicity_in_guidance", "=", "True", ",", "key", "=", "None", ")", ":", "self", ".", "__split_bgedge", "(", "bged...
Splits a :class:`bg.edge.BGEdge` in current :class:`BreakpointGraph` most similar to supplied one (if no unique identifier ``key`` is provided) with respect to supplied guidance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__split_bgedge` method. :param bgedge: an edge to find most "similar to" among existing edges for a split :type bgedge: :class:`bg.edge.BGEdge` :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` object to be split :type guidance: iterable where each entry is iterable with colors entries :param duplication_splitting: flag (**not** currently implemented) for a splitting of color-based splitting to take into account multiplicity of respective colors :type duplication_splitting: ``Boolean`` :param key: unique identifier of edge to be split :type key: any python object. ``int`` is expected :return: ``None``, performs inplace changes
[ "Splits", "a", ":", "class", ":", "bg", ".", "edge", ".", "BGEdge", "in", "current", ":", "class", ":", "BreakpointGraph", "most", "similar", "to", "supplied", "one", "(", "if", "no", "unique", "identifier", "key", "is", "provided", ")", "with", "respect...
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L528-L547
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__split_all_edges_between_two_vertices
def __split_all_edges_between_two_vertices(self, vertex1, vertex2, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True): """ Splits all edges between two supplied vertices in current :class:`BreakpointGraph` instance with respect to the provided guidance. Iterates over all edges between two supplied vertices and splits each one of them with respect to the guidance. :param vertex1: a first out of two vertices edges between which are to be split :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be split :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` objects to be split :type guidance: iterable where each entry is iterable with colors entries :return: ``None``, performs inplace changes """ edges_to_be_split_keys = [key for v1, v2, key in self.bg.edges(nbunch=vertex1, keys=True) if v2 == vertex2] for key in edges_to_be_split_keys: self.__split_bgedge(BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=None), guidance=guidance, sorted_guidance=sorted_guidance, account_for_colors_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance, key=key)
python
def __split_all_edges_between_two_vertices(self, vertex1, vertex2, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True): """ Splits all edges between two supplied vertices in current :class:`BreakpointGraph` instance with respect to the provided guidance. Iterates over all edges between two supplied vertices and splits each one of them with respect to the guidance. :param vertex1: a first out of two vertices edges between which are to be split :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be split :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` objects to be split :type guidance: iterable where each entry is iterable with colors entries :return: ``None``, performs inplace changes """ edges_to_be_split_keys = [key for v1, v2, key in self.bg.edges(nbunch=vertex1, keys=True) if v2 == vertex2] for key in edges_to_be_split_keys: self.__split_bgedge(BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=None), guidance=guidance, sorted_guidance=sorted_guidance, account_for_colors_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance, key=key)
[ "def", "__split_all_edges_between_two_vertices", "(", "self", ",", "vertex1", ",", "vertex2", ",", "guidance", "=", "None", ",", "sorted_guidance", "=", "False", ",", "account_for_colors_multiplicity_in_guidance", "=", "True", ")", ":", "edges_to_be_split_keys", "=", ...
Splits all edges between two supplied vertices in current :class:`BreakpointGraph` instance with respect to the provided guidance. Iterates over all edges between two supplied vertices and splits each one of them with respect to the guidance. :param vertex1: a first out of two vertices edges between which are to be split :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be split :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` objects to be split :type guidance: iterable where each entry is iterable with colors entries :return: ``None``, performs inplace changes
[ "Splits", "all", "edges", "between", "two", "supplied", "vertices", "in", "current", ":", "class", ":", "BreakpointGraph", "instance", "with", "respect", "to", "the", "provided", "guidance", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L549-L568
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.split_all_edges_between_two_vertices
def split_all_edges_between_two_vertices(self, vertex1, vertex2, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True): """ Splits all edges between two supplied vertices in current :class:`BreakpointGraph` instance with respect to the provided guidance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__split_all_edges_between_two_vertices` method. :param vertex1: a first out of two vertices edges between which are to be split :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be split :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` objects to be split :type guidance: iterable where each entry is iterable with colors entries :return: ``None``, performs inplace changes """ self.__split_all_edges_between_two_vertices(vertex1=vertex1, vertex2=vertex2, guidance=guidance, sorted_guidance=sorted_guidance, account_for_colors_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance)
python
def split_all_edges_between_two_vertices(self, vertex1, vertex2, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True): """ Splits all edges between two supplied vertices in current :class:`BreakpointGraph` instance with respect to the provided guidance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__split_all_edges_between_two_vertices` method. :param vertex1: a first out of two vertices edges between which are to be split :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be split :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` objects to be split :type guidance: iterable where each entry is iterable with colors entries :return: ``None``, performs inplace changes """ self.__split_all_edges_between_two_vertices(vertex1=vertex1, vertex2=vertex2, guidance=guidance, sorted_guidance=sorted_guidance, account_for_colors_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance)
[ "def", "split_all_edges_between_two_vertices", "(", "self", ",", "vertex1", ",", "vertex2", ",", "guidance", "=", "None", ",", "sorted_guidance", "=", "False", ",", "account_for_colors_multiplicity_in_guidance", "=", "True", ")", ":", "self", ".", "__split_all_edges_b...
Splits all edges between two supplied vertices in current :class:`BreakpointGraph` instance with respect to the provided guidance. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__split_all_edges_between_two_vertices` method. :param vertex1: a first out of two vertices edges between which are to be split :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be split :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` objects to be split :type guidance: iterable where each entry is iterable with colors entries :return: ``None``, performs inplace changes
[ "Splits", "all", "edges", "between", "two", "supplied", "vertices", "in", "current", ":", "class", ":", "BreakpointGraph", "instance", "with", "respect", "to", "the", "provided", "guidance", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L570-L586
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.split_all_edges
def split_all_edges(self, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True): """ Splits all edge in current :class:`BreakpointGraph` instance with respect to the provided guidance. Iterate over all possible distinct pairs of vertices in current :class:`BreakpointGraph` instance and splits all edges between such pairs with respect to provided guidance. :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` objects to be split :type guidance: iterable where each entry is iterable with colors entries :return: ``None``, performs inplace changes """ vertex_pairs = [(edge.vertex1, edge.vertex2) for edge in self.edges()] for v1, v2 in vertex_pairs: self.__split_all_edges_between_two_vertices(vertex1=v1, vertex2=v2, guidance=guidance, sorted_guidance=sorted_guidance, account_for_colors_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance)
python
def split_all_edges(self, guidance=None, sorted_guidance=False, account_for_colors_multiplicity_in_guidance=True): """ Splits all edge in current :class:`BreakpointGraph` instance with respect to the provided guidance. Iterate over all possible distinct pairs of vertices in current :class:`BreakpointGraph` instance and splits all edges between such pairs with respect to provided guidance. :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` objects to be split :type guidance: iterable where each entry is iterable with colors entries :return: ``None``, performs inplace changes """ vertex_pairs = [(edge.vertex1, edge.vertex2) for edge in self.edges()] for v1, v2 in vertex_pairs: self.__split_all_edges_between_two_vertices(vertex1=v1, vertex2=v2, guidance=guidance, sorted_guidance=sorted_guidance, account_for_colors_multiplicity_in_guidance=account_for_colors_multiplicity_in_guidance)
[ "def", "split_all_edges", "(", "self", ",", "guidance", "=", "None", ",", "sorted_guidance", "=", "False", ",", "account_for_colors_multiplicity_in_guidance", "=", "True", ")", ":", "vertex_pairs", "=", "[", "(", "edge", ".", "vertex1", ",", "edge", ".", "vert...
Splits all edge in current :class:`BreakpointGraph` instance with respect to the provided guidance. Iterate over all possible distinct pairs of vertices in current :class:`BreakpointGraph` instance and splits all edges between such pairs with respect to provided guidance. :param guidance: a guidance for underlying :class:`bg.multicolor.Multicolor` objects to be split :type guidance: iterable where each entry is iterable with colors entries :return: ``None``, performs inplace changes
[ "Splits", "all", "edge", "in", "current", ":", "class", ":", "BreakpointGraph", "instance", "with", "respect", "to", "the", "provided", "guidance", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L588-L601
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__delete_all_bgedges_between_two_vertices
def __delete_all_bgedges_between_two_vertices(self, vertex1, vertex2): """ Deletes all edges between two supplied vertices :param vertex1: a first out of two vertices edges between which are to be deleted :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be deleted :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes """ edges_to_be_deleted_with_keys = [(key, data) for v1, v2, key, data in self.bg.edges(nbunch=vertex1, keys=True, data=True) if v2 == vertex2] for key, data in edges_to_be_deleted_with_keys: self.__delete_bgedge(BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=data["attr_dict"]["multicolor"]), key=key)
python
def __delete_all_bgedges_between_two_vertices(self, vertex1, vertex2): """ Deletes all edges between two supplied vertices :param vertex1: a first out of two vertices edges between which are to be deleted :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be deleted :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes """ edges_to_be_deleted_with_keys = [(key, data) for v1, v2, key, data in self.bg.edges(nbunch=vertex1, keys=True, data=True) if v2 == vertex2] for key, data in edges_to_be_deleted_with_keys: self.__delete_bgedge(BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=data["attr_dict"]["multicolor"]), key=key)
[ "def", "__delete_all_bgedges_between_two_vertices", "(", "self", ",", "vertex1", ",", "vertex2", ")", ":", "edges_to_be_deleted_with_keys", "=", "[", "(", "key", ",", "data", ")", "for", "v1", ",", "v2", ",", "key", ",", "data", "in", "self", ".", "bg", "....
Deletes all edges between two supplied vertices :param vertex1: a first out of two vertices edges between which are to be deleted :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be deleted :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes
[ "Deletes", "all", "edges", "between", "two", "supplied", "vertices" ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L603-L616
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.delete_all_edges_between_two_vertices
def delete_all_edges_between_two_vertices(self, vertex1, vertex2): """ Deletes all edges between two supplied vertices Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__delete_all_bgedges_between_two_vertices` method. :param vertex1: a first out of two vertices edges between which are to be deleted :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be deleted :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes """ self.__delete_all_bgedges_between_two_vertices(vertex1=vertex1, vertex2=vertex2)
python
def delete_all_edges_between_two_vertices(self, vertex1, vertex2): """ Deletes all edges between two supplied vertices Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__delete_all_bgedges_between_two_vertices` method. :param vertex1: a first out of two vertices edges between which are to be deleted :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be deleted :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes """ self.__delete_all_bgedges_between_two_vertices(vertex1=vertex1, vertex2=vertex2)
[ "def", "delete_all_edges_between_two_vertices", "(", "self", ",", "vertex1", ",", "vertex2", ")", ":", "self", ".", "__delete_all_bgedges_between_two_vertices", "(", "vertex1", "=", "vertex1", ",", "vertex2", "=", "vertex2", ")" ]
Deletes all edges between two supplied vertices Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__delete_all_bgedges_between_two_vertices` method. :param vertex1: a first out of two vertices edges between which are to be deleted :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be deleted :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes
[ "Deletes", "all", "edges", "between", "two", "supplied", "vertices" ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L618-L629
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__merge_all_bgedges_between_two_vertices
def __merge_all_bgedges_between_two_vertices(self, vertex1, vertex2): """ Merges all edge between two supplied vertices into a single edge from a perspective of multi-color merging. :param vertex1: a first out of two vertices edges between which are to be merged together :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be merged together :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes """ ############################################################################################################ # # no actual merging is performed, but rather all edges between two vertices are deleted # and then added with a merge argument set to true # ############################################################################################################ edges_multicolors = [deepcopy(data["attr_dict"]["multicolor"]) for v1, v2, data in self.bg.edges(nbunch=vertex1, data=True) if v2 == vertex2] self.__delete_all_bgedges_between_two_vertices(vertex1=vertex1, vertex2=vertex2) for multicolor in edges_multicolors: self.__add_bgedge(BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=multicolor), merge=True)
python
def __merge_all_bgedges_between_two_vertices(self, vertex1, vertex2): """ Merges all edge between two supplied vertices into a single edge from a perspective of multi-color merging. :param vertex1: a first out of two vertices edges between which are to be merged together :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be merged together :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes """ ############################################################################################################ # # no actual merging is performed, but rather all edges between two vertices are deleted # and then added with a merge argument set to true # ############################################################################################################ edges_multicolors = [deepcopy(data["attr_dict"]["multicolor"]) for v1, v2, data in self.bg.edges(nbunch=vertex1, data=True) if v2 == vertex2] self.__delete_all_bgedges_between_two_vertices(vertex1=vertex1, vertex2=vertex2) for multicolor in edges_multicolors: self.__add_bgedge(BGEdge(vertex1=vertex1, vertex2=vertex2, multicolor=multicolor), merge=True)
[ "def", "__merge_all_bgedges_between_two_vertices", "(", "self", ",", "vertex1", ",", "vertex2", ")", ":", "############################################################################################################", "#", "# no actual merging is performed, but rather all edges between two v...
Merges all edge between two supplied vertices into a single edge from a perspective of multi-color merging. :param vertex1: a first out of two vertices edges between which are to be merged together :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be merged together :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes
[ "Merges", "all", "edge", "between", "two", "supplied", "vertices", "into", "a", "single", "edge", "from", "a", "perspective", "of", "multi", "-", "color", "merging", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L631-L650
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.merge_all_edges_between_two_vertices
def merge_all_edges_between_two_vertices(self, vertex1, vertex2): """ Merges all edge between two supplied vertices into a single edge from a perspective of multi-color merging. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__merge_all_bgedges_between_two_vertices` :param vertex1: a first out of two vertices edges between which are to be merged together :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be merged together :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes """ self.__merge_all_bgedges_between_two_vertices(vertex1=vertex1, vertex2=vertex2)
python
def merge_all_edges_between_two_vertices(self, vertex1, vertex2): """ Merges all edge between two supplied vertices into a single edge from a perspective of multi-color merging. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__merge_all_bgedges_between_two_vertices` :param vertex1: a first out of two vertices edges between which are to be merged together :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be merged together :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes """ self.__merge_all_bgedges_between_two_vertices(vertex1=vertex1, vertex2=vertex2)
[ "def", "merge_all_edges_between_two_vertices", "(", "self", ",", "vertex1", ",", "vertex2", ")", ":", "self", ".", "__merge_all_bgedges_between_two_vertices", "(", "vertex1", "=", "vertex1", ",", "vertex2", "=", "vertex2", ")" ]
Merges all edge between two supplied vertices into a single edge from a perspective of multi-color merging. Proxies a call to :meth:`BreakpointGraph._BreakpointGraph__merge_all_bgedges_between_two_vertices` :param vertex1: a first out of two vertices edges between which are to be merged together :type vertex1: any python hashable object. :class:`bg.vertex.BGVertex` is expected :param vertex2: a second out of two vertices edges between which are to be merged together :type vertex2: any python hashable object. :class:`bg.vertex.BGVertex` is expected :return: ``None``, performs inplace changes
[ "Merges", "all", "edge", "between", "two", "supplied", "vertices", "into", "a", "single", "edge", "from", "a", "perspective", "of", "multi", "-", "color", "merging", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L652-L663
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.merge_all_edges
def merge_all_edges(self): """ Merges all edges in a current :class`BreakpointGraph` instance between same pairs of vertices into a single edge from a perspective of multi-color merging. Iterates over all possible pairs of vertices in current :class:`BreakpointGraph` and merges all edges between respective pairs. :return: ``None``, performs inplace changes """ pairs_of_vetices = [(edge.vertex1, edge.vertex2) for edge in self.edges()] for v1, v2 in pairs_of_vetices: ############################################################################################################ # # we iterate over all pairs of vertices in the given graph and merge edges between them # ############################################################################################################ self.__merge_all_bgedges_between_two_vertices(vertex1=v1, vertex2=v2)
python
def merge_all_edges(self): """ Merges all edges in a current :class`BreakpointGraph` instance between same pairs of vertices into a single edge from a perspective of multi-color merging. Iterates over all possible pairs of vertices in current :class:`BreakpointGraph` and merges all edges between respective pairs. :return: ``None``, performs inplace changes """ pairs_of_vetices = [(edge.vertex1, edge.vertex2) for edge in self.edges()] for v1, v2 in pairs_of_vetices: ############################################################################################################ # # we iterate over all pairs of vertices in the given graph and merge edges between them # ############################################################################################################ self.__merge_all_bgedges_between_two_vertices(vertex1=v1, vertex2=v2)
[ "def", "merge_all_edges", "(", "self", ")", ":", "pairs_of_vetices", "=", "[", "(", "edge", ".", "vertex1", ",", "edge", ".", "vertex2", ")", "for", "edge", "in", "self", ".", "edges", "(", ")", "]", "for", "v1", ",", "v2", "in", "pairs_of_vetices", ...
Merges all edges in a current :class`BreakpointGraph` instance between same pairs of vertices into a single edge from a perspective of multi-color merging. Iterates over all possible pairs of vertices in current :class:`BreakpointGraph` and merges all edges between respective pairs. :return: ``None``, performs inplace changes
[ "Merges", "all", "edges", "in", "a", "current", ":", "class", "BreakpointGraph", "instance", "between", "same", "pairs", "of", "vertices", "into", "a", "single", "edge", "from", "a", "perspective", "of", "multi", "-", "color", "merging", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L665-L679
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.merge
def merge(cls, breakpoint_graph1, breakpoint_graph2, merge_edges=False): """ Merges two given instances of :class`BreakpointGraph` into a new one, that gather all available information from both supplied objects. Depending of a ``merge_edges`` flag, while merging of two dat structures is occurring, edges between similar vertices can be merged during the creation of a result :class`BreakpointGraph` obejct. Accounts for subclassing. :param breakpoint_graph1: a first out of two :class`BreakpointGraph` instances to gather information from :type breakpoint_graph1: :class`BreakpointGraph` :param breakpoint_graph2: a second out of two :class`BreakpointGraph` instances to gather information from :type breakpoint_graph2: :class`BreakpointGraph` :param merge_edges: flag to indicate if edges in a new merged :class`BreakpointGraph` object has to be merged between same vertices, or if splitting from supplied graphs shall be preserved. :type merge_edges: ``Boolean`` :return: a new breakpoint graph object that contains all information gathered from both supplied breakpoint graphs :rtype: :class`BreakpointGraph` """ result = cls() for bgedge in breakpoint_graph1.edges(): result.__add_bgedge(bgedge=bgedge, merge=merge_edges) for bgedge in breakpoint_graph2.edges(): result.__add_bgedge(bgedge=bgedge, merge=merge_edges) return result
python
def merge(cls, breakpoint_graph1, breakpoint_graph2, merge_edges=False): """ Merges two given instances of :class`BreakpointGraph` into a new one, that gather all available information from both supplied objects. Depending of a ``merge_edges`` flag, while merging of two dat structures is occurring, edges between similar vertices can be merged during the creation of a result :class`BreakpointGraph` obejct. Accounts for subclassing. :param breakpoint_graph1: a first out of two :class`BreakpointGraph` instances to gather information from :type breakpoint_graph1: :class`BreakpointGraph` :param breakpoint_graph2: a second out of two :class`BreakpointGraph` instances to gather information from :type breakpoint_graph2: :class`BreakpointGraph` :param merge_edges: flag to indicate if edges in a new merged :class`BreakpointGraph` object has to be merged between same vertices, or if splitting from supplied graphs shall be preserved. :type merge_edges: ``Boolean`` :return: a new breakpoint graph object that contains all information gathered from both supplied breakpoint graphs :rtype: :class`BreakpointGraph` """ result = cls() for bgedge in breakpoint_graph1.edges(): result.__add_bgedge(bgedge=bgedge, merge=merge_edges) for bgedge in breakpoint_graph2.edges(): result.__add_bgedge(bgedge=bgedge, merge=merge_edges) return result
[ "def", "merge", "(", "cls", ",", "breakpoint_graph1", ",", "breakpoint_graph2", ",", "merge_edges", "=", "False", ")", ":", "result", "=", "cls", "(", ")", "for", "bgedge", "in", "breakpoint_graph1", ".", "edges", "(", ")", ":", "result", ".", "__add_bgedg...
Merges two given instances of :class`BreakpointGraph` into a new one, that gather all available information from both supplied objects. Depending of a ``merge_edges`` flag, while merging of two dat structures is occurring, edges between similar vertices can be merged during the creation of a result :class`BreakpointGraph` obejct. Accounts for subclassing. :param breakpoint_graph1: a first out of two :class`BreakpointGraph` instances to gather information from :type breakpoint_graph1: :class`BreakpointGraph` :param breakpoint_graph2: a second out of two :class`BreakpointGraph` instances to gather information from :type breakpoint_graph2: :class`BreakpointGraph` :param merge_edges: flag to indicate if edges in a new merged :class`BreakpointGraph` object has to be merged between same vertices, or if splitting from supplied graphs shall be preserved. :type merge_edges: ``Boolean`` :return: a new breakpoint graph object that contains all information gathered from both supplied breakpoint graphs :rtype: :class`BreakpointGraph`
[ "Merges", "two", "given", "instances", "of", ":", "class", "BreakpointGraph", "into", "a", "new", "one", "that", "gather", "all", "available", "information", "from", "both", "supplied", "objects", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L682-L703
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.__update
def __update(self, breakpoint_graph, merge_edges=False): """ Updates a current :class`BreakpointGraph` object with information from a supplied :class`BreakpointGraph` instance. Depending of a ``merge_edges`` flag, while updating of a current :class`BreakpointGraph` object is occuring, edges between similar vertices can be merged to already existing ones. :param breakpoint_graph: a breakpoint graph to extract information from, which will be then added to the current :type breakpoint_graph: :class`BreakpointGraph` :param merge_edges: flag to indicate if edges to be added to current :class`BreakpointGraph` object are to be merged to already existing ones :type merge_edges: ``Boolean`` :return: ``None``, performs inplace changes """ for bgedge in breakpoint_graph.edges(): self.__add_bgedge(bgedge=deepcopy(bgedge), merge=merge_edges)
python
def __update(self, breakpoint_graph, merge_edges=False): """ Updates a current :class`BreakpointGraph` object with information from a supplied :class`BreakpointGraph` instance. Depending of a ``merge_edges`` flag, while updating of a current :class`BreakpointGraph` object is occuring, edges between similar vertices can be merged to already existing ones. :param breakpoint_graph: a breakpoint graph to extract information from, which will be then added to the current :type breakpoint_graph: :class`BreakpointGraph` :param merge_edges: flag to indicate if edges to be added to current :class`BreakpointGraph` object are to be merged to already existing ones :type merge_edges: ``Boolean`` :return: ``None``, performs inplace changes """ for bgedge in breakpoint_graph.edges(): self.__add_bgedge(bgedge=deepcopy(bgedge), merge=merge_edges)
[ "def", "__update", "(", "self", ",", "breakpoint_graph", ",", "merge_edges", "=", "False", ")", ":", "for", "bgedge", "in", "breakpoint_graph", ".", "edges", "(", ")", ":", "self", ".", "__add_bgedge", "(", "bgedge", "=", "deepcopy", "(", "bgedge", ")", ...
Updates a current :class`BreakpointGraph` object with information from a supplied :class`BreakpointGraph` instance. Depending of a ``merge_edges`` flag, while updating of a current :class`BreakpointGraph` object is occuring, edges between similar vertices can be merged to already existing ones. :param breakpoint_graph: a breakpoint graph to extract information from, which will be then added to the current :type breakpoint_graph: :class`BreakpointGraph` :param merge_edges: flag to indicate if edges to be added to current :class`BreakpointGraph` object are to be merged to already existing ones :type merge_edges: ``Boolean`` :return: ``None``, performs inplace changes
[ "Updates", "a", "current", ":", "class", "BreakpointGraph", "object", "with", "information", "from", "a", "supplied", ":", "class", "BreakpointGraph", "instance", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L705-L717
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.update
def update(self, breakpoint_graph, merge_edges=False): """ Updates a current :class`BreakpointGraph` object with information from a supplied :class`BreakpointGraph` instance. Proxoes a call to :meth:`BreakpointGraph._BreakpointGraph__update` method. :param breakpoint_graph: a breakpoint graph to extract information from, which will be then added to the current :type breakpoint_graph: :class:`BreakpointGraph` :param merge_edges: flag to indicate if edges to be added to current :class`BreakpointGraph` object are to be merged to already existing ones :type merge_edges: ``Boolean`` :return: ``None``, performs inplace changes """ self.__update(breakpoint_graph=breakpoint_graph, merge_edges=merge_edges)
python
def update(self, breakpoint_graph, merge_edges=False): """ Updates a current :class`BreakpointGraph` object with information from a supplied :class`BreakpointGraph` instance. Proxoes a call to :meth:`BreakpointGraph._BreakpointGraph__update` method. :param breakpoint_graph: a breakpoint graph to extract information from, which will be then added to the current :type breakpoint_graph: :class:`BreakpointGraph` :param merge_edges: flag to indicate if edges to be added to current :class`BreakpointGraph` object are to be merged to already existing ones :type merge_edges: ``Boolean`` :return: ``None``, performs inplace changes """ self.__update(breakpoint_graph=breakpoint_graph, merge_edges=merge_edges)
[ "def", "update", "(", "self", ",", "breakpoint_graph", ",", "merge_edges", "=", "False", ")", ":", "self", ".", "__update", "(", "breakpoint_graph", "=", "breakpoint_graph", ",", "merge_edges", "=", "merge_edges", ")" ]
Updates a current :class`BreakpointGraph` object with information from a supplied :class`BreakpointGraph` instance. Proxoes a call to :meth:`BreakpointGraph._BreakpointGraph__update` method. :param breakpoint_graph: a breakpoint graph to extract information from, which will be then added to the current :type breakpoint_graph: :class:`BreakpointGraph` :param merge_edges: flag to indicate if edges to be added to current :class`BreakpointGraph` object are to be merged to already existing ones :type merge_edges: ``Boolean`` :return: ``None``, performs inplace changes
[ "Updates", "a", "current", ":", "class", "BreakpointGraph", "object", "with", "information", "from", "a", "supplied", ":", "class", "BreakpointGraph", "instance", "." ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L719-L731
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.apply_kbreak
def apply_kbreak(self, kbreak, merge=True): """ Check validity of supplied k-break and then applies it to current :class:`BreakpointGraph` Only :class:`bg.kbreak.KBreak` (or its heirs) instances are allowed as ``kbreak`` argument. KBreak must correspond to the valid kbreak and, since some changes to its internals might have been done since its creation, a validity check in terms of starting/resulting edges is performed. All vertices in supplied KBreak (except for paired infinity vertices) must be present in current :class:`BreakpointGraph`. For all supplied pairs of vertices (except for paired infinity vertices), there must be edges between such pairs of vertices, at least one of which must contain a multicolor matching a multicolor of supplied kbreak. Edges of specified in kbreak multicolor are deleted between supplied pairs of vertices in kbreak.start_edges (except for paired infinity vertices). New edges of specified in kbreak multicolor are added between all pairs of vertices in kbreak.result_edges (except for paired infinity vertices). If after the kbreak application there is an infinity vertex, that now has no edges incident to it, it is deleted form the current :class:`BreakpointGraph`. :param kbreak: a k-break to be applied to current :class:`BreakpointGraph` :type kbreak: `bg.kbreak.KBreak` :param merge: a flag to indicate on how edges, that will be created by a k-break, will be added to current :class:`BreakpointGraph` :type merge: ``Boolean`` :return: nothing, performs inplace changes :rtype: ``None`` :raises: ``ValueError``, ``TypeError`` """ ############################################################################################################ # # k-break must ba valid to be applied # ############################################################################################################ vertices = {} edge_data = {} if not isinstance(kbreak, KBreak): raise TypeError("Only KBreak and derivatives are allowed as kbreak argument") if not KBreak.valid_kbreak_matchings(kbreak.start_edges, kbreak.result_edges): raise ValueError("Supplied KBreak is not valid form perspective of starting/resulting sets of vertices") for vertex1, vertex2 in kbreak.start_edges: if vertex1.is_infinity_vertex and vertex2.is_infinity_vertex: ############################################################################################################ # # when we encounter a fully infinity edge (both vertices are infinity vertices) # we shall not check if they are present in the current graph, because hat portion of a kbreak is artificial # ############################################################################################################ continue if vertex1 not in self.bg or vertex2 not in self.bg: raise ValueError("Supplied KBreak targets vertices (`{v1}` and `{v2}`) at least one of which " "does not exist in current BreakpointGraph" "".format(v1=vertex1.name, v2=vertex2.name)) for vertex1, vertex2 in kbreak.start_edges: if vertex1.is_infinity_vertex and vertex2.is_infinity_vertex: continue for bgedge in self.__edges_between_two_vertices(vertex1=vertex1, vertex2=vertex2): ############################################################################################################ # # at least one edge between supplied pair of vertices must contain a multicolor that is specified for the kbreak # ############################################################################################################ if kbreak.multicolor <= bgedge.multicolor: break else: raise ValueError("Some targeted by kbreak edge with specified multicolor does not exists") for vertex1, vertex2 in kbreak.start_edges: if vertex1.is_infinity_vertex and vertex2.is_infinity_vertex: continue v1 = self.__get_vertex_by_name(vertex_name=vertex1.name) vertices[v1] = v1 v2 = self.__get_vertex_by_name(vertex_name=vertex2.name) vertices[v2] = v2 bgedge = BGEdge(vertex1=v1, vertex2=v2, multicolor=kbreak.multicolor) candidate_data, candidate_id, candidate_score = self.__determine_most_suitable_edge_for_deletion( bgedge=bgedge) data = candidate_data["attr_dict"]["data"] edge_data[v1] = data edge_data[v2] = data self.__delete_bgedge(bgedge=bgedge, keep_vertices=True) for vertex_set in kbreak.start_edges: for vertex in vertex_set: if vertex.is_infinity_vertex and vertex in self.bg: ############################################################################################################ # # after the first portion of a kbreak is performed one must make sure we don't leave any infinity vertices # that have edges going to them, as infinity vertex is a special artificial vertex # and it has meaning only if there are edges going to / from it # ############################################################################################################ if len(list(self.get_edges_by_vertex(vertex=vertex))) == 0: self.bg.remove_node(vertex) for vertex1, vertex2 in kbreak.result_edges: if vertex1.is_infinity_vertex and vertex2.is_infinity_vertex: ############################################################################################################ # # if we encounter a pair of infinity vertices in result edges set, we shall not add them # as at least a part of kbreak corresponded to fusion # and those infinity edges on their own won't have any meaning # ############################################################################################################ continue origin = kbreak.data.get("origin", None) v1 = vertices.get(vertex1, vertex1) v2 = vertices.get(vertex2, vertex2) bg_edge = BGEdge(vertex1=v1, vertex2=v2, multicolor=kbreak.multicolor) if "origin" in bg_edge.data: bg_edge.data["origin"] = origin if kbreak.is_a_fusion: edge1_data = edge_data[v1] edge2_data = edge_data[v2] merged_edge_fragment_data = merge_fragment_edge_data(edge1_data["fragment"], edge2_data["fragment"]) result_edge_data = {} recursive_dict_update(result_edge_data, edge1_data) recursive_dict_update(result_edge_data, edge2_data) recursive_dict_update(result_edge_data, {"fragment": merged_edge_fragment_data}) recursive_dict_update(bg_edge.data, result_edge_data) self.__add_bgedge(bg_edge, merge=merge)
python
def apply_kbreak(self, kbreak, merge=True): """ Check validity of supplied k-break and then applies it to current :class:`BreakpointGraph` Only :class:`bg.kbreak.KBreak` (or its heirs) instances are allowed as ``kbreak`` argument. KBreak must correspond to the valid kbreak and, since some changes to its internals might have been done since its creation, a validity check in terms of starting/resulting edges is performed. All vertices in supplied KBreak (except for paired infinity vertices) must be present in current :class:`BreakpointGraph`. For all supplied pairs of vertices (except for paired infinity vertices), there must be edges between such pairs of vertices, at least one of which must contain a multicolor matching a multicolor of supplied kbreak. Edges of specified in kbreak multicolor are deleted between supplied pairs of vertices in kbreak.start_edges (except for paired infinity vertices). New edges of specified in kbreak multicolor are added between all pairs of vertices in kbreak.result_edges (except for paired infinity vertices). If after the kbreak application there is an infinity vertex, that now has no edges incident to it, it is deleted form the current :class:`BreakpointGraph`. :param kbreak: a k-break to be applied to current :class:`BreakpointGraph` :type kbreak: `bg.kbreak.KBreak` :param merge: a flag to indicate on how edges, that will be created by a k-break, will be added to current :class:`BreakpointGraph` :type merge: ``Boolean`` :return: nothing, performs inplace changes :rtype: ``None`` :raises: ``ValueError``, ``TypeError`` """ ############################################################################################################ # # k-break must ba valid to be applied # ############################################################################################################ vertices = {} edge_data = {} if not isinstance(kbreak, KBreak): raise TypeError("Only KBreak and derivatives are allowed as kbreak argument") if not KBreak.valid_kbreak_matchings(kbreak.start_edges, kbreak.result_edges): raise ValueError("Supplied KBreak is not valid form perspective of starting/resulting sets of vertices") for vertex1, vertex2 in kbreak.start_edges: if vertex1.is_infinity_vertex and vertex2.is_infinity_vertex: ############################################################################################################ # # when we encounter a fully infinity edge (both vertices are infinity vertices) # we shall not check if they are present in the current graph, because hat portion of a kbreak is artificial # ############################################################################################################ continue if vertex1 not in self.bg or vertex2 not in self.bg: raise ValueError("Supplied KBreak targets vertices (`{v1}` and `{v2}`) at least one of which " "does not exist in current BreakpointGraph" "".format(v1=vertex1.name, v2=vertex2.name)) for vertex1, vertex2 in kbreak.start_edges: if vertex1.is_infinity_vertex and vertex2.is_infinity_vertex: continue for bgedge in self.__edges_between_two_vertices(vertex1=vertex1, vertex2=vertex2): ############################################################################################################ # # at least one edge between supplied pair of vertices must contain a multicolor that is specified for the kbreak # ############################################################################################################ if kbreak.multicolor <= bgedge.multicolor: break else: raise ValueError("Some targeted by kbreak edge with specified multicolor does not exists") for vertex1, vertex2 in kbreak.start_edges: if vertex1.is_infinity_vertex and vertex2.is_infinity_vertex: continue v1 = self.__get_vertex_by_name(vertex_name=vertex1.name) vertices[v1] = v1 v2 = self.__get_vertex_by_name(vertex_name=vertex2.name) vertices[v2] = v2 bgedge = BGEdge(vertex1=v1, vertex2=v2, multicolor=kbreak.multicolor) candidate_data, candidate_id, candidate_score = self.__determine_most_suitable_edge_for_deletion( bgedge=bgedge) data = candidate_data["attr_dict"]["data"] edge_data[v1] = data edge_data[v2] = data self.__delete_bgedge(bgedge=bgedge, keep_vertices=True) for vertex_set in kbreak.start_edges: for vertex in vertex_set: if vertex.is_infinity_vertex and vertex in self.bg: ############################################################################################################ # # after the first portion of a kbreak is performed one must make sure we don't leave any infinity vertices # that have edges going to them, as infinity vertex is a special artificial vertex # and it has meaning only if there are edges going to / from it # ############################################################################################################ if len(list(self.get_edges_by_vertex(vertex=vertex))) == 0: self.bg.remove_node(vertex) for vertex1, vertex2 in kbreak.result_edges: if vertex1.is_infinity_vertex and vertex2.is_infinity_vertex: ############################################################################################################ # # if we encounter a pair of infinity vertices in result edges set, we shall not add them # as at least a part of kbreak corresponded to fusion # and those infinity edges on their own won't have any meaning # ############################################################################################################ continue origin = kbreak.data.get("origin", None) v1 = vertices.get(vertex1, vertex1) v2 = vertices.get(vertex2, vertex2) bg_edge = BGEdge(vertex1=v1, vertex2=v2, multicolor=kbreak.multicolor) if "origin" in bg_edge.data: bg_edge.data["origin"] = origin if kbreak.is_a_fusion: edge1_data = edge_data[v1] edge2_data = edge_data[v2] merged_edge_fragment_data = merge_fragment_edge_data(edge1_data["fragment"], edge2_data["fragment"]) result_edge_data = {} recursive_dict_update(result_edge_data, edge1_data) recursive_dict_update(result_edge_data, edge2_data) recursive_dict_update(result_edge_data, {"fragment": merged_edge_fragment_data}) recursive_dict_update(bg_edge.data, result_edge_data) self.__add_bgedge(bg_edge, merge=merge)
[ "def", "apply_kbreak", "(", "self", ",", "kbreak", ",", "merge", "=", "True", ")", ":", "############################################################################################################", "#", "# k-break must ba valid to be applied", "#", "################################...
Check validity of supplied k-break and then applies it to current :class:`BreakpointGraph` Only :class:`bg.kbreak.KBreak` (or its heirs) instances are allowed as ``kbreak`` argument. KBreak must correspond to the valid kbreak and, since some changes to its internals might have been done since its creation, a validity check in terms of starting/resulting edges is performed. All vertices in supplied KBreak (except for paired infinity vertices) must be present in current :class:`BreakpointGraph`. For all supplied pairs of vertices (except for paired infinity vertices), there must be edges between such pairs of vertices, at least one of which must contain a multicolor matching a multicolor of supplied kbreak. Edges of specified in kbreak multicolor are deleted between supplied pairs of vertices in kbreak.start_edges (except for paired infinity vertices). New edges of specified in kbreak multicolor are added between all pairs of vertices in kbreak.result_edges (except for paired infinity vertices). If after the kbreak application there is an infinity vertex, that now has no edges incident to it, it is deleted form the current :class:`BreakpointGraph`. :param kbreak: a k-break to be applied to current :class:`BreakpointGraph` :type kbreak: `bg.kbreak.KBreak` :param merge: a flag to indicate on how edges, that will be created by a k-break, will be added to current :class:`BreakpointGraph` :type merge: ``Boolean`` :return: nothing, performs inplace changes :rtype: ``None`` :raises: ``ValueError``, ``TypeError``
[ "Check", "validity", "of", "supplied", "k", "-", "break", "and", "then", "applies", "it", "to", "current", ":", "class", ":", "BreakpointGraph" ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L733-L842
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.to_json
def to_json(self, schema_info=True): """ JSON serialization method that account for all information-wise important part of breakpoint graph """ genomes = set() result = {} result["edges"] = [] for bgedge in self.edges(): genomes |= bgedge.multicolor.colors result["edges"].append(bgedge.to_json(schema_info=schema_info)) result["vertices"] = [bgvertex.to_json(schema_info=schema_info) for bgvertex in self.nodes()] result["genomes"] = [bggenome.to_json(schema_info=schema_info) for bggenome in genomes] return result
python
def to_json(self, schema_info=True): """ JSON serialization method that account for all information-wise important part of breakpoint graph """ genomes = set() result = {} result["edges"] = [] for bgedge in self.edges(): genomes |= bgedge.multicolor.colors result["edges"].append(bgedge.to_json(schema_info=schema_info)) result["vertices"] = [bgvertex.to_json(schema_info=schema_info) for bgvertex in self.nodes()] result["genomes"] = [bggenome.to_json(schema_info=schema_info) for bggenome in genomes] return result
[ "def", "to_json", "(", "self", ",", "schema_info", "=", "True", ")", ":", "genomes", "=", "set", "(", ")", "result", "=", "{", "}", "result", "[", "\"edges\"", "]", "=", "[", "]", "for", "bgedge", "in", "self", ".", "edges", "(", ")", ":", "genom...
JSON serialization method that account for all information-wise important part of breakpoint graph
[ "JSON", "serialization", "method", "that", "account", "for", "all", "information", "-", "wise", "important", "part", "of", "breakpoint", "graph" ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L844-L855
aganezov/bg
bg/breakpoint_graph.py
BreakpointGraph.from_json
def from_json(cls, data, genomes_data=None, genomes_deserialization_required=True, merge=False): """ A JSON deserialization operation, that recovers a breakpoint graph from its JSON representation as information about genomes, that are encoded in breakpoint graph might be available somewhere else, but not the json object, there is an option to provide it and omit encoding information about genomes. """ result = cls() merge = merge vertices_dict = {} genomes_dict = genomes_data if genomes_data is not None and not genomes_deserialization_required else None if genomes_dict is None: ############################################################################################################ # # if we need to recover genomes information from breakpoint graph json object # we are happy to do that # ############################################################################################################ genomes_dict = {} try: source = genomes_data if genomes_data is not None and genomes_deserialization_required else data[ "genomes"] except KeyError as exc: raise ValueError("Error during breakpoint graph deserialization. No \"genomes\" information found") for g_dict in source: ############################################################################################################ # # if explicitly specified in genome json object, it can be decoded using provided schema name, # of course a decoding breakpoint graph object shall be aware of such scheme # (it has to be specified in the `genomes_json_schemas` class wide dict) # ############################################################################################################ schema_name = g_dict.get(BGGenome_JSON_SCHEMA_JSON_KEY, None) schema_class = None if schema_name is None else cls.genomes_json_schemas.get(schema_name, None) genomes_dict[g_dict["g_id"]] = BGGenome.from_json(data=g_dict, json_schema_class=schema_class) if "vertices" not in data: ############################################################################################################ # # breakpoint graph can not be decoded without having information about vertices explicitly # as vertices are referenced in edges object, rather than explicitly provided # ############################################################################################################ raise ValueError( "Error during breakpoint graph deserialization. \"vertices\" key is not present in json object") for vertex_dict in data["vertices"]: ############################################################################################################ # # if explicitly specified in vertex json object, it can be decoded using provided schema name, # of course a decoding breakpoint graph object shall be aware of such scheme # (it has to be specified in the `vertices_json_schemas` class wide dict) # ############################################################################################################ schema_name = vertex_dict.get(BGVertex_JSON_SCHEMA_JSON_KEY, None) schema_class = None if schema_name is None else cls.vertices_json_schemas.get(schema_name, None) try: ############################################################################################################ # # we try to recover a specific vertex class based on its name. # it does not overwrite the schema based behaviour # but provides a correct default schema for a specific vertex type # ############################################################################################################ vertex_class = BGVertex.get_vertex_class_from_vertex_name(vertex_dict["name"]) except KeyError: vertex_class = BGVertex vertices_dict[vertex_dict["v_id"]] = vertex_class.from_json(data=vertex_dict, json_schema_class=schema_class) for edge_dict in data["edges"]: ############################################################################################################ # # if explicitly specified in edge json object, it can be decoded using provided schema name, # of course a decoding breakpoint graph object shall be aware of such scheme # (it has to be specified in the `edges_json_schemas` class wide dict) # ############################################################################################################ schema_name = edge_dict.get(BGEdge_JSON_SCHEMA_JSON_KEY, None) schema = None if schema_name is None else cls.edges_json_schemas.get(schema_name, None) edge = BGEdge.from_json(data=edge_dict, json_schema_class=schema) try: edge.vertex1 = vertices_dict[edge.vertex1] edge.vertex2 = vertices_dict[edge.vertex2] except KeyError: ############################################################################################################ # # as edge references a pair of vertices, we must be sure respective vertices were decoded # ############################################################################################################ raise ValueError( "Error during breakpoint graph deserialization. Deserialized edge references non-present vertex") if len(edge.multicolor) == 0: ############################################################################################################ # # edges with empty multicolor are not permitted in breakpoint graphs # ############################################################################################################ raise ValueError( "Error during breakpoint graph deserialization. Empty multicolor for deserialized edge") try: edge.multicolor = Multicolor(*[genomes_dict[g_id] for g_id in edge.multicolor]) except KeyError: raise ValueError( "Error during breakpoint graph deserialization. Deserialized edge reference non-present " "genome in its multicolor") result.__add_bgedge(edge, merge=merge) return result
python
def from_json(cls, data, genomes_data=None, genomes_deserialization_required=True, merge=False): """ A JSON deserialization operation, that recovers a breakpoint graph from its JSON representation as information about genomes, that are encoded in breakpoint graph might be available somewhere else, but not the json object, there is an option to provide it and omit encoding information about genomes. """ result = cls() merge = merge vertices_dict = {} genomes_dict = genomes_data if genomes_data is not None and not genomes_deserialization_required else None if genomes_dict is None: ############################################################################################################ # # if we need to recover genomes information from breakpoint graph json object # we are happy to do that # ############################################################################################################ genomes_dict = {} try: source = genomes_data if genomes_data is not None and genomes_deserialization_required else data[ "genomes"] except KeyError as exc: raise ValueError("Error during breakpoint graph deserialization. No \"genomes\" information found") for g_dict in source: ############################################################################################################ # # if explicitly specified in genome json object, it can be decoded using provided schema name, # of course a decoding breakpoint graph object shall be aware of such scheme # (it has to be specified in the `genomes_json_schemas` class wide dict) # ############################################################################################################ schema_name = g_dict.get(BGGenome_JSON_SCHEMA_JSON_KEY, None) schema_class = None if schema_name is None else cls.genomes_json_schemas.get(schema_name, None) genomes_dict[g_dict["g_id"]] = BGGenome.from_json(data=g_dict, json_schema_class=schema_class) if "vertices" not in data: ############################################################################################################ # # breakpoint graph can not be decoded without having information about vertices explicitly # as vertices are referenced in edges object, rather than explicitly provided # ############################################################################################################ raise ValueError( "Error during breakpoint graph deserialization. \"vertices\" key is not present in json object") for vertex_dict in data["vertices"]: ############################################################################################################ # # if explicitly specified in vertex json object, it can be decoded using provided schema name, # of course a decoding breakpoint graph object shall be aware of such scheme # (it has to be specified in the `vertices_json_schemas` class wide dict) # ############################################################################################################ schema_name = vertex_dict.get(BGVertex_JSON_SCHEMA_JSON_KEY, None) schema_class = None if schema_name is None else cls.vertices_json_schemas.get(schema_name, None) try: ############################################################################################################ # # we try to recover a specific vertex class based on its name. # it does not overwrite the schema based behaviour # but provides a correct default schema for a specific vertex type # ############################################################################################################ vertex_class = BGVertex.get_vertex_class_from_vertex_name(vertex_dict["name"]) except KeyError: vertex_class = BGVertex vertices_dict[vertex_dict["v_id"]] = vertex_class.from_json(data=vertex_dict, json_schema_class=schema_class) for edge_dict in data["edges"]: ############################################################################################################ # # if explicitly specified in edge json object, it can be decoded using provided schema name, # of course a decoding breakpoint graph object shall be aware of such scheme # (it has to be specified in the `edges_json_schemas` class wide dict) # ############################################################################################################ schema_name = edge_dict.get(BGEdge_JSON_SCHEMA_JSON_KEY, None) schema = None if schema_name is None else cls.edges_json_schemas.get(schema_name, None) edge = BGEdge.from_json(data=edge_dict, json_schema_class=schema) try: edge.vertex1 = vertices_dict[edge.vertex1] edge.vertex2 = vertices_dict[edge.vertex2] except KeyError: ############################################################################################################ # # as edge references a pair of vertices, we must be sure respective vertices were decoded # ############################################################################################################ raise ValueError( "Error during breakpoint graph deserialization. Deserialized edge references non-present vertex") if len(edge.multicolor) == 0: ############################################################################################################ # # edges with empty multicolor are not permitted in breakpoint graphs # ############################################################################################################ raise ValueError( "Error during breakpoint graph deserialization. Empty multicolor for deserialized edge") try: edge.multicolor = Multicolor(*[genomes_dict[g_id] for g_id in edge.multicolor]) except KeyError: raise ValueError( "Error during breakpoint graph deserialization. Deserialized edge reference non-present " "genome in its multicolor") result.__add_bgedge(edge, merge=merge) return result
[ "def", "from_json", "(", "cls", ",", "data", ",", "genomes_data", "=", "None", ",", "genomes_deserialization_required", "=", "True", ",", "merge", "=", "False", ")", ":", "result", "=", "cls", "(", ")", "merge", "=", "merge", "vertices_dict", "=", "{", "...
A JSON deserialization operation, that recovers a breakpoint graph from its JSON representation as information about genomes, that are encoded in breakpoint graph might be available somewhere else, but not the json object, there is an option to provide it and omit encoding information about genomes.
[ "A", "JSON", "deserialization", "operation", "that", "recovers", "a", "breakpoint", "graph", "from", "its", "JSON", "representation" ]
train
https://github.com/aganezov/bg/blob/1ec758193441e49e7b34e0da09571480f4c24455/bg/breakpoint_graph.py#L858-L961
theiviaxx/Frog
frog/views/tag.py
get
def get(request, obj_id=None): """Lists all tags :returns: json """ res = Result() if obj_id: if obj_id == '0': obj = { 'id': 0, 'name': 'TAGLESS', 'artist': False, } else: obj = get_object_or_404(Tag, pk=obj_id).json() res.append(obj) return JsonResponse(res.asDict()) else: if request.GET.get('count'): itags = Tag.objects.all().annotate(icount=Count('image')) vtags = Tag.objects.all().annotate(vcount=Count('video')) for i, tag in enumerate(itags): tag.count = itags[i].icount + vtags[i].vcount res.append(tag.json()) else: for tag in Tag.objects.all(): res.append(tag.json()) return JsonResponse(res.asDict())
python
def get(request, obj_id=None): """Lists all tags :returns: json """ res = Result() if obj_id: if obj_id == '0': obj = { 'id': 0, 'name': 'TAGLESS', 'artist': False, } else: obj = get_object_or_404(Tag, pk=obj_id).json() res.append(obj) return JsonResponse(res.asDict()) else: if request.GET.get('count'): itags = Tag.objects.all().annotate(icount=Count('image')) vtags = Tag.objects.all().annotate(vcount=Count('video')) for i, tag in enumerate(itags): tag.count = itags[i].icount + vtags[i].vcount res.append(tag.json()) else: for tag in Tag.objects.all(): res.append(tag.json()) return JsonResponse(res.asDict())
[ "def", "get", "(", "request", ",", "obj_id", "=", "None", ")", ":", "res", "=", "Result", "(", ")", "if", "obj_id", ":", "if", "obj_id", "==", "'0'", ":", "obj", "=", "{", "'id'", ":", "0", ",", "'name'", ":", "'TAGLESS'", ",", "'artist'", ":", ...
Lists all tags :returns: json
[ "Lists", "all", "tags" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/tag.py#L63-L93
theiviaxx/Frog
frog/views/tag.py
post
def post(request): """Creates a tag object :param name: Name for tag :type name: str :returns: json """ res = Result() data = request.POST or json.loads(request.body)['body'] name = data.get('name', None) if not name: res.isError = True res.message = "No name given" return JsonResponse(res.asDict()) tag = Tag.objects.get_or_create(name=name.lower())[0] res.append(tag.json()) return JsonResponse(res.asDict())
python
def post(request): """Creates a tag object :param name: Name for tag :type name: str :returns: json """ res = Result() data = request.POST or json.loads(request.body)['body'] name = data.get('name', None) if not name: res.isError = True res.message = "No name given" return JsonResponse(res.asDict()) tag = Tag.objects.get_or_create(name=name.lower())[0] res.append(tag.json()) return JsonResponse(res.asDict())
[ "def", "post", "(", "request", ")", ":", "res", "=", "Result", "(", ")", "data", "=", "request", ".", "POST", "or", "json", ".", "loads", "(", "request", ".", "body", ")", "[", "'body'", "]", "name", "=", "data", ".", "get", "(", "'name'", ",", ...
Creates a tag object :param name: Name for tag :type name: str :returns: json
[ "Creates", "a", "tag", "object" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/tag.py#L97-L118
theiviaxx/Frog
frog/views/tag.py
put
def put(request, obj_id=None): """Adds tags from objects resolved from guids :param tags: Tags to add :type tags: list :param guids: Guids to add tags from :type guids: list :returns: json """ res = Result() data = request.PUT or json.loads(request.body)['body'] if obj_id: # -- Edit the tag tag = Tag.objects.get(pk=obj_id) tag.name = data.get('name', tag.name) tag.artist = data.get('artist', tag.artist) tag.save() else: tags = [_ for _ in data.get('tags', '').split(',') if _] guids = [_ for _ in data.get('guids', '').split(',') if _] _manageTags(tags, guids) return JsonResponse(res.asDict())
python
def put(request, obj_id=None): """Adds tags from objects resolved from guids :param tags: Tags to add :type tags: list :param guids: Guids to add tags from :type guids: list :returns: json """ res = Result() data = request.PUT or json.loads(request.body)['body'] if obj_id: # -- Edit the tag tag = Tag.objects.get(pk=obj_id) tag.name = data.get('name', tag.name) tag.artist = data.get('artist', tag.artist) tag.save() else: tags = [_ for _ in data.get('tags', '').split(',') if _] guids = [_ for _ in data.get('guids', '').split(',') if _] _manageTags(tags, guids) return JsonResponse(res.asDict())
[ "def", "put", "(", "request", ",", "obj_id", "=", "None", ")", ":", "res", "=", "Result", "(", ")", "data", "=", "request", ".", "PUT", "or", "json", ".", "loads", "(", "request", ".", "body", ")", "[", "'body'", "]", "if", "obj_id", ":", "# -- E...
Adds tags from objects resolved from guids :param tags: Tags to add :type tags: list :param guids: Guids to add tags from :type guids: list :returns: json
[ "Adds", "tags", "from", "objects", "resolved", "from", "guids" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/tag.py#L122-L145
theiviaxx/Frog
frog/views/tag.py
delete
def delete(request, obj_id=None): """Removes tags from objects resolved from guids :param tags: Tags to remove :type tags: list :param guids: Guids to remove tags from :type guids: list :returns: json """ res = Result() if obj_id: # -- Delete the tag itself tag = Tag.objects.get(pk=obj_id) guids = [] images = Image.objects.filter(tags__id=obj_id) guids += [_.guid for _ in images] videos = Video.objects.filter(tags__id=obj_id) guids += [_.guid for _ in videos] # -- Remove all tags from objects _manageTags([tag.id], guids, add=False) # -- Delete old tags tag.delete() else: tags = [_ for _ in request.DELETE.get('tags', '').split(',') if _] guids = [_ for _ in request.DELETE.get('guids', '').split(',') if _] _manageTags(tags, guids, add=False) return JsonResponse(res.asDict())
python
def delete(request, obj_id=None): """Removes tags from objects resolved from guids :param tags: Tags to remove :type tags: list :param guids: Guids to remove tags from :type guids: list :returns: json """ res = Result() if obj_id: # -- Delete the tag itself tag = Tag.objects.get(pk=obj_id) guids = [] images = Image.objects.filter(tags__id=obj_id) guids += [_.guid for _ in images] videos = Video.objects.filter(tags__id=obj_id) guids += [_.guid for _ in videos] # -- Remove all tags from objects _manageTags([tag.id], guids, add=False) # -- Delete old tags tag.delete() else: tags = [_ for _ in request.DELETE.get('tags', '').split(',') if _] guids = [_ for _ in request.DELETE.get('guids', '').split(',') if _] _manageTags(tags, guids, add=False) return JsonResponse(res.asDict())
[ "def", "delete", "(", "request", ",", "obj_id", "=", "None", ")", ":", "res", "=", "Result", "(", ")", "if", "obj_id", ":", "# -- Delete the tag itself", "tag", "=", "Tag", ".", "objects", ".", "get", "(", "pk", "=", "obj_id", ")", "guids", "=", "[",...
Removes tags from objects resolved from guids :param tags: Tags to remove :type tags: list :param guids: Guids to remove tags from :type guids: list :returns: json
[ "Removes", "tags", "from", "objects", "resolved", "from", "guids" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/tag.py#L149-L178
theiviaxx/Frog
frog/views/tag.py
search
def search(request): """ Search for Tag objects and returns a Result object with a list of searialize Tag objects. :param search: Append a "Search for" tag :type search: bool :param zero: Exclude Tags with no items :type zero: bool :param artist: Exclude artist tags :type artist: bool :returns: json """ q = request.GET.get('q', '') includeSearch = request.GET.get('search', False) nonZero = request.GET.get('zero', False) excludeArtist = request.GET.get('artist', False) if includeSearch: l = [{'id': 0, 'name': 'Search for: %s' % q}] else: l = [] query = Tag.objects.filter(name__icontains=q) if excludeArtist: query = query.exclude(artist=True) if nonZero: l += [t.json() for t in query if t.count() > 0] else: l += [t.json() for t in query] return JsonResponse(l, safe=False)
python
def search(request): """ Search for Tag objects and returns a Result object with a list of searialize Tag objects. :param search: Append a "Search for" tag :type search: bool :param zero: Exclude Tags with no items :type zero: bool :param artist: Exclude artist tags :type artist: bool :returns: json """ q = request.GET.get('q', '') includeSearch = request.GET.get('search', False) nonZero = request.GET.get('zero', False) excludeArtist = request.GET.get('artist', False) if includeSearch: l = [{'id': 0, 'name': 'Search for: %s' % q}] else: l = [] query = Tag.objects.filter(name__icontains=q) if excludeArtist: query = query.exclude(artist=True) if nonZero: l += [t.json() for t in query if t.count() > 0] else: l += [t.json() for t in query] return JsonResponse(l, safe=False)
[ "def", "search", "(", "request", ")", ":", "q", "=", "request", ".", "GET", ".", "get", "(", "'q'", ",", "''", ")", "includeSearch", "=", "request", ".", "GET", ".", "get", "(", "'search'", ",", "False", ")", "nonZero", "=", "request", ".", "GET", ...
Search for Tag objects and returns a Result object with a list of searialize Tag objects. :param search: Append a "Search for" tag :type search: bool :param zero: Exclude Tags with no items :type zero: bool :param artist: Exclude artist tags :type artist: bool :returns: json
[ "Search", "for", "Tag", "objects", "and", "returns", "a", "Result", "object", "with", "a", "list", "of", "searialize", "Tag", "objects", "." ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/tag.py#L198-L231
theiviaxx/Frog
frog/views/tag.py
merge
def merge(request, obj_id): """Merges multiple tags into a single tag and all related objects are reassigned""" res = Result() if request.POST: tags = json.loads(request.POST['tags']) else: tags = json.loads(request.body)['body']['tags'] guids = [] images = Image.objects.filter(tags__id__in=tags) guids += [_.guid for _ in images] videos = Video.objects.filter(tags__id__in=tags) guids += [_.guid for _ in videos] # -- Remove all tags from objects _manageTags(tags, guids, add=False) # -- Add merged tag to all objects _manageTags([obj_id], guids, add=True) # -- Delete old tags Tag.objects.filter(pk__in=tags).delete() return JsonResponse(res.asDict())
python
def merge(request, obj_id): """Merges multiple tags into a single tag and all related objects are reassigned""" res = Result() if request.POST: tags = json.loads(request.POST['tags']) else: tags = json.loads(request.body)['body']['tags'] guids = [] images = Image.objects.filter(tags__id__in=tags) guids += [_.guid for _ in images] videos = Video.objects.filter(tags__id__in=tags) guids += [_.guid for _ in videos] # -- Remove all tags from objects _manageTags(tags, guids, add=False) # -- Add merged tag to all objects _manageTags([obj_id], guids, add=True) # -- Delete old tags Tag.objects.filter(pk__in=tags).delete() return JsonResponse(res.asDict())
[ "def", "merge", "(", "request", ",", "obj_id", ")", ":", "res", "=", "Result", "(", ")", "if", "request", ".", "POST", ":", "tags", "=", "json", ".", "loads", "(", "request", ".", "POST", "[", "'tags'", "]", ")", "else", ":", "tags", "=", "json",...
Merges multiple tags into a single tag and all related objects are reassigned
[ "Merges", "multiple", "tags", "into", "a", "single", "tag", "and", "all", "related", "objects", "are", "reassigned" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/tag.py#L294-L314
theiviaxx/Frog
frog/views/tag.py
_manageTags
def _manageTags(tagList, guids, add=True): """ Adds or Removes Guids from Tags """ objects = getObjectsFromGuids(guids) tags = [] for tag in tagList: try: t = Tag.objects.get(pk=int(tag)) except ValueError: t = Tag.objects.get_or_create(name=tag.lower())[0] tags.append(t) if add: return _addTags(tags, objects) else: return _removeTags(tags, objects)
python
def _manageTags(tagList, guids, add=True): """ Adds or Removes Guids from Tags """ objects = getObjectsFromGuids(guids) tags = [] for tag in tagList: try: t = Tag.objects.get(pk=int(tag)) except ValueError: t = Tag.objects.get_or_create(name=tag.lower())[0] tags.append(t) if add: return _addTags(tags, objects) else: return _removeTags(tags, objects)
[ "def", "_manageTags", "(", "tagList", ",", "guids", ",", "add", "=", "True", ")", ":", "objects", "=", "getObjectsFromGuids", "(", "guids", ")", "tags", "=", "[", "]", "for", "tag", "in", "tagList", ":", "try", ":", "t", "=", "Tag", ".", "objects", ...
Adds or Removes Guids from Tags
[ "Adds", "or", "Removes", "Guids", "from", "Tags" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/tag.py#L317-L331
theiviaxx/Frog
frog/views/tag.py
_addTags
def _addTags(tags, objects): """ Adds tags to objects """ for t in tags: for o in objects: o.tags.add(t) return True
python
def _addTags(tags, objects): """ Adds tags to objects """ for t in tags: for o in objects: o.tags.add(t) return True
[ "def", "_addTags", "(", "tags", ",", "objects", ")", ":", "for", "t", "in", "tags", ":", "for", "o", "in", "objects", ":", "o", ".", "tags", ".", "add", "(", "t", ")", "return", "True" ]
Adds tags to objects
[ "Adds", "tags", "to", "objects" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/tag.py#L334-L340
theiviaxx/Frog
frog/views/tag.py
_removeTags
def _removeTags(tags, objects): """ Removes tags from objects """ for t in tags: for o in objects: o.tags.remove(t) return True
python
def _removeTags(tags, objects): """ Removes tags from objects """ for t in tags: for o in objects: o.tags.remove(t) return True
[ "def", "_removeTags", "(", "tags", ",", "objects", ")", ":", "for", "t", "in", "tags", ":", "for", "o", "in", "objects", ":", "o", ".", "tags", ".", "remove", "(", "t", ")", "return", "True" ]
Removes tags from objects
[ "Removes", "tags", "from", "objects" ]
train
https://github.com/theiviaxx/Frog/blob/a9475463a8eed1323fe3ef5d51f9751fb1dc9edd/frog/views/tag.py#L343-L349
backbohne/docx-xslt
docxxslt/xsl.py
XslCommand.parse
def parse(self, xsl='text'): """ TODO: add double-quoted string literals that allows for escaped double quotes https://gist.github.com/prathe/2439752 or http://www.metaltoad.com/blog/regex-quoted-string-escapable-quotes """ try: cmd_text, option_text = xsl.split(None, 1) except ValueError: cmd_text = xsl option_text = '' try: context, cmd = cmd_text.strip().lower().split(':', 1) except ValueError: cmd = cmd_text.lower() context = None if not cmd in DEFAULT_CMD_TO_CONTEXT_MAPPING: raise ParseError("unknown command %s" % cmd) if context and not context in CONTEXTS: raise ParseError("unknown context %s" % context) self.context = context self.cmd = cmd self.text = None self.meta_commands = [] self.options = {} try: if cmd in ('choose', 'text', 'meta'): raise ValueError() option_name, expr = option_text.split('=', 1) option_name = option_name.strip().lower() expr = unescape(expr).strip("'").strip('"').strip() self.options = {option_name: expr} except ValueError: text = unescape(option_text) if cmd == 'meta': for mc in filter(lambda c: c, map(lambda c: c.strip(), text.lower().split(';'))): if mc in META_COMMANDS: # store in stack order self.meta_commands = [mc] + self.meta_commands else: raise ParseError("unknown meta command %s" % self.text) else: self.text = text
python
def parse(self, xsl='text'): """ TODO: add double-quoted string literals that allows for escaped double quotes https://gist.github.com/prathe/2439752 or http://www.metaltoad.com/blog/regex-quoted-string-escapable-quotes """ try: cmd_text, option_text = xsl.split(None, 1) except ValueError: cmd_text = xsl option_text = '' try: context, cmd = cmd_text.strip().lower().split(':', 1) except ValueError: cmd = cmd_text.lower() context = None if not cmd in DEFAULT_CMD_TO_CONTEXT_MAPPING: raise ParseError("unknown command %s" % cmd) if context and not context in CONTEXTS: raise ParseError("unknown context %s" % context) self.context = context self.cmd = cmd self.text = None self.meta_commands = [] self.options = {} try: if cmd in ('choose', 'text', 'meta'): raise ValueError() option_name, expr = option_text.split('=', 1) option_name = option_name.strip().lower() expr = unescape(expr).strip("'").strip('"').strip() self.options = {option_name: expr} except ValueError: text = unescape(option_text) if cmd == 'meta': for mc in filter(lambda c: c, map(lambda c: c.strip(), text.lower().split(';'))): if mc in META_COMMANDS: # store in stack order self.meta_commands = [mc] + self.meta_commands else: raise ParseError("unknown meta command %s" % self.text) else: self.text = text
[ "def", "parse", "(", "self", ",", "xsl", "=", "'text'", ")", ":", "try", ":", "cmd_text", ",", "option_text", "=", "xsl", ".", "split", "(", "None", ",", "1", ")", "except", "ValueError", ":", "cmd_text", "=", "xsl", "option_text", "=", "''", "try", ...
TODO: add double-quoted string literals that allows for escaped double quotes https://gist.github.com/prathe/2439752 or http://www.metaltoad.com/blog/regex-quoted-string-escapable-quotes
[ "TODO", ":", "add", "double", "-", "quoted", "string", "literals", "that", "allows", "for", "escaped", "double", "quotes", "https", ":", "//", "gist", ".", "github", ".", "com", "/", "prathe", "/", "2439752", "or", "http", ":", "//", "www", ".", "metal...
train
https://github.com/backbohne/docx-xslt/blob/d4cc76776a75b8213660c3c1717d42afe5189e15/docxxslt/xsl.py#L45-L96
backbohne/docx-xslt
docxxslt/xsl.py
XslElement.p0
def p0(self, e): """Returns top level w:p node""" body = self.body(e) p = self.p(e) if body == p.getparent(): return p else: raise ElementNotFound("could not find top level w:p element in %s" % e)
python
def p0(self, e): """Returns top level w:p node""" body = self.body(e) p = self.p(e) if body == p.getparent(): return p else: raise ElementNotFound("could not find top level w:p element in %s" % e)
[ "def", "p0", "(", "self", ",", "e", ")", ":", "body", "=", "self", ".", "body", "(", "e", ")", "p", "=", "self", ".", "p", "(", "e", ")", "if", "body", "==", "p", ".", "getparent", "(", ")", ":", "return", "p", "else", ":", "raise", "Elemen...
Returns top level w:p node
[ "Returns", "top", "level", "w", ":", "p", "node" ]
train
https://github.com/backbohne/docx-xslt/blob/d4cc76776a75b8213660c3c1717d42afe5189e15/docxxslt/xsl.py#L173-L180
mk-fg/feedjack
feedjack/management/commands/feedjack_purge.py
_short_ts_regexp
def _short_ts_regexp(): '''Generates regexp for parsing of shortened relative timestamps, as shown in the table.''' ts_re = ['^'] for k in it.chain(_short_ts_days, _short_ts_s): ts_re.append(r'(?P<{0}>\d+{0}\s*)?'.format(k)) return re.compile(''.join(ts_re), re.I | re.U)
python
def _short_ts_regexp(): '''Generates regexp for parsing of shortened relative timestamps, as shown in the table.''' ts_re = ['^'] for k in it.chain(_short_ts_days, _short_ts_s): ts_re.append(r'(?P<{0}>\d+{0}\s*)?'.format(k)) return re.compile(''.join(ts_re), re.I | re.U)
[ "def", "_short_ts_regexp", "(", ")", ":", "ts_re", "=", "[", "'^'", "]", "for", "k", "in", "it", ".", "chain", "(", "_short_ts_days", ",", "_short_ts_s", ")", ":", "ts_re", ".", "append", "(", "r'(?P<{0}>\\d+{0}\\s*)?'", ".", "format", "(", "k", ")", "...
Generates regexp for parsing of shortened relative timestamps, as shown in the table.
[ "Generates", "regexp", "for", "parsing", "of", "shortened", "relative", "timestamps", "as", "shown", "in", "the", "table", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/management/commands/feedjack_purge.py#L19-L25
mk-fg/feedjack
feedjack/management/commands/feedjack_purge.py
parse_timestamp
def parse_timestamp(ts_str): '''Match time either in human-readable format (as accepted by dateutil), or same time-offset format, as used in the table (e.g. "NdMh ago", or just "NdMh").''' assert isinstance(ts_str, bytes), [type(ts_str), repr(ts_str)] ts_str = ts_str.replace('_', ' ') # Try to parse time offset in short format match = _short_ts_regexp.search(ts_str) if match and any(match.groups()): delta = list() parse_int = lambda v: int(''.join(c for c in v if c.isdigit())) for units in [_short_ts_days, _short_ts_s]: val = 0 for k, v in units.iteritems(): try: if not match.group(k): continue n = parse_int(match.group(k)) except IndexError: continue val += n * v delta.append(val) return timezone.localtime(timezone.now()) - timedelta(*delta) # Fallback to other generic formats ts = None if not ts: match = re.search( # common BE format r'^(?P<date>(?:\d{2}|(?P<Y>\d{4}))-\d{2}-\d{2})' r'(?:[ T](?P<time>\d{2}(?::\d{2}(?::\d{2})?)?)?)?$', ts_str ) if match: tpl = 'y' if not match.group('Y') else 'Y' tpl, ts_str = '%{}-%m-%d'.format(tpl), match.group('date') if match.group('time'): tpl_time = ['%H', '%M', '%S'] ts_str_time = match.group('time').split(':') ts_str += ' ' + ':'.join(ts_str_time) tpl += ' ' + ':'.join(tpl_time[:len(ts_str_time)]) try: ts = timezone.make_aware(datetime.strptime(ts_str, tpl)) except ValueError: pass if not ts: # coreutils' "date" parses virtually everything, but is more expensive to use with open(os.devnull, 'w') as devnull: proc = subprocess.Popen( ['date', '+%s', '-d', ts_str], stdout=subprocess.PIPE, stderr=devnull, close_fds=True ) val = proc.stdout.read() if not proc.wait(): ts = timezone.make_aware(datetime.fromtimestamp(int(val.strip()))) if ts: return ts raise ValueError('Unable to parse date/time string: {0}'.format(ts_str))
python
def parse_timestamp(ts_str): '''Match time either in human-readable format (as accepted by dateutil), or same time-offset format, as used in the table (e.g. "NdMh ago", or just "NdMh").''' assert isinstance(ts_str, bytes), [type(ts_str), repr(ts_str)] ts_str = ts_str.replace('_', ' ') # Try to parse time offset in short format match = _short_ts_regexp.search(ts_str) if match and any(match.groups()): delta = list() parse_int = lambda v: int(''.join(c for c in v if c.isdigit())) for units in [_short_ts_days, _short_ts_s]: val = 0 for k, v in units.iteritems(): try: if not match.group(k): continue n = parse_int(match.group(k)) except IndexError: continue val += n * v delta.append(val) return timezone.localtime(timezone.now()) - timedelta(*delta) # Fallback to other generic formats ts = None if not ts: match = re.search( # common BE format r'^(?P<date>(?:\d{2}|(?P<Y>\d{4}))-\d{2}-\d{2})' r'(?:[ T](?P<time>\d{2}(?::\d{2}(?::\d{2})?)?)?)?$', ts_str ) if match: tpl = 'y' if not match.group('Y') else 'Y' tpl, ts_str = '%{}-%m-%d'.format(tpl), match.group('date') if match.group('time'): tpl_time = ['%H', '%M', '%S'] ts_str_time = match.group('time').split(':') ts_str += ' ' + ':'.join(ts_str_time) tpl += ' ' + ':'.join(tpl_time[:len(ts_str_time)]) try: ts = timezone.make_aware(datetime.strptime(ts_str, tpl)) except ValueError: pass if not ts: # coreutils' "date" parses virtually everything, but is more expensive to use with open(os.devnull, 'w') as devnull: proc = subprocess.Popen( ['date', '+%s', '-d', ts_str], stdout=subprocess.PIPE, stderr=devnull, close_fds=True ) val = proc.stdout.read() if not proc.wait(): ts = timezone.make_aware(datetime.fromtimestamp(int(val.strip()))) if ts: return ts raise ValueError('Unable to parse date/time string: {0}'.format(ts_str))
[ "def", "parse_timestamp", "(", "ts_str", ")", ":", "assert", "isinstance", "(", "ts_str", ",", "bytes", ")", ",", "[", "type", "(", "ts_str", ")", ",", "repr", "(", "ts_str", ")", "]", "ts_str", "=", "ts_str", ".", "replace", "(", "'_'", ",", "' '", ...
Match time either in human-readable format (as accepted by dateutil), or same time-offset format, as used in the table (e.g. "NdMh ago", or just "NdMh").
[ "Match", "time", "either", "in", "human", "-", "readable", "format", "(", "as", "accepted", "by", "dateutil", ")", "or", "same", "time", "-", "offset", "format", "as", "used", "in", "the", "table", "(", "e", ".", "g", ".", "NdMh", "ago", "or", "just"...
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/management/commands/feedjack_purge.py#L28-L77
smitchell556/cuttlepool
cuttlepool.py
CuttlePool._get
def _get(self, timeout): """ Get a resource from the pool. If timeout is ``None`` waits indefinitely. :param timeout: Time in seconds to wait for a resource. :type timeout: int :return: A resource. :rtype: :class:`_ResourceTracker` :raises PoolEmptyError: When timeout has elapsed and unable to retrieve resource. """ with self._lock: if timeout is None: while self.empty(): self._not_empty.wait() else: time_end = time.time() + timeout while self.empty(): time_left = time_end - time.time() if time_left < 0: raise PoolEmptyError self._not_empty.wait(time_left) rtracker = self._reference_queue[self._resource_start] self._resource_start = (self._resource_start + 1) % self.maxsize self._available -= 1 return rtracker
python
def _get(self, timeout): """ Get a resource from the pool. If timeout is ``None`` waits indefinitely. :param timeout: Time in seconds to wait for a resource. :type timeout: int :return: A resource. :rtype: :class:`_ResourceTracker` :raises PoolEmptyError: When timeout has elapsed and unable to retrieve resource. """ with self._lock: if timeout is None: while self.empty(): self._not_empty.wait() else: time_end = time.time() + timeout while self.empty(): time_left = time_end - time.time() if time_left < 0: raise PoolEmptyError self._not_empty.wait(time_left) rtracker = self._reference_queue[self._resource_start] self._resource_start = (self._resource_start + 1) % self.maxsize self._available -= 1 return rtracker
[ "def", "_get", "(", "self", ",", "timeout", ")", ":", "with", "self", ".", "_lock", ":", "if", "timeout", "is", "None", ":", "while", "self", ".", "empty", "(", ")", ":", "self", ".", "_not_empty", ".", "wait", "(", ")", "else", ":", "time_end", ...
Get a resource from the pool. If timeout is ``None`` waits indefinitely. :param timeout: Time in seconds to wait for a resource. :type timeout: int :return: A resource. :rtype: :class:`_ResourceTracker` :raises PoolEmptyError: When timeout has elapsed and unable to retrieve resource.
[ "Get", "a", "resource", "from", "the", "pool", ".", "If", "timeout", "is", "None", "waits", "indefinitely", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L145-L174
smitchell556/cuttlepool
cuttlepool.py
CuttlePool._get_tracker
def _get_tracker(self, resource): """ Return the resource tracker that is tracking ``resource``. :param resource: A resource. :return: A resource tracker. :rtype: :class:`_ResourceTracker` """ with self._lock: for rt in self._reference_queue: if rt is not None and resource is rt.resource: return rt raise UnknownResourceError('Resource not created by pool')
python
def _get_tracker(self, resource): """ Return the resource tracker that is tracking ``resource``. :param resource: A resource. :return: A resource tracker. :rtype: :class:`_ResourceTracker` """ with self._lock: for rt in self._reference_queue: if rt is not None and resource is rt.resource: return rt raise UnknownResourceError('Resource not created by pool')
[ "def", "_get_tracker", "(", "self", ",", "resource", ")", ":", "with", "self", ".", "_lock", ":", "for", "rt", "in", "self", ".", "_reference_queue", ":", "if", "rt", "is", "not", "None", "and", "resource", "is", "rt", ".", "resource", ":", "return", ...
Return the resource tracker that is tracking ``resource``. :param resource: A resource. :return: A resource tracker. :rtype: :class:`_ResourceTracker`
[ "Return", "the", "resource", "tracker", "that", "is", "tracking", "resource", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L176-L189
smitchell556/cuttlepool
cuttlepool.py
CuttlePool._harvest_lost_resources
def _harvest_lost_resources(self): """Return lost resources to pool.""" with self._lock: for i in self._unavailable_range(): rtracker = self._reference_queue[i] if rtracker is not None and rtracker.available(): self.put_resource(rtracker.resource)
python
def _harvest_lost_resources(self): """Return lost resources to pool.""" with self._lock: for i in self._unavailable_range(): rtracker = self._reference_queue[i] if rtracker is not None and rtracker.available(): self.put_resource(rtracker.resource)
[ "def", "_harvest_lost_resources", "(", "self", ")", ":", "with", "self", ".", "_lock", ":", "for", "i", "in", "self", ".", "_unavailable_range", "(", ")", ":", "rtracker", "=", "self", ".", "_reference_queue", "[", "i", "]", "if", "rtracker", "is", "not"...
Return lost resources to pool.
[ "Return", "lost", "resources", "to", "pool", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L191-L197
smitchell556/cuttlepool
cuttlepool.py
CuttlePool._make_resource
def _make_resource(self): """ Returns a resource instance. """ with self._lock: for i in self._unavailable_range(): if self._reference_queue[i] is None: rtracker = _ResourceTracker( self._factory(**self._factory_arguments)) self._reference_queue[i] = rtracker self._size += 1 return rtracker raise PoolFullError
python
def _make_resource(self): """ Returns a resource instance. """ with self._lock: for i in self._unavailable_range(): if self._reference_queue[i] is None: rtracker = _ResourceTracker( self._factory(**self._factory_arguments)) self._reference_queue[i] = rtracker self._size += 1 return rtracker raise PoolFullError
[ "def", "_make_resource", "(", "self", ")", ":", "with", "self", ".", "_lock", ":", "for", "i", "in", "self", ".", "_unavailable_range", "(", ")", ":", "if", "self", ".", "_reference_queue", "[", "i", "]", "is", "None", ":", "rtracker", "=", "_ResourceT...
Returns a resource instance.
[ "Returns", "a", "resource", "instance", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L199-L214
smitchell556/cuttlepool
cuttlepool.py
CuttlePool._put
def _put(self, rtracker): """ Put a resource back in the queue. :param rtracker: A resource. :type rtracker: :class:`_ResourceTracker` :raises PoolFullError: If pool is full. :raises UnknownResourceError: If resource can't be found. """ with self._lock: if self._available < self.capacity: for i in self._unavailable_range(): if self._reference_queue[i] is rtracker: # i retains its value and will be used to swap with # first "empty" space in queue. break else: raise UnknownResourceError j = self._resource_end rq = self._reference_queue rq[i], rq[j] = rq[j], rq[i] self._resource_end = (self._resource_end + 1) % self.maxsize self._available += 1 self._not_empty.notify() else: raise PoolFullError
python
def _put(self, rtracker): """ Put a resource back in the queue. :param rtracker: A resource. :type rtracker: :class:`_ResourceTracker` :raises PoolFullError: If pool is full. :raises UnknownResourceError: If resource can't be found. """ with self._lock: if self._available < self.capacity: for i in self._unavailable_range(): if self._reference_queue[i] is rtracker: # i retains its value and will be used to swap with # first "empty" space in queue. break else: raise UnknownResourceError j = self._resource_end rq = self._reference_queue rq[i], rq[j] = rq[j], rq[i] self._resource_end = (self._resource_end + 1) % self.maxsize self._available += 1 self._not_empty.notify() else: raise PoolFullError
[ "def", "_put", "(", "self", ",", "rtracker", ")", ":", "with", "self", ".", "_lock", ":", "if", "self", ".", "_available", "<", "self", ".", "capacity", ":", "for", "i", "in", "self", ".", "_unavailable_range", "(", ")", ":", "if", "self", ".", "_r...
Put a resource back in the queue. :param rtracker: A resource. :type rtracker: :class:`_ResourceTracker` :raises PoolFullError: If pool is full. :raises UnknownResourceError: If resource can't be found.
[ "Put", "a", "resource", "back", "in", "the", "queue", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L216-L245
smitchell556/cuttlepool
cuttlepool.py
CuttlePool._remove
def _remove(self, rtracker): """ Remove a resource from the pool. :param rtracker: A resource. :type rtracker: :class:`_ResourceTracker` """ with self._lock: i = self._reference_queue.index(rtracker) self._reference_queue[i] = None self._size -= 1
python
def _remove(self, rtracker): """ Remove a resource from the pool. :param rtracker: A resource. :type rtracker: :class:`_ResourceTracker` """ with self._lock: i = self._reference_queue.index(rtracker) self._reference_queue[i] = None self._size -= 1
[ "def", "_remove", "(", "self", ",", "rtracker", ")", ":", "with", "self", ".", "_lock", ":", "i", "=", "self", ".", "_reference_queue", ".", "index", "(", "rtracker", ")", "self", ".", "_reference_queue", "[", "i", "]", "=", "None", "self", ".", "_si...
Remove a resource from the pool. :param rtracker: A resource. :type rtracker: :class:`_ResourceTracker`
[ "Remove", "a", "resource", "from", "the", "pool", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L247-L257
smitchell556/cuttlepool
cuttlepool.py
CuttlePool._unavailable_range
def _unavailable_range(self): """ Return a generator for the indices of the unavailable region of ``_reference_queue``. """ with self._lock: i = self._resource_end j = self._resource_start if j < i or self.empty(): j += self.maxsize for k in range(i, j): yield k % self.maxsize
python
def _unavailable_range(self): """ Return a generator for the indices of the unavailable region of ``_reference_queue``. """ with self._lock: i = self._resource_end j = self._resource_start if j < i or self.empty(): j += self.maxsize for k in range(i, j): yield k % self.maxsize
[ "def", "_unavailable_range", "(", "self", ")", ":", "with", "self", ".", "_lock", ":", "i", "=", "self", ".", "_resource_end", "j", "=", "self", ".", "_resource_start", "if", "j", "<", "i", "or", "self", ".", "empty", "(", ")", ":", "j", "+=", "sel...
Return a generator for the indices of the unavailable region of ``_reference_queue``.
[ "Return", "a", "generator", "for", "the", "indices", "of", "the", "unavailable", "region", "of", "_reference_queue", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L259-L271
smitchell556/cuttlepool
cuttlepool.py
CuttlePool.get_resource
def get_resource(self, resource_wrapper=None): """ Returns a ``Resource`` instance. :param resource_wrapper: A Resource subclass. :return: A ``Resource`` instance. :raises PoolEmptyError: If attempt to get resource fails or times out. """ rtracker = None if resource_wrapper is None: resource_wrapper = self._resource_wrapper if self.empty(): self._harvest_lost_resources() try: rtracker = self._get(0) except PoolEmptyError: pass if rtracker is None: # Could not find resource, try to make one. try: rtracker = self._make_resource() except PoolFullError: pass if rtracker is None: # Could not find or make resource, so must wait for a resource # to be returned to the pool. try: rtracker = self._get(timeout=self._timeout) except PoolEmptyError: pass if rtracker is None: raise PoolEmptyError # Ensure resource is active. if not self.ping(rtracker.resource): # Lock here to prevent another thread creating a resource in the # index that will have this resource removed. This ensures there # will be space for _make_resource() to place a newly created # resource. with self._lock: self._remove(rtracker) rtracker = self._make_resource() # Ensure all resources leave pool with same attributes. # normalize_connection() is used since it calls # normalize_resource(), so if a user implements either one, the # resource will still be normalized. This will be changed in 1.0 to # call normalize_resource() when normalize_connection() is # removed. self.normalize_connection(rtracker.resource) return rtracker.wrap_resource(self, resource_wrapper)
python
def get_resource(self, resource_wrapper=None): """ Returns a ``Resource`` instance. :param resource_wrapper: A Resource subclass. :return: A ``Resource`` instance. :raises PoolEmptyError: If attempt to get resource fails or times out. """ rtracker = None if resource_wrapper is None: resource_wrapper = self._resource_wrapper if self.empty(): self._harvest_lost_resources() try: rtracker = self._get(0) except PoolEmptyError: pass if rtracker is None: # Could not find resource, try to make one. try: rtracker = self._make_resource() except PoolFullError: pass if rtracker is None: # Could not find or make resource, so must wait for a resource # to be returned to the pool. try: rtracker = self._get(timeout=self._timeout) except PoolEmptyError: pass if rtracker is None: raise PoolEmptyError # Ensure resource is active. if not self.ping(rtracker.resource): # Lock here to prevent another thread creating a resource in the # index that will have this resource removed. This ensures there # will be space for _make_resource() to place a newly created # resource. with self._lock: self._remove(rtracker) rtracker = self._make_resource() # Ensure all resources leave pool with same attributes. # normalize_connection() is used since it calls # normalize_resource(), so if a user implements either one, the # resource will still be normalized. This will be changed in 1.0 to # call normalize_resource() when normalize_connection() is # removed. self.normalize_connection(rtracker.resource) return rtracker.wrap_resource(self, resource_wrapper)
[ "def", "get_resource", "(", "self", ",", "resource_wrapper", "=", "None", ")", ":", "rtracker", "=", "None", "if", "resource_wrapper", "is", "None", ":", "resource_wrapper", "=", "self", ".", "_resource_wrapper", "if", "self", ".", "empty", "(", ")", ":", ...
Returns a ``Resource`` instance. :param resource_wrapper: A Resource subclass. :return: A ``Resource`` instance. :raises PoolEmptyError: If attempt to get resource fails or times out.
[ "Returns", "a", "Resource", "instance", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L285-L344
smitchell556/cuttlepool
cuttlepool.py
CuttlePool.put_resource
def put_resource(self, resource): """ Adds a resource back to the pool or discards it if the pool is full. :param resource: A resource object. :raises UnknownResourceError: If resource was not made by the pool. """ rtracker = self._get_tracker(resource) try: self._put(rtracker) except PoolFullError: self._remove(rtracker)
python
def put_resource(self, resource): """ Adds a resource back to the pool or discards it if the pool is full. :param resource: A resource object. :raises UnknownResourceError: If resource was not made by the pool. """ rtracker = self._get_tracker(resource) try: self._put(rtracker) except PoolFullError: self._remove(rtracker)
[ "def", "put_resource", "(", "self", ",", "resource", ")", ":", "rtracker", "=", "self", ".", "_get_tracker", "(", "resource", ")", "try", ":", "self", ".", "_put", "(", "rtracker", ")", "except", "PoolFullError", ":", "self", ".", "_remove", "(", "rtrack...
Adds a resource back to the pool or discards it if the pool is full. :param resource: A resource object. :raises UnknownResourceError: If resource was not made by the pool.
[ "Adds", "a", "resource", "back", "to", "the", "pool", "or", "discards", "it", "if", "the", "pool", "is", "full", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L386-L400
smitchell556/cuttlepool
cuttlepool.py
_ResourceTracker.wrap_resource
def wrap_resource(self, pool, resource_wrapper): """ Return a resource wrapped in ``resource_wrapper``. :param pool: A pool instance. :type pool: :class:`CuttlePool` :param resource_wrapper: A wrapper class for the resource. :type resource_wrapper: :class:`Resource` :return: A wrapped resource. :rtype: :class:`Resource` """ resource = resource_wrapper(self.resource, pool) self._weakref = weakref.ref(resource) return resource
python
def wrap_resource(self, pool, resource_wrapper): """ Return a resource wrapped in ``resource_wrapper``. :param pool: A pool instance. :type pool: :class:`CuttlePool` :param resource_wrapper: A wrapper class for the resource. :type resource_wrapper: :class:`Resource` :return: A wrapped resource. :rtype: :class:`Resource` """ resource = resource_wrapper(self.resource, pool) self._weakref = weakref.ref(resource) return resource
[ "def", "wrap_resource", "(", "self", ",", "pool", ",", "resource_wrapper", ")", ":", "resource", "=", "resource_wrapper", "(", "self", ".", "resource", ",", "pool", ")", "self", ".", "_weakref", "=", "weakref", ".", "ref", "(", "resource", ")", "return", ...
Return a resource wrapped in ``resource_wrapper``. :param pool: A pool instance. :type pool: :class:`CuttlePool` :param resource_wrapper: A wrapper class for the resource. :type resource_wrapper: :class:`Resource` :return: A wrapped resource. :rtype: :class:`Resource`
[ "Return", "a", "resource", "wrapped", "in", "resource_wrapper", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L418-L431
smitchell556/cuttlepool
cuttlepool.py
Resource.close
def close(self): """ Returns the resource to the resource pool. """ if self._resource is not None: self._pool.put_resource(self._resource) self._resource = None self._pool = None
python
def close(self): """ Returns the resource to the resource pool. """ if self._resource is not None: self._pool.put_resource(self._resource) self._resource = None self._pool = None
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_resource", "is", "not", "None", ":", "self", ".", "_pool", ".", "put_resource", "(", "self", ".", "_resource", ")", "self", ".", "_resource", "=", "None", "self", ".", "_pool", "=", "None" ]
Returns the resource to the resource pool.
[ "Returns", "the", "resource", "to", "the", "resource", "pool", "." ]
train
https://github.com/smitchell556/cuttlepool/blob/21b74bc61d11c80d3b40da0321485ec6c2f9db31/cuttlepool.py#L465-L472
taddeus/wspy
connection.py
Connection.send
def send(self, message, fragment_size=None, mask=False): """ Send a message. If `fragment_size` is specified, the message is fragmented into multiple frames whose payload size does not extend `fragment_size`. """ for frame in self.message_to_frames(message, fragment_size, mask): self.send_frame(frame)
python
def send(self, message, fragment_size=None, mask=False): """ Send a message. If `fragment_size` is specified, the message is fragmented into multiple frames whose payload size does not extend `fragment_size`. """ for frame in self.message_to_frames(message, fragment_size, mask): self.send_frame(frame)
[ "def", "send", "(", "self", ",", "message", ",", "fragment_size", "=", "None", ",", "mask", "=", "False", ")", ":", "for", "frame", "in", "self", ".", "message_to_frames", "(", "message", ",", "fragment_size", ",", "mask", ")", ":", "self", ".", "send_...
Send a message. If `fragment_size` is specified, the message is fragmented into multiple frames whose payload size does not extend `fragment_size`.
[ "Send", "a", "message", ".", "If", "fragment_size", "is", "specified", "the", "message", "is", "fragmented", "into", "multiple", "frames", "whose", "payload", "size", "does", "not", "extend", "fragment_size", "." ]
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/connection.py#L65-L72
taddeus/wspy
connection.py
Connection.recv
def recv(self): """ Receive a message. A message may consist of multiple (ordered) data frames. A control frame may be delivered at any time, also when expecting the next continuation frame of a fragmented message. These control frames are handled immediately by handle_control_frame(). """ fragments = [] while not len(fragments) or not fragments[-1].final: frame = self.sock.recv() if isinstance(frame, ControlFrame): self.handle_control_frame(frame) elif len(fragments) > 0 and frame.opcode != OPCODE_CONTINUATION: raise ValueError('expected continuation/control frame, got %s ' 'instead' % frame) else: fragments.append(frame) return self.concat_fragments(fragments)
python
def recv(self): """ Receive a message. A message may consist of multiple (ordered) data frames. A control frame may be delivered at any time, also when expecting the next continuation frame of a fragmented message. These control frames are handled immediately by handle_control_frame(). """ fragments = [] while not len(fragments) or not fragments[-1].final: frame = self.sock.recv() if isinstance(frame, ControlFrame): self.handle_control_frame(frame) elif len(fragments) > 0 and frame.opcode != OPCODE_CONTINUATION: raise ValueError('expected continuation/control frame, got %s ' 'instead' % frame) else: fragments.append(frame) return self.concat_fragments(fragments)
[ "def", "recv", "(", "self", ")", ":", "fragments", "=", "[", "]", "while", "not", "len", "(", "fragments", ")", "or", "not", "fragments", "[", "-", "1", "]", ".", "final", ":", "frame", "=", "self", ".", "sock", ".", "recv", "(", ")", "if", "is...
Receive a message. A message may consist of multiple (ordered) data frames. A control frame may be delivered at any time, also when expecting the next continuation frame of a fragmented message. These control frames are handled immediately by handle_control_frame().
[ "Receive", "a", "message", ".", "A", "message", "may", "consist", "of", "multiple", "(", "ordered", ")", "data", "frames", ".", "A", "control", "frame", "may", "be", "delivered", "at", "any", "time", "also", "when", "expecting", "the", "next", "continuatio...
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/connection.py#L80-L100
taddeus/wspy
connection.py
Connection.handle_control_frame
def handle_control_frame(self, frame): """ Handle a control frame as defined by RFC 6455. """ if frame.opcode == OPCODE_CLOSE: self.close_frame_received = True code, reason = frame.unpack_close() if self.close_frame_sent: self.onclose(code, reason) self.sock.close() raise SocketClosed(True) else: self.close_params = (code, reason) self.send_close_frame(code, reason) elif frame.opcode == OPCODE_PING: # Respond with a pong message with identical payload self.send_frame(ControlFrame(OPCODE_PONG, frame.payload)) elif frame.opcode == OPCODE_PONG: # Assert that the PONG payload is identical to that of the PING if not self.ping_sent: raise PingError('received PONG while no PING was sent') self.ping_sent = False if frame.payload != self.ping_payload: raise PingError('received PONG with invalid payload') self.ping_payload = None self.onpong(frame.payload)
python
def handle_control_frame(self, frame): """ Handle a control frame as defined by RFC 6455. """ if frame.opcode == OPCODE_CLOSE: self.close_frame_received = True code, reason = frame.unpack_close() if self.close_frame_sent: self.onclose(code, reason) self.sock.close() raise SocketClosed(True) else: self.close_params = (code, reason) self.send_close_frame(code, reason) elif frame.opcode == OPCODE_PING: # Respond with a pong message with identical payload self.send_frame(ControlFrame(OPCODE_PONG, frame.payload)) elif frame.opcode == OPCODE_PONG: # Assert that the PONG payload is identical to that of the PING if not self.ping_sent: raise PingError('received PONG while no PING was sent') self.ping_sent = False if frame.payload != self.ping_payload: raise PingError('received PONG with invalid payload') self.ping_payload = None self.onpong(frame.payload)
[ "def", "handle_control_frame", "(", "self", ",", "frame", ")", ":", "if", "frame", ".", "opcode", "==", "OPCODE_CLOSE", ":", "self", ".", "close_frame_received", "=", "True", "code", ",", "reason", "=", "frame", ".", "unpack_close", "(", ")", "if", "self",...
Handle a control frame as defined by RFC 6455.
[ "Handle", "a", "control", "frame", "as", "defined", "by", "RFC", "6455", "." ]
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/connection.py#L112-L143
taddeus/wspy
connection.py
Connection.receive_forever
def receive_forever(self): """ Receive and handle messages in an endless loop. A message may consist of multiple data frames, but this is not visible for onmessage(). Control messages (or control frames) are handled automatically. """ while True: try: self.onmessage(self.recv()) except (KeyboardInterrupt, SystemExit, SocketClosed): break except Exception as e: self.onerror(e) self.onclose(None, 'error: %s' % e) try: self.sock.close() except socket.error: pass raise e
python
def receive_forever(self): """ Receive and handle messages in an endless loop. A message may consist of multiple data frames, but this is not visible for onmessage(). Control messages (or control frames) are handled automatically. """ while True: try: self.onmessage(self.recv()) except (KeyboardInterrupt, SystemExit, SocketClosed): break except Exception as e: self.onerror(e) self.onclose(None, 'error: %s' % e) try: self.sock.close() except socket.error: pass raise e
[ "def", "receive_forever", "(", "self", ")", ":", "while", "True", ":", "try", ":", "self", ".", "onmessage", "(", "self", ".", "recv", "(", ")", ")", "except", "(", "KeyboardInterrupt", ",", "SystemExit", ",", "SocketClosed", ")", ":", "break", "except",...
Receive and handle messages in an endless loop. A message may consist of multiple data frames, but this is not visible for onmessage(). Control messages (or control frames) are handled automatically.
[ "Receive", "and", "handle", "messages", "in", "an", "endless", "loop", ".", "A", "message", "may", "consist", "of", "multiple", "data", "frames", "but", "this", "is", "not", "visible", "for", "onmessage", "()", ".", "Control", "messages", "(", "or", "contr...
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/connection.py#L145-L165
taddeus/wspy
connection.py
Connection.send_ping
def send_ping(self, payload=''): """ Send a PING control frame with an optional payload. """ self.send_frame(ControlFrame(OPCODE_PING, payload), lambda: self.onping(payload)) self.ping_payload = payload self.ping_sent = True
python
def send_ping(self, payload=''): """ Send a PING control frame with an optional payload. """ self.send_frame(ControlFrame(OPCODE_PING, payload), lambda: self.onping(payload)) self.ping_payload = payload self.ping_sent = True
[ "def", "send_ping", "(", "self", ",", "payload", "=", "''", ")", ":", "self", ".", "send_frame", "(", "ControlFrame", "(", "OPCODE_PING", ",", "payload", ")", ",", "lambda", ":", "self", ".", "onping", "(", "payload", ")", ")", "self", ".", "ping_paylo...
Send a PING control frame with an optional payload.
[ "Send", "a", "PING", "control", "frame", "with", "an", "optional", "payload", "." ]
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/connection.py#L167-L174
taddeus/wspy
connection.py
Connection.close
def close(self, code=None, reason=''): """ Close the socket by sending a CLOSE frame and waiting for a response close message, unless such a message has already been received earlier (prior to calling this function, for example). The onclose() handler is called after the response has been received, but before the socket is actually closed. """ self.send_close_frame(code, reason) frame = self.sock.recv() if frame.opcode != OPCODE_CLOSE: raise ValueError('expected CLOSE frame, got %s' % frame) self.handle_control_frame(frame)
python
def close(self, code=None, reason=''): """ Close the socket by sending a CLOSE frame and waiting for a response close message, unless such a message has already been received earlier (prior to calling this function, for example). The onclose() handler is called after the response has been received, but before the socket is actually closed. """ self.send_close_frame(code, reason) frame = self.sock.recv() if frame.opcode != OPCODE_CLOSE: raise ValueError('expected CLOSE frame, got %s' % frame) self.handle_control_frame(frame)
[ "def", "close", "(", "self", ",", "code", "=", "None", ",", "reason", "=", "''", ")", ":", "self", ".", "send_close_frame", "(", "code", ",", "reason", ")", "frame", "=", "self", ".", "sock", ".", "recv", "(", ")", "if", "frame", ".", "opcode", "...
Close the socket by sending a CLOSE frame and waiting for a response close message, unless such a message has already been received earlier (prior to calling this function, for example). The onclose() handler is called after the response has been received, but before the socket is actually closed.
[ "Close", "the", "socket", "by", "sending", "a", "CLOSE", "frame", "and", "waiting", "for", "a", "response", "close", "message", "unless", "such", "a", "message", "has", "already", "been", "received", "earlier", "(", "prior", "to", "calling", "this", "functio...
train
https://github.com/taddeus/wspy/blob/13f054a72442bb8dcc37b0ac011cab6025830d66/connection.py#L189-L204
mk-fg/feedjack
feedjack/models.py
get_by_string
def get_by_string(cls, fields, query): '''Get object by numeric id or exact and unique part of specified attrs (name, title, etc).''' try: pk = int(query) except ValueError: pass else: return cls.objects.get(pk=pk) obj = list(cls.objects.filter(reduce( op.or_, list(Q(**{'{}__icontains'.format(f): query}) for f in fields) ))) if len(obj) > 1: raise cls.MultipleObjectsReturned(( u'Unable to uniquely identify {}' ' by provided criteria: {!r} (candidates: {})' )\ .format(cls.__name__, query, ', '.join(it.imap(unicode, obj))) ) elif not len(obj): raise cls.DoesNotExist( u'Unable to find site by provided criteria: {!r}'.format(query) ) return obj[0]
python
def get_by_string(cls, fields, query): '''Get object by numeric id or exact and unique part of specified attrs (name, title, etc).''' try: pk = int(query) except ValueError: pass else: return cls.objects.get(pk=pk) obj = list(cls.objects.filter(reduce( op.or_, list(Q(**{'{}__icontains'.format(f): query}) for f in fields) ))) if len(obj) > 1: raise cls.MultipleObjectsReturned(( u'Unable to uniquely identify {}' ' by provided criteria: {!r} (candidates: {})' )\ .format(cls.__name__, query, ', '.join(it.imap(unicode, obj))) ) elif not len(obj): raise cls.DoesNotExist( u'Unable to find site by provided criteria: {!r}'.format(query) ) return obj[0]
[ "def", "get_by_string", "(", "cls", ",", "fields", ",", "query", ")", ":", "try", ":", "pk", "=", "int", "(", "query", ")", "except", "ValueError", ":", "pass", "else", ":", "return", "cls", ".", "objects", ".", "get", "(", "pk", "=", "pk", ")", ...
Get object by numeric id or exact and unique part of specified attrs (name, title, etc).
[ "Get", "object", "by", "numeric", "id", "or", "exact", "and", "unique", "part", "of", "specified", "attrs", "(", "name", "title", "etc", ")", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/models.py#L28-L44
mk-fg/feedjack
feedjack/models.py
ProcessingThingBase.handler
def handler(self): 'Handler function' from feedjack import filters # shouldn't be imported globally, as they may depend on models proc_func = getattr(filters, self.handler_name or self.name, None) if proc_func is None: if '.' not in self.handler_name: raise ImportError('Processing function not available: {0}'.format(self.handler_name)) proc_module, proc_func = it.imap(str, self.handler_name.rsplit('.', 1)) proc_func = getattr(__import__(proc_module, fromlist=[proc_func]), proc_func) return proc_func
python
def handler(self): 'Handler function' from feedjack import filters # shouldn't be imported globally, as they may depend on models proc_func = getattr(filters, self.handler_name or self.name, None) if proc_func is None: if '.' not in self.handler_name: raise ImportError('Processing function not available: {0}'.format(self.handler_name)) proc_module, proc_func = it.imap(str, self.handler_name.rsplit('.', 1)) proc_func = getattr(__import__(proc_module, fromlist=[proc_func]), proc_func) return proc_func
[ "def", "handler", "(", "self", ")", ":", "from", "feedjack", "import", "filters", "# shouldn't be imported globally, as they may depend on models", "proc_func", "=", "getattr", "(", "filters", ",", "self", ".", "handler_name", "or", "self", ".", "name", ",", "None",...
Handler function
[ "Handler", "function" ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/models.py#L178-L187
mk-fg/feedjack
feedjack/models.py
ProcessingThing.handler
def handler(self): 'Parametrized handler function' return ft.partial(self.base.handler, parameter=self.parameter)\ if self.parameter else self.base.handler
python
def handler(self): 'Parametrized handler function' return ft.partial(self.base.handler, parameter=self.parameter)\ if self.parameter else self.base.handler
[ "def", "handler", "(", "self", ")", ":", "return", "ft", ".", "partial", "(", "self", ".", "base", ".", "handler", ",", "parameter", "=", "self", ".", "parameter", ")", "if", "self", ".", "parameter", "else", "self", ".", "base", ".", "handler" ]
Parametrized handler function
[ "Parametrized", "handler", "function" ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/models.py#L213-L216
mk-fg/feedjack
feedjack/models.py
Feed.calculate_check_interval
def calculate_check_interval( self, max_interval, ewma_factor, max_days=None, max_updates=None, ewma=0, ewma_ts=None, add_partial=None ): '''Calculate interval for checks as average time (ewma) between updates for specified period.''' if not add_partial: posts_base = self.posts.only('date_modified').order_by('date_modified') if ewma_ts: posts_base = posts_base.filter(date_modified__gt=ewma_ts) posts = posts_base if max_days: posts = posts.filter(date_modified__gt=timezone.now() - timedelta(max_days)) if max_updates and max_updates > 0: posts = posts[:max_updates] if len(posts) < max_updates: posts = posts_base[:max_updates] timestamps = posts.values_list('date_modified', flat=True) else: timestamps = list() if add_partial: if not ewma_ts: try: ewma_ts = self.posts.only('date_modified')\ .order_by('-date_modified')[0].date_modified except (ObjectDoesNotExist, IndexError): return 0 # no previous timestamp available timestamps.append(add_partial) if (add_partial - ewma_ts).total_seconds() < ewma: # It doesn't make sense to lower interval due to frequent check attempts. return ewma for ts in timestamps: if ewma_ts is None: # first post ewma_ts = ts continue ewma_ts, interval = ts, (ts - ewma_ts).total_seconds() ewma = ewma_factor * interval + (1 - ewma_factor) * ewma return min(timedelta(max_interval).total_seconds(), ewma)
python
def calculate_check_interval( self, max_interval, ewma_factor, max_days=None, max_updates=None, ewma=0, ewma_ts=None, add_partial=None ): '''Calculate interval for checks as average time (ewma) between updates for specified period.''' if not add_partial: posts_base = self.posts.only('date_modified').order_by('date_modified') if ewma_ts: posts_base = posts_base.filter(date_modified__gt=ewma_ts) posts = posts_base if max_days: posts = posts.filter(date_modified__gt=timezone.now() - timedelta(max_days)) if max_updates and max_updates > 0: posts = posts[:max_updates] if len(posts) < max_updates: posts = posts_base[:max_updates] timestamps = posts.values_list('date_modified', flat=True) else: timestamps = list() if add_partial: if not ewma_ts: try: ewma_ts = self.posts.only('date_modified')\ .order_by('-date_modified')[0].date_modified except (ObjectDoesNotExist, IndexError): return 0 # no previous timestamp available timestamps.append(add_partial) if (add_partial - ewma_ts).total_seconds() < ewma: # It doesn't make sense to lower interval due to frequent check attempts. return ewma for ts in timestamps: if ewma_ts is None: # first post ewma_ts = ts continue ewma_ts, interval = ts, (ts - ewma_ts).total_seconds() ewma = ewma_factor * interval + (1 - ewma_factor) * ewma return min(timedelta(max_interval).total_seconds(), ewma)
[ "def", "calculate_check_interval", "(", "self", ",", "max_interval", ",", "ewma_factor", ",", "max_days", "=", "None", ",", "max_updates", "=", "None", ",", "ewma", "=", "0", ",", "ewma_ts", "=", "None", ",", "add_partial", "=", "None", ")", ":", "if", "...
Calculate interval for checks as average time (ewma) between updates for specified period.
[ "Calculate", "interval", "for", "checks", "as", "average", "time", "(", "ewma", ")", "between", "updates", "for", "specified", "period", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/models.py#L445-L478
mk-fg/feedjack
feedjack/models.py
Feed.update_handler
def update_handler(feeds): '''Update all cross-referencing filters results for feeds and others, related to them. Intended to be called from non-Feed update hooks (like new Post saving).''' # Check if this call is a result of actions initiated from # one of the hooks in a higher frame (resulting in recursion). if Feed._filters_update_handler_lock: return return Feed._filters_update_handler(Feed, feeds, force=True)
python
def update_handler(feeds): '''Update all cross-referencing filters results for feeds and others, related to them. Intended to be called from non-Feed update hooks (like new Post saving).''' # Check if this call is a result of actions initiated from # one of the hooks in a higher frame (resulting in recursion). if Feed._filters_update_handler_lock: return return Feed._filters_update_handler(Feed, feeds, force=True)
[ "def", "update_handler", "(", "feeds", ")", ":", "# Check if this call is a result of actions initiated from", "# one of the hooks in a higher frame (resulting in recursion).", "if", "Feed", ".", "_filters_update_handler_lock", ":", "return", "return", "Feed", ".", "_filters_updat...
Update all cross-referencing filters results for feeds and others, related to them. Intended to be called from non-Feed update hooks (like new Post saving).
[ "Update", "all", "cross", "-", "referencing", "filters", "results", "for", "feeds", "and", "others", "related", "to", "them", ".", "Intended", "to", "be", "called", "from", "non", "-", "Feed", "update", "hooks", "(", "like", "new", "Post", "saving", ")", ...
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/models.py#L616-L622
mk-fg/feedjack
feedjack/models.py
PostQuerySet.similar
def similar(self, threshold, **criterias): '''Find text-based field matches with similarity (1-levenshtein/length) higher than specified threshold (0 to 1, 1 being an exact match)''' # XXX: use F from https://docs.djangoproject.com/en/1.8/ref/models/expressions/ meta = self.model._meta funcs, params = list(), list() for name,val in criterias.iteritems(): name = meta.get_field(name, many_to_many=False).column name = '.'.join(it.imap(connection.ops.quote_name, (meta.db_table, name))) # Alas, pg_trgm is for containment tests, not fuzzy matches, # but it can potentially be used to find closest results as well # funcs.append( 'similarity(CAST({0}.{1} as text), CAST(%s as text))'\ # Ok, these two are just to make sure levenshtein() won't crash # w/ "argument exceeds the maximum length of N bytes error" funcs.append('octet_length({0}) <= {1}'.format(name, self.levenshtein_limit)) funcs.append('octet_length(%s) <= {0}'.format(self.levenshtein_limit)) # Then there's a possibility of division by zero... funcs.append('length({0}) > 0'.format(name)) # And if everything else fits, the comparison itself funcs.append('levenshtein({0}, %s) / CAST(length({0}) AS numeric) < %s'.format(name)) params.extend((val, val, float(1 - threshold))) return self.extra(where=funcs, params=params)
python
def similar(self, threshold, **criterias): '''Find text-based field matches with similarity (1-levenshtein/length) higher than specified threshold (0 to 1, 1 being an exact match)''' # XXX: use F from https://docs.djangoproject.com/en/1.8/ref/models/expressions/ meta = self.model._meta funcs, params = list(), list() for name,val in criterias.iteritems(): name = meta.get_field(name, many_to_many=False).column name = '.'.join(it.imap(connection.ops.quote_name, (meta.db_table, name))) # Alas, pg_trgm is for containment tests, not fuzzy matches, # but it can potentially be used to find closest results as well # funcs.append( 'similarity(CAST({0}.{1} as text), CAST(%s as text))'\ # Ok, these two are just to make sure levenshtein() won't crash # w/ "argument exceeds the maximum length of N bytes error" funcs.append('octet_length({0}) <= {1}'.format(name, self.levenshtein_limit)) funcs.append('octet_length(%s) <= {0}'.format(self.levenshtein_limit)) # Then there's a possibility of division by zero... funcs.append('length({0}) > 0'.format(name)) # And if everything else fits, the comparison itself funcs.append('levenshtein({0}, %s) / CAST(length({0}) AS numeric) < %s'.format(name)) params.extend((val, val, float(1 - threshold))) return self.extra(where=funcs, params=params)
[ "def", "similar", "(", "self", ",", "threshold", ",", "*", "*", "criterias", ")", ":", "# XXX: use F from https://docs.djangoproject.com/en/1.8/ref/models/expressions/", "meta", "=", "self", ".", "model", ".", "_meta", "funcs", ",", "params", "=", "list", "(", ")"...
Find text-based field matches with similarity (1-levenshtein/length) higher than specified threshold (0 to 1, 1 being an exact match)
[ "Find", "text", "-", "based", "field", "matches", "with", "similarity", "(", "1", "-", "levenshtein", "/", "length", ")", "higher", "than", "specified", "threshold", "(", "0", "to", "1", "1", "being", "an", "exact", "match", ")" ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/models.py#L654-L675
mk-fg/feedjack
feedjack/models.py
Post._filtering_result_checked
def _filtering_result_checked(self, by_or): '''Check if post passes all / at_least_one (by_or parameter) filter(s). Filters are evaluated on only-if-necessary ("lazy") basis.''' filters, results = it.imap(set, ( self.feed.filters.all(), self.filtering_results.values_list('filter', flat=True) )) # Check if conclusion can already be made, based on cached results. if results.issubset(filters): # If at least one failed/passed test is already there, and/or outcome is defined. try: return self._filtering_result(by_or) except IndexError: # inconclusive until results are consistent if filters == results: return not by_or # Consistency check / update. if filters != results: # Drop obsolete (removed, unbound from feed) # filters' results (they WILL corrupt outcome). self.filtering_results.filter(filter__in=results.difference(filters)).delete() # One more try, now that results are only from feed filters' subset. try: return self._filtering_result(by_or) except IndexError: pass # Check if any filter-results are not cached yet, create them (perform actual filtering). # Note that independent filters applied first, since # crossrefs should be more resource-hungry in general. for filter_obj in sorted(filters.difference(results), key=op.attrgetter('base.crossref')): filter_op = FilterResult(filter=filter_obj, post=self, result=filter_obj.handler(self)) filter_op.save() if filter_op.result == by_or: return by_or # return as soon as first passed / failed # Final result try: return self._filtering_result(by_or) except IndexError: return not by_or
python
def _filtering_result_checked(self, by_or): '''Check if post passes all / at_least_one (by_or parameter) filter(s). Filters are evaluated on only-if-necessary ("lazy") basis.''' filters, results = it.imap(set, ( self.feed.filters.all(), self.filtering_results.values_list('filter', flat=True) )) # Check if conclusion can already be made, based on cached results. if results.issubset(filters): # If at least one failed/passed test is already there, and/or outcome is defined. try: return self._filtering_result(by_or) except IndexError: # inconclusive until results are consistent if filters == results: return not by_or # Consistency check / update. if filters != results: # Drop obsolete (removed, unbound from feed) # filters' results (they WILL corrupt outcome). self.filtering_results.filter(filter__in=results.difference(filters)).delete() # One more try, now that results are only from feed filters' subset. try: return self._filtering_result(by_or) except IndexError: pass # Check if any filter-results are not cached yet, create them (perform actual filtering). # Note that independent filters applied first, since # crossrefs should be more resource-hungry in general. for filter_obj in sorted(filters.difference(results), key=op.attrgetter('base.crossref')): filter_op = FilterResult(filter=filter_obj, post=self, result=filter_obj.handler(self)) filter_op.save() if filter_op.result == by_or: return by_or # return as soon as first passed / failed # Final result try: return self._filtering_result(by_or) except IndexError: return not by_or
[ "def", "_filtering_result_checked", "(", "self", ",", "by_or", ")", ":", "filters", ",", "results", "=", "it", ".", "imap", "(", "set", ",", "(", "self", ".", "feed", ".", "filters", ".", "all", "(", ")", ",", "self", ".", "filtering_results", ".", "...
Check if post passes all / at_least_one (by_or parameter) filter(s). Filters are evaluated on only-if-necessary ("lazy") basis.
[ "Check", "if", "post", "passes", "all", "/", "at_least_one", "(", "by_or", "parameter", ")", "filter", "(", "s", ")", ".", "Filters", "are", "evaluated", "on", "only", "-", "if", "-", "necessary", "(", "lazy", ")", "basis", "." ]
train
https://github.com/mk-fg/feedjack/blob/3fe65c0f66dc2cfdf45834aaa7235ec9f81b3ca3/feedjack/models.py#L783-L814
gmr/tredis
tredis/client.py
_Connection.connect
def connect(self): """Connect to the Redis server if necessary. :rtype: :class:`~tornado.concurrent.Future` :raises: :class:`~tredis.exceptions.ConnectError` :class:`~tredis.exceptinos.RedisError` """ future = concurrent.Future() if self.connected: raise exceptions.ConnectError('already connected') LOGGER.debug('%s connecting', self.name) self.io_loop.add_future( self._client.connect(self.host, self.port), lambda f: self._on_connected(f, future)) return future
python
def connect(self): """Connect to the Redis server if necessary. :rtype: :class:`~tornado.concurrent.Future` :raises: :class:`~tredis.exceptions.ConnectError` :class:`~tredis.exceptinos.RedisError` """ future = concurrent.Future() if self.connected: raise exceptions.ConnectError('already connected') LOGGER.debug('%s connecting', self.name) self.io_loop.add_future( self._client.connect(self.host, self.port), lambda f: self._on_connected(f, future)) return future
[ "def", "connect", "(", "self", ")", ":", "future", "=", "concurrent", ".", "Future", "(", ")", "if", "self", ".", "connected", ":", "raise", "exceptions", ".", "ConnectError", "(", "'already connected'", ")", "LOGGER", ".", "debug", "(", "'%s connecting'", ...
Connect to the Redis server if necessary. :rtype: :class:`~tornado.concurrent.Future` :raises: :class:`~tredis.exceptions.ConnectError` :class:`~tredis.exceptinos.RedisError`
[ "Connect", "to", "the", "Redis", "server", "if", "necessary", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L104-L121
gmr/tredis
tredis/client.py
_Connection.execute
def execute(self, command, future): """Execute a command after connecting if necessary. :param bytes command: command to execute after the connection is established :param tornado.concurrent.Future future: future to resolve when the command's response is received. """ LOGGER.debug('execute(%r, %r)', command, future) if self.connected: self._write(command, future) else: def on_connected(cfuture): if cfuture.exception(): return future.set_exception(cfuture.exception()) self._write(command, future) self.io_loop.add_future(self.connect(), on_connected)
python
def execute(self, command, future): """Execute a command after connecting if necessary. :param bytes command: command to execute after the connection is established :param tornado.concurrent.Future future: future to resolve when the command's response is received. """ LOGGER.debug('execute(%r, %r)', command, future) if self.connected: self._write(command, future) else: def on_connected(cfuture): if cfuture.exception(): return future.set_exception(cfuture.exception()) self._write(command, future) self.io_loop.add_future(self.connect(), on_connected)
[ "def", "execute", "(", "self", ",", "command", ",", "future", ")", ":", "LOGGER", ".", "debug", "(", "'execute(%r, %r)'", ",", "command", ",", "future", ")", "if", "self", ".", "connected", ":", "self", ".", "_write", "(", "command", ",", "future", ")"...
Execute a command after connecting if necessary. :param bytes command: command to execute after the connection is established :param tornado.concurrent.Future future: future to resolve when the command's response is received.
[ "Execute", "a", "command", "after", "connecting", "if", "necessary", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L123-L142
gmr/tredis
tredis/client.py
_Connection._on_closed
def _on_closed(self): """Invoked when the connection is closed""" LOGGER.error('Redis connection closed') self.connected = False self._on_close() self._stream = None
python
def _on_closed(self): """Invoked when the connection is closed""" LOGGER.error('Redis connection closed') self.connected = False self._on_close() self._stream = None
[ "def", "_on_closed", "(", "self", ")", ":", "LOGGER", ".", "error", "(", "'Redis connection closed'", ")", "self", ".", "connected", "=", "False", "self", ".", "_on_close", "(", ")", "self", ".", "_stream", "=", "None" ]
Invoked when the connection is closed
[ "Invoked", "when", "the", "connection", "is", "closed" ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L187-L192
gmr/tredis
tredis/client.py
_Connection._on_connected
def _on_connected(self, stream_future, connect_future): """Invoked when the socket stream has connected, setting up the stream callbacks and invoking the on connect callback if set. :param stream_future: The connection socket future :type stream_future: :class:`~tornado.concurrent.Future` :param stream_future: The connection response future :type stream_future: :class:`~tornado.concurrent.Future` :raises: :exc:`tredis.exceptions.ConnectError` """ if stream_future.exception(): connect_future.set_exception( exceptions.ConnectError(stream_future.exception())) else: self._stream = stream_future.result() self._stream.set_close_callback(self._on_closed) self.connected = True connect_future.set_result(self)
python
def _on_connected(self, stream_future, connect_future): """Invoked when the socket stream has connected, setting up the stream callbacks and invoking the on connect callback if set. :param stream_future: The connection socket future :type stream_future: :class:`~tornado.concurrent.Future` :param stream_future: The connection response future :type stream_future: :class:`~tornado.concurrent.Future` :raises: :exc:`tredis.exceptions.ConnectError` """ if stream_future.exception(): connect_future.set_exception( exceptions.ConnectError(stream_future.exception())) else: self._stream = stream_future.result() self._stream.set_close_callback(self._on_closed) self.connected = True connect_future.set_result(self)
[ "def", "_on_connected", "(", "self", ",", "stream_future", ",", "connect_future", ")", ":", "if", "stream_future", ".", "exception", "(", ")", ":", "connect_future", ".", "set_exception", "(", "exceptions", ".", "ConnectError", "(", "stream_future", ".", "except...
Invoked when the socket stream has connected, setting up the stream callbacks and invoking the on connect callback if set. :param stream_future: The connection socket future :type stream_future: :class:`~tornado.concurrent.Future` :param stream_future: The connection response future :type stream_future: :class:`~tornado.concurrent.Future` :raises: :exc:`tredis.exceptions.ConnectError`
[ "Invoked", "when", "the", "socket", "stream", "has", "connected", "setting", "up", "the", "stream", "callbacks", "and", "invoking", "the", "on", "connect", "callback", "if", "set", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L194-L212
gmr/tredis
tredis/client.py
_Connection._write
def _write(self, command, future): """Write a command to the socket :param Command command: the Command data structure """ def on_written(): self._on_written(command, future) try: self._stream.write(command.command, callback=on_written) except iostream.StreamClosedError as error: future.set_exception(exceptions.ConnectionError(error)) except Exception as error: LOGGER.exception('unhandled write failure - %r', error) future.set_exception(exceptions.ConnectionError(error))
python
def _write(self, command, future): """Write a command to the socket :param Command command: the Command data structure """ def on_written(): self._on_written(command, future) try: self._stream.write(command.command, callback=on_written) except iostream.StreamClosedError as error: future.set_exception(exceptions.ConnectionError(error)) except Exception as error: LOGGER.exception('unhandled write failure - %r', error) future.set_exception(exceptions.ConnectionError(error))
[ "def", "_write", "(", "self", ",", "command", ",", "future", ")", ":", "def", "on_written", "(", ")", ":", "self", ".", "_on_written", "(", "command", ",", "future", ")", "try", ":", "self", ".", "_stream", ".", "write", "(", "command", ".", "command...
Write a command to the socket :param Command command: the Command data structure
[ "Write", "a", "command", "to", "the", "socket" ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L214-L230
gmr/tredis
tredis/client.py
Client.connect
def connect(self): """Connect to the Redis server or Cluster. :rtype: tornado.concurrent.Future """ LOGGER.debug('Creating a%s connection to %s:%s (db %s)', ' cluster node' if self._clustering else '', self._hosts[0]['host'], self._hosts[0]['port'], self._hosts[0].get( 'db', DEFAULT_DB)) self._connect_future = concurrent.Future() conn = _Connection( self._hosts[0]['host'], self._hosts[0]['port'], self._hosts[0].get('db', DEFAULT_DB), self._read, self._on_closed, self.io_loop, cluster_node=self._clustering) self.io_loop.add_future(conn.connect(), self._on_connected) return self._connect_future
python
def connect(self): """Connect to the Redis server or Cluster. :rtype: tornado.concurrent.Future """ LOGGER.debug('Creating a%s connection to %s:%s (db %s)', ' cluster node' if self._clustering else '', self._hosts[0]['host'], self._hosts[0]['port'], self._hosts[0].get( 'db', DEFAULT_DB)) self._connect_future = concurrent.Future() conn = _Connection( self._hosts[0]['host'], self._hosts[0]['port'], self._hosts[0].get('db', DEFAULT_DB), self._read, self._on_closed, self.io_loop, cluster_node=self._clustering) self.io_loop.add_future(conn.connect(), self._on_connected) return self._connect_future
[ "def", "connect", "(", "self", ")", ":", "LOGGER", ".", "debug", "(", "'Creating a%s connection to %s:%s (db %s)'", ",", "' cluster node'", "if", "self", ".", "_clustering", "else", "''", ",", "self", ".", "_hosts", "[", "0", "]", "[", "'host'", "]", ",", ...
Connect to the Redis server or Cluster. :rtype: tornado.concurrent.Future
[ "Connect", "to", "the", "Redis", "server", "or", "Cluster", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L319-L340
gmr/tredis
tredis/client.py
Client.close
def close(self): """Close any open connections to Redis. :raises: :exc:`tredis.exceptions.ConnectionError` """ if not self._connected.is_set(): raise exceptions.ConnectionError('not connected') self._closing = True if self._clustering: for host in self._cluster.keys(): self._cluster[host].close() elif self._connection: self._connection.close()
python
def close(self): """Close any open connections to Redis. :raises: :exc:`tredis.exceptions.ConnectionError` """ if not self._connected.is_set(): raise exceptions.ConnectionError('not connected') self._closing = True if self._clustering: for host in self._cluster.keys(): self._cluster[host].close() elif self._connection: self._connection.close()
[ "def", "close", "(", "self", ")", ":", "if", "not", "self", ".", "_connected", ".", "is_set", "(", ")", ":", "raise", "exceptions", ".", "ConnectionError", "(", "'not connected'", ")", "self", ".", "_closing", "=", "True", "if", "self", ".", "_clustering...
Close any open connections to Redis. :raises: :exc:`tredis.exceptions.ConnectionError`
[ "Close", "any", "open", "connections", "to", "Redis", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L342-L355
gmr/tredis
tredis/client.py
Client.ready
def ready(self): """Indicates that the client is connected to the Redis server or cluster and is ready for use. :rtype: bool """ if self._clustering: return (all([c.connected for c in self._cluster.values()]) and len(self._cluster)) return (self._connection and self._connection.connected)
python
def ready(self): """Indicates that the client is connected to the Redis server or cluster and is ready for use. :rtype: bool """ if self._clustering: return (all([c.connected for c in self._cluster.values()]) and len(self._cluster)) return (self._connection and self._connection.connected)
[ "def", "ready", "(", "self", ")", ":", "if", "self", ".", "_clustering", ":", "return", "(", "all", "(", "[", "c", ".", "connected", "for", "c", "in", "self", ".", "_cluster", ".", "values", "(", ")", "]", ")", "and", "len", "(", "self", ".", "...
Indicates that the client is connected to the Redis server or cluster and is ready for use. :rtype: bool
[ "Indicates", "that", "the", "client", "is", "connected", "to", "the", "Redis", "server", "or", "cluster", "and", "is", "ready", "for", "use", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L358-L368
gmr/tredis
tredis/client.py
Client._create_cluster_connection
def _create_cluster_connection(self, node): """Create a connection to a Redis server. :param node: The node to connect to :type node: tredis.cluster.ClusterNode """ LOGGER.debug('Creating a cluster connection to %s:%s', node.ip, node.port) conn = _Connection( node.ip, node.port, 0, self._read, self._on_closed, self.io_loop, cluster_node=True, read_only='slave' in node.flags, slots=node.slots) self.io_loop.add_future(conn.connect(), self._on_connected)
python
def _create_cluster_connection(self, node): """Create a connection to a Redis server. :param node: The node to connect to :type node: tredis.cluster.ClusterNode """ LOGGER.debug('Creating a cluster connection to %s:%s', node.ip, node.port) conn = _Connection( node.ip, node.port, 0, self._read, self._on_closed, self.io_loop, cluster_node=True, read_only='slave' in node.flags, slots=node.slots) self.io_loop.add_future(conn.connect(), self._on_connected)
[ "def", "_create_cluster_connection", "(", "self", ",", "node", ")", ":", "LOGGER", ".", "debug", "(", "'Creating a cluster connection to %s:%s'", ",", "node", ".", "ip", ",", "node", ".", "port", ")", "conn", "=", "_Connection", "(", "node", ".", "ip", ",", ...
Create a connection to a Redis server. :param node: The node to connect to :type node: tredis.cluster.ClusterNode
[ "Create", "a", "connection", "to", "a", "Redis", "server", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L379-L398
gmr/tredis
tredis/client.py
Client._encode_resp
def _encode_resp(self, value): """Dynamically build the RESP payload based upon the list provided. :param mixed value: The list of command parts to encode :rtype: bytes """ if isinstance(value, bytes): return b''.join( [b'$', ascii(len(value)).encode('ascii'), CRLF, value, CRLF]) elif isinstance(value, str): # pragma: nocover return self._encode_resp(value.encode('utf-8')) elif isinstance(value, int): return self._encode_resp(ascii(value).encode('ascii')) elif isinstance(value, float): return self._encode_resp(ascii(value).encode('ascii')) elif isinstance(value, list): output = [b'*', ascii(len(value)).encode('ascii'), CRLF] for item in value: output.append(self._encode_resp(item)) return b''.join(output) else: raise ValueError('Unsupported type: {0}'.format(type(value)))
python
def _encode_resp(self, value): """Dynamically build the RESP payload based upon the list provided. :param mixed value: The list of command parts to encode :rtype: bytes """ if isinstance(value, bytes): return b''.join( [b'$', ascii(len(value)).encode('ascii'), CRLF, value, CRLF]) elif isinstance(value, str): # pragma: nocover return self._encode_resp(value.encode('utf-8')) elif isinstance(value, int): return self._encode_resp(ascii(value).encode('ascii')) elif isinstance(value, float): return self._encode_resp(ascii(value).encode('ascii')) elif isinstance(value, list): output = [b'*', ascii(len(value)).encode('ascii'), CRLF] for item in value: output.append(self._encode_resp(item)) return b''.join(output) else: raise ValueError('Unsupported type: {0}'.format(type(value)))
[ "def", "_encode_resp", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "bytes", ")", ":", "return", "b''", ".", "join", "(", "[", "b'$'", ",", "ascii", "(", "len", "(", "value", ")", ")", ".", "encode", "(", "'ascii'", ...
Dynamically build the RESP payload based upon the list provided. :param mixed value: The list of command parts to encode :rtype: bytes
[ "Dynamically", "build", "the", "RESP", "payload", "based", "upon", "the", "list", "provided", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L400-L423
gmr/tredis
tredis/client.py
Client._eval_expectation
def _eval_expectation(command, response, future): """Evaluate the response from Redis to see if it matches the expected response. :param command: The command that is being evaluated :type command: tredis.client.Command :param bytes response: The response value to check :param future: The future representing the execution of the command :type future: tornado.concurrent.Future :return: """ if isinstance(command.expectation, int) and command.expectation > 1: future.set_result(response == command.expectation or response) else: future.set_result(response == command.expectation)
python
def _eval_expectation(command, response, future): """Evaluate the response from Redis to see if it matches the expected response. :param command: The command that is being evaluated :type command: tredis.client.Command :param bytes response: The response value to check :param future: The future representing the execution of the command :type future: tornado.concurrent.Future :return: """ if isinstance(command.expectation, int) and command.expectation > 1: future.set_result(response == command.expectation or response) else: future.set_result(response == command.expectation)
[ "def", "_eval_expectation", "(", "command", ",", "response", ",", "future", ")", ":", "if", "isinstance", "(", "command", ".", "expectation", ",", "int", ")", "and", "command", ".", "expectation", ">", "1", ":", "future", ".", "set_result", "(", "response"...
Evaluate the response from Redis to see if it matches the expected response. :param command: The command that is being evaluated :type command: tredis.client.Command :param bytes response: The response value to check :param future: The future representing the execution of the command :type future: tornado.concurrent.Future :return:
[ "Evaluate", "the", "response", "from", "Redis", "to", "see", "if", "it", "matches", "the", "expected", "response", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L426-L440
gmr/tredis
tredis/client.py
Client._execute
def _execute(self, parts, expectation=None, format_callback=None): """Really execute a redis command :param list parts: The list of command parts :param mixed expectation: Optional response expectation :rtype: :class:`~tornado.concurrent.Future` :raises: :exc:`~tredis.exceptions.SubscribedError` """ future = concurrent.TracebackFuture() try: command = self._build_command(parts) except ValueError as error: future.set_exception(error) return future def on_locked(_): if self.ready: if self._clustering: cmd = Command(command, self._pick_cluster_host(parts), expectation, format_callback) else: LOGGER.debug('Connection: %r', self._connection) cmd = Command(command, self._connection, expectation, format_callback) LOGGER.debug('_execute(%r, %r, %r) on %s', cmd.command, expectation, format_callback, cmd.connection.name) cmd.connection.execute(cmd, future) else: LOGGER.critical('Lock released & not ready, aborting command') # Wait until the cluster is ready, letting cluster discovery through if not self.ready and not self._connected.is_set(): self.io_loop.add_future( self._connected.wait(), lambda f: self.io_loop.add_future(self._busy.acquire(), on_locked) ) else: self.io_loop.add_future(self._busy.acquire(), on_locked) # Release the lock when the future is complete self.io_loop.add_future(future, lambda r: self._busy.release()) return future
python
def _execute(self, parts, expectation=None, format_callback=None): """Really execute a redis command :param list parts: The list of command parts :param mixed expectation: Optional response expectation :rtype: :class:`~tornado.concurrent.Future` :raises: :exc:`~tredis.exceptions.SubscribedError` """ future = concurrent.TracebackFuture() try: command = self._build_command(parts) except ValueError as error: future.set_exception(error) return future def on_locked(_): if self.ready: if self._clustering: cmd = Command(command, self._pick_cluster_host(parts), expectation, format_callback) else: LOGGER.debug('Connection: %r', self._connection) cmd = Command(command, self._connection, expectation, format_callback) LOGGER.debug('_execute(%r, %r, %r) on %s', cmd.command, expectation, format_callback, cmd.connection.name) cmd.connection.execute(cmd, future) else: LOGGER.critical('Lock released & not ready, aborting command') # Wait until the cluster is ready, letting cluster discovery through if not self.ready and not self._connected.is_set(): self.io_loop.add_future( self._connected.wait(), lambda f: self.io_loop.add_future(self._busy.acquire(), on_locked) ) else: self.io_loop.add_future(self._busy.acquire(), on_locked) # Release the lock when the future is complete self.io_loop.add_future(future, lambda r: self._busy.release()) return future
[ "def", "_execute", "(", "self", ",", "parts", ",", "expectation", "=", "None", ",", "format_callback", "=", "None", ")", ":", "future", "=", "concurrent", ".", "TracebackFuture", "(", ")", "try", ":", "command", "=", "self", ".", "_build_command", "(", "...
Really execute a redis command :param list parts: The list of command parts :param mixed expectation: Optional response expectation :rtype: :class:`~tornado.concurrent.Future` :raises: :exc:`~tredis.exceptions.SubscribedError`
[ "Really", "execute", "a", "redis", "command" ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L442-L486
gmr/tredis
tredis/client.py
Client._on_cluster_discovery
def _on_cluster_discovery(self, future): """Invoked when the Redis server has responded to the ``CLUSTER_NODES`` command. :param future: The future containing the response from Redis :type future: tornado.concurrent.Future """ LOGGER.debug('_on_cluster_discovery(%r)', future) common.maybe_raise_exception(future) nodes = future.result() for node in nodes: name = '{}:{}'.format(node.ip, node.port) if name in self._cluster: LOGGER.debug('Updating cluster connection info for %s:%s', node.ip, node.port) self._cluster[name].set_slots(node.slots) self._cluster[name].set_read_only('slave' in node.flags) else: self._create_cluster_connection(node) self._discovery = True
python
def _on_cluster_discovery(self, future): """Invoked when the Redis server has responded to the ``CLUSTER_NODES`` command. :param future: The future containing the response from Redis :type future: tornado.concurrent.Future """ LOGGER.debug('_on_cluster_discovery(%r)', future) common.maybe_raise_exception(future) nodes = future.result() for node in nodes: name = '{}:{}'.format(node.ip, node.port) if name in self._cluster: LOGGER.debug('Updating cluster connection info for %s:%s', node.ip, node.port) self._cluster[name].set_slots(node.slots) self._cluster[name].set_read_only('slave' in node.flags) else: self._create_cluster_connection(node) self._discovery = True
[ "def", "_on_cluster_discovery", "(", "self", ",", "future", ")", ":", "LOGGER", ".", "debug", "(", "'_on_cluster_discovery(%r)'", ",", "future", ")", "common", ".", "maybe_raise_exception", "(", "future", ")", "nodes", "=", "future", ".", "result", "(", ")", ...
Invoked when the Redis server has responded to the ``CLUSTER_NODES`` command. :param future: The future containing the response from Redis :type future: tornado.concurrent.Future
[ "Invoked", "when", "the", "Redis", "server", "has", "responded", "to", "the", "CLUSTER_NODES", "command", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L488-L508
gmr/tredis
tredis/client.py
Client._on_closed
def _on_closed(self): """Invoked by connections when they are closed.""" self._connected.clear() if not self._closing: if self._on_close_callback: self._on_close_callback() else: raise exceptions.ConnectionError('closed')
python
def _on_closed(self): """Invoked by connections when they are closed.""" self._connected.clear() if not self._closing: if self._on_close_callback: self._on_close_callback() else: raise exceptions.ConnectionError('closed')
[ "def", "_on_closed", "(", "self", ")", ":", "self", ".", "_connected", ".", "clear", "(", ")", "if", "not", "self", ".", "_closing", ":", "if", "self", ".", "_on_close_callback", ":", "self", ".", "_on_close_callback", "(", ")", "else", ":", "raise", "...
Invoked by connections when they are closed.
[ "Invoked", "by", "connections", "when", "they", "are", "closed", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L510-L517
gmr/tredis
tredis/client.py
Client._on_cluster_data_moved
def _on_cluster_data_moved(self, response, command, future): """Process the ``MOVED`` response from a Redis cluster node. :param bytes response: The response from the Redis server :param command: The command that was being executed :type command: tredis.client.Command :param future: The execution future :type future: tornado.concurrent.Future """ LOGGER.debug('on_cluster_data_moved(%r, %r, %r)', response, command, future) parts = response.split(' ') name = '{}:{}'.format(*common.split_connection_host_port(parts[2])) LOGGER.debug('Moved to %r', name) if name not in self._cluster: raise exceptions.ConnectionError( '{} is not connected'.format(name)) self._cluster[name].execute( command._replace(connection=self._cluster[name]), future)
python
def _on_cluster_data_moved(self, response, command, future): """Process the ``MOVED`` response from a Redis cluster node. :param bytes response: The response from the Redis server :param command: The command that was being executed :type command: tredis.client.Command :param future: The execution future :type future: tornado.concurrent.Future """ LOGGER.debug('on_cluster_data_moved(%r, %r, %r)', response, command, future) parts = response.split(' ') name = '{}:{}'.format(*common.split_connection_host_port(parts[2])) LOGGER.debug('Moved to %r', name) if name not in self._cluster: raise exceptions.ConnectionError( '{} is not connected'.format(name)) self._cluster[name].execute( command._replace(connection=self._cluster[name]), future)
[ "def", "_on_cluster_data_moved", "(", "self", ",", "response", ",", "command", ",", "future", ")", ":", "LOGGER", ".", "debug", "(", "'on_cluster_data_moved(%r, %r, %r)'", ",", "response", ",", "command", ",", "future", ")", "parts", "=", "response", ".", "spl...
Process the ``MOVED`` response from a Redis cluster node. :param bytes response: The response from the Redis server :param command: The command that was being executed :type command: tredis.client.Command :param future: The execution future :type future: tornado.concurrent.Future
[ "Process", "the", "MOVED", "response", "from", "a", "Redis", "cluster", "node", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L519-L538
gmr/tredis
tredis/client.py
Client._on_connected
def _on_connected(self, future): """Invoked when connections have been established. If the client is in clustering mode, it will kick of the discovery step if needed. If not, it will select the configured database. :param future: The connection future :type future: tornado.concurrent.Future """ if future.exception(): self._connect_future.set_exception(future.exception()) return conn = future.result() LOGGER.debug('Connected to %s (%r, %r, %r)', conn.name, self._clustering, self._discovery, self._connected) if self._clustering: self._cluster[conn.name] = conn if not self._discovery: self.io_loop.add_future(self.cluster_nodes(), self._on_cluster_discovery) elif self.ready: LOGGER.debug('Cluster nodes all connected') if not self._connect_future.done(): self._connect_future.set_result(True) self._connected.set() else: def on_selected(sfuture): LOGGER.debug('Initial setup and selection processed') if sfuture.exception(): self._connect_future.set_exception(sfuture.exception()) else: self._connect_future.set_result(True) self._connected.set() select_future = concurrent.Future() self.io_loop.add_future(select_future, on_selected) self._connection = conn cmd = Command( self._build_command(['SELECT', str(conn.database)]), self._connection, None, None) cmd.connection.execute(cmd, select_future)
python
def _on_connected(self, future): """Invoked when connections have been established. If the client is in clustering mode, it will kick of the discovery step if needed. If not, it will select the configured database. :param future: The connection future :type future: tornado.concurrent.Future """ if future.exception(): self._connect_future.set_exception(future.exception()) return conn = future.result() LOGGER.debug('Connected to %s (%r, %r, %r)', conn.name, self._clustering, self._discovery, self._connected) if self._clustering: self._cluster[conn.name] = conn if not self._discovery: self.io_loop.add_future(self.cluster_nodes(), self._on_cluster_discovery) elif self.ready: LOGGER.debug('Cluster nodes all connected') if not self._connect_future.done(): self._connect_future.set_result(True) self._connected.set() else: def on_selected(sfuture): LOGGER.debug('Initial setup and selection processed') if sfuture.exception(): self._connect_future.set_exception(sfuture.exception()) else: self._connect_future.set_result(True) self._connected.set() select_future = concurrent.Future() self.io_loop.add_future(select_future, on_selected) self._connection = conn cmd = Command( self._build_command(['SELECT', str(conn.database)]), self._connection, None, None) cmd.connection.execute(cmd, select_future)
[ "def", "_on_connected", "(", "self", ",", "future", ")", ":", "if", "future", ".", "exception", "(", ")", ":", "self", ".", "_connect_future", ".", "set_exception", "(", "future", ".", "exception", "(", ")", ")", "return", "conn", "=", "future", ".", "...
Invoked when connections have been established. If the client is in clustering mode, it will kick of the discovery step if needed. If not, it will select the configured database. :param future: The connection future :type future: tornado.concurrent.Future
[ "Invoked", "when", "connections", "have", "been", "established", ".", "If", "the", "client", "is", "in", "clustering", "mode", "it", "will", "kick", "of", "the", "discovery", "step", "if", "needed", ".", "If", "not", "it", "will", "select", "the", "configu...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L540-L582
gmr/tredis
tredis/client.py
Client._on_read_only_error
def _on_read_only_error(self, command, future): """Invoked when a Redis node returns an error indicating it's in read-only mode. It will use the ``INFO REPLICATION`` command to attempt to find the master server and failover to that, reissuing the command to that server. :param command: The command that was being executed :type command: tredis.client.Command :param future: The execution future :type future: tornado.concurrent.Future """ failover_future = concurrent.TracebackFuture() def on_replication_info(_): common.maybe_raise_exception(failover_future) LOGGER.debug('Failover closing current read-only connection') self._closing = True database = self._connection.database self._connection.close() self._connected.clear() self._connect_future = concurrent.Future() info = failover_future.result() LOGGER.debug('Failover connecting to %s:%s', info['master_host'], info['master_port']) self._connection = _Connection( info['master_host'], info['master_port'], database, self._read, self._on_closed, self.io_loop, self._clustering) # When the connection is re-established, re-run the command self.io_loop.add_future( self._connect_future, lambda f: self._connection.execute( command._replace(connection=self._connection), future)) # Use the normal connection processing flow when connecting self.io_loop.add_future(self._connection.connect(), self._on_connected) if self._clustering: command.connection.set_readonly(True) LOGGER.debug('%s is read-only, need to failover to new master', command.connection.name) cmd = Command( self._build_command(['INFO', 'REPLICATION']), self._connection, None, common.format_info_response) self.io_loop.add_future(failover_future, on_replication_info) cmd.connection.execute(cmd, failover_future)
python
def _on_read_only_error(self, command, future): """Invoked when a Redis node returns an error indicating it's in read-only mode. It will use the ``INFO REPLICATION`` command to attempt to find the master server and failover to that, reissuing the command to that server. :param command: The command that was being executed :type command: tredis.client.Command :param future: The execution future :type future: tornado.concurrent.Future """ failover_future = concurrent.TracebackFuture() def on_replication_info(_): common.maybe_raise_exception(failover_future) LOGGER.debug('Failover closing current read-only connection') self._closing = True database = self._connection.database self._connection.close() self._connected.clear() self._connect_future = concurrent.Future() info = failover_future.result() LOGGER.debug('Failover connecting to %s:%s', info['master_host'], info['master_port']) self._connection = _Connection( info['master_host'], info['master_port'], database, self._read, self._on_closed, self.io_loop, self._clustering) # When the connection is re-established, re-run the command self.io_loop.add_future( self._connect_future, lambda f: self._connection.execute( command._replace(connection=self._connection), future)) # Use the normal connection processing flow when connecting self.io_loop.add_future(self._connection.connect(), self._on_connected) if self._clustering: command.connection.set_readonly(True) LOGGER.debug('%s is read-only, need to failover to new master', command.connection.name) cmd = Command( self._build_command(['INFO', 'REPLICATION']), self._connection, None, common.format_info_response) self.io_loop.add_future(failover_future, on_replication_info) cmd.connection.execute(cmd, failover_future)
[ "def", "_on_read_only_error", "(", "self", ",", "command", ",", "future", ")", ":", "failover_future", "=", "concurrent", ".", "TracebackFuture", "(", ")", "def", "on_replication_info", "(", "_", ")", ":", "common", ".", "maybe_raise_exception", "(", "failover_f...
Invoked when a Redis node returns an error indicating it's in read-only mode. It will use the ``INFO REPLICATION`` command to attempt to find the master server and failover to that, reissuing the command to that server. :param command: The command that was being executed :type command: tredis.client.Command :param future: The execution future :type future: tornado.concurrent.Future
[ "Invoked", "when", "a", "Redis", "node", "returns", "an", "error", "indicating", "it", "s", "in", "read", "-", "only", "mode", ".", "It", "will", "use", "the", "INFO", "REPLICATION", "command", "to", "attempt", "to", "find", "the", "master", "server", "a...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L584-L635
gmr/tredis
tredis/client.py
Client._read
def _read(self, command, future): """Invoked when a command is executed to read and parse its results. It will loop on the IOLoop until the response is complete and then set the value of the response in the execution future. :param command: The command that was being executed :type command: tredis.client.Command :param future: The execution future :type future: tornado.concurrent.Future """ response = self._reader.gets() if response is not False: if isinstance(response, hiredis.ReplyError): if response.args[0].startswith('MOVED '): self._on_cluster_data_moved(response.args[0], command, future) elif response.args[0].startswith('READONLY '): self._on_read_only_error(command, future) else: future.set_exception(exceptions.RedisError(response)) elif command.callback is not None: future.set_result(command.callback(response)) elif command.expectation is not None: self._eval_expectation(command, response, future) else: future.set_result(response) else: def on_data(data): # LOGGER.debug('Read %r', data) self._reader.feed(data) self._read(command, future) command.connection.read(on_data)
python
def _read(self, command, future): """Invoked when a command is executed to read and parse its results. It will loop on the IOLoop until the response is complete and then set the value of the response in the execution future. :param command: The command that was being executed :type command: tredis.client.Command :param future: The execution future :type future: tornado.concurrent.Future """ response = self._reader.gets() if response is not False: if isinstance(response, hiredis.ReplyError): if response.args[0].startswith('MOVED '): self._on_cluster_data_moved(response.args[0], command, future) elif response.args[0].startswith('READONLY '): self._on_read_only_error(command, future) else: future.set_exception(exceptions.RedisError(response)) elif command.callback is not None: future.set_result(command.callback(response)) elif command.expectation is not None: self._eval_expectation(command, response, future) else: future.set_result(response) else: def on_data(data): # LOGGER.debug('Read %r', data) self._reader.feed(data) self._read(command, future) command.connection.read(on_data)
[ "def", "_read", "(", "self", ",", "command", ",", "future", ")", ":", "response", "=", "self", ".", "_reader", ".", "gets", "(", ")", "if", "response", "is", "not", "False", ":", "if", "isinstance", "(", "response", ",", "hiredis", ".", "ReplyError", ...
Invoked when a command is executed to read and parse its results. It will loop on the IOLoop until the response is complete and then set the value of the response in the execution future. :param command: The command that was being executed :type command: tredis.client.Command :param future: The execution future :type future: tornado.concurrent.Future
[ "Invoked", "when", "a", "command", "is", "executed", "to", "read", "and", "parse", "its", "results", ".", "It", "will", "loop", "on", "the", "IOLoop", "until", "the", "response", "is", "complete", "and", "then", "set", "the", "value", "of", "the", "respo...
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L637-L671
gmr/tredis
tredis/client.py
Client._pick_cluster_host
def _pick_cluster_host(self, value): """Selects the Redis cluster host for the specified value. :param mixed value: The value to use when looking for the host :rtype: tredis.client._Connection """ crc = crc16.crc16(self._encode_resp(value[1])) % HASH_SLOTS for host in self._cluster.keys(): for slot in self._cluster[host].slots: if slot[0] <= crc <= slot[1]: return self._cluster[host] LOGGER.debug('Host not found for %r, returning first connection', value) host_keys = sorted(list(self._cluster.keys())) return self._cluster[host_keys[0]]
python
def _pick_cluster_host(self, value): """Selects the Redis cluster host for the specified value. :param mixed value: The value to use when looking for the host :rtype: tredis.client._Connection """ crc = crc16.crc16(self._encode_resp(value[1])) % HASH_SLOTS for host in self._cluster.keys(): for slot in self._cluster[host].slots: if slot[0] <= crc <= slot[1]: return self._cluster[host] LOGGER.debug('Host not found for %r, returning first connection', value) host_keys = sorted(list(self._cluster.keys())) return self._cluster[host_keys[0]]
[ "def", "_pick_cluster_host", "(", "self", ",", "value", ")", ":", "crc", "=", "crc16", ".", "crc16", "(", "self", ".", "_encode_resp", "(", "value", "[", "1", "]", ")", ")", "%", "HASH_SLOTS", "for", "host", "in", "self", ".", "_cluster", ".", "keys"...
Selects the Redis cluster host for the specified value. :param mixed value: The value to use when looking for the host :rtype: tredis.client._Connection
[ "Selects", "the", "Redis", "cluster", "host", "for", "the", "specified", "value", "." ]
train
https://github.com/gmr/tredis/blob/2e91c6a58a35460be0525c51ac6a98fde3b506ad/tredis/client.py#L673-L688
kespindler/puffin
puf/cli_lib.py
parse_lines
def parse_lines(stream, separator=None): """ Takes each line of a stream, creating a generator that yields tuples of line, row - where row is the line split by separator (or by whitespace if separator is None. :param stream: :param separator: (optional) :return: generator """ separator = None if separator is None else unicode(separator) for line in stream: line = line.rstrip(u'\r\n') row = [interpret_segment(i) for i in line.split(separator)] yield line, row
python
def parse_lines(stream, separator=None): """ Takes each line of a stream, creating a generator that yields tuples of line, row - where row is the line split by separator (or by whitespace if separator is None. :param stream: :param separator: (optional) :return: generator """ separator = None if separator is None else unicode(separator) for line in stream: line = line.rstrip(u'\r\n') row = [interpret_segment(i) for i in line.split(separator)] yield line, row
[ "def", "parse_lines", "(", "stream", ",", "separator", "=", "None", ")", ":", "separator", "=", "None", "if", "separator", "is", "None", "else", "unicode", "(", "separator", ")", "for", "line", "in", "stream", ":", "line", "=", "line", ".", "rstrip", "...
Takes each line of a stream, creating a generator that yields tuples of line, row - where row is the line split by separator (or by whitespace if separator is None. :param stream: :param separator: (optional) :return: generator
[ "Takes", "each", "line", "of", "a", "stream", "creating", "a", "generator", "that", "yields", "tuples", "of", "line", "row", "-", "where", "row", "is", "the", "line", "split", "by", "separator", "(", "or", "by", "whitespace", "if", "separator", "is", "No...
train
https://github.com/kespindler/puffin/blob/baf4b70d43ef90a8df762796a03d2d919c0c6ecf/puf/cli_lib.py#L28-L42
kespindler/puffin
puf/cli_lib.py
parse_buffer
def parse_buffer(stream, separator=None): """ Returns a dictionary of the lines of a stream, an array of rows of the stream (split by separator), and an array of the columns of the stream (also split by separator) :param stream: :param separator: :return: dict """ rows = [] lines = [] for line, row in parse_lines(stream, separator): lines.append(line) rows.append(row) cols = zip(*rows) return { 'rows': rows, 'lines': lines, 'cols': cols, }
python
def parse_buffer(stream, separator=None): """ Returns a dictionary of the lines of a stream, an array of rows of the stream (split by separator), and an array of the columns of the stream (also split by separator) :param stream: :param separator: :return: dict """ rows = [] lines = [] for line, row in parse_lines(stream, separator): lines.append(line) rows.append(row) cols = zip(*rows) return { 'rows': rows, 'lines': lines, 'cols': cols, }
[ "def", "parse_buffer", "(", "stream", ",", "separator", "=", "None", ")", ":", "rows", "=", "[", "]", "lines", "=", "[", "]", "for", "line", ",", "row", "in", "parse_lines", "(", "stream", ",", "separator", ")", ":", "lines", ".", "append", "(", "l...
Returns a dictionary of the lines of a stream, an array of rows of the stream (split by separator), and an array of the columns of the stream (also split by separator) :param stream: :param separator: :return: dict
[ "Returns", "a", "dictionary", "of", "the", "lines", "of", "a", "stream", "an", "array", "of", "rows", "of", "the", "stream", "(", "split", "by", "separator", ")", "and", "an", "array", "of", "the", "columns", "of", "the", "stream", "(", "also", "split"...
train
https://github.com/kespindler/puffin/blob/baf4b70d43ef90a8df762796a03d2d919c0c6ecf/puf/cli_lib.py#L45-L65
kespindler/puffin
puf/cli_lib.py
display
def display(result, stream): """ Intelligently print the result (or pass if result is None). :param result: :return: None """ if result is None: return elif isinstance(result, basestring): pass elif isinstance(result, collections.Mapping): result = u'\n'.join(u'%s=%s' % (k, v) for k, v in result.iteritems() if v is not None) elif isinstance(result, collections.Iterable): result = u'\n'.join(unicode(x) for x in result if x is not None) else: result = unicode(result) stream.write(result.encode('utf8')) stream.write('\n')
python
def display(result, stream): """ Intelligently print the result (or pass if result is None). :param result: :return: None """ if result is None: return elif isinstance(result, basestring): pass elif isinstance(result, collections.Mapping): result = u'\n'.join(u'%s=%s' % (k, v) for k, v in result.iteritems() if v is not None) elif isinstance(result, collections.Iterable): result = u'\n'.join(unicode(x) for x in result if x is not None) else: result = unicode(result) stream.write(result.encode('utf8')) stream.write('\n')
[ "def", "display", "(", "result", ",", "stream", ")", ":", "if", "result", "is", "None", ":", "return", "elif", "isinstance", "(", "result", ",", "basestring", ")", ":", "pass", "elif", "isinstance", "(", "result", ",", "collections", ".", "Mapping", ")",...
Intelligently print the result (or pass if result is None). :param result: :return: None
[ "Intelligently", "print", "the", "result", "(", "or", "pass", "if", "result", "is", "None", ")", "." ]
train
https://github.com/kespindler/puffin/blob/baf4b70d43ef90a8df762796a03d2d919c0c6ecf/puf/cli_lib.py#L73-L92
kespindler/puffin
puf/cli_lib.py
safe_evaluate
def safe_evaluate(command, glob, local): """ Continue to attempt to execute the given command, importing objects which cause a NameError in the command :param command: command for eval :param glob: globals dict for eval :param local: locals dict for eval :return: command result """ while True: try: return eval(command, glob, local) except NameError as e: match = re.match("name '(.*)' is not defined", e.message) if not match: raise e try: exec ('import %s' % (match.group(1), )) in glob except ImportError: raise e
python
def safe_evaluate(command, glob, local): """ Continue to attempt to execute the given command, importing objects which cause a NameError in the command :param command: command for eval :param glob: globals dict for eval :param local: locals dict for eval :return: command result """ while True: try: return eval(command, glob, local) except NameError as e: match = re.match("name '(.*)' is not defined", e.message) if not match: raise e try: exec ('import %s' % (match.group(1), )) in glob except ImportError: raise e
[ "def", "safe_evaluate", "(", "command", ",", "glob", ",", "local", ")", ":", "while", "True", ":", "try", ":", "return", "eval", "(", "command", ",", "glob", ",", "local", ")", "except", "NameError", "as", "e", ":", "match", "=", "re", ".", "match", ...
Continue to attempt to execute the given command, importing objects which cause a NameError in the command :param command: command for eval :param glob: globals dict for eval :param local: locals dict for eval :return: command result
[ "Continue", "to", "attempt", "to", "execute", "the", "given", "command", "importing", "objects", "which", "cause", "a", "NameError", "in", "the", "command" ]
train
https://github.com/kespindler/puffin/blob/baf4b70d43ef90a8df762796a03d2d919c0c6ecf/puf/cli_lib.py#L95-L115
OriHoch/python-hebrew-numbers
hebrew_numbers/__init__.py
int_to_gematria
def int_to_gematria(num, gershayim=True): """convert integers between 1 an 999 to Hebrew numerals. - set gershayim flag to False to ommit gershayim """ # 1. Lookup in specials if num in specialnumbers['specials']: retval = specialnumbers['specials'][num] return _add_gershayim(retval) if gershayim else retval # 2. Generate numeral normally parts = [] rest = str(num) while rest: digit = int(rest[0]) rest = rest[1:] if digit == 0: continue power = 10 ** len(rest) parts.append(specialnumbers['numerals'][power * digit]) retval = ''.join(parts) # 3. Add gershayim return _add_gershayim(retval) if gershayim else retval
python
def int_to_gematria(num, gershayim=True): """convert integers between 1 an 999 to Hebrew numerals. - set gershayim flag to False to ommit gershayim """ # 1. Lookup in specials if num in specialnumbers['specials']: retval = specialnumbers['specials'][num] return _add_gershayim(retval) if gershayim else retval # 2. Generate numeral normally parts = [] rest = str(num) while rest: digit = int(rest[0]) rest = rest[1:] if digit == 0: continue power = 10 ** len(rest) parts.append(specialnumbers['numerals'][power * digit]) retval = ''.join(parts) # 3. Add gershayim return _add_gershayim(retval) if gershayim else retval
[ "def", "int_to_gematria", "(", "num", ",", "gershayim", "=", "True", ")", ":", "# 1. Lookup in specials", "if", "num", "in", "specialnumbers", "[", "'specials'", "]", ":", "retval", "=", "specialnumbers", "[", "'specials'", "]", "[", "num", "]", "return", "_...
convert integers between 1 an 999 to Hebrew numerals. - set gershayim flag to False to ommit gershayim
[ "convert", "integers", "between", "1", "an", "999", "to", "Hebrew", "numerals", "." ]
train
https://github.com/OriHoch/python-hebrew-numbers/blob/0bfa3d83b7bc03c2fc27e3db4a0630f6770cdbf0/hebrew_numbers/__init__.py#L56-L78