repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
TAPPGuild/bitjws
bitjws/jws.py
_jws_header
def _jws_header(keyid, algorithm): """Produce a base64-encoded JWS header.""" data = { 'typ': 'JWT', 'alg': algorithm.name, # 'kid' is used to indicate the public part of the key # used during signing. 'kid': keyid } datajson = json.dumps(data, sort_keys=True).encode('utf8') return base64url_encode(datajson)
python
def _jws_header(keyid, algorithm): """Produce a base64-encoded JWS header.""" data = { 'typ': 'JWT', 'alg': algorithm.name, # 'kid' is used to indicate the public part of the key # used during signing. 'kid': keyid } datajson = json.dumps(data, sort_keys=True).encode('utf8') return base64url_encode(datajson)
[ "def", "_jws_header", "(", "keyid", ",", "algorithm", ")", ":", "data", "=", "{", "'typ'", ":", "'JWT'", ",", "'alg'", ":", "algorithm", ".", "name", ",", "# 'kid' is used to indicate the public part of the key", "# used during signing.", "'kid'", ":", "keyid", "}...
Produce a base64-encoded JWS header.
[ "Produce", "a", "base64", "-", "encoded", "JWS", "header", "." ]
train
https://github.com/TAPPGuild/bitjws/blob/bcf943e0c60985da11fb7895a416525e63728c35/bitjws/jws.py#L70-L81
TAPPGuild/bitjws
bitjws/jws.py
_jws_payload
def _jws_payload(expire_at, requrl=None, **kwargs): """ Produce a base64-encoded JWS payload. expire_at, if specified, must be a number that indicates a timestamp after which the message must be rejected. requrl, if specified, is used as the "audience" according to the JWT spec. Any other parameters are passed as is to the payload. """ data = { 'exp': expire_at, 'aud': requrl } data.update(kwargs) datajson = json.dumps(data, sort_keys=True).encode('utf8') return base64url_encode(datajson)
python
def _jws_payload(expire_at, requrl=None, **kwargs): """ Produce a base64-encoded JWS payload. expire_at, if specified, must be a number that indicates a timestamp after which the message must be rejected. requrl, if specified, is used as the "audience" according to the JWT spec. Any other parameters are passed as is to the payload. """ data = { 'exp': expire_at, 'aud': requrl } data.update(kwargs) datajson = json.dumps(data, sort_keys=True).encode('utf8') return base64url_encode(datajson)
[ "def", "_jws_payload", "(", "expire_at", ",", "requrl", "=", "None", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "'exp'", ":", "expire_at", ",", "'aud'", ":", "requrl", "}", "data", ".", "update", "(", "kwargs", ")", "datajson", "=", "json",...
Produce a base64-encoded JWS payload. expire_at, if specified, must be a number that indicates a timestamp after which the message must be rejected. requrl, if specified, is used as the "audience" according to the JWT spec. Any other parameters are passed as is to the payload.
[ "Produce", "a", "base64", "-", "encoded", "JWS", "payload", "." ]
train
https://github.com/TAPPGuild/bitjws/blob/bcf943e0c60985da11fb7895a416525e63728c35/bitjws/jws.py#L84-L103
TAPPGuild/bitjws
bitjws/jws.py
_jws_signature
def _jws_signature(signdata, privkey, algorithm): """ Produce a base64-encoded JWS signature based on the signdata specified, the privkey instance, and the algorithm passed. """ signature = algorithm.sign(privkey, signdata) return base64url_encode(signature)
python
def _jws_signature(signdata, privkey, algorithm): """ Produce a base64-encoded JWS signature based on the signdata specified, the privkey instance, and the algorithm passed. """ signature = algorithm.sign(privkey, signdata) return base64url_encode(signature)
[ "def", "_jws_signature", "(", "signdata", ",", "privkey", ",", "algorithm", ")", ":", "signature", "=", "algorithm", ".", "sign", "(", "privkey", ",", "signdata", ")", "return", "base64url_encode", "(", "signature", ")" ]
Produce a base64-encoded JWS signature based on the signdata specified, the privkey instance, and the algorithm passed.
[ "Produce", "a", "base64", "-", "encoded", "JWS", "signature", "based", "on", "the", "signdata", "specified", "the", "privkey", "instance", "and", "the", "algorithm", "passed", "." ]
train
https://github.com/TAPPGuild/bitjws/blob/bcf943e0c60985da11fb7895a416525e63728c35/bitjws/jws.py#L106-L112
TAPPGuild/bitjws
bitjws/jws.py
sign_serialize
def sign_serialize(privkey, expire_after=3600, requrl=None, algorithm_name=DEFAULT_ALGO, **kwargs): """ Produce a JWT compact serialization by generating a header, payload, and signature using the privkey and algorithm specified. The privkey object must contain at least a member named pubkey. The parameter expire_after is used by the server to reject the payload if received after current_time + expire_after. Set it to None to disable its use. The parameter requrl is optionally used by the server to reject the payload if it is not delivered to the proper place, e.g. if requrl is set to https://example.com/api/login but sent to a different server or path then the receiving server should reject it. Any other parameters are passed as is to the payload. """ assert algorithm_name in ALGORITHM_AVAILABLE algo = ALGORITHM_AVAILABLE[algorithm_name] addy = algo.pubkey_serialize(privkey.pubkey) header = _jws_header(addy, algo).decode('utf8') payload = _build_payload(expire_after, requrl, **kwargs) signdata = "{}.{}".format(header, payload) signature = _jws_signature(signdata, privkey, algo).decode('utf8') return "{}.{}".format(signdata, signature)
python
def sign_serialize(privkey, expire_after=3600, requrl=None, algorithm_name=DEFAULT_ALGO, **kwargs): """ Produce a JWT compact serialization by generating a header, payload, and signature using the privkey and algorithm specified. The privkey object must contain at least a member named pubkey. The parameter expire_after is used by the server to reject the payload if received after current_time + expire_after. Set it to None to disable its use. The parameter requrl is optionally used by the server to reject the payload if it is not delivered to the proper place, e.g. if requrl is set to https://example.com/api/login but sent to a different server or path then the receiving server should reject it. Any other parameters are passed as is to the payload. """ assert algorithm_name in ALGORITHM_AVAILABLE algo = ALGORITHM_AVAILABLE[algorithm_name] addy = algo.pubkey_serialize(privkey.pubkey) header = _jws_header(addy, algo).decode('utf8') payload = _build_payload(expire_after, requrl, **kwargs) signdata = "{}.{}".format(header, payload) signature = _jws_signature(signdata, privkey, algo).decode('utf8') return "{}.{}".format(signdata, signature)
[ "def", "sign_serialize", "(", "privkey", ",", "expire_after", "=", "3600", ",", "requrl", "=", "None", ",", "algorithm_name", "=", "DEFAULT_ALGO", ",", "*", "*", "kwargs", ")", ":", "assert", "algorithm_name", "in", "ALGORITHM_AVAILABLE", "algo", "=", "ALGORIT...
Produce a JWT compact serialization by generating a header, payload, and signature using the privkey and algorithm specified. The privkey object must contain at least a member named pubkey. The parameter expire_after is used by the server to reject the payload if received after current_time + expire_after. Set it to None to disable its use. The parameter requrl is optionally used by the server to reject the payload if it is not delivered to the proper place, e.g. if requrl is set to https://example.com/api/login but sent to a different server or path then the receiving server should reject it. Any other parameters are passed as is to the payload.
[ "Produce", "a", "JWT", "compact", "serialization", "by", "generating", "a", "header", "payload", "and", "signature", "using", "the", "privkey", "and", "algorithm", "specified", "." ]
train
https://github.com/TAPPGuild/bitjws/blob/bcf943e0c60985da11fb7895a416525e63728c35/bitjws/jws.py#L115-L144
TAPPGuild/bitjws
bitjws/jws.py
multisig_sign_serialize
def multisig_sign_serialize(privkeys, expire_after=3600, requrl=None, algorithm_name=DEFAULT_ALGO, **kwargs): """ Produce a general JSON serialization by generating a header, payload, and multiple signatures using the list of private keys specified. All the signatures will be performed using the same algorithm. The parameter expire_after is used by the server to reject the payload if received after current_time + expire_after. Set it to None to disable its use. The parameter requrl is optionally used by the server to reject the payload if it is not delivered to the proper place, e.g. if requrl is set to https://example.com/api/login but sent to a different server or path then the receiving server should reject it. Any other parameters are passed as is to the payload. """ assert algorithm_name in ALGORITHM_AVAILABLE payload = _build_payload(expire_after, requrl, **kwargs) result = {"payload": payload, "signatures": []} algo = ALGORITHM_AVAILABLE[algorithm_name] for pk in privkeys: addy = algo.pubkey_serialize(pk.pubkey) header = _jws_header(addy, algo).decode('utf8') signdata = "{}.{}".format(header, payload) signature = _jws_signature(signdata, pk, algo).decode('utf8') result["signatures"].append({ "protected": header, "signature": signature}) return json.dumps(result)
python
def multisig_sign_serialize(privkeys, expire_after=3600, requrl=None, algorithm_name=DEFAULT_ALGO, **kwargs): """ Produce a general JSON serialization by generating a header, payload, and multiple signatures using the list of private keys specified. All the signatures will be performed using the same algorithm. The parameter expire_after is used by the server to reject the payload if received after current_time + expire_after. Set it to None to disable its use. The parameter requrl is optionally used by the server to reject the payload if it is not delivered to the proper place, e.g. if requrl is set to https://example.com/api/login but sent to a different server or path then the receiving server should reject it. Any other parameters are passed as is to the payload. """ assert algorithm_name in ALGORITHM_AVAILABLE payload = _build_payload(expire_after, requrl, **kwargs) result = {"payload": payload, "signatures": []} algo = ALGORITHM_AVAILABLE[algorithm_name] for pk in privkeys: addy = algo.pubkey_serialize(pk.pubkey) header = _jws_header(addy, algo).decode('utf8') signdata = "{}.{}".format(header, payload) signature = _jws_signature(signdata, pk, algo).decode('utf8') result["signatures"].append({ "protected": header, "signature": signature}) return json.dumps(result)
[ "def", "multisig_sign_serialize", "(", "privkeys", ",", "expire_after", "=", "3600", ",", "requrl", "=", "None", ",", "algorithm_name", "=", "DEFAULT_ALGO", ",", "*", "*", "kwargs", ")", ":", "assert", "algorithm_name", "in", "ALGORITHM_AVAILABLE", "payload", "=...
Produce a general JSON serialization by generating a header, payload, and multiple signatures using the list of private keys specified. All the signatures will be performed using the same algorithm. The parameter expire_after is used by the server to reject the payload if received after current_time + expire_after. Set it to None to disable its use. The parameter requrl is optionally used by the server to reject the payload if it is not delivered to the proper place, e.g. if requrl is set to https://example.com/api/login but sent to a different server or path then the receiving server should reject it. Any other parameters are passed as is to the payload.
[ "Produce", "a", "general", "JSON", "serialization", "by", "generating", "a", "header", "payload", "and", "multiple", "signatures", "using", "the", "list", "of", "private", "keys", "specified", ".", "All", "the", "signatures", "will", "be", "performed", "using", ...
train
https://github.com/TAPPGuild/bitjws/blob/bcf943e0c60985da11fb7895a416525e63728c35/bitjws/jws.py#L147-L180
TAPPGuild/bitjws
bitjws/jws.py
multisig_validate_deserialize
def multisig_validate_deserialize(rawmsg, requrl=None, check_expiration=True, decode_payload=True, algorithm_name=DEFAULT_ALGO): """ Validate a general JSON serialization and return the headers and payload if all the signatures are good. If check_expiration is False, the payload will be accepted even if expired. If decode_payload is True then this function will attempt to decode it as JSON, otherwise the raw payload will be returned. Note that it is always decoded from base64url. """ assert algorithm_name in ALGORITHM_AVAILABLE algo = ALGORITHM_AVAILABLE[algorithm_name] data = json.loads(rawmsg) payload64 = data.get('payload', None) signatures = data.get('signatures', None) if payload64 is None or not isinstance(signatures, list): raise InvalidMessage('must contain "payload" and "signatures"') if not len(signatures): raise InvalidMessage('no signatures') try: payload, sigs = _multisig_decode(payload64, signatures, decode_payload) except Exception as err: raise InvalidMessage(str(err)) all_valid = True try: for entry in sigs: valid = _verify_signature(algorithm=algo, **entry) all_valid = all_valid and valid except Exception as err: raise InvalidMessage('failed to verify signature: {}'.format(err)) if not all_valid: return None, None if decode_payload: _verify_payload(payload, check_expiration, requrl) return [entry['header'] for entry in sigs], payload
python
def multisig_validate_deserialize(rawmsg, requrl=None, check_expiration=True, decode_payload=True, algorithm_name=DEFAULT_ALGO): """ Validate a general JSON serialization and return the headers and payload if all the signatures are good. If check_expiration is False, the payload will be accepted even if expired. If decode_payload is True then this function will attempt to decode it as JSON, otherwise the raw payload will be returned. Note that it is always decoded from base64url. """ assert algorithm_name in ALGORITHM_AVAILABLE algo = ALGORITHM_AVAILABLE[algorithm_name] data = json.loads(rawmsg) payload64 = data.get('payload', None) signatures = data.get('signatures', None) if payload64 is None or not isinstance(signatures, list): raise InvalidMessage('must contain "payload" and "signatures"') if not len(signatures): raise InvalidMessage('no signatures') try: payload, sigs = _multisig_decode(payload64, signatures, decode_payload) except Exception as err: raise InvalidMessage(str(err)) all_valid = True try: for entry in sigs: valid = _verify_signature(algorithm=algo, **entry) all_valid = all_valid and valid except Exception as err: raise InvalidMessage('failed to verify signature: {}'.format(err)) if not all_valid: return None, None if decode_payload: _verify_payload(payload, check_expiration, requrl) return [entry['header'] for entry in sigs], payload
[ "def", "multisig_validate_deserialize", "(", "rawmsg", ",", "requrl", "=", "None", ",", "check_expiration", "=", "True", ",", "decode_payload", "=", "True", ",", "algorithm_name", "=", "DEFAULT_ALGO", ")", ":", "assert", "algorithm_name", "in", "ALGORITHM_AVAILABLE"...
Validate a general JSON serialization and return the headers and payload if all the signatures are good. If check_expiration is False, the payload will be accepted even if expired. If decode_payload is True then this function will attempt to decode it as JSON, otherwise the raw payload will be returned. Note that it is always decoded from base64url.
[ "Validate", "a", "general", "JSON", "serialization", "and", "return", "the", "headers", "and", "payload", "if", "all", "the", "signatures", "are", "good", "." ]
train
https://github.com/TAPPGuild/bitjws/blob/bcf943e0c60985da11fb7895a416525e63728c35/bitjws/jws.py#L183-L227
TAPPGuild/bitjws
bitjws/jws.py
validate_deserialize
def validate_deserialize(rawmsg, requrl=None, check_expiration=True, decode_payload=True, algorithm_name=DEFAULT_ALGO): """ Validate a JWT compact serialization and return the header and payload if the signature is good. If check_expiration is False, the payload will be accepted even if expired. If decode_payload is True then this function will attempt to decode it as JSON, otherwise the raw payload will be returned. Note that it is always decoded from base64url. """ assert algorithm_name in ALGORITHM_AVAILABLE algo = ALGORITHM_AVAILABLE[algorithm_name] segments = rawmsg.split('.') if len(segments) != 3 or not all(segments): raise InvalidMessage('must contain 3 non-empty segments') header64, payload64, cryptoseg64 = segments try: signature = base64url_decode(cryptoseg64.encode('utf8')) payload_data = base64url_decode(payload64.encode('utf8')) header_data = base64url_decode(header64.encode('utf8')) header = json.loads(header_data.decode('utf8')) if decode_payload: payload = json.loads(payload_data.decode('utf8')) else: payload = payload_data except Exception as err: raise InvalidMessage(str(err)) try: valid = _verify_signature( '{}.{}'.format(header64, payload64), header, signature, algo) except Exception as err: raise InvalidMessage('failed to verify signature: {}'.format(err)) if not valid: return None, None if decode_payload: _verify_payload(payload, check_expiration, requrl) return header, payload
python
def validate_deserialize(rawmsg, requrl=None, check_expiration=True, decode_payload=True, algorithm_name=DEFAULT_ALGO): """ Validate a JWT compact serialization and return the header and payload if the signature is good. If check_expiration is False, the payload will be accepted even if expired. If decode_payload is True then this function will attempt to decode it as JSON, otherwise the raw payload will be returned. Note that it is always decoded from base64url. """ assert algorithm_name in ALGORITHM_AVAILABLE algo = ALGORITHM_AVAILABLE[algorithm_name] segments = rawmsg.split('.') if len(segments) != 3 or not all(segments): raise InvalidMessage('must contain 3 non-empty segments') header64, payload64, cryptoseg64 = segments try: signature = base64url_decode(cryptoseg64.encode('utf8')) payload_data = base64url_decode(payload64.encode('utf8')) header_data = base64url_decode(header64.encode('utf8')) header = json.loads(header_data.decode('utf8')) if decode_payload: payload = json.loads(payload_data.decode('utf8')) else: payload = payload_data except Exception as err: raise InvalidMessage(str(err)) try: valid = _verify_signature( '{}.{}'.format(header64, payload64), header, signature, algo) except Exception as err: raise InvalidMessage('failed to verify signature: {}'.format(err)) if not valid: return None, None if decode_payload: _verify_payload(payload, check_expiration, requrl) return header, payload
[ "def", "validate_deserialize", "(", "rawmsg", ",", "requrl", "=", "None", ",", "check_expiration", "=", "True", ",", "decode_payload", "=", "True", ",", "algorithm_name", "=", "DEFAULT_ALGO", ")", ":", "assert", "algorithm_name", "in", "ALGORITHM_AVAILABLE", "algo...
Validate a JWT compact serialization and return the header and payload if the signature is good. If check_expiration is False, the payload will be accepted even if expired. If decode_payload is True then this function will attempt to decode it as JSON, otherwise the raw payload will be returned. Note that it is always decoded from base64url.
[ "Validate", "a", "JWT", "compact", "serialization", "and", "return", "the", "header", "and", "payload", "if", "the", "signature", "is", "good", "." ]
train
https://github.com/TAPPGuild/bitjws/blob/bcf943e0c60985da11fb7895a416525e63728c35/bitjws/jws.py#L230-L277
9seconds/pep3134
pep3134/py3.py
raise_
def raise_(type_, value=None, traceback=None): # pylint: disable=W0613 """ Does the same as ordinary ``raise`` with arguments do in Python 2. But works in Python 3 (>= 3.3) also! Please checkout README on https://github.com/9seconds/pep3134 to get an idea about possible pitfals. But short story is: please be pretty carefull with tracebacks. If it is possible, use sys.exc_info instead. But in most cases it will work as you expect. """ if type_.__traceback__ is not traceback: raise type_.with_traceback(traceback) raise type_
python
def raise_(type_, value=None, traceback=None): # pylint: disable=W0613 """ Does the same as ordinary ``raise`` with arguments do in Python 2. But works in Python 3 (>= 3.3) also! Please checkout README on https://github.com/9seconds/pep3134 to get an idea about possible pitfals. But short story is: please be pretty carefull with tracebacks. If it is possible, use sys.exc_info instead. But in most cases it will work as you expect. """ if type_.__traceback__ is not traceback: raise type_.with_traceback(traceback) raise type_
[ "def", "raise_", "(", "type_", ",", "value", "=", "None", ",", "traceback", "=", "None", ")", ":", "# pylint: disable=W0613", "if", "type_", ".", "__traceback__", "is", "not", "traceback", ":", "raise", "type_", ".", "with_traceback", "(", "traceback", ")", ...
Does the same as ordinary ``raise`` with arguments do in Python 2. But works in Python 3 (>= 3.3) also! Please checkout README on https://github.com/9seconds/pep3134 to get an idea about possible pitfals. But short story is: please be pretty carefull with tracebacks. If it is possible, use sys.exc_info instead. But in most cases it will work as you expect.
[ "Does", "the", "same", "as", "ordinary", "raise", "with", "arguments", "do", "in", "Python", "2", ".", "But", "works", "in", "Python", "3", "(", ">", "=", "3", ".", "3", ")", "also!" ]
train
https://github.com/9seconds/pep3134/blob/6b6fae903bb63cb2ac24004bb2c18ebc6a7d41d0/pep3134/py3.py#L22-L35
salesking/salesking_python_sdk
salesking/api.py
SalesKingApiBase.request
def request(self, url, method = u"get", data = None, headers = None, **kwargs): """ public method for doing the live request """ url, method, data, headers, kwargs = self._pre_request(url, method=method, data=data, headers=headers, **kwargs) response = self._request(url, method=method, data=data, headers=headers, **kwargs) response = self._post_request(response) # raises the appropriate exceptions response = self._handle_response(response) return response
python
def request(self, url, method = u"get", data = None, headers = None, **kwargs): """ public method for doing the live request """ url, method, data, headers, kwargs = self._pre_request(url, method=method, data=data, headers=headers, **kwargs) response = self._request(url, method=method, data=data, headers=headers, **kwargs) response = self._post_request(response) # raises the appropriate exceptions response = self._handle_response(response) return response
[ "def", "request", "(", "self", ",", "url", ",", "method", "=", "u\"get\"", ",", "data", "=", "None", ",", "headers", "=", "None", ",", "*", "*", "kwargs", ")", ":", "url", ",", "method", ",", "data", ",", "headers", ",", "kwargs", "=", "self", "....
public method for doing the live request
[ "public", "method", "for", "doing", "the", "live", "request" ]
train
https://github.com/salesking/salesking_python_sdk/blob/0d5a95c5ee4e16a85562ceaf67bb11b55e47ee4c/salesking/api.py#L60-L76
salesking/salesking_python_sdk
salesking/api.py
SalesKingApiBase._pre_request
def _pre_request(self, url, method = u"get", data = None, headers=None, **kwargs): """ hook for manipulating the _pre request data """ return (url, method, data, headers, kwargs)
python
def _pre_request(self, url, method = u"get", data = None, headers=None, **kwargs): """ hook for manipulating the _pre request data """ return (url, method, data, headers, kwargs)
[ "def", "_pre_request", "(", "self", ",", "url", ",", "method", "=", "u\"get\"", ",", "data", "=", "None", ",", "headers", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "(", "url", ",", "method", ",", "data", ",", "headers", ",", "kwargs...
hook for manipulating the _pre request data
[ "hook", "for", "manipulating", "the", "_pre", "request", "data" ]
train
https://github.com/salesking/salesking_python_sdk/blob/0d5a95c5ee4e16a85562ceaf67bb11b55e47ee4c/salesking/api.py#L78-L82
salesking/salesking_python_sdk
salesking/api.py
APIClient._pre_request
def _pre_request(self, url, method = u"get", data = None, headers=None, **kwargs): """ hook for manipulating the _pre request data """ header = { u"Content-Type": u"application/json", u"User-Agent": u"salesking_api_py_v1", } if headers: headers.update(header) else: headers = header if url.find(self.base_url) !=0: url = u"%s%s" %(self.base_url, url) return (url, method, data, headers, kwargs)
python
def _pre_request(self, url, method = u"get", data = None, headers=None, **kwargs): """ hook for manipulating the _pre request data """ header = { u"Content-Type": u"application/json", u"User-Agent": u"salesking_api_py_v1", } if headers: headers.update(header) else: headers = header if url.find(self.base_url) !=0: url = u"%s%s" %(self.base_url, url) return (url, method, data, headers, kwargs)
[ "def", "_pre_request", "(", "self", ",", "url", ",", "method", "=", "u\"get\"", ",", "data", "=", "None", ",", "headers", "=", "None", ",", "*", "*", "kwargs", ")", ":", "header", "=", "{", "u\"Content-Type\"", ":", "u\"application/json\"", ",", "u\"User...
hook for manipulating the _pre request data
[ "hook", "for", "manipulating", "the", "_pre", "request", "data" ]
train
https://github.com/salesking/salesking_python_sdk/blob/0d5a95c5ee4e16a85562ceaf67bb11b55e47ee4c/salesking/api.py#L116-L130
salesking/salesking_python_sdk
salesking/api.py
APIClient._request
def _request(self, url, method = u"get", data = None, headers=None, **kwargs): """ does the request via requests - oauth not implemented yet - use basic auth please """ # if self.access_token: # auth_header = { # u"Authorization": "Bearer %s" % (self.access_token) # } # headers.update(auth_header) #basic auth msg = "method: %s url:%s\nheaders:%s\ndata:%s" % ( method, url, headers, data) #print msg if not self.use_oauth: auth = (self.sk_user, self.sk_pw) if not self.client: self.client = requests.session() r = self.client.request(method, url, headers=headers, data=data, auth=auth,**kwargs) else: if not self.client: self.client = requests.session(hooks={'pre_request': oauth_hook}) r = self.client.request(method, url, headers=headers, data=data,**kwargs) return r
python
def _request(self, url, method = u"get", data = None, headers=None, **kwargs): """ does the request via requests - oauth not implemented yet - use basic auth please """ # if self.access_token: # auth_header = { # u"Authorization": "Bearer %s" % (self.access_token) # } # headers.update(auth_header) #basic auth msg = "method: %s url:%s\nheaders:%s\ndata:%s" % ( method, url, headers, data) #print msg if not self.use_oauth: auth = (self.sk_user, self.sk_pw) if not self.client: self.client = requests.session() r = self.client.request(method, url, headers=headers, data=data, auth=auth,**kwargs) else: if not self.client: self.client = requests.session(hooks={'pre_request': oauth_hook}) r = self.client.request(method, url, headers=headers, data=data,**kwargs) return r
[ "def", "_request", "(", "self", ",", "url", ",", "method", "=", "u\"get\"", ",", "data", "=", "None", ",", "headers", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# if self.access_token:", "# auth_header = {", "# u\"Authorizati...
does the request via requests - oauth not implemented yet - use basic auth please
[ "does", "the", "request", "via", "requests", "-", "oauth", "not", "implemented", "yet", "-", "use", "basic", "auth", "please" ]
train
https://github.com/salesking/salesking_python_sdk/blob/0d5a95c5ee4e16a85562ceaf67bb11b55e47ee4c/salesking/api.py#L132-L156
salesking/salesking_python_sdk
salesking/api.py
APIClient._handle_response
def _handle_response(self, response): """ internal method to throw the correct exception if something went wrong """ status = response.status_code if status == 400: msg = u"bad request" raise exceptions.BadRequest(status, msg) elif status == 401: msg = u"authorization failed user:%s" % (self.sk_user) raise exceptions.Unauthorized(status, msg) elif status == 404: raise exceptions.NotFound() elif status == 422: msg = u"bad request" raise exceptions.BadRequest(status, msg) elif status in range(400, 500): msg = u"unexpected bad request" raise exceptions.BadRequest(status, msg) elif status in range(500, 600): raise exceptions.ServerError() return response
python
def _handle_response(self, response): """ internal method to throw the correct exception if something went wrong """ status = response.status_code if status == 400: msg = u"bad request" raise exceptions.BadRequest(status, msg) elif status == 401: msg = u"authorization failed user:%s" % (self.sk_user) raise exceptions.Unauthorized(status, msg) elif status == 404: raise exceptions.NotFound() elif status == 422: msg = u"bad request" raise exceptions.BadRequest(status, msg) elif status in range(400, 500): msg = u"unexpected bad request" raise exceptions.BadRequest(status, msg) elif status in range(500, 600): raise exceptions.ServerError() return response
[ "def", "_handle_response", "(", "self", ",", "response", ")", ":", "status", "=", "response", ".", "status_code", "if", "status", "==", "400", ":", "msg", "=", "u\"bad request\"", "raise", "exceptions", ".", "BadRequest", "(", "status", ",", "msg", ")", "e...
internal method to throw the correct exception if something went wrong
[ "internal", "method", "to", "throw", "the", "correct", "exception", "if", "something", "went", "wrong" ]
train
https://github.com/salesking/salesking_python_sdk/blob/0d5a95c5ee4e16a85562ceaf67bb11b55e47ee4c/salesking/api.py#L158-L179
rduplain/jeni-python
jeni.py
see_doc
def see_doc(obj_with_doc): """Copy docstring from existing object to the decorated callable.""" def decorator(fn): fn.__doc__ = obj_with_doc.__doc__ return fn return decorator
python
def see_doc(obj_with_doc): """Copy docstring from existing object to the decorated callable.""" def decorator(fn): fn.__doc__ = obj_with_doc.__doc__ return fn return decorator
[ "def", "see_doc", "(", "obj_with_doc", ")", ":", "def", "decorator", "(", "fn", ")", ":", "fn", ".", "__doc__", "=", "obj_with_doc", ".", "__doc__", "return", "fn", "return", "decorator" ]
Copy docstring from existing object to the decorated callable.
[ "Copy", "docstring", "from", "existing", "object", "to", "the", "decorated", "callable", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L159-L164
rduplain/jeni-python
jeni.py
class_in_progress
def class_in_progress(stack=None): """True if currently inside a class definition, else False.""" if stack is None: stack = inspect.stack() for frame in stack: statement_list = frame[4] if statement_list is None: continue if statement_list[0].strip().startswith('class '): return True return False
python
def class_in_progress(stack=None): """True if currently inside a class definition, else False.""" if stack is None: stack = inspect.stack() for frame in stack: statement_list = frame[4] if statement_list is None: continue if statement_list[0].strip().startswith('class '): return True return False
[ "def", "class_in_progress", "(", "stack", "=", "None", ")", ":", "if", "stack", "is", "None", ":", "stack", "=", "inspect", ".", "stack", "(", ")", "for", "frame", "in", "stack", ":", "statement_list", "=", "frame", "[", "4", "]", "if", "statement_list...
True if currently inside a class definition, else False.
[ "True", "if", "currently", "inside", "a", "class", "definition", "else", "False", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L850-L860
rduplain/jeni-python
jeni.py
GeneratorProvider.get
def get(self, name=None): """Get initial yield value, or result of send(name) if name given.""" if name is None: return self.init_value elif not self.support_name: msg = "generator does not support get-by-name: function {!r}" raise TypeError(msg.format(self.function)) try: value = self.generator.send(name) except StopIteration: msg = "generator didn't yield: function {!r}" raise RuntimeError(msg.format(self.function)) return value
python
def get(self, name=None): """Get initial yield value, or result of send(name) if name given.""" if name is None: return self.init_value elif not self.support_name: msg = "generator does not support get-by-name: function {!r}" raise TypeError(msg.format(self.function)) try: value = self.generator.send(name) except StopIteration: msg = "generator didn't yield: function {!r}" raise RuntimeError(msg.format(self.function)) return value
[ "def", "get", "(", "self", ",", "name", "=", "None", ")", ":", "if", "name", "is", "None", ":", "return", "self", ".", "init_value", "elif", "not", "self", ".", "support_name", ":", "msg", "=", "\"generator does not support get-by-name: function {!r}\"", "rais...
Get initial yield value, or result of send(name) if name given.
[ "Get", "initial", "yield", "value", "or", "result", "of", "send", "(", "name", ")", "if", "name", "given", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L132-L144
rduplain/jeni-python
jeni.py
GeneratorProvider.close
def close(self): """Close the generator.""" if self.support_name: self.generator.close() try: next(self.generator) except StopIteration: return else: msg = "generator didn't stop: function {!r}" raise RuntimeError(msg.format(self.function))
python
def close(self): """Close the generator.""" if self.support_name: self.generator.close() try: next(self.generator) except StopIteration: return else: msg = "generator didn't stop: function {!r}" raise RuntimeError(msg.format(self.function))
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "support_name", ":", "self", ".", "generator", ".", "close", "(", ")", "try", ":", "next", "(", "self", ".", "generator", ")", "except", "StopIteration", ":", "return", "else", ":", "msg", "="...
Close the generator.
[ "Close", "the", "generator", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L146-L156
rduplain/jeni-python
jeni.py
Annotator.get_annotations
def get_annotations(cls, __fn): """Get the annotations of a given callable.""" if hasattr(__fn, '__func__'): __fn = __fn.__func__ if hasattr(__fn, '__notes__'): return __fn.__notes__ raise AttributeError('{!r} does not have annotations'.format(__fn))
python
def get_annotations(cls, __fn): """Get the annotations of a given callable.""" if hasattr(__fn, '__func__'): __fn = __fn.__func__ if hasattr(__fn, '__notes__'): return __fn.__notes__ raise AttributeError('{!r} does not have annotations'.format(__fn))
[ "def", "get_annotations", "(", "cls", ",", "__fn", ")", ":", "if", "hasattr", "(", "__fn", ",", "'__func__'", ")", ":", "__fn", "=", "__fn", ".", "__func__", "if", "hasattr", "(", "__fn", ",", "'__notes__'", ")", ":", "return", "__fn", ".", "__notes__"...
Get the annotations of a given callable.
[ "Get", "the", "annotations", "of", "a", "given", "callable", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L250-L256
rduplain/jeni-python
jeni.py
Annotator.set_annotations
def set_annotations(cls, __fn, *notes, **keyword_notes): """Set the annotations on the given callable.""" if hasattr(__fn, '__func__'): __fn = __fn.__func__ if hasattr(__fn, '__notes__'): msg = 'callable already has notes: {!r}' raise AttributeError(msg.format(__fn)) __fn.__notes__ = (notes, keyword_notes)
python
def set_annotations(cls, __fn, *notes, **keyword_notes): """Set the annotations on the given callable.""" if hasattr(__fn, '__func__'): __fn = __fn.__func__ if hasattr(__fn, '__notes__'): msg = 'callable already has notes: {!r}' raise AttributeError(msg.format(__fn)) __fn.__notes__ = (notes, keyword_notes)
[ "def", "set_annotations", "(", "cls", ",", "__fn", ",", "*", "notes", ",", "*", "*", "keyword_notes", ")", ":", "if", "hasattr", "(", "__fn", ",", "'__func__'", ")", ":", "__fn", "=", "__fn", ".", "__func__", "if", "hasattr", "(", "__fn", ",", "'__no...
Set the annotations on the given callable.
[ "Set", "the", "annotations", "on", "the", "given", "callable", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L259-L266
rduplain/jeni-python
jeni.py
Annotator.wraps
def wraps(__fn, **kw): """Like ``functools.wraps``, with support for annotations.""" kw['assigned'] = kw.get('assigned', WRAPPER_ASSIGNMENTS) return functools.wraps(__fn, **kw)
python
def wraps(__fn, **kw): """Like ``functools.wraps``, with support for annotations.""" kw['assigned'] = kw.get('assigned', WRAPPER_ASSIGNMENTS) return functools.wraps(__fn, **kw)
[ "def", "wraps", "(", "__fn", ",", "*", "*", "kw", ")", ":", "kw", "[", "'assigned'", "]", "=", "kw", ".", "get", "(", "'assigned'", ",", "WRAPPER_ASSIGNMENTS", ")", "return", "functools", ".", "wraps", "(", "__fn", ",", "*", "*", "kw", ")" ]
Like ``functools.wraps``, with support for annotations.
[ "Like", "functools", ".", "wraps", "with", "support", "for", "annotations", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L278-L281
rduplain/jeni-python
jeni.py
Annotator.partial
def partial(__fn, *a, **kw): """Wrap a note for injection of a partially applied function. This allows for annotated functions to be injected for composition:: from jeni import annotate @annotate('foo', bar=annotate.maybe('bar')) def foobar(foo, bar=None): return @annotate('foo', annotate.partial(foobar)) def bazquux(foo, fn): # fn: injector.partial(foobar) return Keyword arguments are treated as `maybe` when using partial, in order to allow partial application of only the notes which can be provided, where the caller could then apply arguments known to be unavailable in the injector. Note that with Python 3 function annotations, all annotations are injected as keyword arguments. Injections on the partial function are lazy and not applied until the injected partial function is called. See `eager_partial` to inject eagerly. """ return (PARTIAL, (__fn, a, tuple(kw.items())))
python
def partial(__fn, *a, **kw): """Wrap a note for injection of a partially applied function. This allows for annotated functions to be injected for composition:: from jeni import annotate @annotate('foo', bar=annotate.maybe('bar')) def foobar(foo, bar=None): return @annotate('foo', annotate.partial(foobar)) def bazquux(foo, fn): # fn: injector.partial(foobar) return Keyword arguments are treated as `maybe` when using partial, in order to allow partial application of only the notes which can be provided, where the caller could then apply arguments known to be unavailable in the injector. Note that with Python 3 function annotations, all annotations are injected as keyword arguments. Injections on the partial function are lazy and not applied until the injected partial function is called. See `eager_partial` to inject eagerly. """ return (PARTIAL, (__fn, a, tuple(kw.items())))
[ "def", "partial", "(", "__fn", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "return", "(", "PARTIAL", ",", "(", "__fn", ",", "a", ",", "tuple", "(", "kw", ".", "items", "(", ")", ")", ")", ")" ]
Wrap a note for injection of a partially applied function. This allows for annotated functions to be injected for composition:: from jeni import annotate @annotate('foo', bar=annotate.maybe('bar')) def foobar(foo, bar=None): return @annotate('foo', annotate.partial(foobar)) def bazquux(foo, fn): # fn: injector.partial(foobar) return Keyword arguments are treated as `maybe` when using partial, in order to allow partial application of only the notes which can be provided, where the caller could then apply arguments known to be unavailable in the injector. Note that with Python 3 function annotations, all annotations are injected as keyword arguments. Injections on the partial function are lazy and not applied until the injected partial function is called. See `eager_partial` to inject eagerly.
[ "Wrap", "a", "note", "for", "injection", "of", "a", "partially", "applied", "function", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L300-L326
rduplain/jeni-python
jeni.py
Annotator.partial_regardless
def partial_regardless(__fn, *a, **kw): """Wrap a note for injection of a partially applied function, or don't. Use this instead of `partial` when binding a callable that may or may not have annotations. """ return (PARTIAL_REGARDLESS, (__fn, a, tuple(kw.items())))
python
def partial_regardless(__fn, *a, **kw): """Wrap a note for injection of a partially applied function, or don't. Use this instead of `partial` when binding a callable that may or may not have annotations. """ return (PARTIAL_REGARDLESS, (__fn, a, tuple(kw.items())))
[ "def", "partial_regardless", "(", "__fn", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "return", "(", "PARTIAL_REGARDLESS", ",", "(", "__fn", ",", "a", ",", "tuple", "(", "kw", ".", "items", "(", ")", ")", ")", ")" ]
Wrap a note for injection of a partially applied function, or don't. Use this instead of `partial` when binding a callable that may or may not have annotations.
[ "Wrap", "a", "note", "for", "injection", "of", "a", "partially", "applied", "function", "or", "don", "t", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L329-L335
rduplain/jeni-python
jeni.py
Annotator.eager_partial
def eager_partial(__fn, *a, **kw): """Wrap a note for injection of an eagerly partially applied function. Use this instead of `partial` when eager injection is needed in place of lazy injection. """ return (EAGER_PARTIAL, (__fn, a, tuple(kw.items())))
python
def eager_partial(__fn, *a, **kw): """Wrap a note for injection of an eagerly partially applied function. Use this instead of `partial` when eager injection is needed in place of lazy injection. """ return (EAGER_PARTIAL, (__fn, a, tuple(kw.items())))
[ "def", "eager_partial", "(", "__fn", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "return", "(", "EAGER_PARTIAL", ",", "(", "__fn", ",", "a", ",", "tuple", "(", "kw", ".", "items", "(", ")", ")", ")", ")" ]
Wrap a note for injection of an eagerly partially applied function. Use this instead of `partial` when eager injection is needed in place of lazy injection.
[ "Wrap", "a", "note", "for", "injection", "of", "an", "eagerly", "partially", "applied", "function", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L338-L344
rduplain/jeni-python
jeni.py
Annotator.eager_partial_regardless
def eager_partial_regardless(__fn, *a, **kw): """Wrap a note for injection of an eagerly partially applied function, or don't. Use this instead of `eager_partial partial` when binding a callable that may or may not have annotations. """ return (EAGER_PARTIAL_REGARDLESS, (__fn, a, tuple(kw.items())))
python
def eager_partial_regardless(__fn, *a, **kw): """Wrap a note for injection of an eagerly partially applied function, or don't. Use this instead of `eager_partial partial` when binding a callable that may or may not have annotations. """ return (EAGER_PARTIAL_REGARDLESS, (__fn, a, tuple(kw.items())))
[ "def", "eager_partial_regardless", "(", "__fn", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "return", "(", "EAGER_PARTIAL_REGARDLESS", ",", "(", "__fn", ",", "a", ",", "tuple", "(", "kw", ".", "items", "(", ")", ")", ")", ")" ]
Wrap a note for injection of an eagerly partially applied function, or don't. Use this instead of `eager_partial partial` when binding a callable that may or may not have annotations.
[ "Wrap", "a", "note", "for", "injection", "of", "an", "eagerly", "partially", "applied", "function", "or", "don", "t", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L347-L353
rduplain/jeni-python
jeni.py
Injector.provider
def provider(cls, note, provider=None, name=False): """Register a provider, either a Provider class or a generator. Provider class:: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.provider('hello') class HelloProvider(Provider): def get(self, name=None): if name is None: name = 'world' return 'Hello, {}!'.format(name) Simple generator:: @Injector.provider('answer') def answer(): yield 42 If a generator supports get with a name argument:: @Injector.provider('spam', name=True) def spam(): count_str = yield 'spam' while True: count_str = yield 'spam' * int(count_str) Registration can be a decorator or a direct method call:: Injector.provider('hello', HelloProvider) """ def decorator(provider): if inspect.isgeneratorfunction(provider): # Automatically adapt generator functions provider = cls.generator_provider.bind( provider, support_name=name) return decorator(provider) cls.register(note, provider) return provider if provider is not None: decorator(provider) else: return decorator
python
def provider(cls, note, provider=None, name=False): """Register a provider, either a Provider class or a generator. Provider class:: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.provider('hello') class HelloProvider(Provider): def get(self, name=None): if name is None: name = 'world' return 'Hello, {}!'.format(name) Simple generator:: @Injector.provider('answer') def answer(): yield 42 If a generator supports get with a name argument:: @Injector.provider('spam', name=True) def spam(): count_str = yield 'spam' while True: count_str = yield 'spam' * int(count_str) Registration can be a decorator or a direct method call:: Injector.provider('hello', HelloProvider) """ def decorator(provider): if inspect.isgeneratorfunction(provider): # Automatically adapt generator functions provider = cls.generator_provider.bind( provider, support_name=name) return decorator(provider) cls.register(note, provider) return provider if provider is not None: decorator(provider) else: return decorator
[ "def", "provider", "(", "cls", ",", "note", ",", "provider", "=", "None", ",", "name", "=", "False", ")", ":", "def", "decorator", "(", "provider", ")", ":", "if", "inspect", ".", "isgeneratorfunction", "(", "provider", ")", ":", "# Automatically adapt gen...
Register a provider, either a Provider class or a generator. Provider class:: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.provider('hello') class HelloProvider(Provider): def get(self, name=None): if name is None: name = 'world' return 'Hello, {}!'.format(name) Simple generator:: @Injector.provider('answer') def answer(): yield 42 If a generator supports get with a name argument:: @Injector.provider('spam', name=True) def spam(): count_str = yield 'spam' while True: count_str = yield 'spam' * int(count_str) Registration can be a decorator or a direct method call:: Injector.provider('hello', HelloProvider)
[ "Register", "a", "provider", "either", "a", "Provider", "class", "or", "a", "generator", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L427-L476
rduplain/jeni-python
jeni.py
Injector.factory
def factory(cls, note, fn=None): """Register a function as a provider. Function (name support is optional):: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.factory('echo') def echo(name=None): return name Registration can be a decorator or a direct method call:: Injector.factory('echo', echo) """ def decorator(f): provider = cls.factory_provider.bind(f) cls.register(note, provider) return f if fn is not None: decorator(fn) else: return decorator
python
def factory(cls, note, fn=None): """Register a function as a provider. Function (name support is optional):: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.factory('echo') def echo(name=None): return name Registration can be a decorator or a direct method call:: Injector.factory('echo', echo) """ def decorator(f): provider = cls.factory_provider.bind(f) cls.register(note, provider) return f if fn is not None: decorator(fn) else: return decorator
[ "def", "factory", "(", "cls", ",", "note", ",", "fn", "=", "None", ")", ":", "def", "decorator", "(", "f", ")", ":", "provider", "=", "cls", ".", "factory_provider", ".", "bind", "(", "f", ")", "cls", ".", "register", "(", "note", ",", "provider", ...
Register a function as a provider. Function (name support is optional):: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.factory('echo') def echo(name=None): return name Registration can be a decorator or a direct method call:: Injector.factory('echo', echo)
[ "Register", "a", "function", "as", "a", "provider", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L479-L506
rduplain/jeni-python
jeni.py
Injector.apply
def apply(self, fn, *a, **kw): """Fully apply annotated callable, returning callable's result.""" args, kwargs = self.prepare_callable(fn) args += a; kwargs.update(kw) return fn(*args, **kwargs)
python
def apply(self, fn, *a, **kw): """Fully apply annotated callable, returning callable's result.""" args, kwargs = self.prepare_callable(fn) args += a; kwargs.update(kw) return fn(*args, **kwargs)
[ "def", "apply", "(", "self", ",", "fn", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "args", ",", "kwargs", "=", "self", ".", "prepare_callable", "(", "fn", ")", "args", "+=", "a", "kwargs", ".", "update", "(", "kw", ")", "return", "fn", "(", ...
Fully apply annotated callable, returning callable's result.
[ "Fully", "apply", "annotated", "callable", "returning", "callable", "s", "result", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L516-L520
rduplain/jeni-python
jeni.py
Injector.partial
def partial(self, fn, *user_args, **user_kwargs): """Return function with closure to lazily inject annotated callable. Repeat calls to the resulting function will reuse injections from the first call. Positional arguments are provided in this order: 1. positional arguments provided by injector 2. positional arguments provided in `partial_fn = partial(fn, *args)` 3. positional arguments provided in `partial_fn(*args)` Keyword arguments are resolved in this order (later override earlier): 1. keyword arguments provided by injector 2. keyword arguments provided in `partial_fn = partial(fn, **kwargs)` 3. keyword arguments provided in `partial_fn(**kargs)` Note that Python function annotations (in Python 3) are injected as keyword arguments, as documented in `annotate`, which affects the argument order here. `annotate.partial` accepts arguments in same manner as this `partial`. """ self.get_annotations(fn) # Assert has annotations. def lazy_injection_fn(*run_args, **run_kwargs): arg_pack = getattr(lazy_injection_fn, 'arg_pack', None) if arg_pack is not None: pack_args, pack_kwargs = arg_pack else: jeni_args, jeni_kwargs = self.prepare_callable(fn, partial=True) pack_args = jeni_args + user_args pack_kwargs = {} pack_kwargs.update(jeni_kwargs) pack_kwargs.update(user_kwargs) lazy_injection_fn.arg_pack = (pack_args, pack_kwargs) final_args = pack_args + run_args final_kwargs = {} final_kwargs.update(pack_kwargs) final_kwargs.update(run_kwargs) return fn(*final_args, **final_kwargs) return lazy_injection_fn
python
def partial(self, fn, *user_args, **user_kwargs): """Return function with closure to lazily inject annotated callable. Repeat calls to the resulting function will reuse injections from the first call. Positional arguments are provided in this order: 1. positional arguments provided by injector 2. positional arguments provided in `partial_fn = partial(fn, *args)` 3. positional arguments provided in `partial_fn(*args)` Keyword arguments are resolved in this order (later override earlier): 1. keyword arguments provided by injector 2. keyword arguments provided in `partial_fn = partial(fn, **kwargs)` 3. keyword arguments provided in `partial_fn(**kargs)` Note that Python function annotations (in Python 3) are injected as keyword arguments, as documented in `annotate`, which affects the argument order here. `annotate.partial` accepts arguments in same manner as this `partial`. """ self.get_annotations(fn) # Assert has annotations. def lazy_injection_fn(*run_args, **run_kwargs): arg_pack = getattr(lazy_injection_fn, 'arg_pack', None) if arg_pack is not None: pack_args, pack_kwargs = arg_pack else: jeni_args, jeni_kwargs = self.prepare_callable(fn, partial=True) pack_args = jeni_args + user_args pack_kwargs = {} pack_kwargs.update(jeni_kwargs) pack_kwargs.update(user_kwargs) lazy_injection_fn.arg_pack = (pack_args, pack_kwargs) final_args = pack_args + run_args final_kwargs = {} final_kwargs.update(pack_kwargs) final_kwargs.update(run_kwargs) return fn(*final_args, **final_kwargs) return lazy_injection_fn
[ "def", "partial", "(", "self", ",", "fn", ",", "*", "user_args", ",", "*", "*", "user_kwargs", ")", ":", "self", ".", "get_annotations", "(", "fn", ")", "# Assert has annotations.", "def", "lazy_injection_fn", "(", "*", "run_args", ",", "*", "*", "run_kwar...
Return function with closure to lazily inject annotated callable. Repeat calls to the resulting function will reuse injections from the first call. Positional arguments are provided in this order: 1. positional arguments provided by injector 2. positional arguments provided in `partial_fn = partial(fn, *args)` 3. positional arguments provided in `partial_fn(*args)` Keyword arguments are resolved in this order (later override earlier): 1. keyword arguments provided by injector 2. keyword arguments provided in `partial_fn = partial(fn, **kwargs)` 3. keyword arguments provided in `partial_fn(**kargs)` Note that Python function annotations (in Python 3) are injected as keyword arguments, as documented in `annotate`, which affects the argument order here. `annotate.partial` accepts arguments in same manner as this `partial`.
[ "Return", "function", "with", "closure", "to", "lazily", "inject", "annotated", "callable", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L522-L563
rduplain/jeni-python
jeni.py
Injector.eager_partial
def eager_partial(self, fn, *a, **kw): """Partially apply annotated callable, returning a partial function. By default, `partial` is lazy so that injections only happen when they are needed. Use `eager_partial` in place of `partial` when a guarantee of injection is needed at the time the partially applied function is created. `eager_partial` resolves arguments similarly to `partial` but relies on `functools.partial` for argument resolution when calling the final partial function. """ args, kwargs = self.prepare_callable(fn, partial=True) args += a; kwargs.update(kw) return functools.partial(fn, *args, **kwargs)
python
def eager_partial(self, fn, *a, **kw): """Partially apply annotated callable, returning a partial function. By default, `partial` is lazy so that injections only happen when they are needed. Use `eager_partial` in place of `partial` when a guarantee of injection is needed at the time the partially applied function is created. `eager_partial` resolves arguments similarly to `partial` but relies on `functools.partial` for argument resolution when calling the final partial function. """ args, kwargs = self.prepare_callable(fn, partial=True) args += a; kwargs.update(kw) return functools.partial(fn, *args, **kwargs)
[ "def", "eager_partial", "(", "self", ",", "fn", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "args", ",", "kwargs", "=", "self", ".", "prepare_callable", "(", "fn", ",", "partial", "=", "True", ")", "args", "+=", "a", "kwargs", ".", "update", "(...
Partially apply annotated callable, returning a partial function. By default, `partial` is lazy so that injections only happen when they are needed. Use `eager_partial` in place of `partial` when a guarantee of injection is needed at the time the partially applied function is created. `eager_partial` resolves arguments similarly to `partial` but relies on `functools.partial` for argument resolution when calling the final partial function.
[ "Partially", "apply", "annotated", "callable", "returning", "a", "partial", "function", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L565-L579
rduplain/jeni-python
jeni.py
Injector.apply_regardless
def apply_regardless(self, fn, *a, **kw): """Like `apply`, but applies if callable is not annotated.""" if self.has_annotations(fn): return self.apply(fn, *a, **kw) return fn(*a, **kw)
python
def apply_regardless(self, fn, *a, **kw): """Like `apply`, but applies if callable is not annotated.""" if self.has_annotations(fn): return self.apply(fn, *a, **kw) return fn(*a, **kw)
[ "def", "apply_regardless", "(", "self", ",", "fn", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "if", "self", ".", "has_annotations", "(", "fn", ")", ":", "return", "self", ".", "apply", "(", "fn", ",", "*", "a", ",", "*", "*", "kw", ")", "r...
Like `apply`, but applies if callable is not annotated.
[ "Like", "apply", "but", "applies", "if", "callable", "is", "not", "annotated", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L581-L585
rduplain/jeni-python
jeni.py
Injector.partial_regardless
def partial_regardless(self, fn, *a, **kw): """Like `partial`, but applies if callable is not annotated.""" if self.has_annotations(fn): return self.partial(fn, *a, **kw) else: return functools.partial(fn, *a, **kw)
python
def partial_regardless(self, fn, *a, **kw): """Like `partial`, but applies if callable is not annotated.""" if self.has_annotations(fn): return self.partial(fn, *a, **kw) else: return functools.partial(fn, *a, **kw)
[ "def", "partial_regardless", "(", "self", ",", "fn", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "if", "self", ".", "has_annotations", "(", "fn", ")", ":", "return", "self", ".", "partial", "(", "fn", ",", "*", "a", ",", "*", "*", "kw", ")", ...
Like `partial`, but applies if callable is not annotated.
[ "Like", "partial", "but", "applies", "if", "callable", "is", "not", "annotated", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L587-L592
rduplain/jeni-python
jeni.py
Injector.eager_partial_regardless
def eager_partial_regardless(self, fn, *a, **kw): """Like `eager_partial`, but applies if callable is not annotated.""" if self.has_annotations(fn): return self.eager_partial(fn, *a, **kw) return functools.partial(fn, *a, **kw)
python
def eager_partial_regardless(self, fn, *a, **kw): """Like `eager_partial`, but applies if callable is not annotated.""" if self.has_annotations(fn): return self.eager_partial(fn, *a, **kw) return functools.partial(fn, *a, **kw)
[ "def", "eager_partial_regardless", "(", "self", ",", "fn", ",", "*", "a", ",", "*", "*", "kw", ")", ":", "if", "self", ".", "has_annotations", "(", "fn", ")", ":", "return", "self", ".", "eager_partial", "(", "fn", ",", "*", "a", ",", "*", "*", "...
Like `eager_partial`, but applies if callable is not annotated.
[ "Like", "eager_partial", "but", "applies", "if", "callable", "is", "not", "annotated", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L594-L598
rduplain/jeni-python
jeni.py
Injector.get
def get(self, note): """Resolve a single note into an object.""" if self.closed: raise RuntimeError('{!r} already closed'.format(self)) # Record request for note even if it fails to resolve. self.stats[note] += 1 # Handle injection of partially applied annotated functions. if isinstance(note, tuple) and len(note) == 2: if note[0] == PARTIAL: fn, a, kw_items = note[1] return self.partial(fn, *a, **dict(kw_items)) elif note[0] == PARTIAL_REGARDLESS: fn, a, kw_items = note[1] return self.partial_regardless(fn, *a, **dict(kw_items)) elif note[0] == EAGER_PARTIAL: fn, a, kw_items = note[1] return self.eager_partial(fn, *a, **dict(kw_items)) elif note[0] == EAGER_PARTIAL_REGARDLESS: fn, a, kw_items = note[1] return self.eager_partial_regardless(fn, *a, **dict(kw_items)) basenote, name = self.parse_note(note) if name is None and basenote in self.values: return self.values[basenote] try: provider_factory = self.lookup(basenote) except LookupError: msg = "Unable to resolve '{}'" raise LookupError(msg.format(note)) self.instantiating.append((basenote, name)) try: if self.instantiating.count((basenote, name)) > 1: stack = ' <- '.join(repr(note) for note in self.instantiating) notes = tuple(self.instantiating) raise DependencyCycleError(stack, notes=notes) return self.handle_provider(provider_factory, note) finally: self.instantiating.pop()
python
def get(self, note): """Resolve a single note into an object.""" if self.closed: raise RuntimeError('{!r} already closed'.format(self)) # Record request for note even if it fails to resolve. self.stats[note] += 1 # Handle injection of partially applied annotated functions. if isinstance(note, tuple) and len(note) == 2: if note[0] == PARTIAL: fn, a, kw_items = note[1] return self.partial(fn, *a, **dict(kw_items)) elif note[0] == PARTIAL_REGARDLESS: fn, a, kw_items = note[1] return self.partial_regardless(fn, *a, **dict(kw_items)) elif note[0] == EAGER_PARTIAL: fn, a, kw_items = note[1] return self.eager_partial(fn, *a, **dict(kw_items)) elif note[0] == EAGER_PARTIAL_REGARDLESS: fn, a, kw_items = note[1] return self.eager_partial_regardless(fn, *a, **dict(kw_items)) basenote, name = self.parse_note(note) if name is None and basenote in self.values: return self.values[basenote] try: provider_factory = self.lookup(basenote) except LookupError: msg = "Unable to resolve '{}'" raise LookupError(msg.format(note)) self.instantiating.append((basenote, name)) try: if self.instantiating.count((basenote, name)) > 1: stack = ' <- '.join(repr(note) for note in self.instantiating) notes = tuple(self.instantiating) raise DependencyCycleError(stack, notes=notes) return self.handle_provider(provider_factory, note) finally: self.instantiating.pop()
[ "def", "get", "(", "self", ",", "note", ")", ":", "if", "self", ".", "closed", ":", "raise", "RuntimeError", "(", "'{!r} already closed'", ".", "format", "(", "self", ")", ")", "# Record request for note even if it fails to resolve.", "self", ".", "stats", "[", ...
Resolve a single note into an object.
[ "Resolve", "a", "single", "note", "into", "an", "object", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L600-L641
rduplain/jeni-python
jeni.py
Injector.close
def close(self): """Close injector & injected Provider instances, including generators. Providers are closed in the reverse order in which they were opened, and each provider is only closed once. Providers are closed if accessed by the injector, even if a dependency is not successfully provided. As such, providers should determine whether or not anything needs to be done in the close method. """ if self.closed: raise RuntimeError('{!r} already closed'.format(self)) for finalizer in reversed(self.finalizers): # Note: Unable to apply injector on close method. finalizer() self.closed = True self.instances.clear() self.values.clear()
python
def close(self): """Close injector & injected Provider instances, including generators. Providers are closed in the reverse order in which they were opened, and each provider is only closed once. Providers are closed if accessed by the injector, even if a dependency is not successfully provided. As such, providers should determine whether or not anything needs to be done in the close method. """ if self.closed: raise RuntimeError('{!r} already closed'.format(self)) for finalizer in reversed(self.finalizers): # Note: Unable to apply injector on close method. finalizer() self.closed = True self.instances.clear() self.values.clear()
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "closed", ":", "raise", "RuntimeError", "(", "'{!r} already closed'", ".", "format", "(", "self", ")", ")", "for", "finalizer", "in", "reversed", "(", "self", ".", "finalizers", ")", ":", "# Note:...
Close injector & injected Provider instances, including generators. Providers are closed in the reverse order in which they were opened, and each provider is only closed once. Providers are closed if accessed by the injector, even if a dependency is not successfully provided. As such, providers should determine whether or not anything needs to be done in the close method.
[ "Close", "injector", "&", "injected", "Provider", "instances", "including", "generators", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L643-L659
rduplain/jeni-python
jeni.py
Injector.prepare_callable
def prepare_callable(self, fn, partial=False): """Prepare arguments required to apply function.""" notes, keyword_notes = self.get_annotations(fn) return self.prepare_notes(*notes, __partial=partial, **keyword_notes)
python
def prepare_callable(self, fn, partial=False): """Prepare arguments required to apply function.""" notes, keyword_notes = self.get_annotations(fn) return self.prepare_notes(*notes, __partial=partial, **keyword_notes)
[ "def", "prepare_callable", "(", "self", ",", "fn", ",", "partial", "=", "False", ")", ":", "notes", ",", "keyword_notes", "=", "self", ".", "get_annotations", "(", "fn", ")", "return", "self", ".", "prepare_notes", "(", "*", "notes", ",", "__partial", "=...
Prepare arguments required to apply function.
[ "Prepare", "arguments", "required", "to", "apply", "function", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L661-L664
rduplain/jeni-python
jeni.py
Injector.prepare_notes
def prepare_notes(self, *notes, **keyword_notes): """Get injection values for all given notes.""" __partial = keyword_notes.pop('__partial', False) args = tuple(self.get(note) for note in notes) kwargs = {} for arg in keyword_notes: note = keyword_notes[arg] if isinstance(note, tuple) and len(note) == 2 and note[0] == MAYBE: try: kwargs[arg] = self.get(note[1]) except LookupError: continue elif __partial: try: kwargs[arg] = self.get(note) except LookupError: continue else: kwargs[arg] = self.get(note) return args, kwargs
python
def prepare_notes(self, *notes, **keyword_notes): """Get injection values for all given notes.""" __partial = keyword_notes.pop('__partial', False) args = tuple(self.get(note) for note in notes) kwargs = {} for arg in keyword_notes: note = keyword_notes[arg] if isinstance(note, tuple) and len(note) == 2 and note[0] == MAYBE: try: kwargs[arg] = self.get(note[1]) except LookupError: continue elif __partial: try: kwargs[arg] = self.get(note) except LookupError: continue else: kwargs[arg] = self.get(note) return args, kwargs
[ "def", "prepare_notes", "(", "self", ",", "*", "notes", ",", "*", "*", "keyword_notes", ")", ":", "__partial", "=", "keyword_notes", ".", "pop", "(", "'__partial'", ",", "False", ")", "args", "=", "tuple", "(", "self", ".", "get", "(", "note", ")", "...
Get injection values for all given notes.
[ "Get", "injection", "values", "for", "all", "given", "notes", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L666-L685
rduplain/jeni-python
jeni.py
Injector.parse_note
def parse_note(cls, note): """Parse string annotation into object reference with optional name.""" if isinstance(note, tuple): if len(note) != 2: raise ValueError('tuple annotations must be length 2') return note try: match = cls.re_note.match(note) except TypeError: # Note is not a string. Support any Python object as a note. return note, None return match.groups()
python
def parse_note(cls, note): """Parse string annotation into object reference with optional name.""" if isinstance(note, tuple): if len(note) != 2: raise ValueError('tuple annotations must be length 2') return note try: match = cls.re_note.match(note) except TypeError: # Note is not a string. Support any Python object as a note. return note, None return match.groups()
[ "def", "parse_note", "(", "cls", ",", "note", ")", ":", "if", "isinstance", "(", "note", ",", "tuple", ")", ":", "if", "len", "(", "note", ")", "!=", "2", ":", "raise", "ValueError", "(", "'tuple annotations must be length 2'", ")", "return", "note", "tr...
Parse string annotation into object reference with optional name.
[ "Parse", "string", "annotation", "into", "object", "reference", "with", "optional", "name", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L688-L699
rduplain/jeni-python
jeni.py
Injector.handle_provider
def handle_provider(self, provider_factory, note): """Get value from provider as requested by note.""" # Implementation in separate method to support accurate book-keeping. basenote, name = self.parse_note(note) # _handle_provider could be even shorter if # Injector.apply() worked with classes, issue #9. if basenote not in self.instances: if (isinstance(provider_factory, type) and self.has_annotations(provider_factory.__init__)): args, kwargs = self.prepare_callable(provider_factory.__init__) self.instances[basenote] = provider_factory(*args, **kwargs) else: self.instances[basenote] = self.apply_regardless( provider_factory) provider = self.instances[basenote] if hasattr(provider, 'close'): self.finalizers.append(self.instances[basenote].close) provider = self.instances[basenote] get = self.partial_regardless(provider.get) try: if name is not None: return get(name=name) self.values[basenote] = get() return self.values[basenote] except UnsetError: # Use sys.exc_info to support both Python 2 and Python 3. exc_type, exc_value, tb = sys.exc_info() exc_msg = str(exc_value) if exc_msg: msg = '{}: {!r}'.format(exc_msg, note) else: msg = repr(note) six.reraise(exc_type, exc_type(msg, note=note), tb)
python
def handle_provider(self, provider_factory, note): """Get value from provider as requested by note.""" # Implementation in separate method to support accurate book-keeping. basenote, name = self.parse_note(note) # _handle_provider could be even shorter if # Injector.apply() worked with classes, issue #9. if basenote not in self.instances: if (isinstance(provider_factory, type) and self.has_annotations(provider_factory.__init__)): args, kwargs = self.prepare_callable(provider_factory.__init__) self.instances[basenote] = provider_factory(*args, **kwargs) else: self.instances[basenote] = self.apply_regardless( provider_factory) provider = self.instances[basenote] if hasattr(provider, 'close'): self.finalizers.append(self.instances[basenote].close) provider = self.instances[basenote] get = self.partial_regardless(provider.get) try: if name is not None: return get(name=name) self.values[basenote] = get() return self.values[basenote] except UnsetError: # Use sys.exc_info to support both Python 2 and Python 3. exc_type, exc_value, tb = sys.exc_info() exc_msg = str(exc_value) if exc_msg: msg = '{}: {!r}'.format(exc_msg, note) else: msg = repr(note) six.reraise(exc_type, exc_type(msg, note=note), tb)
[ "def", "handle_provider", "(", "self", ",", "provider_factory", ",", "note", ")", ":", "# Implementation in separate method to support accurate book-keeping.", "basenote", ",", "name", "=", "self", ".", "parse_note", "(", "note", ")", "# _handle_provider could be even short...
Get value from provider as requested by note.
[ "Get", "value", "from", "provider", "as", "requested", "by", "note", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L701-L739
rduplain/jeni-python
jeni.py
Injector.register
def register(cls, note, provider): """Implementation to register provider via `provider` & `factory`.""" basenote, name = cls.parse_note(note) if 'provider_registry' not in vars(cls): cls.provider_registry = {} cls.provider_registry[basenote] = provider
python
def register(cls, note, provider): """Implementation to register provider via `provider` & `factory`.""" basenote, name = cls.parse_note(note) if 'provider_registry' not in vars(cls): cls.provider_registry = {} cls.provider_registry[basenote] = provider
[ "def", "register", "(", "cls", ",", "note", ",", "provider", ")", ":", "basenote", ",", "name", "=", "cls", ".", "parse_note", "(", "note", ")", "if", "'provider_registry'", "not", "in", "vars", "(", "cls", ")", ":", "cls", ".", "provider_registry", "=...
Implementation to register provider via `provider` & `factory`.
[ "Implementation", "to", "register", "provider", "via", "provider", "&", "factory", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L742-L747
rduplain/jeni-python
jeni.py
Injector.lookup
def lookup(cls, basenote): """Look up note in registered annotations, walking class tree.""" # Walk method resolution order, which includes current class. for c in cls.mro(): if 'provider_registry' not in vars(c): # class is a mixin, super to base class, or never registered. continue if basenote in c.provider_registry: # note is in the registry. return c.provider_registry[basenote] raise LookupError(repr(basenote))
python
def lookup(cls, basenote): """Look up note in registered annotations, walking class tree.""" # Walk method resolution order, which includes current class. for c in cls.mro(): if 'provider_registry' not in vars(c): # class is a mixin, super to base class, or never registered. continue if basenote in c.provider_registry: # note is in the registry. return c.provider_registry[basenote] raise LookupError(repr(basenote))
[ "def", "lookup", "(", "cls", ",", "basenote", ")", ":", "# Walk method resolution order, which includes current class.", "for", "c", "in", "cls", ".", "mro", "(", ")", ":", "if", "'provider_registry'", "not", "in", "vars", "(", "c", ")", ":", "# class is a mixin...
Look up note in registered annotations, walking class tree.
[ "Look", "up", "note", "in", "registered", "annotations", "walking", "class", "tree", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L750-L760
rduplain/jeni-python
jeni.py
Injector.sub
def sub(cls, *mixins_and_dicts, **values): """Create and instantiate a sub-injector. Mixins and local value dicts can be passed in as arguments. Local values can also be passed in as keyword arguments. """ class SubInjector(cls): pass mixins = [ x for x in mixins_and_dicts if isinstance(x, type) ] if mixins: SubInjector.__bases__ = tuple(mixins) + SubInjector.__bases__ dicts = [ x for x in mixins_and_dicts if not isinstance(x, type) ] for d in reversed(dicts): for k,v in d.items(): if k not in values: values[k] = v for k,v in values.items(): SubInjector.value(k, v) return SubInjector()
python
def sub(cls, *mixins_and_dicts, **values): """Create and instantiate a sub-injector. Mixins and local value dicts can be passed in as arguments. Local values can also be passed in as keyword arguments. """ class SubInjector(cls): pass mixins = [ x for x in mixins_and_dicts if isinstance(x, type) ] if mixins: SubInjector.__bases__ = tuple(mixins) + SubInjector.__bases__ dicts = [ x for x in mixins_and_dicts if not isinstance(x, type) ] for d in reversed(dicts): for k,v in d.items(): if k not in values: values[k] = v for k,v in values.items(): SubInjector.value(k, v) return SubInjector()
[ "def", "sub", "(", "cls", ",", "*", "mixins_and_dicts", ",", "*", "*", "values", ")", ":", "class", "SubInjector", "(", "cls", ")", ":", "pass", "mixins", "=", "[", "x", "for", "x", "in", "mixins_and_dicts", "if", "isinstance", "(", "x", ",", "type",...
Create and instantiate a sub-injector. Mixins and local value dicts can be passed in as arguments. Local values can also be passed in as keyword arguments.
[ "Create", "and", "instantiate", "a", "sub", "-", "injector", "." ]
train
https://github.com/rduplain/jeni-python/blob/feca12ce5e4f0438ae5d7bec59d61826063594f1/jeni.py#L790-L813
Laufire/ec
ec/modules/classes.py
_getFuncArgs
def _getFuncArgs(func): r"""Gives the details on the args of the given func. Args: func (function): The function to get details on. """ code = func.func_code Defaults = func.func_defaults nargs = code.co_argcount ArgNames = code.co_varnames[:nargs] Args = OrderedDict() argCount = len(ArgNames) defCount = len(Defaults) if Defaults else 0 diff = argCount - defCount for i in range(0, diff): Args[ArgNames[i]] = {} for i in range(diff, argCount): Args[ArgNames[i]] = {'default': Defaults[i - diff]} return Args
python
def _getFuncArgs(func): r"""Gives the details on the args of the given func. Args: func (function): The function to get details on. """ code = func.func_code Defaults = func.func_defaults nargs = code.co_argcount ArgNames = code.co_varnames[:nargs] Args = OrderedDict() argCount = len(ArgNames) defCount = len(Defaults) if Defaults else 0 diff = argCount - defCount for i in range(0, diff): Args[ArgNames[i]] = {} for i in range(diff, argCount): Args[ArgNames[i]] = {'default': Defaults[i - diff]} return Args
[ "def", "_getFuncArgs", "(", "func", ")", ":", "code", "=", "func", ".", "func_code", "Defaults", "=", "func", ".", "func_defaults", "nargs", "=", "code", ".", "co_argcount", "ArgNames", "=", "code", ".", "co_varnames", "[", ":", "nargs", "]", "Args", "="...
r"""Gives the details on the args of the given func. Args: func (function): The function to get details on.
[ "r", "Gives", "the", "details", "on", "the", "args", "of", "the", "given", "func", "." ]
train
https://github.com/Laufire/ec/blob/63e84a1daef9234487d7de538e5da233a7d13071/ec/modules/classes.py#L209-L232
clinicedc/edc-form-label
edc_form_label/form_label.py
FormLabel.get_form_label
def get_form_label(self, request=None, obj=None, model=None, form=None): """Returns a customized form label, if condition is met, otherwise returns the default form label. * condition is an instance of CustomLabelCondition. """ label = form.base_fields[self.field].label condition = self.condition_cls(request=request, obj=obj, model=model) if condition.check(): additional_opts = condition.get_additional_options( request=request, obj=obj, model=model ) visit_datetime = "" if obj: visit_datetime = getattr( obj, obj.visit_model_attr() ).report_datetime.strftime("%B %Y") try: label = self.custom_label.format( appointment=condition.appointment, previous_appointment=condition.previous_appointment, previous_obj=condition.previous_obj, previous_visit=condition.previous_visit, visit_datetime=visit_datetime, **additional_opts, ) except KeyError as e: raise CustomFormLabelError( f"Custom label template has invalid keys. See {label}. Got {e}." ) return label
python
def get_form_label(self, request=None, obj=None, model=None, form=None): """Returns a customized form label, if condition is met, otherwise returns the default form label. * condition is an instance of CustomLabelCondition. """ label = form.base_fields[self.field].label condition = self.condition_cls(request=request, obj=obj, model=model) if condition.check(): additional_opts = condition.get_additional_options( request=request, obj=obj, model=model ) visit_datetime = "" if obj: visit_datetime = getattr( obj, obj.visit_model_attr() ).report_datetime.strftime("%B %Y") try: label = self.custom_label.format( appointment=condition.appointment, previous_appointment=condition.previous_appointment, previous_obj=condition.previous_obj, previous_visit=condition.previous_visit, visit_datetime=visit_datetime, **additional_opts, ) except KeyError as e: raise CustomFormLabelError( f"Custom label template has invalid keys. See {label}. Got {e}." ) return label
[ "def", "get_form_label", "(", "self", ",", "request", "=", "None", ",", "obj", "=", "None", ",", "model", "=", "None", ",", "form", "=", "None", ")", ":", "label", "=", "form", ".", "base_fields", "[", "self", ".", "field", "]", ".", "label", "cond...
Returns a customized form label, if condition is met, otherwise returns the default form label. * condition is an instance of CustomLabelCondition.
[ "Returns", "a", "customized", "form", "label", "if", "condition", "is", "met", "otherwise", "returns", "the", "default", "form", "label", "." ]
train
https://github.com/clinicedc/edc-form-label/blob/9d90807ddf784045b3867d676bee6e384a8e9d71/edc_form_label/form_label.py#L10-L40
earlye/nephele
nephele/AwsRoot.py
AwsRoot.do_stack
def do_stack(self,args): """Go to the specified stack. stack -h for detailed help""" parser = CommandArgumentParser("stack") parser.add_argument(dest='stack',help='stack index or name'); parser.add_argument('-a','--asg',dest='asg',help='descend into specified asg'); args = vars(parser.parse_args(args)) try: index = int(args['stack']) if self.stackList == None: self.do_stacks('-s') stack = AwsConnectionFactory.instance.getCfResource().Stack(self.stackList[index]['StackName']) except ValueError: stack = AwsConnectionFactory.instance.getCfResource().Stack(args['stack']) if 'asg' in args: AwsProcessor.processorFactory.Stack(stack,stack.name,self).onecmd('asg {}'.format(args['asg'])) AwsProcessor.processorFactory.Stack(stack,stack.name,self).cmdloop()
python
def do_stack(self,args): """Go to the specified stack. stack -h for detailed help""" parser = CommandArgumentParser("stack") parser.add_argument(dest='stack',help='stack index or name'); parser.add_argument('-a','--asg',dest='asg',help='descend into specified asg'); args = vars(parser.parse_args(args)) try: index = int(args['stack']) if self.stackList == None: self.do_stacks('-s') stack = AwsConnectionFactory.instance.getCfResource().Stack(self.stackList[index]['StackName']) except ValueError: stack = AwsConnectionFactory.instance.getCfResource().Stack(args['stack']) if 'asg' in args: AwsProcessor.processorFactory.Stack(stack,stack.name,self).onecmd('asg {}'.format(args['asg'])) AwsProcessor.processorFactory.Stack(stack,stack.name,self).cmdloop()
[ "def", "do_stack", "(", "self", ",", "args", ")", ":", "parser", "=", "CommandArgumentParser", "(", "\"stack\"", ")", "parser", ".", "add_argument", "(", "dest", "=", "'stack'", ",", "help", "=", "'stack index or name'", ")", "parser", ".", "add_argument", "...
Go to the specified stack. stack -h for detailed help
[ "Go", "to", "the", "specified", "stack", ".", "stack", "-", "h", "for", "detailed", "help" ]
train
https://github.com/earlye/nephele/blob/a7dadc68f4124671457f09119419978c4d22013e/nephele/AwsRoot.py#L13-L31
earlye/nephele
nephele/AwsRoot.py
AwsRoot.do_delete_stack
def do_delete_stack(self,args): """Delete specified stack. delete_stack -h for detailed help.""" parser = CommandArgumentParser("delete_stack") parser.add_argument(dest='stack',help='stack index or name'); args = vars(parser.parse_args(args)) try: index = int(args['stack']) if self.stackList == None: self.do_stacks('-s') stack = AwsConnectionFactory.instance.getCfResource().Stack(self.stackList[index]['StackName']) except ValueError: stack = AwsConnectionFactory.instance.getCfResource().Stack(args['stack']) print "Here are the details of the stack you are about to delete:" print "Stack.name: {}".format(stack.name) print "Stack.stack_id: {}".format(stack.stack_id) print "Stack.creation_time: {}".format(stack.creation_time) confirmation = raw_input("If you are sure, enter the Stack.name here: ") if stack.name == confirmation: stack.delete() print "Stack deletion in progress" else: print "Stack deletion canceled: '{}' != '{}'".format(stack.name,confirmation)
python
def do_delete_stack(self,args): """Delete specified stack. delete_stack -h for detailed help.""" parser = CommandArgumentParser("delete_stack") parser.add_argument(dest='stack',help='stack index or name'); args = vars(parser.parse_args(args)) try: index = int(args['stack']) if self.stackList == None: self.do_stacks('-s') stack = AwsConnectionFactory.instance.getCfResource().Stack(self.stackList[index]['StackName']) except ValueError: stack = AwsConnectionFactory.instance.getCfResource().Stack(args['stack']) print "Here are the details of the stack you are about to delete:" print "Stack.name: {}".format(stack.name) print "Stack.stack_id: {}".format(stack.stack_id) print "Stack.creation_time: {}".format(stack.creation_time) confirmation = raw_input("If you are sure, enter the Stack.name here: ") if stack.name == confirmation: stack.delete() print "Stack deletion in progress" else: print "Stack deletion canceled: '{}' != '{}'".format(stack.name,confirmation)
[ "def", "do_delete_stack", "(", "self", ",", "args", ")", ":", "parser", "=", "CommandArgumentParser", "(", "\"delete_stack\"", ")", "parser", ".", "add_argument", "(", "dest", "=", "'stack'", ",", "help", "=", "'stack index or name'", ")", "args", "=", "vars",...
Delete specified stack. delete_stack -h for detailed help.
[ "Delete", "specified", "stack", ".", "delete_stack", "-", "h", "for", "detailed", "help", "." ]
train
https://github.com/earlye/nephele/blob/a7dadc68f4124671457f09119419978c4d22013e/nephele/AwsRoot.py#L33-L56
earlye/nephele
nephele/AwsRoot.py
AwsRoot.do_stacks
def do_stacks(self,args): """List available stacks. stacks -h for detailed help.""" parser = CommandArgumentParser() parser.add_argument('-s','--silent',dest='silent',action='store_true',help='Run silently') parser.add_argument('-i','--include',nargs='*',dest='includes',default=[],help='Add statuses') parser.add_argument('-e','--exclude',nargs='*',dest='excludes',default=[],help='Remove statuses') parser.add_argument('--summary',dest='summary',action='store_true',default=False,help='Show just a summary') parser.add_argument(dest='filters',nargs='*',default=["*"],help='Filter stacks') args = vars(parser.parse_args(args)) nextToken = None includes = args['includes'] excludes = args['excludes'] filters = args['filters'] global stackStatusFilter for i in includes: if not i in stackStatusFilter: stackStatusFilter.append(i) for e in excludes: stackStatusFilter.remove(e) complete = False; stackSummaries = [] while not complete: if None == nextToken: stacks = AwsConnectionFactory.getCfClient().list_stacks(StackStatusFilter=stackStatusFilter) else: stacks = AwsConnectionFactory.getCfClient().list_stacks(NextToken=nextToken,StackStatusFilter=stackStatusFilter) #pprint(stacks) if not 'NextToken' in stacks: complete = True; else: nextToken = stacks['NextToken'] if 'StackSummaries' in stacks: stackSummaries.extend(stacks['StackSummaries']) stackSummaries = filter( lambda x: fnmatches(x['StackName'],filters),stackSummaries) stackSummaries = sorted(stackSummaries, key= lambda entry: entry['StackName']) index = 0; stackSummariesByIndex = {} for summary in stackSummaries: summary['Index'] = index stackSummariesByIndex[index] = summary index += 1 self.stackList = stackSummariesByIndex if not (args['silent'] or args['summary']): for index,summary in stackSummariesByIndex.items(): print '{0:3d}: {2:20} {1:40} {3}'.format(summary['Index'],summary['StackName'],summary['StackStatus'],defaultifyDict(summary,'StackStatusReason','')) if args['summary'] and not args['silent']: print '{} stacks'.format(len(stackSummariesByIndex))
python
def do_stacks(self,args): """List available stacks. stacks -h for detailed help.""" parser = CommandArgumentParser() parser.add_argument('-s','--silent',dest='silent',action='store_true',help='Run silently') parser.add_argument('-i','--include',nargs='*',dest='includes',default=[],help='Add statuses') parser.add_argument('-e','--exclude',nargs='*',dest='excludes',default=[],help='Remove statuses') parser.add_argument('--summary',dest='summary',action='store_true',default=False,help='Show just a summary') parser.add_argument(dest='filters',nargs='*',default=["*"],help='Filter stacks') args = vars(parser.parse_args(args)) nextToken = None includes = args['includes'] excludes = args['excludes'] filters = args['filters'] global stackStatusFilter for i in includes: if not i in stackStatusFilter: stackStatusFilter.append(i) for e in excludes: stackStatusFilter.remove(e) complete = False; stackSummaries = [] while not complete: if None == nextToken: stacks = AwsConnectionFactory.getCfClient().list_stacks(StackStatusFilter=stackStatusFilter) else: stacks = AwsConnectionFactory.getCfClient().list_stacks(NextToken=nextToken,StackStatusFilter=stackStatusFilter) #pprint(stacks) if not 'NextToken' in stacks: complete = True; else: nextToken = stacks['NextToken'] if 'StackSummaries' in stacks: stackSummaries.extend(stacks['StackSummaries']) stackSummaries = filter( lambda x: fnmatches(x['StackName'],filters),stackSummaries) stackSummaries = sorted(stackSummaries, key= lambda entry: entry['StackName']) index = 0; stackSummariesByIndex = {} for summary in stackSummaries: summary['Index'] = index stackSummariesByIndex[index] = summary index += 1 self.stackList = stackSummariesByIndex if not (args['silent'] or args['summary']): for index,summary in stackSummariesByIndex.items(): print '{0:3d}: {2:20} {1:40} {3}'.format(summary['Index'],summary['StackName'],summary['StackStatus'],defaultifyDict(summary,'StackStatusReason','')) if args['summary'] and not args['silent']: print '{} stacks'.format(len(stackSummariesByIndex))
[ "def", "do_stacks", "(", "self", ",", "args", ")", ":", "parser", "=", "CommandArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'-s'", ",", "'--silent'", ",", "dest", "=", "'silent'", ",", "action", "=", "'store_true'", ",", "help", "=", "...
List available stacks. stacks -h for detailed help.
[ "List", "available", "stacks", ".", "stacks", "-", "h", "for", "detailed", "help", "." ]
train
https://github.com/earlye/nephele/blob/a7dadc68f4124671457f09119419978c4d22013e/nephele/AwsRoot.py#L58-L112
earlye/nephele
nephele/AwsRoot.py
AwsRoot.do_stack_resource
def do_stack_resource(self, args): """Use specified stack resource. stack_resource -h for detailed help.""" parser = CommandArgumentParser() parser.add_argument('-s','--stack-name',dest='stack-name',help='name of the stack resource'); parser.add_argument('-i','--logical-id',dest='logical-id',help='logical id of the child resource'); args = vars(parser.parse_args(args)) stackName = args['stack-name'] logicalId = args['logical-id'] self.stackResource(stackName,logicalId)
python
def do_stack_resource(self, args): """Use specified stack resource. stack_resource -h for detailed help.""" parser = CommandArgumentParser() parser.add_argument('-s','--stack-name',dest='stack-name',help='name of the stack resource'); parser.add_argument('-i','--logical-id',dest='logical-id',help='logical id of the child resource'); args = vars(parser.parse_args(args)) stackName = args['stack-name'] logicalId = args['logical-id'] self.stackResource(stackName,logicalId)
[ "def", "do_stack_resource", "(", "self", ",", "args", ")", ":", "parser", "=", "CommandArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'-s'", ",", "'--stack-name'", ",", "dest", "=", "'stack-name'", ",", "help", "=", "'name of the stack resource'...
Use specified stack resource. stack_resource -h for detailed help.
[ "Use", "specified", "stack", "resource", ".", "stack_resource", "-", "h", "for", "detailed", "help", "." ]
train
https://github.com/earlye/nephele/blob/a7dadc68f4124671457f09119419978c4d22013e/nephele/AwsRoot.py#L114-L124
ttm/socialLegacy
social/tw.py
Twitter.searchTag
def searchTag(self,HTAG="#python"): """Set Twitter search or stream criteria for the selection of tweets""" self.t = Twython(app_key =self.app_key , app_secret =self.app_secret , oauth_token =self.oauth_token , oauth_token_secret =self.oauth_token_secret) search =self.t.search(q=HTAG,count=100,result_type="recent") ss=search[:] search = self.t.search(q=HTAG,count=150,max_id=ss[-1]['id']-1,result_type="recent") #search = t.search(q=HTAG,count=150,since_id=ss[-1]['id'],result_type="recent") while seach: ss+=search[:] search = self.t.search(q=HTAG,count=150,max_id=ss[-1]['id']-1,result_type="recent") self.ss=ss
python
def searchTag(self,HTAG="#python"): """Set Twitter search or stream criteria for the selection of tweets""" self.t = Twython(app_key =self.app_key , app_secret =self.app_secret , oauth_token =self.oauth_token , oauth_token_secret =self.oauth_token_secret) search =self.t.search(q=HTAG,count=100,result_type="recent") ss=search[:] search = self.t.search(q=HTAG,count=150,max_id=ss[-1]['id']-1,result_type="recent") #search = t.search(q=HTAG,count=150,since_id=ss[-1]['id'],result_type="recent") while seach: ss+=search[:] search = self.t.search(q=HTAG,count=150,max_id=ss[-1]['id']-1,result_type="recent") self.ss=ss
[ "def", "searchTag", "(", "self", ",", "HTAG", "=", "\"#python\"", ")", ":", "self", ".", "t", "=", "Twython", "(", "app_key", "=", "self", ".", "app_key", ",", "app_secret", "=", "self", ".", "app_secret", ",", "oauth_token", "=", "self", ".", "oauth_t...
Set Twitter search or stream criteria for the selection of tweets
[ "Set", "Twitter", "search", "or", "stream", "criteria", "for", "the", "selection", "of", "tweets" ]
train
https://github.com/ttm/socialLegacy/blob/c0930cfe6e84392729449bf7c92569e1556fd109/social/tw.py#L293-L307
BD2KOnFHIR/i2b2model
i2b2model/data/i2b2observationfact.py
ObservationFact.delete_upload_id
def delete_upload_id(cls, tables: I2B2Tables, upload_id: int) -> int: """ Delete all observation_fact records with the supplied upload_id :param tables: i2b2 sql connection :param upload_id: upload identifier to remove :return: number or records that were deleted """ return cls._delete_upload_id(tables.crc_connection, tables.observation_fact, upload_id)
python
def delete_upload_id(cls, tables: I2B2Tables, upload_id: int) -> int: """ Delete all observation_fact records with the supplied upload_id :param tables: i2b2 sql connection :param upload_id: upload identifier to remove :return: number or records that were deleted """ return cls._delete_upload_id(tables.crc_connection, tables.observation_fact, upload_id)
[ "def", "delete_upload_id", "(", "cls", ",", "tables", ":", "I2B2Tables", ",", "upload_id", ":", "int", ")", "->", "int", ":", "return", "cls", ".", "_delete_upload_id", "(", "tables", ".", "crc_connection", ",", "tables", ".", "observation_fact", ",", "uploa...
Delete all observation_fact records with the supplied upload_id :param tables: i2b2 sql connection :param upload_id: upload identifier to remove :return: number or records that were deleted
[ "Delete", "all", "observation_fact", "records", "with", "the", "supplied", "upload_id", ":", "param", "tables", ":", "i2b2", "sql", "connection", ":", "param", "upload_id", ":", "upload", "identifier", "to", "remove", ":", "return", ":", "number", "or", "recor...
train
https://github.com/BD2KOnFHIR/i2b2model/blob/9d49bb53b0733dd83ab5b716014865e270a3c903/i2b2model/data/i2b2observationfact.py#L84-L91
BD2KOnFHIR/i2b2model
i2b2model/data/i2b2observationfact.py
ObservationFact.delete_sourcesystem_cd
def delete_sourcesystem_cd(cls, tables: I2B2Tables, sourcesystem_cd: str) -> int: """ Delete all records with the supplied sourcesystem_cd :param tables: i2b2 sql connection :param sourcesystem_cd: sourcesystem_cd to remove :return: number or records that were deleted """ return cls._delete_sourcesystem_cd(tables.crc_connection, tables.observation_fact, sourcesystem_cd)
python
def delete_sourcesystem_cd(cls, tables: I2B2Tables, sourcesystem_cd: str) -> int: """ Delete all records with the supplied sourcesystem_cd :param tables: i2b2 sql connection :param sourcesystem_cd: sourcesystem_cd to remove :return: number or records that were deleted """ return cls._delete_sourcesystem_cd(tables.crc_connection, tables.observation_fact, sourcesystem_cd)
[ "def", "delete_sourcesystem_cd", "(", "cls", ",", "tables", ":", "I2B2Tables", ",", "sourcesystem_cd", ":", "str", ")", "->", "int", ":", "return", "cls", ".", "_delete_sourcesystem_cd", "(", "tables", ".", "crc_connection", ",", "tables", ".", "observation_fact...
Delete all records with the supplied sourcesystem_cd :param tables: i2b2 sql connection :param sourcesystem_cd: sourcesystem_cd to remove :return: number or records that were deleted
[ "Delete", "all", "records", "with", "the", "supplied", "sourcesystem_cd", ":", "param", "tables", ":", "i2b2", "sql", "connection", ":", "param", "sourcesystem_cd", ":", "sourcesystem_cd", "to", "remove", ":", "return", ":", "number", "or", "records", "that", ...
train
https://github.com/BD2KOnFHIR/i2b2model/blob/9d49bb53b0733dd83ab5b716014865e270a3c903/i2b2model/data/i2b2observationfact.py#L94-L101
BD2KOnFHIR/i2b2model
i2b2model/data/i2b2observationfact.py
ObservationFact.add_or_update_records
def add_or_update_records(cls, tables: I2B2Tables, records: List["ObservationFact"]) -> Tuple[int, int]: """ Add or update the observation_fact table as needed to reflect the contents of records :param tables: i2b2 sql connection :param records: records to apply :return: number of records added / modified """ return cls._add_or_update_records(tables.crc_connection, tables.observation_fact, records)
python
def add_or_update_records(cls, tables: I2B2Tables, records: List["ObservationFact"]) -> Tuple[int, int]: """ Add or update the observation_fact table as needed to reflect the contents of records :param tables: i2b2 sql connection :param records: records to apply :return: number of records added / modified """ return cls._add_or_update_records(tables.crc_connection, tables.observation_fact, records)
[ "def", "add_or_update_records", "(", "cls", ",", "tables", ":", "I2B2Tables", ",", "records", ":", "List", "[", "\"ObservationFact\"", "]", ")", "->", "Tuple", "[", "int", ",", "int", "]", ":", "return", "cls", ".", "_add_or_update_records", "(", "tables", ...
Add or update the observation_fact table as needed to reflect the contents of records :param tables: i2b2 sql connection :param records: records to apply :return: number of records added / modified
[ "Add", "or", "update", "the", "observation_fact", "table", "as", "needed", "to", "reflect", "the", "contents", "of", "records", ":", "param", "tables", ":", "i2b2", "sql", "connection", ":", "param", "records", ":", "records", "to", "apply", ":", "return", ...
train
https://github.com/BD2KOnFHIR/i2b2model/blob/9d49bb53b0733dd83ab5b716014865e270a3c903/i2b2model/data/i2b2observationfact.py#L104-L111
BD2KOnFHIR/i2b2model
i2b2model/data/i2b2observationfact.py
ObservationFact._date_val
def _date_val(self, dt: datetime) -> None: """ Add a date value :param dt: datetime to add """ self._tval_char = dt.strftime('%Y-%m-%d %H:%M') self._nval_num = (dt.year * 10000) + (dt.month * 100) + dt.day + \ (((dt.hour / 100.0) + (dt.minute / 10000.0)) if isinstance(dt, datetime) else 0)
python
def _date_val(self, dt: datetime) -> None: """ Add a date value :param dt: datetime to add """ self._tval_char = dt.strftime('%Y-%m-%d %H:%M') self._nval_num = (dt.year * 10000) + (dt.month * 100) + dt.day + \ (((dt.hour / 100.0) + (dt.minute / 10000.0)) if isinstance(dt, datetime) else 0)
[ "def", "_date_val", "(", "self", ",", "dt", ":", "datetime", ")", "->", "None", ":", "self", ".", "_tval_char", "=", "dt", ".", "strftime", "(", "'%Y-%m-%d %H:%M'", ")", "self", ".", "_nval_num", "=", "(", "dt", ".", "year", "*", "10000", ")", "+", ...
Add a date value :param dt: datetime to add
[ "Add", "a", "date", "value", ":", "param", "dt", ":", "datetime", "to", "add" ]
train
https://github.com/BD2KOnFHIR/i2b2model/blob/9d49bb53b0733dd83ab5b716014865e270a3c903/i2b2model/data/i2b2observationfact.py#L113-L120
reflexsc/reflex
src/rfxmon/__init__.py
Monitor.configure
def configure(self, config): """ Configure Monitor, pull list of what to monitor, initialize threads """ self.config = config self.update_monitors() # initialize thread pools for profile in ('worker', 'result'): for _ in range(config['threads'][profile]['number']): worker = threading.Thread(target=config['threads'][profile]['function']) worker.daemon = True worker.start() # send a heartbeat right away self.heartbeat() # setup interval jobs self.refresh_stopper = set_interval(config['interval']['refresh']*1000, self.update_monitors) self.heartbeat_stopper = set_interval(config['interval']['heartbeat']*1000, self.heartbeat) self.reporting_stopper = set_interval(config['interval']['reporting']*1000, self.reporting) return self
python
def configure(self, config): """ Configure Monitor, pull list of what to monitor, initialize threads """ self.config = config self.update_monitors() # initialize thread pools for profile in ('worker', 'result'): for _ in range(config['threads'][profile]['number']): worker = threading.Thread(target=config['threads'][profile]['function']) worker.daemon = True worker.start() # send a heartbeat right away self.heartbeat() # setup interval jobs self.refresh_stopper = set_interval(config['interval']['refresh']*1000, self.update_monitors) self.heartbeat_stopper = set_interval(config['interval']['heartbeat']*1000, self.heartbeat) self.reporting_stopper = set_interval(config['interval']['reporting']*1000, self.reporting) return self
[ "def", "configure", "(", "self", ",", "config", ")", ":", "self", ".", "config", "=", "config", "self", ".", "update_monitors", "(", ")", "# initialize thread pools", "for", "profile", "in", "(", "'worker'", ",", "'result'", ")", ":", "for", "_", "in", "...
Configure Monitor, pull list of what to monitor, initialize threads
[ "Configure", "Monitor", "pull", "list", "of", "what", "to", "monitor", "initialize", "threads" ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L94-L119
reflexsc/reflex
src/rfxmon/__init__.py
Monitor.feed_monitors
def feed_monitors(self): """ Pull from the cached monitors data and feed the workers queue. Run every interval (refresh:test). """ self.thread_debug("Filling worker queue...", module='feed_monitors') for mon in self.monitors: self.thread_debug(" Adding " + mon['title']) self.workers_queue.put(mon)
python
def feed_monitors(self): """ Pull from the cached monitors data and feed the workers queue. Run every interval (refresh:test). """ self.thread_debug("Filling worker queue...", module='feed_monitors') for mon in self.monitors: self.thread_debug(" Adding " + mon['title']) self.workers_queue.put(mon)
[ "def", "feed_monitors", "(", "self", ")", ":", "self", ".", "thread_debug", "(", "\"Filling worker queue...\"", ",", "module", "=", "'feed_monitors'", ")", "for", "mon", "in", "self", ".", "monitors", ":", "self", ".", "thread_debug", "(", "\" Adding \"", "...
Pull from the cached monitors data and feed the workers queue. Run every interval (refresh:test).
[ "Pull", "from", "the", "cached", "monitors", "data", "and", "feed", "the", "workers", "queue", ".", "Run", "every", "interval", "(", "refresh", ":", "test", ")", "." ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L122-L130
reflexsc/reflex
src/rfxmon/__init__.py
Monitor.start
def start(self): """ The main loop, run forever. """ while True: self.thread_debug("Interval starting") for thr in threading.enumerate(): self.thread_debug(" " + str(thr)) self.feed_monitors() start = time.time() # wait fore queue to empty self.workers_queue.join() end = time.time() diff = self.config['interval']['test'] - (end - start) if diff <= 0: # alarm self.stats.procwin = -diff self.thread_debug("Cannot keep up with tests! {} seconds late" .format(abs(diff))) else: self.thread_debug("waiting {} seconds...".format(diff)) time.sleep(diff)
python
def start(self): """ The main loop, run forever. """ while True: self.thread_debug("Interval starting") for thr in threading.enumerate(): self.thread_debug(" " + str(thr)) self.feed_monitors() start = time.time() # wait fore queue to empty self.workers_queue.join() end = time.time() diff = self.config['interval']['test'] - (end - start) if diff <= 0: # alarm self.stats.procwin = -diff self.thread_debug("Cannot keep up with tests! {} seconds late" .format(abs(diff))) else: self.thread_debug("waiting {} seconds...".format(diff)) time.sleep(diff)
[ "def", "start", "(", "self", ")", ":", "while", "True", ":", "self", ".", "thread_debug", "(", "\"Interval starting\"", ")", "for", "thr", "in", "threading", ".", "enumerate", "(", ")", ":", "self", ".", "thread_debug", "(", "\" \"", "+", "str", "(", ...
The main loop, run forever.
[ "The", "main", "loop", "run", "forever", "." ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L133-L154
reflexsc/reflex
src/rfxmon/__init__.py
Monitor.update_monitors
def update_monitors(self): """ Periodically check in with Reflex Engine and refresh the list of what to monitor """ self.thread_debug("Starting monitor refresh", module="update_monitors") # need to make a more efficient way of doing this via Reflex Engine monitors = [] self.rcs.cache_reset() svcs = self.rcs.cache_list('service', cols=['pipeline', 'name', 'active-instances']) for svc in svcs: try: pipeline = self.rcs.cache_get('pipeline', svc['pipeline']) for mon in pipeline.get('monitor', []): self.DEBUG("monitor {}".format(mon)) mon['service'] = svc['name'] mon['pipeline'] = svc['pipeline'] for inst_name in svc.get('active-instances', []): inst = self.rcs.cache_get('instance', inst_name) # todo: insert: macro flatten mymon = mon.copy() mymon['instance'] = inst_name mymon['target'] = inst['address'] mymon['title'] = svc['name'] + ": " + mon['name'] monitors.append(mymon) except KeyboardInterrupt: raise except: # pylint: disable=bare-except self.NOTIFY("Error in processing monitor:", err=traceback.format_exc()) self.NOTIFY("Refreshed monitors", total_monitors=len(monitors)) self.DEBUG("Monitors", monitors=monitors) # mutex / threadsafe? self.monitors = monitors cache = self.rcs._cache # pylint: disable=protected-access self.instances = cache['instance'] self.services = cache['service'] self.pipelines = cache['pipeline'] self.thread_debug("Refresh complete", module="update_monitors")
python
def update_monitors(self): """ Periodically check in with Reflex Engine and refresh the list of what to monitor """ self.thread_debug("Starting monitor refresh", module="update_monitors") # need to make a more efficient way of doing this via Reflex Engine monitors = [] self.rcs.cache_reset() svcs = self.rcs.cache_list('service', cols=['pipeline', 'name', 'active-instances']) for svc in svcs: try: pipeline = self.rcs.cache_get('pipeline', svc['pipeline']) for mon in pipeline.get('monitor', []): self.DEBUG("monitor {}".format(mon)) mon['service'] = svc['name'] mon['pipeline'] = svc['pipeline'] for inst_name in svc.get('active-instances', []): inst = self.rcs.cache_get('instance', inst_name) # todo: insert: macro flatten mymon = mon.copy() mymon['instance'] = inst_name mymon['target'] = inst['address'] mymon['title'] = svc['name'] + ": " + mon['name'] monitors.append(mymon) except KeyboardInterrupt: raise except: # pylint: disable=bare-except self.NOTIFY("Error in processing monitor:", err=traceback.format_exc()) self.NOTIFY("Refreshed monitors", total_monitors=len(monitors)) self.DEBUG("Monitors", monitors=monitors) # mutex / threadsafe? self.monitors = monitors cache = self.rcs._cache # pylint: disable=protected-access self.instances = cache['instance'] self.services = cache['service'] self.pipelines = cache['pipeline'] self.thread_debug("Refresh complete", module="update_monitors")
[ "def", "update_monitors", "(", "self", ")", ":", "self", ".", "thread_debug", "(", "\"Starting monitor refresh\"", ",", "module", "=", "\"update_monitors\"", ")", "# need to make a more efficient way of doing this via Reflex Engine", "monitors", "=", "[", "]", "self", "."...
Periodically check in with Reflex Engine and refresh the list of what to monitor
[ "Periodically", "check", "in", "with", "Reflex", "Engine", "and", "refresh", "the", "list", "of", "what", "to", "monitor" ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L157-L201
reflexsc/reflex
src/rfxmon/__init__.py
Monitor.thread_debug
def thread_debug(self, *args, **kwargs): """ Wrap debug to include thread information """ if 'module' not in kwargs: kwargs['module'] = "Monitor" if kwargs['module'] != 'Monitor' and self.do_DEBUG(module='Monitor'): self.debug[kwargs['module']] = True if not self.do_DEBUG(module=kwargs['module']): return thread_id = threading.current_thread().name key = "[" + thread_id + "] " + kwargs['module'] if not self.debug.get(key): self.debug[key] = True kwargs['module'] = key self.DEBUG(*args, **kwargs)
python
def thread_debug(self, *args, **kwargs): """ Wrap debug to include thread information """ if 'module' not in kwargs: kwargs['module'] = "Monitor" if kwargs['module'] != 'Monitor' and self.do_DEBUG(module='Monitor'): self.debug[kwargs['module']] = True if not self.do_DEBUG(module=kwargs['module']): return thread_id = threading.current_thread().name key = "[" + thread_id + "] " + kwargs['module'] if not self.debug.get(key): self.debug[key] = True kwargs['module'] = key self.DEBUG(*args, **kwargs)
[ "def", "thread_debug", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "'module'", "not", "in", "kwargs", ":", "kwargs", "[", "'module'", "]", "=", "\"Monitor\"", "if", "kwargs", "[", "'module'", "]", "!=", "'Monitor'", "and", ...
Wrap debug to include thread information
[ "Wrap", "debug", "to", "include", "thread", "information" ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L204-L219
reflexsc/reflex
src/rfxmon/__init__.py
Monitor.worker_thread
def worker_thread(self): """ The primary worker thread--this thread pulls from the monitor queue and runs the monitor, submitting the results to the handler queue. Calls a sub method based on type of monitor. """ self.thread_debug("Starting monitor thread") while not self.thread_stopper.is_set(): mon = self.workers_queue.get() self.thread_debug("Processing {type} Monitor: {title}".format(**mon)) result = getattr(self, "_worker_" + mon['type'])(mon) self.workers_queue.task_done() self.results_queue.put({'type':mon['type'], 'result':result})
python
def worker_thread(self): """ The primary worker thread--this thread pulls from the monitor queue and runs the monitor, submitting the results to the handler queue. Calls a sub method based on type of monitor. """ self.thread_debug("Starting monitor thread") while not self.thread_stopper.is_set(): mon = self.workers_queue.get() self.thread_debug("Processing {type} Monitor: {title}".format(**mon)) result = getattr(self, "_worker_" + mon['type'])(mon) self.workers_queue.task_done() self.results_queue.put({'type':mon['type'], 'result':result})
[ "def", "worker_thread", "(", "self", ")", ":", "self", ".", "thread_debug", "(", "\"Starting monitor thread\"", ")", "while", "not", "self", ".", "thread_stopper", ".", "is_set", "(", ")", ":", "mon", "=", "self", ".", "workers_queue", ".", "get", "(", ")"...
The primary worker thread--this thread pulls from the monitor queue and runs the monitor, submitting the results to the handler queue. Calls a sub method based on type of monitor.
[ "The", "primary", "worker", "thread", "--", "this", "thread", "pulls", "from", "the", "monitor", "queue", "and", "runs", "the", "monitor", "submitting", "the", "results", "to", "the", "handler", "queue", "." ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L222-L235
reflexsc/reflex
src/rfxmon/__init__.py
Monitor._worker_http
def _worker_http(self, monitor): """ Process an http monitor. """ self.thread_debug("process_http", data=monitor, module='handler') query = monitor['query'] method = query['method'].lower() self.stats.http_run += 1 try: target = monitor['target'] url = 'http://{host}:{port}{path}'.format(path=query['path'], **target) response = { 'url': url, 'status': 'failed', 'result': {}, 'monitor': monitor, 'message': 'did not meet expected result or no expected result defined', 'elapsedms': monitor['timeout']*1000, 'code':0 } # not sed_env_dict -- we do not want to xref headers headers = query.get('headers', {}) for elem in headers: headers[elem] = self.sed_env(headers[elem], {}, '') res = response['result'] = getattr(requests, method)(url, headers=headers, timeout=monitor['timeout']) response['code'] = res.status_code response['elapsedms'] = res.elapsed.total_seconds() * 1000 if 'response-code' in monitor['expect']: if int(monitor['expect']['response-code']) == res.status_code: response['message'] = '' response['status'] = 'ok' else: # abort with failure, do not pass go return response if 'content' in monitor['expect']: if monitor['expect']['content'] in res.text: response['message'] = '' response['status'] = 'ok' else: # abort with failure, do not pass go return response if 'regex' in monitor['expect']: if re.search(monitor['expect']['regex'], res.text): response['message'] = '' response['status'] = 'ok' else: # abort with failure, do not pass go return response except requests.exceptions.Timeout: response['message'] = 'timeout' except requests.exceptions.ConnectionError: response['message'] = 'connect-failed' response['elapsedms'] = -1 return response
python
def _worker_http(self, monitor): """ Process an http monitor. """ self.thread_debug("process_http", data=monitor, module='handler') query = monitor['query'] method = query['method'].lower() self.stats.http_run += 1 try: target = monitor['target'] url = 'http://{host}:{port}{path}'.format(path=query['path'], **target) response = { 'url': url, 'status': 'failed', 'result': {}, 'monitor': monitor, 'message': 'did not meet expected result or no expected result defined', 'elapsedms': monitor['timeout']*1000, 'code':0 } # not sed_env_dict -- we do not want to xref headers headers = query.get('headers', {}) for elem in headers: headers[elem] = self.sed_env(headers[elem], {}, '') res = response['result'] = getattr(requests, method)(url, headers=headers, timeout=monitor['timeout']) response['code'] = res.status_code response['elapsedms'] = res.elapsed.total_seconds() * 1000 if 'response-code' in monitor['expect']: if int(monitor['expect']['response-code']) == res.status_code: response['message'] = '' response['status'] = 'ok' else: # abort with failure, do not pass go return response if 'content' in monitor['expect']: if monitor['expect']['content'] in res.text: response['message'] = '' response['status'] = 'ok' else: # abort with failure, do not pass go return response if 'regex' in monitor['expect']: if re.search(monitor['expect']['regex'], res.text): response['message'] = '' response['status'] = 'ok' else: # abort with failure, do not pass go return response except requests.exceptions.Timeout: response['message'] = 'timeout' except requests.exceptions.ConnectionError: response['message'] = 'connect-failed' response['elapsedms'] = -1 return response
[ "def", "_worker_http", "(", "self", ",", "monitor", ")", ":", "self", ".", "thread_debug", "(", "\"process_http\"", ",", "data", "=", "monitor", ",", "module", "=", "'handler'", ")", "query", "=", "monitor", "[", "'query'", "]", "method", "=", "query", "...
Process an http monitor.
[ "Process", "an", "http", "monitor", "." ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L238-L295
reflexsc/reflex
src/rfxmon/__init__.py
Monitor.handler_thread
def handler_thread(self): """ A handler thread--this pulls results from the queue and processes them accordingly. Calls a sub method based on type of monitor. """ self.thread_debug("Starting handler thread") while not self.thread_stopper.is_set(): data = self.results_queue.get() self.thread_debug("Handling Result", module="handler") getattr(self, "_handler_" + data['type'])(data['result'])
python
def handler_thread(self): """ A handler thread--this pulls results from the queue and processes them accordingly. Calls a sub method based on type of monitor. """ self.thread_debug("Starting handler thread") while not self.thread_stopper.is_set(): data = self.results_queue.get() self.thread_debug("Handling Result", module="handler") getattr(self, "_handler_" + data['type'])(data['result'])
[ "def", "handler_thread", "(", "self", ")", ":", "self", ".", "thread_debug", "(", "\"Starting handler thread\"", ")", "while", "not", "self", ".", "thread_stopper", ".", "is_set", "(", ")", ":", "data", "=", "self", ".", "results_queue", ".", "get", "(", "...
A handler thread--this pulls results from the queue and processes them accordingly. Calls a sub method based on type of monitor.
[ "A", "handler", "thread", "--", "this", "pulls", "results", "from", "the", "queue", "and", "processes", "them", "accordingly", "." ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L298-L309
reflexsc/reflex
src/rfxmon/__init__.py
Monitor._handler_http
def _handler_http(self, result): """ Handle the result of an http monitor """ monitor = result['monitor'] self.thread_debug("process_http", data=monitor, module='handler') self.stats.http_handled += 1 # splunk will pick this up logargs = { 'type':"metric", 'endpoint': result['url'], 'pipeline': monitor['pipeline'], 'service': monitor['service'], 'instance': monitor['instance'], 'status': result['status'], 'elapsed-ms': round(result['elapsedms'], 5), 'code': result['code'] } self.NOTIFY(result['message'], **logargs) # if our status has changed, also update Reflex Engine if result['status'] != self.instances[monitor['instance']]['status']: # do some retry/counter steps on failure? self.instances[monitor['instance']]['status'] = result['status'] self.rcs.patch('instance', monitor['instance'], {'status': result['status']})
python
def _handler_http(self, result): """ Handle the result of an http monitor """ monitor = result['monitor'] self.thread_debug("process_http", data=monitor, module='handler') self.stats.http_handled += 1 # splunk will pick this up logargs = { 'type':"metric", 'endpoint': result['url'], 'pipeline': monitor['pipeline'], 'service': monitor['service'], 'instance': monitor['instance'], 'status': result['status'], 'elapsed-ms': round(result['elapsedms'], 5), 'code': result['code'] } self.NOTIFY(result['message'], **logargs) # if our status has changed, also update Reflex Engine if result['status'] != self.instances[monitor['instance']]['status']: # do some retry/counter steps on failure? self.instances[monitor['instance']]['status'] = result['status'] self.rcs.patch('instance', monitor['instance'], {'status': result['status']})
[ "def", "_handler_http", "(", "self", ",", "result", ")", ":", "monitor", "=", "result", "[", "'monitor'", "]", "self", ".", "thread_debug", "(", "\"process_http\"", ",", "data", "=", "monitor", ",", "module", "=", "'handler'", ")", "self", ".", "stats", ...
Handle the result of an http monitor
[ "Handle", "the", "result", "of", "an", "http", "monitor" ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L312-L339
reflexsc/reflex
src/rfxmon/__init__.py
Monitor.reporting
def reporting(self): """ report on consumption info """ self.thread_debug("reporting") res = resource.getrusage(resource.RUSAGE_SELF) self.NOTIFY("", type='internal-usage', maxrss=round(res.ru_maxrss/1024, 2), ixrss=round(res.ru_ixrss/1024, 2), idrss=round(res.ru_idrss/1024, 2), isrss=round(res.ru_isrss/1024, 2), threads=threading.active_count(), proctot=len(self.monitors), procwin=self.stats.procwin)
python
def reporting(self): """ report on consumption info """ self.thread_debug("reporting") res = resource.getrusage(resource.RUSAGE_SELF) self.NOTIFY("", type='internal-usage', maxrss=round(res.ru_maxrss/1024, 2), ixrss=round(res.ru_ixrss/1024, 2), idrss=round(res.ru_idrss/1024, 2), isrss=round(res.ru_isrss/1024, 2), threads=threading.active_count(), proctot=len(self.monitors), procwin=self.stats.procwin)
[ "def", "reporting", "(", "self", ")", ":", "self", ".", "thread_debug", "(", "\"reporting\"", ")", "res", "=", "resource", ".", "getrusage", "(", "resource", ".", "RUSAGE_SELF", ")", "self", ".", "NOTIFY", "(", "\"\"", ",", "type", "=", "'internal-usage'",...
report on consumption info
[ "report", "on", "consumption", "info" ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L342-L356
reflexsc/reflex
src/rfxmon/__init__.py
Monitor.heartbeat
def heartbeat(self): """ Watch our counters--as long as things are incrementing, send a ping to statuscake sayin we are alive and okay. """ self.thread_debug("heartbeat") # check stats -- should be incrementing if self.last_stats: if self.stats.http_run <= self.last_stats.http_run: self.NOTIFY("No monitors run since last heartbeat!", service="heartbeat") return elif self.stats.http_handled <= self.last_stats.http_handled: self.NOTIFY("No monitor results handled since last heartbeat!", service="heartbeat") return # ping heartbeat as a webhook if self.config.get('heartbeat-hook'): result = requests.get(self.config.get('heartbeat-hook')) if result.status_code != 200: self.NOTIFY("Heartbeat ping to statuscake failed!", level="ERROR") # keep a static copy of the last run stats self.last_stats = self.stats.copy()
python
def heartbeat(self): """ Watch our counters--as long as things are incrementing, send a ping to statuscake sayin we are alive and okay. """ self.thread_debug("heartbeat") # check stats -- should be incrementing if self.last_stats: if self.stats.http_run <= self.last_stats.http_run: self.NOTIFY("No monitors run since last heartbeat!", service="heartbeat") return elif self.stats.http_handled <= self.last_stats.http_handled: self.NOTIFY("No monitor results handled since last heartbeat!", service="heartbeat") return # ping heartbeat as a webhook if self.config.get('heartbeat-hook'): result = requests.get(self.config.get('heartbeat-hook')) if result.status_code != 200: self.NOTIFY("Heartbeat ping to statuscake failed!", level="ERROR") # keep a static copy of the last run stats self.last_stats = self.stats.copy()
[ "def", "heartbeat", "(", "self", ")", ":", "self", ".", "thread_debug", "(", "\"heartbeat\"", ")", "# check stats -- should be incrementing", "if", "self", ".", "last_stats", ":", "if", "self", ".", "stats", ".", "http_run", "<=", "self", ".", "last_stats", "....
Watch our counters--as long as things are incrementing, send a ping to statuscake sayin we are alive and okay.
[ "Watch", "our", "counters", "--", "as", "long", "as", "things", "are", "incrementing", "send", "a", "ping", "to", "statuscake", "sayin", "we", "are", "alive", "and", "okay", "." ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L359-L383
reflexsc/reflex
src/rfxmon/__init__.py
Monitor.start_agent
def start_agent(self, cfgin=True): """ CLI interface to start 12-factor service """ default_conf = { "threads": { "result": { "number": 0, "function": None }, "worker": { "number": 0, "function": None }, }, "interval": { "refresh": 900, "heartbeat": 300, "reporting": 300, "test": 60 }, "heartbeat-hook": False } indata = {} if cfgin: indata = json.load(sys.stdin) elif os.environ.get("REFLEX_MONITOR_CONFIG"): indata = os.environ.get("REFLEX_MONITOR_CONFIG") if indata[0] != "{": indata = base64.b64decode(indata) else: self.NOTIFY("Using default configuration") conf = dictlib.union(default_conf, indata) conf['threads']['result']['function'] = self.handler_thread conf['threads']['worker']['function'] = self.worker_thread self.NOTIFY("Starting monitor Agent") try: self.configure(conf).start() except KeyboardInterrupt: self.thread_stopper.set() if self.refresh_stopper: self.refresh_stopper.set() if self.heartbeat_stopper: self.heartbeat_stopper.set() if self.reporting_stopper: self.reporting_stopper.set()
python
def start_agent(self, cfgin=True): """ CLI interface to start 12-factor service """ default_conf = { "threads": { "result": { "number": 0, "function": None }, "worker": { "number": 0, "function": None }, }, "interval": { "refresh": 900, "heartbeat": 300, "reporting": 300, "test": 60 }, "heartbeat-hook": False } indata = {} if cfgin: indata = json.load(sys.stdin) elif os.environ.get("REFLEX_MONITOR_CONFIG"): indata = os.environ.get("REFLEX_MONITOR_CONFIG") if indata[0] != "{": indata = base64.b64decode(indata) else: self.NOTIFY("Using default configuration") conf = dictlib.union(default_conf, indata) conf['threads']['result']['function'] = self.handler_thread conf['threads']['worker']['function'] = self.worker_thread self.NOTIFY("Starting monitor Agent") try: self.configure(conf).start() except KeyboardInterrupt: self.thread_stopper.set() if self.refresh_stopper: self.refresh_stopper.set() if self.heartbeat_stopper: self.heartbeat_stopper.set() if self.reporting_stopper: self.reporting_stopper.set()
[ "def", "start_agent", "(", "self", ",", "cfgin", "=", "True", ")", ":", "default_conf", "=", "{", "\"threads\"", ":", "{", "\"result\"", ":", "{", "\"number\"", ":", "0", ",", "\"function\"", ":", "None", "}", ",", "\"worker\"", ":", "{", "\"number\"", ...
CLI interface to start 12-factor service
[ "CLI", "interface", "to", "start", "12", "-", "factor", "service" ]
train
https://github.com/reflexsc/reflex/blob/cee6b0ccfef395ca5e157d644a2e3252cea9fe62/src/rfxmon/__init__.py#L391-L440
Laufire/ec
ec/modules/core.py
start
def start(): r"""Starts ec. """ processPendingModules() if not state.main_module_name in ModuleMembers: # don't start the core when main is not Ec-ed return MainModule = sys.modules[state.main_module_name] if not MainModule.__ec_member__.Members: # there was some error while loading script(s) return global BaseGroup BaseGroup = MainModule.__ec_member__ Argv = sys.argv[1:] global mode mode = 'd' if Argv else 's' # dispatch / shell mode if mode == 's': import shell shell.init() else: import dispatch dispatch.init(Argv) processExitHooks()
python
def start(): r"""Starts ec. """ processPendingModules() if not state.main_module_name in ModuleMembers: # don't start the core when main is not Ec-ed return MainModule = sys.modules[state.main_module_name] if not MainModule.__ec_member__.Members: # there was some error while loading script(s) return global BaseGroup BaseGroup = MainModule.__ec_member__ Argv = sys.argv[1:] global mode mode = 'd' if Argv else 's' # dispatch / shell mode if mode == 's': import shell shell.init() else: import dispatch dispatch.init(Argv) processExitHooks()
[ "def", "start", "(", ")", ":", "processPendingModules", "(", ")", "if", "not", "state", ".", "main_module_name", "in", "ModuleMembers", ":", "# don't start the core when main is not Ec-ed\r", "return", "MainModule", "=", "sys", ".", "modules", "[", "state", ".", "...
r"""Starts ec.
[ "r", "Starts", "ec", "." ]
train
https://github.com/Laufire/ec/blob/63e84a1daef9234487d7de538e5da233a7d13071/ec/modules/core.py#L15-L43
Laufire/ec
ec/modules/core.py
execCommand
def execCommand(Argv, collect_missing): r"""Executes the given task with parameters. """ try: return _execCommand(Argv, collect_missing) except Exception as e: if Settings['errorHandler']: Settings['errorHandler'](e) if Settings['debug']: # #ToDo: Have an option to debug through stderr. The issue is, the way to make pdb.post_mortem, to use stderr, like pdb.set_trace is unknown. import pdb pdb.post_mortem(sys.exc_info()[2]) if not Settings['silent']: # Debug, then log the trace. import traceback etype, value, tb = sys.exc_info() tb = tb.tb_next.tb_next # remove the ec - calls from the traceback, to make it more understandable message = ''.join(traceback.format_exception(etype, value, tb))[:-1] else: if isinstance(e, HandledException): # let the modes handle the HandledException raise e message = str(e) # provide a succinct error message raise HandledException(message)
python
def execCommand(Argv, collect_missing): r"""Executes the given task with parameters. """ try: return _execCommand(Argv, collect_missing) except Exception as e: if Settings['errorHandler']: Settings['errorHandler'](e) if Settings['debug']: # #ToDo: Have an option to debug through stderr. The issue is, the way to make pdb.post_mortem, to use stderr, like pdb.set_trace is unknown. import pdb pdb.post_mortem(sys.exc_info()[2]) if not Settings['silent']: # Debug, then log the trace. import traceback etype, value, tb = sys.exc_info() tb = tb.tb_next.tb_next # remove the ec - calls from the traceback, to make it more understandable message = ''.join(traceback.format_exception(etype, value, tb))[:-1] else: if isinstance(e, HandledException): # let the modes handle the HandledException raise e message = str(e) # provide a succinct error message raise HandledException(message)
[ "def", "execCommand", "(", "Argv", ",", "collect_missing", ")", ":", "try", ":", "return", "_execCommand", "(", "Argv", ",", "collect_missing", ")", "except", "Exception", "as", "e", ":", "if", "Settings", "[", "'errorHandler'", "]", ":", "Settings", "[", ...
r"""Executes the given task with parameters.
[ "r", "Executes", "the", "given", "task", "with", "parameters", "." ]
train
https://github.com/Laufire/ec/blob/63e84a1daef9234487d7de538e5da233a7d13071/ec/modules/core.py#L45-L74
Laufire/ec
ec/modules/core.py
getDescendant
def getDescendant(Ancestor, RouteParts): r"""Resolves a descendant, of the given Ancestor, as pointed by the RouteParts. """ if not RouteParts: return Ancestor Resolved = Ancestor.Members.get(RouteParts.pop(0)) if isinstance(Resolved, Group): return getDescendant(Resolved, RouteParts) else: return Resolved
python
def getDescendant(Ancestor, RouteParts): r"""Resolves a descendant, of the given Ancestor, as pointed by the RouteParts. """ if not RouteParts: return Ancestor Resolved = Ancestor.Members.get(RouteParts.pop(0)) if isinstance(Resolved, Group): return getDescendant(Resolved, RouteParts) else: return Resolved
[ "def", "getDescendant", "(", "Ancestor", ",", "RouteParts", ")", ":", "if", "not", "RouteParts", ":", "return", "Ancestor", "Resolved", "=", "Ancestor", ".", "Members", ".", "get", "(", "RouteParts", ".", "pop", "(", "0", ")", ")", "if", "isinstance", "(...
r"""Resolves a descendant, of the given Ancestor, as pointed by the RouteParts.
[ "r", "Resolves", "a", "descendant", "of", "the", "given", "Ancestor", "as", "pointed", "by", "the", "RouteParts", "." ]
train
https://github.com/Laufire/ec/blob/63e84a1daef9234487d7de538e5da233a7d13071/ec/modules/core.py#L76-L88
Laufire/ec
ec/modules/core.py
setActiveModule
def setActiveModule(Module): r"""Helps with collecting the members of the imported modules. """ module_name = Module.__name__ if module_name not in ModuleMembers: ModuleMembers[module_name] = [] ModulesQ.append(module_name) Group(Module, {}) # brand the module with __ec_member__ state.ActiveModuleMemberQ = ModuleMembers[module_name]
python
def setActiveModule(Module): r"""Helps with collecting the members of the imported modules. """ module_name = Module.__name__ if module_name not in ModuleMembers: ModuleMembers[module_name] = [] ModulesQ.append(module_name) Group(Module, {}) # brand the module with __ec_member__ state.ActiveModuleMemberQ = ModuleMembers[module_name]
[ "def", "setActiveModule", "(", "Module", ")", ":", "module_name", "=", "Module", ".", "__name__", "if", "module_name", "not", "in", "ModuleMembers", ":", "ModuleMembers", "[", "module_name", "]", "=", "[", "]", "ModulesQ", ".", "append", "(", "module_name", ...
r"""Helps with collecting the members of the imported modules.
[ "r", "Helps", "with", "collecting", "the", "members", "of", "the", "imported", "modules", "." ]
train
https://github.com/Laufire/ec/blob/63e84a1daef9234487d7de538e5da233a7d13071/ec/modules/core.py#L90-L100
Laufire/ec
ec/modules/core.py
processModule
def processModule(module_name): r"""Builds a command tree out of the configured members of a module. """ Module = sys.modules[module_name] MembersTarget = [] ClassQ = [] Cls = None ClsGroup = None ClsGrpMembers = [] for Member in ModuleMembers[module_name]: Underlying = Member.Underlying member_name = Member.Config['name'] member_alias = Member.Config.get('alias', None) if ClassQ: ClsGroup = ClassQ[-1] Cls = ClsGroup.Underlying if getattr(Cls, Underlying.__name__, None) is Underlying: # we got a member tht is a child of the previous class if isclass(Underlying): ClassQ.append(Underlying.__ec_member__) elif not isunderlying(Underlying): continue if member_alias: ClsGrpMembers.insert(0, (member_alias, Member)) ClsGrpMembers.insert(0, (member_name, Member)) continue elif Cls: # we've finished adding children to the previous class ClsGroup.Members = OrderedDict(ClsGrpMembers) ClsGrpMembers = [] ClassQ.pop() Cls = None ClsGroup = None if isunderlying(Underlying): if member_alias: MembersTarget.insert(0, (member_alias, Member)) MembersTarget.insert(0, (member_name, Member)) if isclass(Underlying): ClassQ.append(Underlying.__ec_member__) if ClsGroup: ClsGroup.Members = OrderedDict(ClsGrpMembers) ModuleMembers[module_name] = [] # remove the existing members from the cache so that they won't be processed again if not hasattr(Module.__ec_member__, 'Members'): Module.__ec_member__.Members = OrderedDict(MembersTarget)
python
def processModule(module_name): r"""Builds a command tree out of the configured members of a module. """ Module = sys.modules[module_name] MembersTarget = [] ClassQ = [] Cls = None ClsGroup = None ClsGrpMembers = [] for Member in ModuleMembers[module_name]: Underlying = Member.Underlying member_name = Member.Config['name'] member_alias = Member.Config.get('alias', None) if ClassQ: ClsGroup = ClassQ[-1] Cls = ClsGroup.Underlying if getattr(Cls, Underlying.__name__, None) is Underlying: # we got a member tht is a child of the previous class if isclass(Underlying): ClassQ.append(Underlying.__ec_member__) elif not isunderlying(Underlying): continue if member_alias: ClsGrpMembers.insert(0, (member_alias, Member)) ClsGrpMembers.insert(0, (member_name, Member)) continue elif Cls: # we've finished adding children to the previous class ClsGroup.Members = OrderedDict(ClsGrpMembers) ClsGrpMembers = [] ClassQ.pop() Cls = None ClsGroup = None if isunderlying(Underlying): if member_alias: MembersTarget.insert(0, (member_alias, Member)) MembersTarget.insert(0, (member_name, Member)) if isclass(Underlying): ClassQ.append(Underlying.__ec_member__) if ClsGroup: ClsGroup.Members = OrderedDict(ClsGrpMembers) ModuleMembers[module_name] = [] # remove the existing members from the cache so that they won't be processed again if not hasattr(Module.__ec_member__, 'Members'): Module.__ec_member__.Members = OrderedDict(MembersTarget)
[ "def", "processModule", "(", "module_name", ")", ":", "Module", "=", "sys", ".", "modules", "[", "module_name", "]", "MembersTarget", "=", "[", "]", "ClassQ", "=", "[", "]", "Cls", "=", "None", "ClsGroup", "=", "None", "ClsGrpMembers", "=", "[", "]", "...
r"""Builds a command tree out of the configured members of a module.
[ "r", "Builds", "a", "command", "tree", "out", "of", "the", "configured", "members", "of", "a", "module", "." ]
train
https://github.com/Laufire/ec/blob/63e84a1daef9234487d7de538e5da233a7d13071/ec/modules/core.py#L116-L170
Laufire/ec
ec/modules/core.py
_execCommand
def _execCommand(Argv, collect_missing): r"""Worker of execCommand. """ if not Argv: raise HandledException('Please specify a command!') RouteParts = Argv[0].split('/') Args, KwArgs = getDigestableArgs(Argv[1:]) ResolvedMember = getDescendant(BaseGroup, RouteParts[:]) if isinstance(ResolvedMember, Group): raise HandledException('Please specify a task.', Member=ResolvedMember) if not isinstance(ResolvedMember, Task): raise HandledException('No such task.', Member=BaseGroup) return ResolvedMember.__collect_n_call__(*Args, **KwArgs) if collect_missing else ResolvedMember(*Args, **KwArgs)
python
def _execCommand(Argv, collect_missing): r"""Worker of execCommand. """ if not Argv: raise HandledException('Please specify a command!') RouteParts = Argv[0].split('/') Args, KwArgs = getDigestableArgs(Argv[1:]) ResolvedMember = getDescendant(BaseGroup, RouteParts[:]) if isinstance(ResolvedMember, Group): raise HandledException('Please specify a task.', Member=ResolvedMember) if not isinstance(ResolvedMember, Task): raise HandledException('No such task.', Member=BaseGroup) return ResolvedMember.__collect_n_call__(*Args, **KwArgs) if collect_missing else ResolvedMember(*Args, **KwArgs)
[ "def", "_execCommand", "(", "Argv", ",", "collect_missing", ")", ":", "if", "not", "Argv", ":", "raise", "HandledException", "(", "'Please specify a command!'", ")", "RouteParts", "=", "Argv", "[", "0", "]", ".", "split", "(", "'/'", ")", "Args", ",", "KwA...
r"""Worker of execCommand.
[ "r", "Worker", "of", "execCommand", "." ]
train
https://github.com/Laufire/ec/blob/63e84a1daef9234487d7de538e5da233a7d13071/ec/modules/core.py#L177-L194
jreese/systemwatch
systemwatch/common.py
memoize
def memoize(fn): '''Cache the results of a function that only takes positional arguments.''' cache = {} @wraps(fn) def wrapped_function(*args): if args in cache: return cache[args] else: result = fn(*args) cache[args] = result return result return wrapped_function
python
def memoize(fn): '''Cache the results of a function that only takes positional arguments.''' cache = {} @wraps(fn) def wrapped_function(*args): if args in cache: return cache[args] else: result = fn(*args) cache[args] = result return result return wrapped_function
[ "def", "memoize", "(", "fn", ")", ":", "cache", "=", "{", "}", "@", "wraps", "(", "fn", ")", "def", "wrapped_function", "(", "*", "args", ")", ":", "if", "args", "in", "cache", ":", "return", "cache", "[", "args", "]", "else", ":", "result", "=",...
Cache the results of a function that only takes positional arguments.
[ "Cache", "the", "results", "of", "a", "function", "that", "only", "takes", "positional", "arguments", "." ]
train
https://github.com/jreese/systemwatch/blob/285f38016eebd3b6cf667f809450e91cea2c0741/systemwatch/common.py#L23-L38
etcher-be/epab
epab/core/config.py
setup_config
def setup_config(epab_version: str): """ Set up elib_config package :param epab_version: installed version of EPAB as as string """ logger = logging.getLogger('EPAB') logger.debug('setting up config') elib_config.ELIBConfig.setup( app_name='EPAB', app_version=epab_version, config_file_path='pyproject.toml', config_sep_str='__', root_path=['tool', 'epab'] ) elib_config.write_example_config('pyproject.toml.example') if not pathlib.Path('pyproject.toml').exists(): raise FileNotFoundError('pyproject.toml') elib_config.validate_config()
python
def setup_config(epab_version: str): """ Set up elib_config package :param epab_version: installed version of EPAB as as string """ logger = logging.getLogger('EPAB') logger.debug('setting up config') elib_config.ELIBConfig.setup( app_name='EPAB', app_version=epab_version, config_file_path='pyproject.toml', config_sep_str='__', root_path=['tool', 'epab'] ) elib_config.write_example_config('pyproject.toml.example') if not pathlib.Path('pyproject.toml').exists(): raise FileNotFoundError('pyproject.toml') elib_config.validate_config()
[ "def", "setup_config", "(", "epab_version", ":", "str", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "'EPAB'", ")", "logger", ".", "debug", "(", "'setting up config'", ")", "elib_config", ".", "ELIBConfig", ".", "setup", "(", "app_name", "=", ...
Set up elib_config package :param epab_version: installed version of EPAB as as string
[ "Set", "up", "elib_config", "package" ]
train
https://github.com/etcher-be/epab/blob/024cde74d058281aa66e6e4b7b71dccbe803b1c1/epab/core/config.py#L100-L118
marteinn/AtomicPress
atomicpress/utils/date.py
get_months_apart
def get_months_apart(d1, d2): """ Get amount of months between dates http://stackoverflow.com/a/4040338 """ return (d1.year - d2.year)*12 + d1.month - d2.month
python
def get_months_apart(d1, d2): """ Get amount of months between dates http://stackoverflow.com/a/4040338 """ return (d1.year - d2.year)*12 + d1.month - d2.month
[ "def", "get_months_apart", "(", "d1", ",", "d2", ")", ":", "return", "(", "d1", ".", "year", "-", "d2", ".", "year", ")", "*", "12", "+", "d1", ".", "month", "-", "d2", ".", "month" ]
Get amount of months between dates http://stackoverflow.com/a/4040338
[ "Get", "amount", "of", "months", "between", "dates", "http", ":", "//", "stackoverflow", ".", "com", "/", "a", "/", "4040338" ]
train
https://github.com/marteinn/AtomicPress/blob/b8a0ca9c9c327f062833fc4a401a8ac0baccf6d1/atomicpress/utils/date.py#L10-L16
marteinn/AtomicPress
atomicpress/utils/date.py
get_month_list
def get_month_list(to_date, from_date): """ Generate a list containing year+month between two dates. Returns: [(2013, 11), (2013, 12), (2014, 1)] """ num_months = get_months_apart(to_date, from_date) month_offset = from_date.month month_list = [] for month in range(month_offset-1, month_offset+num_months): year = from_date.year+(month/12) real_month = (month % 12) + 1 month_list.append((year, real_month)) return month_list
python
def get_month_list(to_date, from_date): """ Generate a list containing year+month between two dates. Returns: [(2013, 11), (2013, 12), (2014, 1)] """ num_months = get_months_apart(to_date, from_date) month_offset = from_date.month month_list = [] for month in range(month_offset-1, month_offset+num_months): year = from_date.year+(month/12) real_month = (month % 12) + 1 month_list.append((year, real_month)) return month_list
[ "def", "get_month_list", "(", "to_date", ",", "from_date", ")", ":", "num_months", "=", "get_months_apart", "(", "to_date", ",", "from_date", ")", "month_offset", "=", "from_date", ".", "month", "month_list", "=", "[", "]", "for", "month", "in", "range", "("...
Generate a list containing year+month between two dates. Returns: [(2013, 11), (2013, 12), (2014, 1)]
[ "Generate", "a", "list", "containing", "year", "+", "month", "between", "two", "dates", "." ]
train
https://github.com/marteinn/AtomicPress/blob/b8a0ca9c9c327f062833fc4a401a8ac0baccf6d1/atomicpress/utils/date.py#L19-L35
ajyoon/blur
examples/waves/amplitude.py
find_amplitude
def find_amplitude(chunk): """ Calculate the 0-1 amplitude of an ndarray chunk of audio samples. Samples in the ndarray chunk are signed int16 values oscillating anywhere between -32768 and 32767. Find the amplitude between 0 and 1 by summing the absolute values of the minimum and maximum, and dividing by 32767. Args: chunk (numpy.ndarray): An array of int16 audio samples Returns: float: The amplitude of the sample between 0 and 1. Note that this is not a decibel representation of the amplitude. """ return (abs(int(chunk.max() - chunk.min())) / config.SAMPLE_RANGE)
python
def find_amplitude(chunk): """ Calculate the 0-1 amplitude of an ndarray chunk of audio samples. Samples in the ndarray chunk are signed int16 values oscillating anywhere between -32768 and 32767. Find the amplitude between 0 and 1 by summing the absolute values of the minimum and maximum, and dividing by 32767. Args: chunk (numpy.ndarray): An array of int16 audio samples Returns: float: The amplitude of the sample between 0 and 1. Note that this is not a decibel representation of the amplitude. """ return (abs(int(chunk.max() - chunk.min())) / config.SAMPLE_RANGE)
[ "def", "find_amplitude", "(", "chunk", ")", ":", "return", "(", "abs", "(", "int", "(", "chunk", ".", "max", "(", ")", "-", "chunk", ".", "min", "(", ")", ")", ")", "/", "config", ".", "SAMPLE_RANGE", ")" ]
Calculate the 0-1 amplitude of an ndarray chunk of audio samples. Samples in the ndarray chunk are signed int16 values oscillating anywhere between -32768 and 32767. Find the amplitude between 0 and 1 by summing the absolute values of the minimum and maximum, and dividing by 32767. Args: chunk (numpy.ndarray): An array of int16 audio samples Returns: float: The amplitude of the sample between 0 and 1. Note that this is not a decibel representation of the amplitude.
[ "Calculate", "the", "0", "-", "1", "amplitude", "of", "an", "ndarray", "chunk", "of", "audio", "samples", "." ]
train
https://github.com/ajyoon/blur/blob/25fcf083af112bb003956a7a7e1c6ff7d8fef279/examples/waves/amplitude.py#L10-L27
ajyoon/blur
examples/waves/amplitude.py
AmplitudeHandler.step_amp
def step_amp(self): """ Change the amplitude according to the change rate and drift target. Returns: None """ difference = self.drift_target - self._raw_value if abs(difference) < self.change_rate: self.value = self.drift_target else: delta = self.change_rate * numpy.sign(difference) self.value = self._raw_value + delta
python
def step_amp(self): """ Change the amplitude according to the change rate and drift target. Returns: None """ difference = self.drift_target - self._raw_value if abs(difference) < self.change_rate: self.value = self.drift_target else: delta = self.change_rate * numpy.sign(difference) self.value = self._raw_value + delta
[ "def", "step_amp", "(", "self", ")", ":", "difference", "=", "self", ".", "drift_target", "-", "self", ".", "_raw_value", "if", "abs", "(", "difference", ")", "<", "self", ".", "change_rate", ":", "self", ".", "value", "=", "self", ".", "drift_target", ...
Change the amplitude according to the change rate and drift target. Returns: None
[ "Change", "the", "amplitude", "according", "to", "the", "change", "rate", "and", "drift", "target", "." ]
train
https://github.com/ajyoon/blur/blob/25fcf083af112bb003956a7a7e1c6ff7d8fef279/examples/waves/amplitude.py#L90-L101
asmodehn/filefinder2
filefinder2/_fileloader2.py
get_supported_file_loaders_2
def get_supported_file_loaders_2(force=False): """Returns a list of file-based module loaders. Each item is a tuple (loader, suffixes). """ if force or (2, 7) <= sys.version_info < (3, 4): # valid until which py3 version ? import imp loaders = [] for suffix, mode, type in imp.get_suffixes(): if type == imp.PY_SOURCE: loaders.append((SourceFileLoader2, [suffix])) else: loaders.append((ImpFileLoader2, [suffix])) return loaders elif sys.version_info >= (3, 4): # valid from which py3 version ? from importlib.machinery import ( SOURCE_SUFFIXES, SourceFileLoader, BYTECODE_SUFFIXES, SourcelessFileLoader, EXTENSION_SUFFIXES, ExtensionFileLoader, ) # This is already defined in importlib._bootstrap_external # but is not exposed. extensions = ExtensionFileLoader, EXTENSION_SUFFIXES source = SourceFileLoader, SOURCE_SUFFIXES bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES return [extensions, source, bytecode]
python
def get_supported_file_loaders_2(force=False): """Returns a list of file-based module loaders. Each item is a tuple (loader, suffixes). """ if force or (2, 7) <= sys.version_info < (3, 4): # valid until which py3 version ? import imp loaders = [] for suffix, mode, type in imp.get_suffixes(): if type == imp.PY_SOURCE: loaders.append((SourceFileLoader2, [suffix])) else: loaders.append((ImpFileLoader2, [suffix])) return loaders elif sys.version_info >= (3, 4): # valid from which py3 version ? from importlib.machinery import ( SOURCE_SUFFIXES, SourceFileLoader, BYTECODE_SUFFIXES, SourcelessFileLoader, EXTENSION_SUFFIXES, ExtensionFileLoader, ) # This is already defined in importlib._bootstrap_external # but is not exposed. extensions = ExtensionFileLoader, EXTENSION_SUFFIXES source = SourceFileLoader, SOURCE_SUFFIXES bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES return [extensions, source, bytecode]
[ "def", "get_supported_file_loaders_2", "(", "force", "=", "False", ")", ":", "if", "force", "or", "(", "2", ",", "7", ")", "<=", "sys", ".", "version_info", "<", "(", "3", ",", "4", ")", ":", "# valid until which py3 version ?", "import", "imp", "loaders",...
Returns a list of file-based module loaders. Each item is a tuple (loader, suffixes).
[ "Returns", "a", "list", "of", "file", "-", "based", "module", "loaders", ".", "Each", "item", "is", "a", "tuple", "(", "loader", "suffixes", ")", "." ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L43-L73
asmodehn/filefinder2
filefinder2/_fileloader2.py
_NamespacePath._find_parent_path_names
def _find_parent_path_names(self): """Returns a tuple of (parent-module-name, parent-path-attr-name)""" parent, dot, me = self._name.rpartition('.') if dot == '': # This is a top-level module. sys.path contains the parent path. return 'sys', 'path' # Not a top-level module. parent-module.__path__ contains the # parent path. return parent, '__path__'
python
def _find_parent_path_names(self): """Returns a tuple of (parent-module-name, parent-path-attr-name)""" parent, dot, me = self._name.rpartition('.') if dot == '': # This is a top-level module. sys.path contains the parent path. return 'sys', 'path' # Not a top-level module. parent-module.__path__ contains the # parent path. return parent, '__path__'
[ "def", "_find_parent_path_names", "(", "self", ")", ":", "parent", ",", "dot", ",", "me", "=", "self", ".", "_name", ".", "rpartition", "(", "'.'", ")", "if", "dot", "==", "''", ":", "# This is a top-level module. sys.path contains the parent path.", "return", "...
Returns a tuple of (parent-module-name, parent-path-attr-name)
[ "Returns", "a", "tuple", "of", "(", "parent", "-", "module", "-", "name", "parent", "-", "path", "-", "attr", "-", "name", ")" ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L106-L114
asmodehn/filefinder2
filefinder2/_fileloader2.py
_LoaderBasics.is_package
def is_package(self, fullname): """Concrete implementation of InspectLoader.is_package by checking if the path returned by get_filename has a filename of '__init__.py'.""" filename = os.path.split(self.get_filename(fullname))[1] filename_base = filename.rsplit('.', 1)[0] tail_name = fullname.rpartition('.')[2] return filename_base == '__init__' and tail_name != '__init__'
python
def is_package(self, fullname): """Concrete implementation of InspectLoader.is_package by checking if the path returned by get_filename has a filename of '__init__.py'.""" filename = os.path.split(self.get_filename(fullname))[1] filename_base = filename.rsplit('.', 1)[0] tail_name = fullname.rpartition('.')[2] return filename_base == '__init__' and tail_name != '__init__'
[ "def", "is_package", "(", "self", ",", "fullname", ")", ":", "filename", "=", "os", ".", "path", ".", "split", "(", "self", ".", "get_filename", "(", "fullname", ")", ")", "[", "1", "]", "filename_base", "=", "filename", ".", "rsplit", "(", "'.'", ",...
Concrete implementation of InspectLoader.is_package by checking if the path returned by get_filename has a filename of '__init__.py'.
[ "Concrete", "implementation", "of", "InspectLoader", ".", "is_package", "by", "checking", "if", "the", "path", "returned", "by", "get_filename", "has", "a", "filename", "of", "__init__", ".", "py", "." ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L154-L160
asmodehn/filefinder2
filefinder2/_fileloader2.py
_LoaderBasics.create_module
def create_module(self, spec): """Creates the module, and also insert it into sys.modules, adding this onto py2 import logic.""" mod = sys.modules.setdefault(spec.name, types.ModuleType(spec.name)) # we are using setdefault to satisfy https://docs.python.org/3/reference/import.html#loaders return mod
python
def create_module(self, spec): """Creates the module, and also insert it into sys.modules, adding this onto py2 import logic.""" mod = sys.modules.setdefault(spec.name, types.ModuleType(spec.name)) # we are using setdefault to satisfy https://docs.python.org/3/reference/import.html#loaders return mod
[ "def", "create_module", "(", "self", ",", "spec", ")", ":", "mod", "=", "sys", ".", "modules", ".", "setdefault", "(", "spec", ".", "name", ",", "types", ".", "ModuleType", "(", "spec", ".", "name", ")", ")", "# we are using setdefault to satisfy https://doc...
Creates the module, and also insert it into sys.modules, adding this onto py2 import logic.
[ "Creates", "the", "module", "and", "also", "insert", "it", "into", "sys", ".", "modules", "adding", "this", "onto", "py2", "import", "logic", "." ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L162-L166
asmodehn/filefinder2
filefinder2/_fileloader2.py
_LoaderBasics.exec_module
def exec_module(self, module): """Execute the module.""" code = self.get_code(module.__name__) if code is None: raise ImportError('cannot load module {!r} when get_code() ' 'returns None'.format(module.__name__)) exec(code, module.__dict__)
python
def exec_module(self, module): """Execute the module.""" code = self.get_code(module.__name__) if code is None: raise ImportError('cannot load module {!r} when get_code() ' 'returns None'.format(module.__name__)) exec(code, module.__dict__)
[ "def", "exec_module", "(", "self", ",", "module", ")", ":", "code", "=", "self", ".", "get_code", "(", "module", ".", "__name__", ")", "if", "code", "is", "None", ":", "raise", "ImportError", "(", "'cannot load module {!r} when get_code() '", "'returns None'", ...
Execute the module.
[ "Execute", "the", "module", "." ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L168-L175
asmodehn/filefinder2
filefinder2/_fileloader2.py
_LoaderBasics.load_module
def load_module(self, fullname): """Load the specified module into sys.modules and return it. This method is for python2 only, but implemented with backported py3 methods. """ if fullname in sys.modules: mod = sys.modules[fullname] self.exec_module(mod) # In this case we do not want to remove the module in case of error # Ref : https://docs.python.org/3/reference/import.html#loaders else: try: # Retrieving the spec to help creating module properly spec = spec_from_loader(fullname, self) # this will call create_module and also initialize the module properly (like for py3) mod = module_from_spec(spec) # as per https://docs.python.org/3/reference/import.html#loaders assert mod.__name__ in sys.modules self.exec_module(mod) # We don't ensure that the import-related module attributes get # set in the sys.modules replacement case. Such modules are on # their own. except Exception as exc: # TODO : log exception ! # as per https://docs.python.org/3/reference/import.html#loaders if fullname in sys.modules: del sys.modules[fullname] raise return sys.modules[fullname]
python
def load_module(self, fullname): """Load the specified module into sys.modules and return it. This method is for python2 only, but implemented with backported py3 methods. """ if fullname in sys.modules: mod = sys.modules[fullname] self.exec_module(mod) # In this case we do not want to remove the module in case of error # Ref : https://docs.python.org/3/reference/import.html#loaders else: try: # Retrieving the spec to help creating module properly spec = spec_from_loader(fullname, self) # this will call create_module and also initialize the module properly (like for py3) mod = module_from_spec(spec) # as per https://docs.python.org/3/reference/import.html#loaders assert mod.__name__ in sys.modules self.exec_module(mod) # We don't ensure that the import-related module attributes get # set in the sys.modules replacement case. Such modules are on # their own. except Exception as exc: # TODO : log exception ! # as per https://docs.python.org/3/reference/import.html#loaders if fullname in sys.modules: del sys.modules[fullname] raise return sys.modules[fullname]
[ "def", "load_module", "(", "self", ",", "fullname", ")", ":", "if", "fullname", "in", "sys", ".", "modules", ":", "mod", "=", "sys", ".", "modules", "[", "fullname", "]", "self", ".", "exec_module", "(", "mod", ")", "# In this case we do not want to remove t...
Load the specified module into sys.modules and return it. This method is for python2 only, but implemented with backported py3 methods.
[ "Load", "the", "specified", "module", "into", "sys", ".", "modules", "and", "return", "it", ".", "This", "method", "is", "for", "python2", "only", "but", "implemented", "with", "backported", "py3", "methods", "." ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L177-L209
asmodehn/filefinder2
filefinder2/_fileloader2.py
NamespaceLoader2.create_module
def create_module(self, spec): """Improve python2 semantics for module creation.""" mod = super(NamespaceLoader2, self).create_module(spec) # Set a few properties required by PEP 302 # mod.__file__ = [p for p in self.path] # this will set mod.__repr__ to not builtin... shouldnt break anything in py2... # CAREFUL : get_filename present implies the module has ONE location, which is not true with namespaces return mod
python
def create_module(self, spec): """Improve python2 semantics for module creation.""" mod = super(NamespaceLoader2, self).create_module(spec) # Set a few properties required by PEP 302 # mod.__file__ = [p for p in self.path] # this will set mod.__repr__ to not builtin... shouldnt break anything in py2... # CAREFUL : get_filename present implies the module has ONE location, which is not true with namespaces return mod
[ "def", "create_module", "(", "self", ",", "spec", ")", ":", "mod", "=", "super", "(", "NamespaceLoader2", ",", "self", ")", ".", "create_module", "(", "spec", ")", "# Set a few properties required by PEP 302", "# mod.__file__ = [p for p in self.path]", "# this will set ...
Improve python2 semantics for module creation.
[ "Improve", "python2", "semantics", "for", "module", "creation", "." ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L264-L271
asmodehn/filefinder2
filefinder2/_fileloader2.py
NamespaceLoader2.load_module
def load_module(self, name): """Load a namespace module as if coming from an empty file. """ _verbose_message('namespace module loaded with path {!r}', self.path) # Adjusting code from LoaderBasics if name in sys.modules: mod = sys.modules[name] self.exec_module(mod) # In this case we do not want to remove the module in case of error # Ref : https://docs.python.org/3/reference/import.html#loaders else: try: # Building custom spec and loading as in _LoaderBasics... spec = ModuleSpec(name, self, origin='namespace', is_package=True) spec.submodule_search_locations = self.path # this will call create_module and also initialize the module properly (like for py3) mod = module_from_spec(spec) # as per https://docs.python.org/3/reference/import.html#loaders assert mod.__name__ in sys.modules self.exec_module(mod) # We don't ensure that the import-related module attributes get # set in the sys.modules replacement case. Such modules are on # their own. except: # as per https://docs.python.org/3/reference/import.html#loaders if name in sys.modules: del sys.modules[name] raise return sys.modules[name]
python
def load_module(self, name): """Load a namespace module as if coming from an empty file. """ _verbose_message('namespace module loaded with path {!r}', self.path) # Adjusting code from LoaderBasics if name in sys.modules: mod = sys.modules[name] self.exec_module(mod) # In this case we do not want to remove the module in case of error # Ref : https://docs.python.org/3/reference/import.html#loaders else: try: # Building custom spec and loading as in _LoaderBasics... spec = ModuleSpec(name, self, origin='namespace', is_package=True) spec.submodule_search_locations = self.path # this will call create_module and also initialize the module properly (like for py3) mod = module_from_spec(spec) # as per https://docs.python.org/3/reference/import.html#loaders assert mod.__name__ in sys.modules self.exec_module(mod) # We don't ensure that the import-related module attributes get # set in the sys.modules replacement case. Such modules are on # their own. except: # as per https://docs.python.org/3/reference/import.html#loaders if name in sys.modules: del sys.modules[name] raise return sys.modules[name]
[ "def", "load_module", "(", "self", ",", "name", ")", ":", "_verbose_message", "(", "'namespace module loaded with path {!r}'", ",", "self", ".", "path", ")", "# Adjusting code from LoaderBasics", "if", "name", "in", "sys", ".", "modules", ":", "mod", "=", "sys", ...
Load a namespace module as if coming from an empty file.
[ "Load", "a", "namespace", "module", "as", "if", "coming", "from", "an", "empty", "file", "." ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L273-L306
asmodehn/filefinder2
filefinder2/_fileloader2.py
SourceLoader.get_source
def get_source(self, name): """Concrete implementation of InspectLoader.get_source.""" path = self.get_filename(name) try: source_bytes = self.get_data(path) except OSError as exc: e = _ImportError('source not available through get_data()', name=name) e.__cause__ = exc raise e return decode_source(source_bytes)
python
def get_source(self, name): """Concrete implementation of InspectLoader.get_source.""" path = self.get_filename(name) try: source_bytes = self.get_data(path) except OSError as exc: e = _ImportError('source not available through get_data()', name=name) e.__cause__ = exc raise e return decode_source(source_bytes)
[ "def", "get_source", "(", "self", ",", "name", ")", ":", "path", "=", "self", ".", "get_filename", "(", "name", ")", "try", ":", "source_bytes", "=", "self", ".", "get_data", "(", "path", ")", "except", "OSError", "as", "exc", ":", "e", "=", "_Import...
Concrete implementation of InspectLoader.get_source.
[ "Concrete", "implementation", "of", "InspectLoader", ".", "get_source", "." ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L326-L336
asmodehn/filefinder2
filefinder2/_fileloader2.py
ImpFileLoader2.exec_module
def exec_module(self, module): """Execute the module using the old imp.""" path = [os.path.dirname(module.__file__)] # file should have been resolved before (module creation) file = None try: file, pathname, description = imp.find_module(module.__name__.rpartition('.')[-1], path) module = imp.load_module(module.__name__, file, pathname, description) finally: if file: file.close()
python
def exec_module(self, module): """Execute the module using the old imp.""" path = [os.path.dirname(module.__file__)] # file should have been resolved before (module creation) file = None try: file, pathname, description = imp.find_module(module.__name__.rpartition('.')[-1], path) module = imp.load_module(module.__name__, file, pathname, description) finally: if file: file.close()
[ "def", "exec_module", "(", "self", ",", "module", ")", ":", "path", "=", "[", "os", ".", "path", ".", "dirname", "(", "module", ".", "__file__", ")", "]", "# file should have been resolved before (module creation)", "file", "=", "None", "try", ":", "file", "...
Execute the module using the old imp.
[ "Execute", "the", "module", "using", "the", "old", "imp", "." ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L412-L421
asmodehn/filefinder2
filefinder2/_fileloader2.py
ImpFileLoader2.load_module
def load_module(self, name): """Load a module from a file. """ # Implementation inspired from pytest.rewrite and importlib # If there is an existing module object named 'name' in # sys.modules, the loader must use that existing module. (Otherwise, # the reload() builtin will not work correctly.) if name in sys.modules: return sys.modules[name] try: # we have already done the search, an gone through package layers # so we directly feed the latest module and correct path # to reuse the logic for choosing the proper loading behavior # TODO : double check maybe we do not need the loop here, already handled by finders in dir hierarchy # TODO : use exec_module (recent, more tested API) from here for name_idx, name_part in enumerate(name.split('.')): pkgname = ".".join(name.split('.')[:name_idx+1]) if pkgname not in sys.modules: if '.' in pkgname: # parent has to be in sys.modules. make sure it is a package, else fails if '__path__' in vars(sys.modules[pkgname.rpartition('.')[0]]): path = sys.modules[pkgname.rpartition('.')[0]].__path__ else: raise ImportError("{0} is not a package (no __path__ detected)".format(pkgname.rpartition('.')[0])) else: # using __file__ instead. should always be there. path = os.path.dirname(sys.modules[pkgname].__file__)if pkgname in sys.modules else None try: file, pathname, description = imp.find_module(pkgname.rpartition('.')[-1], path) sys.modules[pkgname] = imp.load_module(pkgname, file, pathname, description) finally: if file: file.close() except: # dont pollute the interpreter environment if we dont know what we are doing if name in sys.modules: del sys.modules[name] raise return sys.modules[name]
python
def load_module(self, name): """Load a module from a file. """ # Implementation inspired from pytest.rewrite and importlib # If there is an existing module object named 'name' in # sys.modules, the loader must use that existing module. (Otherwise, # the reload() builtin will not work correctly.) if name in sys.modules: return sys.modules[name] try: # we have already done the search, an gone through package layers # so we directly feed the latest module and correct path # to reuse the logic for choosing the proper loading behavior # TODO : double check maybe we do not need the loop here, already handled by finders in dir hierarchy # TODO : use exec_module (recent, more tested API) from here for name_idx, name_part in enumerate(name.split('.')): pkgname = ".".join(name.split('.')[:name_idx+1]) if pkgname not in sys.modules: if '.' in pkgname: # parent has to be in sys.modules. make sure it is a package, else fails if '__path__' in vars(sys.modules[pkgname.rpartition('.')[0]]): path = sys.modules[pkgname.rpartition('.')[0]].__path__ else: raise ImportError("{0} is not a package (no __path__ detected)".format(pkgname.rpartition('.')[0])) else: # using __file__ instead. should always be there. path = os.path.dirname(sys.modules[pkgname].__file__)if pkgname in sys.modules else None try: file, pathname, description = imp.find_module(pkgname.rpartition('.')[-1], path) sys.modules[pkgname] = imp.load_module(pkgname, file, pathname, description) finally: if file: file.close() except: # dont pollute the interpreter environment if we dont know what we are doing if name in sys.modules: del sys.modules[name] raise return sys.modules[name]
[ "def", "load_module", "(", "self", ",", "name", ")", ":", "# Implementation inspired from pytest.rewrite and importlib", "# If there is an existing module object named 'name' in", "# sys.modules, the loader must use that existing module. (Otherwise,", "# the reload() builtin will not work corr...
Load a module from a file.
[ "Load", "a", "module", "from", "a", "file", "." ]
train
https://github.com/asmodehn/filefinder2/blob/3f0b211ce11a34562e2a2160e039ae5290b68d6b/filefinder2/_fileloader2.py#L423-L462
scieloorg/processing
export/xml_rsps.py
analyze_xml
def analyze_xml(xml): """Analyzes `file` against packtools' XMLValidator. """ f = StringIO(xml) try: xml = packtools.XMLValidator.parse(f, sps_version='sps-1.4') except packtools.exceptions.PacktoolsError as e: logger.exception(e) summary = {} summary['dtd_is_valid'] = False summary['sps_is_valid'] = False summary['is_valid'] = False summary['parsing_error'] = True summary['dtd_errors'] = [] summary['sps_errors'] = [] return summary except XMLSyntaxError as e: logger.exception(e) summary = {} summary['dtd_is_valid'] = False summary['sps_is_valid'] = False summary['is_valid'] = False summary['parsing_error'] = True summary['dtd_errors'] = [e.message] summary['sps_errors'] = [] return summary else: summary = summarize(xml) return summary
python
def analyze_xml(xml): """Analyzes `file` against packtools' XMLValidator. """ f = StringIO(xml) try: xml = packtools.XMLValidator.parse(f, sps_version='sps-1.4') except packtools.exceptions.PacktoolsError as e: logger.exception(e) summary = {} summary['dtd_is_valid'] = False summary['sps_is_valid'] = False summary['is_valid'] = False summary['parsing_error'] = True summary['dtd_errors'] = [] summary['sps_errors'] = [] return summary except XMLSyntaxError as e: logger.exception(e) summary = {} summary['dtd_is_valid'] = False summary['sps_is_valid'] = False summary['is_valid'] = False summary['parsing_error'] = True summary['dtd_errors'] = [e.message] summary['sps_errors'] = [] return summary else: summary = summarize(xml) return summary
[ "def", "analyze_xml", "(", "xml", ")", ":", "f", "=", "StringIO", "(", "xml", ")", "try", ":", "xml", "=", "packtools", ".", "XMLValidator", ".", "parse", "(", "f", ",", "sps_version", "=", "'sps-1.4'", ")", "except", "packtools", ".", "exceptions", "....
Analyzes `file` against packtools' XMLValidator.
[ "Analyzes", "file", "against", "packtools", "XMLValidator", "." ]
train
https://github.com/scieloorg/processing/blob/629b50b45ba7a176651cd3bfcdb441dab6fddfcc/export/xml_rsps.py#L87-L118
zhexiao/ezhost
ezhost/ServerBase.py
ServerBase.init_host
def init_host(self): """ Initial host """ env.host_string = self.host_string env.user = self.host_user env.password = self.host_passwd env.key_filename = self.host_keyfile
python
def init_host(self): """ Initial host """ env.host_string = self.host_string env.user = self.host_user env.password = self.host_passwd env.key_filename = self.host_keyfile
[ "def", "init_host", "(", "self", ")", ":", "env", ".", "host_string", "=", "self", ".", "host_string", "env", ".", "user", "=", "self", ".", "host_user", "env", ".", "password", "=", "self", ".", "host_passwd", "env", ".", "key_filename", "=", "self", ...
Initial host
[ "Initial", "host" ]
train
https://github.com/zhexiao/ezhost/blob/4146bc0be14bb1bfe98ec19283d19fab420871b3/ezhost/ServerBase.py#L75-L82
zhexiao/ezhost
ezhost/ServerBase.py
ServerBase.install
def install(self): """ install the server """ try: if self.args.server is not None: server = ServerLists(self.server_type) DynamicImporter( 'ezhost', server.name, args=self.args, configure=self.configure ) else: ServerCommand(self.args) except Exception as e: raise e
python
def install(self): """ install the server """ try: if self.args.server is not None: server = ServerLists(self.server_type) DynamicImporter( 'ezhost', server.name, args=self.args, configure=self.configure ) else: ServerCommand(self.args) except Exception as e: raise e
[ "def", "install", "(", "self", ")", ":", "try", ":", "if", "self", ".", "args", ".", "server", "is", "not", "None", ":", "server", "=", "ServerLists", "(", "self", ".", "server_type", ")", "DynamicImporter", "(", "'ezhost'", ",", "server", ".", "name",...
install the server
[ "install", "the", "server" ]
train
https://github.com/zhexiao/ezhost/blob/4146bc0be14bb1bfe98ec19283d19fab420871b3/ezhost/ServerBase.py#L84-L100
MoseleyBioinformaticsLab/filehandles
filehandles/filehandles.py
filehandles
def filehandles(path, openers_list=openers, pattern='', verbose=False): """Main function that iterates over list of openers and decides which opener to use. :param str path: Path. :param list openers_list: List of openers. :param str pattern: Regular expression pattern. :param verbose: Print additional information. :type verbose: :py:obj:`True` or :py:obj:`False` :return: Filehandle(s). """ if not verbose: logging.disable(logging.VERBOSE) for opener in openers_list: try: for filehandle in opener(path=path, pattern=pattern, verbose=verbose): with closing(filehandle): yield filehandle break # use the first successful opener function except (zipfile.BadZipfile, tarfile.ReadError, GZValidationError, BZ2ValidationError, IOError, NotADirectoryError): continue else: logger.verbose('No opener found for path: "{}"'.format(path)) yield None
python
def filehandles(path, openers_list=openers, pattern='', verbose=False): """Main function that iterates over list of openers and decides which opener to use. :param str path: Path. :param list openers_list: List of openers. :param str pattern: Regular expression pattern. :param verbose: Print additional information. :type verbose: :py:obj:`True` or :py:obj:`False` :return: Filehandle(s). """ if not verbose: logging.disable(logging.VERBOSE) for opener in openers_list: try: for filehandle in opener(path=path, pattern=pattern, verbose=verbose): with closing(filehandle): yield filehandle break # use the first successful opener function except (zipfile.BadZipfile, tarfile.ReadError, GZValidationError, BZ2ValidationError, IOError, NotADirectoryError): continue else: logger.verbose('No opener found for path: "{}"'.format(path)) yield None
[ "def", "filehandles", "(", "path", ",", "openers_list", "=", "openers", ",", "pattern", "=", "''", ",", "verbose", "=", "False", ")", ":", "if", "not", "verbose", ":", "logging", ".", "disable", "(", "logging", ".", "VERBOSE", ")", "for", "opener", "in...
Main function that iterates over list of openers and decides which opener to use. :param str path: Path. :param list openers_list: List of openers. :param str pattern: Regular expression pattern. :param verbose: Print additional information. :type verbose: :py:obj:`True` or :py:obj:`False` :return: Filehandle(s).
[ "Main", "function", "that", "iterates", "over", "list", "of", "openers", "and", "decides", "which", "opener", "to", "use", "." ]
train
https://github.com/MoseleyBioinformaticsLab/filehandles/blob/dd09354a2f12c315fb5c6fa5d6919e1d7ae3e076/filehandles/filehandles.py#L80-L106
MoseleyBioinformaticsLab/filehandles
filehandles/filehandles.py
directory_opener
def directory_opener(path, pattern='', verbose=False): """Directory opener. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ if not os.path.isdir(path): raise NotADirectoryError else: openers_list = [opener for opener in openers if not opener.__name__.startswith('directory')] # remove directory for root, dirlist, filelist in os.walk(path): for filename in filelist: if pattern and not re.match(pattern, filename): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(filename), pattern)) continue filename_path = os.path.abspath(os.path.join(root, filename)) for filehandle in filehandles(filename_path, openers_list=openers_list, pattern=pattern, verbose=verbose): yield filehandle
python
def directory_opener(path, pattern='', verbose=False): """Directory opener. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ if not os.path.isdir(path): raise NotADirectoryError else: openers_list = [opener for opener in openers if not opener.__name__.startswith('directory')] # remove directory for root, dirlist, filelist in os.walk(path): for filename in filelist: if pattern and not re.match(pattern, filename): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(filename), pattern)) continue filename_path = os.path.abspath(os.path.join(root, filename)) for filehandle in filehandles(filename_path, openers_list=openers_list, pattern=pattern, verbose=verbose): yield filehandle
[ "def", "directory_opener", "(", "path", ",", "pattern", "=", "''", ",", "verbose", "=", "False", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "raise", "NotADirectoryError", "else", ":", "openers_list", "=", "[", "opene...
Directory opener. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s).
[ "Directory", "opener", "." ]
train
https://github.com/MoseleyBioinformaticsLab/filehandles/blob/dd09354a2f12c315fb5c6fa5d6919e1d7ae3e076/filehandles/filehandles.py#L110-L131
MoseleyBioinformaticsLab/filehandles
filehandles/filehandles.py
ziparchive_opener
def ziparchive_opener(path, pattern='', verbose=False): """Opener that opens files from zip archive.. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ with zipfile.ZipFile(io.BytesIO(urlopen(path).read()), 'r') if is_url(path) else zipfile.ZipFile(path, 'r') as ziparchive: for zipinfo in ziparchive.infolist(): if not zipinfo.filename.endswith('/'): source = os.path.join(path, zipinfo.filename) if pattern and not re.match(pattern, zipinfo.filename): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(zipinfo.filename), pattern)) continue logger.verbose('Processing file: {}'.format(source)) filehandle = ziparchive.open(zipinfo) yield filehandle
python
def ziparchive_opener(path, pattern='', verbose=False): """Opener that opens files from zip archive.. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ with zipfile.ZipFile(io.BytesIO(urlopen(path).read()), 'r') if is_url(path) else zipfile.ZipFile(path, 'r') as ziparchive: for zipinfo in ziparchive.infolist(): if not zipinfo.filename.endswith('/'): source = os.path.join(path, zipinfo.filename) if pattern and not re.match(pattern, zipinfo.filename): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(zipinfo.filename), pattern)) continue logger.verbose('Processing file: {}'.format(source)) filehandle = ziparchive.open(zipinfo) yield filehandle
[ "def", "ziparchive_opener", "(", "path", ",", "pattern", "=", "''", ",", "verbose", "=", "False", ")", ":", "with", "zipfile", ".", "ZipFile", "(", "io", ".", "BytesIO", "(", "urlopen", "(", "path", ")", ".", "read", "(", ")", ")", ",", "'r'", ")",...
Opener that opens files from zip archive.. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s).
[ "Opener", "that", "opens", "files", "from", "zip", "archive", ".." ]
train
https://github.com/MoseleyBioinformaticsLab/filehandles/blob/dd09354a2f12c315fb5c6fa5d6919e1d7ae3e076/filehandles/filehandles.py#L135-L153
MoseleyBioinformaticsLab/filehandles
filehandles/filehandles.py
tararchive_opener
def tararchive_opener(path, pattern='', verbose=False): """Opener that opens files from tar archive. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ with tarfile.open(fileobj=io.BytesIO(urlopen(path).read())) if is_url(path) else tarfile.open(path) as tararchive: for tarinfo in tararchive: if tarinfo.isfile(): source = os.path.join(path, tarinfo.name) if pattern and not re.match(pattern, tarinfo.name): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(tarinfo.name), pattern)) continue logger.verbose('Processing file: {}'.format(source)) filehandle = tararchive.extractfile(tarinfo) yield filehandle
python
def tararchive_opener(path, pattern='', verbose=False): """Opener that opens files from tar archive. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ with tarfile.open(fileobj=io.BytesIO(urlopen(path).read())) if is_url(path) else tarfile.open(path) as tararchive: for tarinfo in tararchive: if tarinfo.isfile(): source = os.path.join(path, tarinfo.name) if pattern and not re.match(pattern, tarinfo.name): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(tarinfo.name), pattern)) continue logger.verbose('Processing file: {}'.format(source)) filehandle = tararchive.extractfile(tarinfo) yield filehandle
[ "def", "tararchive_opener", "(", "path", ",", "pattern", "=", "''", ",", "verbose", "=", "False", ")", ":", "with", "tarfile", ".", "open", "(", "fileobj", "=", "io", ".", "BytesIO", "(", "urlopen", "(", "path", ")", ".", "read", "(", ")", ")", ")"...
Opener that opens files from tar archive. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s).
[ "Opener", "that", "opens", "files", "from", "tar", "archive", "." ]
train
https://github.com/MoseleyBioinformaticsLab/filehandles/blob/dd09354a2f12c315fb5c6fa5d6919e1d7ae3e076/filehandles/filehandles.py#L157-L175
MoseleyBioinformaticsLab/filehandles
filehandles/filehandles.py
gzip_opener
def gzip_opener(path, pattern='', verbose=False): """Opener that opens single gzip compressed file. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ source = path if is_url(path) else os.path.abspath(path) filename = os.path.basename(path) if pattern and not re.match(pattern, filename): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(filename), pattern)) return try: filehandle = gzip.GzipFile(fileobj=io.BytesIO(urlopen(path).read())) if is_url(path) else gzip.open(path) filehandle.read(1) filehandle.seek(0) logger.verbose('Processing file: {}'.format(source)) yield filehandle except (OSError, IOError): raise GZValidationError
python
def gzip_opener(path, pattern='', verbose=False): """Opener that opens single gzip compressed file. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ source = path if is_url(path) else os.path.abspath(path) filename = os.path.basename(path) if pattern and not re.match(pattern, filename): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(filename), pattern)) return try: filehandle = gzip.GzipFile(fileobj=io.BytesIO(urlopen(path).read())) if is_url(path) else gzip.open(path) filehandle.read(1) filehandle.seek(0) logger.verbose('Processing file: {}'.format(source)) yield filehandle except (OSError, IOError): raise GZValidationError
[ "def", "gzip_opener", "(", "path", ",", "pattern", "=", "''", ",", "verbose", "=", "False", ")", ":", "source", "=", "path", "if", "is_url", "(", "path", ")", "else", "os", ".", "path", ".", "abspath", "(", "path", ")", "filename", "=", "os", ".", ...
Opener that opens single gzip compressed file. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s).
[ "Opener", "that", "opens", "single", "gzip", "compressed", "file", "." ]
train
https://github.com/MoseleyBioinformaticsLab/filehandles/blob/dd09354a2f12c315fb5c6fa5d6919e1d7ae3e076/filehandles/filehandles.py#L179-L200
MoseleyBioinformaticsLab/filehandles
filehandles/filehandles.py
bz2_opener
def bz2_opener(path, pattern='', verbose=False): """Opener that opens single bz2 compressed file. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ source = path if is_url(path) else os.path.abspath(path) filename = os.path.basename(path) if pattern and not re.match(pattern, filename): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(path), pattern)) return try: filehandle = bz2.open(io.BytesIO(urlopen(path).read())) if is_url(path) else bz2.open(path) filehandle.read(1) filehandle.seek(0) logger.verbose('Processing file: {}'.format(source)) yield filehandle except (OSError, IOError): raise BZ2ValidationError
python
def bz2_opener(path, pattern='', verbose=False): """Opener that opens single bz2 compressed file. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ source = path if is_url(path) else os.path.abspath(path) filename = os.path.basename(path) if pattern and not re.match(pattern, filename): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(path), pattern)) return try: filehandle = bz2.open(io.BytesIO(urlopen(path).read())) if is_url(path) else bz2.open(path) filehandle.read(1) filehandle.seek(0) logger.verbose('Processing file: {}'.format(source)) yield filehandle except (OSError, IOError): raise BZ2ValidationError
[ "def", "bz2_opener", "(", "path", ",", "pattern", "=", "''", ",", "verbose", "=", "False", ")", ":", "source", "=", "path", "if", "is_url", "(", "path", ")", "else", "os", ".", "path", ".", "abspath", "(", "path", ")", "filename", "=", "os", ".", ...
Opener that opens single bz2 compressed file. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s).
[ "Opener", "that", "opens", "single", "bz2", "compressed", "file", "." ]
train
https://github.com/MoseleyBioinformaticsLab/filehandles/blob/dd09354a2f12c315fb5c6fa5d6919e1d7ae3e076/filehandles/filehandles.py#L204-L225
MoseleyBioinformaticsLab/filehandles
filehandles/filehandles.py
text_opener
def text_opener(path, pattern='', verbose=False): """Opener that opens single text file. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ source = path if is_url(path) else os.path.abspath(path) filename = os.path.basename(path) if pattern and not re.match(pattern, filename): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(path), pattern)) return filehandle = urlopen(path) if is_url(path) else open(path) logger.verbose('Processing file: {}'.format(source)) yield filehandle
python
def text_opener(path, pattern='', verbose=False): """Opener that opens single text file. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s). """ source = path if is_url(path) else os.path.abspath(path) filename = os.path.basename(path) if pattern and not re.match(pattern, filename): logger.verbose('Skipping file: {}, did not match regex pattern "{}"'.format(os.path.abspath(path), pattern)) return filehandle = urlopen(path) if is_url(path) else open(path) logger.verbose('Processing file: {}'.format(source)) yield filehandle
[ "def", "text_opener", "(", "path", ",", "pattern", "=", "''", ",", "verbose", "=", "False", ")", ":", "source", "=", "path", "if", "is_url", "(", "path", ")", "else", "os", ".", "path", ".", "abspath", "(", "path", ")", "filename", "=", "os", ".", ...
Opener that opens single text file. :param str path: Path. :param str pattern: Regular expression pattern. :return: Filehandle(s).
[ "Opener", "that", "opens", "single", "text", "file", "." ]
train
https://github.com/MoseleyBioinformaticsLab/filehandles/blob/dd09354a2f12c315fb5c6fa5d6919e1d7ae3e076/filehandles/filehandles.py#L229-L245
cloudboss/friend
friend/strings.py
random_string
def random_string(length, charset): """ Return a random string of the given length from the given character set. :param int length: The length of string to return :param str charset: A string of characters to choose from :returns: A random string :rtype: str """ n = len(charset) return ''.join(charset[random.randrange(n)] for _ in range(length))
python
def random_string(length, charset): """ Return a random string of the given length from the given character set. :param int length: The length of string to return :param str charset: A string of characters to choose from :returns: A random string :rtype: str """ n = len(charset) return ''.join(charset[random.randrange(n)] for _ in range(length))
[ "def", "random_string", "(", "length", ",", "charset", ")", ":", "n", "=", "len", "(", "charset", ")", "return", "''", ".", "join", "(", "charset", "[", "random", ".", "randrange", "(", "n", ")", "]", "for", "_", "in", "range", "(", "length", ")", ...
Return a random string of the given length from the given character set. :param int length: The length of string to return :param str charset: A string of characters to choose from :returns: A random string :rtype: str
[ "Return", "a", "random", "string", "of", "the", "given", "length", "from", "the", "given", "character", "set", "." ]
train
https://github.com/cloudboss/friend/blob/3357e6ec849552e3ae9ed28017ff0926e4006e4e/friend/strings.py#L38-L49
cloudboss/friend
friend/strings.py
random_alphanum
def random_alphanum(length): """ Return a random string of ASCII letters and digits. :param int length: The length of string to return :returns: A random string :rtype: str """ charset = string.ascii_letters + string.digits return random_string(length, charset)
python
def random_alphanum(length): """ Return a random string of ASCII letters and digits. :param int length: The length of string to return :returns: A random string :rtype: str """ charset = string.ascii_letters + string.digits return random_string(length, charset)
[ "def", "random_alphanum", "(", "length", ")", ":", "charset", "=", "string", ".", "ascii_letters", "+", "string", ".", "digits", "return", "random_string", "(", "length", ",", "charset", ")" ]
Return a random string of ASCII letters and digits. :param int length: The length of string to return :returns: A random string :rtype: str
[ "Return", "a", "random", "string", "of", "ASCII", "letters", "and", "digits", "." ]
train
https://github.com/cloudboss/friend/blob/3357e6ec849552e3ae9ed28017ff0926e4006e4e/friend/strings.py#L52-L61
cloudboss/friend
friend/strings.py
random_hex
def random_hex(length): """ Return a random hex string. :param int length: The length of string to return :returns: A random string :rtype: str """ charset = ''.join(set(string.hexdigits.lower())) return random_string(length, charset)
python
def random_hex(length): """ Return a random hex string. :param int length: The length of string to return :returns: A random string :rtype: str """ charset = ''.join(set(string.hexdigits.lower())) return random_string(length, charset)
[ "def", "random_hex", "(", "length", ")", ":", "charset", "=", "''", ".", "join", "(", "set", "(", "string", ".", "hexdigits", ".", "lower", "(", ")", ")", ")", "return", "random_string", "(", "length", ",", "charset", ")" ]
Return a random hex string. :param int length: The length of string to return :returns: A random string :rtype: str
[ "Return", "a", "random", "hex", "string", "." ]
train
https://github.com/cloudboss/friend/blob/3357e6ec849552e3ae9ed28017ff0926e4006e4e/friend/strings.py#L64-L73