id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
228,600
googleapis/google-auth-library-python
google/oauth2/service_account.py
Credentials.from_service_account_info
def from_service_account_info(cls, info, **kwargs): """Creates a Credentials instance from parsed service account info. Args: info (Mapping[str, str]): The service account info in Google format. kwargs: Additional arguments to pass to the constructor. Returns: google.auth.service_account.Credentials: The constructed credentials. Raises: ValueError: If the info is not in the expected format. """ signer = _service_account_info.from_dict( info, require=['client_email', 'token_uri']) return cls._from_signer_and_info(signer, info, **kwargs)
python
def from_service_account_info(cls, info, **kwargs): signer = _service_account_info.from_dict( info, require=['client_email', 'token_uri']) return cls._from_signer_and_info(signer, info, **kwargs)
[ "def", "from_service_account_info", "(", "cls", ",", "info", ",", "*", "*", "kwargs", ")", ":", "signer", "=", "_service_account_info", ".", "from_dict", "(", "info", ",", "require", "=", "[", "'client_email'", ",", "'token_uri'", "]", ")", "return", "cls", ...
Creates a Credentials instance from parsed service account info. Args: info (Mapping[str, str]): The service account info in Google format. kwargs: Additional arguments to pass to the constructor. Returns: google.auth.service_account.Credentials: The constructed credentials. Raises: ValueError: If the info is not in the expected format.
[ "Creates", "a", "Credentials", "instance", "from", "parsed", "service", "account", "info", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/service_account.py#L177-L194
228,601
googleapis/google-auth-library-python
google/oauth2/service_account.py
Credentials.from_service_account_file
def from_service_account_file(cls, filename, **kwargs): """Creates a Credentials instance from a service account json file. Args: filename (str): The path to the service account json file. kwargs: Additional arguments to pass to the constructor. Returns: google.auth.service_account.Credentials: The constructed credentials. """ info, signer = _service_account_info.from_filename( filename, require=['client_email', 'token_uri']) return cls._from_signer_and_info(signer, info, **kwargs)
python
def from_service_account_file(cls, filename, **kwargs): info, signer = _service_account_info.from_filename( filename, require=['client_email', 'token_uri']) return cls._from_signer_and_info(signer, info, **kwargs)
[ "def", "from_service_account_file", "(", "cls", ",", "filename", ",", "*", "*", "kwargs", ")", ":", "info", ",", "signer", "=", "_service_account_info", ".", "from_filename", "(", "filename", ",", "require", "=", "[", "'client_email'", ",", "'token_uri'", "]",...
Creates a Credentials instance from a service account json file. Args: filename (str): The path to the service account json file. kwargs: Additional arguments to pass to the constructor. Returns: google.auth.service_account.Credentials: The constructed credentials.
[ "Creates", "a", "Credentials", "instance", "from", "a", "service", "account", "json", "file", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/service_account.py#L197-L210
228,602
googleapis/google-auth-library-python
google/oauth2/service_account.py
Credentials.with_subject
def with_subject(self, subject): """Create a copy of these credentials with the specified subject. Args: subject (str): The subject claim. Returns: google.auth.service_account.Credentials: A new credentials instance. """ return self.__class__( self._signer, service_account_email=self._service_account_email, scopes=self._scopes, token_uri=self._token_uri, subject=subject, project_id=self._project_id, additional_claims=self._additional_claims.copy())
python
def with_subject(self, subject): return self.__class__( self._signer, service_account_email=self._service_account_email, scopes=self._scopes, token_uri=self._token_uri, subject=subject, project_id=self._project_id, additional_claims=self._additional_claims.copy())
[ "def", "with_subject", "(", "self", ",", "subject", ")", ":", "return", "self", ".", "__class__", "(", "self", ".", "_signer", ",", "service_account_email", "=", "self", ".", "_service_account_email", ",", "scopes", "=", "self", ".", "_scopes", ",", "token_u...
Create a copy of these credentials with the specified subject. Args: subject (str): The subject claim. Returns: google.auth.service_account.Credentials: A new credentials instance.
[ "Create", "a", "copy", "of", "these", "credentials", "with", "the", "specified", "subject", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/service_account.py#L242-L259
228,603
googleapis/google-auth-library-python
google/oauth2/service_account.py
IDTokenCredentials.with_target_audience
def with_target_audience(self, target_audience): """Create a copy of these credentials with the specified target audience. Args: target_audience (str): The intended audience for these credentials, used when requesting the ID Token. Returns: google.auth.service_account.IDTokenCredentials: A new credentials instance. """ return self.__class__( self._signer, service_account_email=self._service_account_email, token_uri=self._token_uri, target_audience=target_audience, additional_claims=self._additional_claims.copy())
python
def with_target_audience(self, target_audience): return self.__class__( self._signer, service_account_email=self._service_account_email, token_uri=self._token_uri, target_audience=target_audience, additional_claims=self._additional_claims.copy())
[ "def", "with_target_audience", "(", "self", ",", "target_audience", ")", ":", "return", "self", ".", "__class__", "(", "self", ".", "_signer", ",", "service_account_email", "=", "self", ".", "_service_account_email", ",", "token_uri", "=", "self", ".", "_token_u...
Create a copy of these credentials with the specified target audience. Args: target_audience (str): The intended audience for these credentials, used when requesting the ID Token. Returns: google.auth.service_account.IDTokenCredentials: A new credentials instance.
[ "Create", "a", "copy", "of", "these", "credentials", "with", "the", "specified", "target", "audience", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/service_account.py#L467-L484
228,604
googleapis/google-auth-library-python
google/auth/transport/requests.py
AuthorizedSession.request
def request(self, method, url, data=None, headers=None, **kwargs): """Implementation of Requests' request.""" # pylint: disable=arguments-differ # Requests has a ton of arguments to request, but only two # (method, url) are required. We pass through all of the other # arguments to super, so no need to exhaustively list them here. # Use a kwarg for this instead of an attribute to maintain # thread-safety. _credential_refresh_attempt = kwargs.pop( '_credential_refresh_attempt', 0) # Make a copy of the headers. They will be modified by the credentials # and we want to pass the original headers if we recurse. request_headers = headers.copy() if headers is not None else {} self.credentials.before_request( self._auth_request, method, url, request_headers) response = super(AuthorizedSession, self).request( method, url, data=data, headers=request_headers, **kwargs) # If the response indicated that the credentials needed to be # refreshed, then refresh the credentials and re-attempt the # request. # A stored token may expire between the time it is retrieved and # the time the request is made, so we may need to try twice. if (response.status_code in self._refresh_status_codes and _credential_refresh_attempt < self._max_refresh_attempts): _LOGGER.info( 'Refreshing credentials due to a %s response. Attempt %s/%s.', response.status_code, _credential_refresh_attempt + 1, self._max_refresh_attempts) auth_request_with_timeout = functools.partial( self._auth_request, timeout=self._refresh_timeout) self.credentials.refresh(auth_request_with_timeout) # Recurse. Pass in the original headers, not our modified set. return self.request( method, url, data=data, headers=headers, _credential_refresh_attempt=_credential_refresh_attempt + 1, **kwargs) return response
python
def request(self, method, url, data=None, headers=None, **kwargs): # pylint: disable=arguments-differ # Requests has a ton of arguments to request, but only two # (method, url) are required. We pass through all of the other # arguments to super, so no need to exhaustively list them here. # Use a kwarg for this instead of an attribute to maintain # thread-safety. _credential_refresh_attempt = kwargs.pop( '_credential_refresh_attempt', 0) # Make a copy of the headers. They will be modified by the credentials # and we want to pass the original headers if we recurse. request_headers = headers.copy() if headers is not None else {} self.credentials.before_request( self._auth_request, method, url, request_headers) response = super(AuthorizedSession, self).request( method, url, data=data, headers=request_headers, **kwargs) # If the response indicated that the credentials needed to be # refreshed, then refresh the credentials and re-attempt the # request. # A stored token may expire between the time it is retrieved and # the time the request is made, so we may need to try twice. if (response.status_code in self._refresh_status_codes and _credential_refresh_attempt < self._max_refresh_attempts): _LOGGER.info( 'Refreshing credentials due to a %s response. Attempt %s/%s.', response.status_code, _credential_refresh_attempt + 1, self._max_refresh_attempts) auth_request_with_timeout = functools.partial( self._auth_request, timeout=self._refresh_timeout) self.credentials.refresh(auth_request_with_timeout) # Recurse. Pass in the original headers, not our modified set. return self.request( method, url, data=data, headers=headers, _credential_refresh_attempt=_credential_refresh_attempt + 1, **kwargs) return response
[ "def", "request", "(", "self", ",", "method", ",", "url", ",", "data", "=", "None", ",", "headers", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# pylint: disable=arguments-differ", "# Requests has a ton of arguments to request, but only two", "# (method, url) are...
Implementation of Requests' request.
[ "Implementation", "of", "Requests", "request", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/transport/requests.py#L188-L233
228,605
googleapis/google-auth-library-python
google/auth/credentials.py
with_scopes_if_required
def with_scopes_if_required(credentials, scopes): """Creates a copy of the credentials with scopes if scoping is required. This helper function is useful when you do not know (or care to know) the specific type of credentials you are using (such as when you use :func:`google.auth.default`). This function will call :meth:`Scoped.with_scopes` if the credentials are scoped credentials and if the credentials require scoping. Otherwise, it will return the credentials as-is. Args: credentials (google.auth.credentials.Credentials): The credentials to scope if necessary. scopes (Sequence[str]): The list of scopes to use. Returns: google.auth.credentials.Credentials: Either a new set of scoped credentials, or the passed in credentials instance if no scoping was required. """ if isinstance(credentials, Scoped) and credentials.requires_scopes: return credentials.with_scopes(scopes) else: return credentials
python
def with_scopes_if_required(credentials, scopes): if isinstance(credentials, Scoped) and credentials.requires_scopes: return credentials.with_scopes(scopes) else: return credentials
[ "def", "with_scopes_if_required", "(", "credentials", ",", "scopes", ")", ":", "if", "isinstance", "(", "credentials", ",", "Scoped", ")", "and", "credentials", ".", "requires_scopes", ":", "return", "credentials", ".", "with_scopes", "(", "scopes", ")", "else",...
Creates a copy of the credentials with scopes if scoping is required. This helper function is useful when you do not know (or care to know) the specific type of credentials you are using (such as when you use :func:`google.auth.default`). This function will call :meth:`Scoped.with_scopes` if the credentials are scoped credentials and if the credentials require scoping. Otherwise, it will return the credentials as-is. Args: credentials (google.auth.credentials.Credentials): The credentials to scope if necessary. scopes (Sequence[str]): The list of scopes to use. Returns: google.auth.credentials.Credentials: Either a new set of scoped credentials, or the passed in credentials instance if no scoping was required.
[ "Creates", "a", "copy", "of", "the", "credentials", "with", "scopes", "if", "scoping", "is", "required", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/credentials.py#L266-L289
228,606
googleapis/google-auth-library-python
google/auth/crypt/_python_rsa.py
_bit_list_to_bytes
def _bit_list_to_bytes(bit_list): """Converts an iterable of 1s and 0s to bytes. Combines the list 8 at a time, treating each group of 8 bits as a single byte. Args: bit_list (Sequence): Sequence of 1s and 0s. Returns: bytes: The decoded bytes. """ num_bits = len(bit_list) byte_vals = bytearray() for start in six.moves.xrange(0, num_bits, 8): curr_bits = bit_list[start:start + 8] char_val = sum( val * digit for val, digit in six.moves.zip(_POW2, curr_bits)) byte_vals.append(char_val) return bytes(byte_vals)
python
def _bit_list_to_bytes(bit_list): num_bits = len(bit_list) byte_vals = bytearray() for start in six.moves.xrange(0, num_bits, 8): curr_bits = bit_list[start:start + 8] char_val = sum( val * digit for val, digit in six.moves.zip(_POW2, curr_bits)) byte_vals.append(char_val) return bytes(byte_vals)
[ "def", "_bit_list_to_bytes", "(", "bit_list", ")", ":", "num_bits", "=", "len", "(", "bit_list", ")", "byte_vals", "=", "bytearray", "(", ")", "for", "start", "in", "six", ".", "moves", ".", "xrange", "(", "0", ",", "num_bits", ",", "8", ")", ":", "c...
Converts an iterable of 1s and 0s to bytes. Combines the list 8 at a time, treating each group of 8 bits as a single byte. Args: bit_list (Sequence): Sequence of 1s and 0s. Returns: bytes: The decoded bytes.
[ "Converts", "an", "iterable", "of", "1s", "and", "0s", "to", "bytes", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/crypt/_python_rsa.py#L43-L62
228,607
googleapis/google-auth-library-python
google/auth/crypt/_python_rsa.py
RSASigner.from_string
def from_string(cls, key, key_id=None): """Construct an Signer instance from a private key in PEM format. Args: key (str): Private key in PEM format. key_id (str): An optional key id used to identify the private key. Returns: google.auth.crypt.Signer: The constructed signer. Raises: ValueError: If the key cannot be parsed as PKCS#1 or PKCS#8 in PEM format. """ key = _helpers.from_bytes(key) # PEM expects str in Python 3 marker_id, key_bytes = pem.readPemBlocksFromFile( six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER) # Key is in pkcs1 format. if marker_id == 0: private_key = rsa.key.PrivateKey.load_pkcs1( key_bytes, format='DER') # Key is in pkcs8. elif marker_id == 1: key_info, remaining = decoder.decode( key_bytes, asn1Spec=_PKCS8_SPEC) if remaining != b'': raise ValueError('Unused bytes', remaining) private_key_info = key_info.getComponentByName('privateKey') private_key = rsa.key.PrivateKey.load_pkcs1( private_key_info.asOctets(), format='DER') else: raise ValueError('No key could be detected.') return cls(private_key, key_id=key_id)
python
def from_string(cls, key, key_id=None): key = _helpers.from_bytes(key) # PEM expects str in Python 3 marker_id, key_bytes = pem.readPemBlocksFromFile( six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER) # Key is in pkcs1 format. if marker_id == 0: private_key = rsa.key.PrivateKey.load_pkcs1( key_bytes, format='DER') # Key is in pkcs8. elif marker_id == 1: key_info, remaining = decoder.decode( key_bytes, asn1Spec=_PKCS8_SPEC) if remaining != b'': raise ValueError('Unused bytes', remaining) private_key_info = key_info.getComponentByName('privateKey') private_key = rsa.key.PrivateKey.load_pkcs1( private_key_info.asOctets(), format='DER') else: raise ValueError('No key could be detected.') return cls(private_key, key_id=key_id)
[ "def", "from_string", "(", "cls", ",", "key", ",", "key_id", "=", "None", ")", ":", "key", "=", "_helpers", ".", "from_bytes", "(", "key", ")", "# PEM expects str in Python 3", "marker_id", ",", "key_bytes", "=", "pem", ".", "readPemBlocksFromFile", "(", "si...
Construct an Signer instance from a private key in PEM format. Args: key (str): Private key in PEM format. key_id (str): An optional key id used to identify the private key. Returns: google.auth.crypt.Signer: The constructed signer. Raises: ValueError: If the key cannot be parsed as PKCS#1 or PKCS#8 in PEM format.
[ "Construct", "an", "Signer", "instance", "from", "a", "private", "key", "in", "PEM", "format", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/crypt/_python_rsa.py#L142-L176
228,608
googleapis/google-auth-library-python
google/oauth2/_client.py
_handle_error_response
def _handle_error_response(response_body): """"Translates an error response into an exception. Args: response_body (str): The decoded response data. Raises: google.auth.exceptions.RefreshError """ try: error_data = json.loads(response_body) error_details = '{}: {}'.format( error_data['error'], error_data.get('error_description')) # If no details could be extracted, use the response data. except (KeyError, ValueError): error_details = response_body raise exceptions.RefreshError( error_details, response_body)
python
def _handle_error_response(response_body): "try: error_data = json.loads(response_body) error_details = '{}: {}'.format( error_data['error'], error_data.get('error_description')) # If no details could be extracted, use the response data. except (KeyError, ValueError): error_details = response_body raise exceptions.RefreshError( error_details, response_body)
[ "def", "_handle_error_response", "(", "response_body", ")", ":", "try", ":", "error_data", "=", "json", ".", "loads", "(", "response_body", ")", "error_details", "=", "'{}: {}'", ".", "format", "(", "error_data", "[", "'error'", "]", ",", "error_data", ".", ...
Translates an error response into an exception. Args: response_body (str): The decoded response data. Raises: google.auth.exceptions.RefreshError
[ "Translates", "an", "error", "response", "into", "an", "exception", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/_client.py#L42-L61
228,609
googleapis/google-auth-library-python
google/oauth2/_client.py
_parse_expiry
def _parse_expiry(response_data): """Parses the expiry field from a response into a datetime. Args: response_data (Mapping): The JSON-parsed response data. Returns: Optional[datetime]: The expiration or ``None`` if no expiration was specified. """ expires_in = response_data.get('expires_in', None) if expires_in is not None: return _helpers.utcnow() + datetime.timedelta( seconds=expires_in) else: return None
python
def _parse_expiry(response_data): expires_in = response_data.get('expires_in', None) if expires_in is not None: return _helpers.utcnow() + datetime.timedelta( seconds=expires_in) else: return None
[ "def", "_parse_expiry", "(", "response_data", ")", ":", "expires_in", "=", "response_data", ".", "get", "(", "'expires_in'", ",", "None", ")", "if", "expires_in", "is", "not", "None", ":", "return", "_helpers", ".", "utcnow", "(", ")", "+", "datetime", "."...
Parses the expiry field from a response into a datetime. Args: response_data (Mapping): The JSON-parsed response data. Returns: Optional[datetime]: The expiration or ``None`` if no expiration was specified.
[ "Parses", "the", "expiry", "field", "from", "a", "response", "into", "a", "datetime", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/_client.py#L64-L80
228,610
googleapis/google-auth-library-python
google/oauth2/_client.py
_token_endpoint_request
def _token_endpoint_request(request, token_uri, body): """Makes a request to the OAuth 2.0 authorization server's token endpoint. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorizations server's token endpoint URI. body (Mapping[str, str]): The parameters to send in the request body. Returns: Mapping[str, str]: The JSON-decoded response data. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error. """ body = urllib.parse.urlencode(body) headers = { 'content-type': _URLENCODED_CONTENT_TYPE, } response = request( method='POST', url=token_uri, headers=headers, body=body) response_body = response.data.decode('utf-8') if response.status != http_client.OK: _handle_error_response(response_body) response_data = json.loads(response_body) return response_data
python
def _token_endpoint_request(request, token_uri, body): body = urllib.parse.urlencode(body) headers = { 'content-type': _URLENCODED_CONTENT_TYPE, } response = request( method='POST', url=token_uri, headers=headers, body=body) response_body = response.data.decode('utf-8') if response.status != http_client.OK: _handle_error_response(response_body) response_data = json.loads(response_body) return response_data
[ "def", "_token_endpoint_request", "(", "request", ",", "token_uri", ",", "body", ")", ":", "body", "=", "urllib", ".", "parse", ".", "urlencode", "(", "body", ")", "headers", "=", "{", "'content-type'", ":", "_URLENCODED_CONTENT_TYPE", ",", "}", "response", ...
Makes a request to the OAuth 2.0 authorization server's token endpoint. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorizations server's token endpoint URI. body (Mapping[str, str]): The parameters to send in the request body. Returns: Mapping[str, str]: The JSON-decoded response data. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error.
[ "Makes", "a", "request", "to", "the", "OAuth", "2", ".", "0", "authorization", "server", "s", "token", "endpoint", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/_client.py#L83-L115
228,611
googleapis/google-auth-library-python
google/oauth2/_client.py
jwt_grant
def jwt_grant(request, token_uri, assertion): """Implements the JWT Profile for OAuth 2.0 Authorization Grants. For more details, see `rfc7523 section 4`_. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorizations server's token endpoint URI. assertion (str): The OAuth 2.0 assertion. Returns: Tuple[str, Optional[datetime], Mapping[str, str]]: The access token, expiration, and additional data returned by the token endpoint. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error. .. _rfc7523 section 4: https://tools.ietf.org/html/rfc7523#section-4 """ body = { 'assertion': assertion, 'grant_type': _JWT_GRANT_TYPE, } response_data = _token_endpoint_request(request, token_uri, body) try: access_token = response_data['access_token'] except KeyError as caught_exc: new_exc = exceptions.RefreshError( 'No access token in response.', response_data) six.raise_from(new_exc, caught_exc) expiry = _parse_expiry(response_data) return access_token, expiry, response_data
python
def jwt_grant(request, token_uri, assertion): body = { 'assertion': assertion, 'grant_type': _JWT_GRANT_TYPE, } response_data = _token_endpoint_request(request, token_uri, body) try: access_token = response_data['access_token'] except KeyError as caught_exc: new_exc = exceptions.RefreshError( 'No access token in response.', response_data) six.raise_from(new_exc, caught_exc) expiry = _parse_expiry(response_data) return access_token, expiry, response_data
[ "def", "jwt_grant", "(", "request", ",", "token_uri", ",", "assertion", ")", ":", "body", "=", "{", "'assertion'", ":", "assertion", ",", "'grant_type'", ":", "_JWT_GRANT_TYPE", ",", "}", "response_data", "=", "_token_endpoint_request", "(", "request", ",", "t...
Implements the JWT Profile for OAuth 2.0 Authorization Grants. For more details, see `rfc7523 section 4`_. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorizations server's token endpoint URI. assertion (str): The OAuth 2.0 assertion. Returns: Tuple[str, Optional[datetime], Mapping[str, str]]: The access token, expiration, and additional data returned by the token endpoint. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error. .. _rfc7523 section 4: https://tools.ietf.org/html/rfc7523#section-4
[ "Implements", "the", "JWT", "Profile", "for", "OAuth", "2", ".", "0", "Authorization", "Grants", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/_client.py#L118-L156
228,612
googleapis/google-auth-library-python
google/oauth2/_client.py
id_token_jwt_grant
def id_token_jwt_grant(request, token_uri, assertion): """Implements the JWT Profile for OAuth 2.0 Authorization Grants, but requests an OpenID Connect ID Token instead of an access token. This is a variant on the standard JWT Profile that is currently unique to Google. This was added for the benefit of authenticating to services that require ID Tokens instead of access tokens or JWT bearer tokens. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorization server's token endpoint URI. assertion (str): JWT token signed by a service account. The token's payload must include a ``target_audience`` claim. Returns: Tuple[str, Optional[datetime], Mapping[str, str]]: The (encoded) Open ID Connect ID Token, expiration, and additional data returned by the endpoint. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error. """ body = { 'assertion': assertion, 'grant_type': _JWT_GRANT_TYPE, } response_data = _token_endpoint_request(request, token_uri, body) try: id_token = response_data['id_token'] except KeyError as caught_exc: new_exc = exceptions.RefreshError( 'No ID token in response.', response_data) six.raise_from(new_exc, caught_exc) payload = jwt.decode(id_token, verify=False) expiry = datetime.datetime.utcfromtimestamp(payload['exp']) return id_token, expiry, response_data
python
def id_token_jwt_grant(request, token_uri, assertion): body = { 'assertion': assertion, 'grant_type': _JWT_GRANT_TYPE, } response_data = _token_endpoint_request(request, token_uri, body) try: id_token = response_data['id_token'] except KeyError as caught_exc: new_exc = exceptions.RefreshError( 'No ID token in response.', response_data) six.raise_from(new_exc, caught_exc) payload = jwt.decode(id_token, verify=False) expiry = datetime.datetime.utcfromtimestamp(payload['exp']) return id_token, expiry, response_data
[ "def", "id_token_jwt_grant", "(", "request", ",", "token_uri", ",", "assertion", ")", ":", "body", "=", "{", "'assertion'", ":", "assertion", ",", "'grant_type'", ":", "_JWT_GRANT_TYPE", ",", "}", "response_data", "=", "_token_endpoint_request", "(", "request", ...
Implements the JWT Profile for OAuth 2.0 Authorization Grants, but requests an OpenID Connect ID Token instead of an access token. This is a variant on the standard JWT Profile that is currently unique to Google. This was added for the benefit of authenticating to services that require ID Tokens instead of access tokens or JWT bearer tokens. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorization server's token endpoint URI. assertion (str): JWT token signed by a service account. The token's payload must include a ``target_audience`` claim. Returns: Tuple[str, Optional[datetime], Mapping[str, str]]: The (encoded) Open ID Connect ID Token, expiration, and additional data returned by the endpoint. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error.
[ "Implements", "the", "JWT", "Profile", "for", "OAuth", "2", ".", "0", "Authorization", "Grants", "but", "requests", "an", "OpenID", "Connect", "ID", "Token", "instead", "of", "an", "access", "token", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/_client.py#L159-L201
228,613
googleapis/google-auth-library-python
google/oauth2/_client.py
refresh_grant
def refresh_grant(request, token_uri, refresh_token, client_id, client_secret): """Implements the OAuth 2.0 refresh token grant. For more details, see `rfc678 section 6`_. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorizations server's token endpoint URI. refresh_token (str): The refresh token to use to get a new access token. client_id (str): The OAuth 2.0 application's client ID. client_secret (str): The Oauth 2.0 appliaction's client secret. Returns: Tuple[str, Optional[str], Optional[datetime], Mapping[str, str]]: The access token, new refresh token, expiration, and additional data returned by the token endpoint. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error. .. _rfc6748 section 6: https://tools.ietf.org/html/rfc6749#section-6 """ body = { 'grant_type': _REFRESH_GRANT_TYPE, 'client_id': client_id, 'client_secret': client_secret, 'refresh_token': refresh_token, } response_data = _token_endpoint_request(request, token_uri, body) try: access_token = response_data['access_token'] except KeyError as caught_exc: new_exc = exceptions.RefreshError( 'No access token in response.', response_data) six.raise_from(new_exc, caught_exc) refresh_token = response_data.get('refresh_token', refresh_token) expiry = _parse_expiry(response_data) return access_token, refresh_token, expiry, response_data
python
def refresh_grant(request, token_uri, refresh_token, client_id, client_secret): body = { 'grant_type': _REFRESH_GRANT_TYPE, 'client_id': client_id, 'client_secret': client_secret, 'refresh_token': refresh_token, } response_data = _token_endpoint_request(request, token_uri, body) try: access_token = response_data['access_token'] except KeyError as caught_exc: new_exc = exceptions.RefreshError( 'No access token in response.', response_data) six.raise_from(new_exc, caught_exc) refresh_token = response_data.get('refresh_token', refresh_token) expiry = _parse_expiry(response_data) return access_token, refresh_token, expiry, response_data
[ "def", "refresh_grant", "(", "request", ",", "token_uri", ",", "refresh_token", ",", "client_id", ",", "client_secret", ")", ":", "body", "=", "{", "'grant_type'", ":", "_REFRESH_GRANT_TYPE", ",", "'client_id'", ":", "client_id", ",", "'client_secret'", ":", "cl...
Implements the OAuth 2.0 refresh token grant. For more details, see `rfc678 section 6`_. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. token_uri (str): The OAuth 2.0 authorizations server's token endpoint URI. refresh_token (str): The refresh token to use to get a new access token. client_id (str): The OAuth 2.0 application's client ID. client_secret (str): The Oauth 2.0 appliaction's client secret. Returns: Tuple[str, Optional[str], Optional[datetime], Mapping[str, str]]: The access token, new refresh token, expiration, and additional data returned by the token endpoint. Raises: google.auth.exceptions.RefreshError: If the token endpoint returned an error. .. _rfc6748 section 6: https://tools.ietf.org/html/rfc6749#section-6
[ "Implements", "the", "OAuth", "2", ".", "0", "refresh", "token", "grant", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/_client.py#L204-L249
228,614
googleapis/google-auth-library-python
google/auth/_cloud_sdk.py
get_config_path
def get_config_path(): """Returns the absolute path the the Cloud SDK's configuration directory. Returns: str: The Cloud SDK config path. """ # If the path is explicitly set, return that. try: return os.environ[environment_vars.CLOUD_SDK_CONFIG_DIR] except KeyError: pass # Non-windows systems store this at ~/.config/gcloud if os.name != 'nt': return os.path.join( os.path.expanduser('~'), '.config', _CONFIG_DIRECTORY) # Windows systems store config at %APPDATA%\gcloud else: try: return os.path.join( os.environ[_WINDOWS_CONFIG_ROOT_ENV_VAR], _CONFIG_DIRECTORY) except KeyError: # This should never happen unless someone is really # messing with things, but we'll cover the case anyway. drive = os.environ.get('SystemDrive', 'C:') return os.path.join( drive, '\\', _CONFIG_DIRECTORY)
python
def get_config_path(): # If the path is explicitly set, return that. try: return os.environ[environment_vars.CLOUD_SDK_CONFIG_DIR] except KeyError: pass # Non-windows systems store this at ~/.config/gcloud if os.name != 'nt': return os.path.join( os.path.expanduser('~'), '.config', _CONFIG_DIRECTORY) # Windows systems store config at %APPDATA%\gcloud else: try: return os.path.join( os.environ[_WINDOWS_CONFIG_ROOT_ENV_VAR], _CONFIG_DIRECTORY) except KeyError: # This should never happen unless someone is really # messing with things, but we'll cover the case anyway. drive = os.environ.get('SystemDrive', 'C:') return os.path.join( drive, '\\', _CONFIG_DIRECTORY)
[ "def", "get_config_path", "(", ")", ":", "# If the path is explicitly set, return that.", "try", ":", "return", "os", ".", "environ", "[", "environment_vars", ".", "CLOUD_SDK_CONFIG_DIR", "]", "except", "KeyError", ":", "pass", "# Non-windows systems store this at ~/.config...
Returns the absolute path the the Cloud SDK's configuration directory. Returns: str: The Cloud SDK config path.
[ "Returns", "the", "absolute", "path", "the", "the", "Cloud", "SDK", "s", "configuration", "directory", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/_cloud_sdk.py#L42-L69
228,615
googleapis/google-auth-library-python
google/auth/_cloud_sdk.py
get_project_id
def get_project_id(): """Gets the project ID from the Cloud SDK. Returns: Optional[str]: The project ID. """ if os.name == 'nt': command = _CLOUD_SDK_WINDOWS_COMMAND else: command = _CLOUD_SDK_POSIX_COMMAND try: output = subprocess.check_output( (command,) + _CLOUD_SDK_CONFIG_COMMAND, stderr=subprocess.STDOUT) except (subprocess.CalledProcessError, OSError, IOError): return None try: configuration = json.loads(output.decode('utf-8')) except ValueError: return None try: return configuration['configuration']['properties']['core']['project'] except KeyError: return None
python
def get_project_id(): if os.name == 'nt': command = _CLOUD_SDK_WINDOWS_COMMAND else: command = _CLOUD_SDK_POSIX_COMMAND try: output = subprocess.check_output( (command,) + _CLOUD_SDK_CONFIG_COMMAND, stderr=subprocess.STDOUT) except (subprocess.CalledProcessError, OSError, IOError): return None try: configuration = json.loads(output.decode('utf-8')) except ValueError: return None try: return configuration['configuration']['properties']['core']['project'] except KeyError: return None
[ "def", "get_project_id", "(", ")", ":", "if", "os", ".", "name", "==", "'nt'", ":", "command", "=", "_CLOUD_SDK_WINDOWS_COMMAND", "else", ":", "command", "=", "_CLOUD_SDK_POSIX_COMMAND", "try", ":", "output", "=", "subprocess", ".", "check_output", "(", "(", ...
Gets the project ID from the Cloud SDK. Returns: Optional[str]: The project ID.
[ "Gets", "the", "project", "ID", "from", "the", "Cloud", "SDK", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/_cloud_sdk.py#L100-L126
228,616
googleapis/google-auth-library-python
google/auth/compute_engine/credentials.py
Credentials._retrieve_info
def _retrieve_info(self, request): """Retrieve information about the service account. Updates the scopes and retrieves the full service account email. Args: request (google.auth.transport.Request): The object used to make HTTP requests. """ info = _metadata.get_service_account_info( request, service_account=self._service_account_email) self._service_account_email = info['email'] self._scopes = info['scopes']
python
def _retrieve_info(self, request): info = _metadata.get_service_account_info( request, service_account=self._service_account_email) self._service_account_email = info['email'] self._scopes = info['scopes']
[ "def", "_retrieve_info", "(", "self", ",", "request", ")", ":", "info", "=", "_metadata", ".", "get_service_account_info", "(", "request", ",", "service_account", "=", "self", ".", "_service_account_email", ")", "self", ".", "_service_account_email", "=", "info", ...
Retrieve information about the service account. Updates the scopes and retrieves the full service account email. Args: request (google.auth.transport.Request): The object used to make HTTP requests.
[ "Retrieve", "information", "about", "the", "service", "account", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/compute_engine/credentials.py#L67-L81
228,617
googleapis/google-auth-library-python
google/auth/compute_engine/credentials.py
Credentials.refresh
def refresh(self, request): """Refresh the access token and scopes. Args: request (google.auth.transport.Request): The object used to make HTTP requests. Raises: google.auth.exceptions.RefreshError: If the Compute Engine metadata service can't be reached if if the instance has not credentials. """ try: self._retrieve_info(request) self.token, self.expiry = _metadata.get_service_account_token( request, service_account=self._service_account_email) except exceptions.TransportError as caught_exc: new_exc = exceptions.RefreshError(caught_exc) six.raise_from(new_exc, caught_exc)
python
def refresh(self, request): try: self._retrieve_info(request) self.token, self.expiry = _metadata.get_service_account_token( request, service_account=self._service_account_email) except exceptions.TransportError as caught_exc: new_exc = exceptions.RefreshError(caught_exc) six.raise_from(new_exc, caught_exc)
[ "def", "refresh", "(", "self", ",", "request", ")", ":", "try", ":", "self", ".", "_retrieve_info", "(", "request", ")", "self", ".", "token", ",", "self", ".", "expiry", "=", "_metadata", ".", "get_service_account_token", "(", "request", ",", "service_acc...
Refresh the access token and scopes. Args: request (google.auth.transport.Request): The object used to make HTTP requests. Raises: google.auth.exceptions.RefreshError: If the Compute Engine metadata service can't be reached if if the instance has not credentials.
[ "Refresh", "the", "access", "token", "and", "scopes", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/compute_engine/credentials.py#L83-L102
228,618
googleapis/google-auth-library-python
google/oauth2/credentials.py
Credentials.from_authorized_user_info
def from_authorized_user_info(cls, info, scopes=None): """Creates a Credentials instance from parsed authorized user info. Args: info (Mapping[str, str]): The authorized user info in Google format. scopes (Sequence[str]): Optional list of scopes to include in the credentials. Returns: google.oauth2.credentials.Credentials: The constructed credentials. Raises: ValueError: If the info is not in the expected format. """ keys_needed = set(('refresh_token', 'client_id', 'client_secret')) missing = keys_needed.difference(six.iterkeys(info)) if missing: raise ValueError( 'Authorized user info was not in the expected format, missing ' 'fields {}.'.format(', '.join(missing))) return Credentials( None, # No access token, must be refreshed. refresh_token=info['refresh_token'], token_uri=_GOOGLE_OAUTH2_TOKEN_ENDPOINT, scopes=scopes, client_id=info['client_id'], client_secret=info['client_secret'])
python
def from_authorized_user_info(cls, info, scopes=None): keys_needed = set(('refresh_token', 'client_id', 'client_secret')) missing = keys_needed.difference(six.iterkeys(info)) if missing: raise ValueError( 'Authorized user info was not in the expected format, missing ' 'fields {}.'.format(', '.join(missing))) return Credentials( None, # No access token, must be refreshed. refresh_token=info['refresh_token'], token_uri=_GOOGLE_OAUTH2_TOKEN_ENDPOINT, scopes=scopes, client_id=info['client_id'], client_secret=info['client_secret'])
[ "def", "from_authorized_user_info", "(", "cls", ",", "info", ",", "scopes", "=", "None", ")", ":", "keys_needed", "=", "set", "(", "(", "'refresh_token'", ",", "'client_id'", ",", "'client_secret'", ")", ")", "missing", "=", "keys_needed", ".", "difference", ...
Creates a Credentials instance from parsed authorized user info. Args: info (Mapping[str, str]): The authorized user info in Google format. scopes (Sequence[str]): Optional list of scopes to include in the credentials. Returns: google.oauth2.credentials.Credentials: The constructed credentials. Raises: ValueError: If the info is not in the expected format.
[ "Creates", "a", "Credentials", "instance", "from", "parsed", "authorized", "user", "info", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/credentials.py#L144-L174
228,619
googleapis/google-auth-library-python
google/oauth2/credentials.py
Credentials.from_authorized_user_file
def from_authorized_user_file(cls, filename, scopes=None): """Creates a Credentials instance from an authorized user json file. Args: filename (str): The path to the authorized user json file. scopes (Sequence[str]): Optional list of scopes to include in the credentials. Returns: google.oauth2.credentials.Credentials: The constructed credentials. Raises: ValueError: If the file is not in the expected format. """ with io.open(filename, 'r', encoding='utf-8') as json_file: data = json.load(json_file) return cls.from_authorized_user_info(data, scopes)
python
def from_authorized_user_file(cls, filename, scopes=None): with io.open(filename, 'r', encoding='utf-8') as json_file: data = json.load(json_file) return cls.from_authorized_user_info(data, scopes)
[ "def", "from_authorized_user_file", "(", "cls", ",", "filename", ",", "scopes", "=", "None", ")", ":", "with", "io", ".", "open", "(", "filename", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", "as", "json_file", ":", "data", "=", "json", ".", "load...
Creates a Credentials instance from an authorized user json file. Args: filename (str): The path to the authorized user json file. scopes (Sequence[str]): Optional list of scopes to include in the credentials. Returns: google.oauth2.credentials.Credentials: The constructed credentials. Raises: ValueError: If the file is not in the expected format.
[ "Creates", "a", "Credentials", "instance", "from", "an", "authorized", "user", "json", "file", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/oauth2/credentials.py#L177-L194
228,620
googleapis/google-auth-library-python
google/auth/compute_engine/_metadata.py
ping
def ping(request, timeout=_METADATA_DEFAULT_TIMEOUT, retry_count=3): """Checks to see if the metadata server is available. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. timeout (int): How long to wait for the metadata server to respond. retry_count (int): How many times to attempt connecting to metadata server using above timeout. Returns: bool: True if the metadata server is reachable, False otherwise. """ # NOTE: The explicit ``timeout`` is a workaround. The underlying # issue is that resolving an unknown host on some networks will take # 20-30 seconds; making this timeout short fixes the issue, but # could lead to false negatives in the event that we are on GCE, but # the metadata resolution was particularly slow. The latter case is # "unlikely". retries = 0 while retries < retry_count: try: response = request( url=_METADATA_IP_ROOT, method='GET', headers=_METADATA_HEADERS, timeout=timeout) metadata_flavor = response.headers.get(_METADATA_FLAVOR_HEADER) return (response.status == http_client.OK and metadata_flavor == _METADATA_FLAVOR_VALUE) except exceptions.TransportError: _LOGGER.info('Compute Engine Metadata server unavailable on' 'attempt %s of %s', retries+1, retry_count) retries += 1 return False
python
def ping(request, timeout=_METADATA_DEFAULT_TIMEOUT, retry_count=3): # NOTE: The explicit ``timeout`` is a workaround. The underlying # issue is that resolving an unknown host on some networks will take # 20-30 seconds; making this timeout short fixes the issue, but # could lead to false negatives in the event that we are on GCE, but # the metadata resolution was particularly slow. The latter case is # "unlikely". retries = 0 while retries < retry_count: try: response = request( url=_METADATA_IP_ROOT, method='GET', headers=_METADATA_HEADERS, timeout=timeout) metadata_flavor = response.headers.get(_METADATA_FLAVOR_HEADER) return (response.status == http_client.OK and metadata_flavor == _METADATA_FLAVOR_VALUE) except exceptions.TransportError: _LOGGER.info('Compute Engine Metadata server unavailable on' 'attempt %s of %s', retries+1, retry_count) retries += 1 return False
[ "def", "ping", "(", "request", ",", "timeout", "=", "_METADATA_DEFAULT_TIMEOUT", ",", "retry_count", "=", "3", ")", ":", "# NOTE: The explicit ``timeout`` is a workaround. The underlying", "# issue is that resolving an unknown host on some networks will take", "# 20-30 se...
Checks to see if the metadata server is available. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. timeout (int): How long to wait for the metadata server to respond. retry_count (int): How many times to attempt connecting to metadata server using above timeout. Returns: bool: True if the metadata server is reachable, False otherwise.
[ "Checks", "to", "see", "if", "the", "metadata", "server", "is", "available", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/compute_engine/_metadata.py#L54-L89
228,621
googleapis/google-auth-library-python
google/auth/compute_engine/_metadata.py
get
def get(request, path, root=_METADATA_ROOT, recursive=False): """Fetch a resource from the metadata server. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. path (str): The resource to retrieve. For example, ``'instance/service-accounts/default'``. root (str): The full path to the metadata server root. recursive (bool): Whether to do a recursive query of metadata. See https://cloud.google.com/compute/docs/metadata#aggcontents for more details. Returns: Union[Mapping, str]: If the metadata server returns JSON, a mapping of the decoded JSON is return. Otherwise, the response content is returned as a string. Raises: google.auth.exceptions.TransportError: if an error occurred while retrieving metadata. """ base_url = urlparse.urljoin(root, path) query_params = {} if recursive: query_params['recursive'] = 'true' url = _helpers.update_query(base_url, query_params) response = request(url=url, method='GET', headers=_METADATA_HEADERS) if response.status == http_client.OK: content = _helpers.from_bytes(response.data) if response.headers['content-type'] == 'application/json': try: return json.loads(content) except ValueError as caught_exc: new_exc = exceptions.TransportError( 'Received invalid JSON from the Google Compute Engine' 'metadata service: {:.20}'.format(content)) six.raise_from(new_exc, caught_exc) else: return content else: raise exceptions.TransportError( 'Failed to retrieve {} from the Google Compute Engine' 'metadata service. Status: {} Response:\n{}'.format( url, response.status, response.data), response)
python
def get(request, path, root=_METADATA_ROOT, recursive=False): base_url = urlparse.urljoin(root, path) query_params = {} if recursive: query_params['recursive'] = 'true' url = _helpers.update_query(base_url, query_params) response = request(url=url, method='GET', headers=_METADATA_HEADERS) if response.status == http_client.OK: content = _helpers.from_bytes(response.data) if response.headers['content-type'] == 'application/json': try: return json.loads(content) except ValueError as caught_exc: new_exc = exceptions.TransportError( 'Received invalid JSON from the Google Compute Engine' 'metadata service: {:.20}'.format(content)) six.raise_from(new_exc, caught_exc) else: return content else: raise exceptions.TransportError( 'Failed to retrieve {} from the Google Compute Engine' 'metadata service. Status: {} Response:\n{}'.format( url, response.status, response.data), response)
[ "def", "get", "(", "request", ",", "path", ",", "root", "=", "_METADATA_ROOT", ",", "recursive", "=", "False", ")", ":", "base_url", "=", "urlparse", ".", "urljoin", "(", "root", ",", "path", ")", "query_params", "=", "{", "}", "if", "recursive", ":", ...
Fetch a resource from the metadata server. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. path (str): The resource to retrieve. For example, ``'instance/service-accounts/default'``. root (str): The full path to the metadata server root. recursive (bool): Whether to do a recursive query of metadata. See https://cloud.google.com/compute/docs/metadata#aggcontents for more details. Returns: Union[Mapping, str]: If the metadata server returns JSON, a mapping of the decoded JSON is return. Otherwise, the response content is returned as a string. Raises: google.auth.exceptions.TransportError: if an error occurred while retrieving metadata.
[ "Fetch", "a", "resource", "from", "the", "metadata", "server", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/compute_engine/_metadata.py#L92-L140
228,622
googleapis/google-auth-library-python
google/auth/compute_engine/_metadata.py
get_service_account_token
def get_service_account_token(request, service_account='default'): """Get the OAuth 2.0 access token for a service account. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. service_account (str): The string 'default' or a service account email address. The determines which service account for which to acquire an access token. Returns: Union[str, datetime]: The access token and its expiration. Raises: google.auth.exceptions.TransportError: if an error occurred while retrieving metadata. """ token_json = get( request, 'instance/service-accounts/{0}/token'.format(service_account)) token_expiry = _helpers.utcnow() + datetime.timedelta( seconds=token_json['expires_in']) return token_json['access_token'], token_expiry
python
def get_service_account_token(request, service_account='default'): token_json = get( request, 'instance/service-accounts/{0}/token'.format(service_account)) token_expiry = _helpers.utcnow() + datetime.timedelta( seconds=token_json['expires_in']) return token_json['access_token'], token_expiry
[ "def", "get_service_account_token", "(", "request", ",", "service_account", "=", "'default'", ")", ":", "token_json", "=", "get", "(", "request", ",", "'instance/service-accounts/{0}/token'", ".", "format", "(", "service_account", ")", ")", "token_expiry", "=", "_he...
Get the OAuth 2.0 access token for a service account. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. service_account (str): The string 'default' or a service account email address. The determines which service account for which to acquire an access token. Returns: Union[str, datetime]: The access token and its expiration. Raises: google.auth.exceptions.TransportError: if an error occurred while retrieving metadata.
[ "Get", "the", "OAuth", "2", ".", "0", "access", "token", "for", "a", "service", "account", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/compute_engine/_metadata.py#L189-L211
228,623
googleapis/google-auth-library-python
google/auth/_service_account_info.py
from_dict
def from_dict(data, require=None): """Validates a dictionary containing Google service account data. Creates and returns a :class:`google.auth.crypt.Signer` instance from the private key specified in the data. Args: data (Mapping[str, str]): The service account data require (Sequence[str]): List of keys required to be present in the info. Returns: google.auth.crypt.Signer: A signer created from the private key in the service account file. Raises: ValueError: if the data was in the wrong format, or if one of the required keys is missing. """ keys_needed = set(require if require is not None else []) missing = keys_needed.difference(six.iterkeys(data)) if missing: raise ValueError( 'Service account info was not in the expected format, missing ' 'fields {}.'.format(', '.join(missing))) # Create a signer. signer = crypt.RSASigner.from_service_account_info(data) return signer
python
def from_dict(data, require=None): keys_needed = set(require if require is not None else []) missing = keys_needed.difference(six.iterkeys(data)) if missing: raise ValueError( 'Service account info was not in the expected format, missing ' 'fields {}.'.format(', '.join(missing))) # Create a signer. signer = crypt.RSASigner.from_service_account_info(data) return signer
[ "def", "from_dict", "(", "data", ",", "require", "=", "None", ")", ":", "keys_needed", "=", "set", "(", "require", "if", "require", "is", "not", "None", "else", "[", "]", ")", "missing", "=", "keys_needed", ".", "difference", "(", "six", ".", "iterkeys...
Validates a dictionary containing Google service account data. Creates and returns a :class:`google.auth.crypt.Signer` instance from the private key specified in the data. Args: data (Mapping[str, str]): The service account data require (Sequence[str]): List of keys required to be present in the info. Returns: google.auth.crypt.Signer: A signer created from the private key in the service account file. Raises: ValueError: if the data was in the wrong format, or if one of the required keys is missing.
[ "Validates", "a", "dictionary", "containing", "Google", "service", "account", "data", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/_service_account_info.py#L25-L56
228,624
googleapis/google-auth-library-python
google/auth/_service_account_info.py
from_filename
def from_filename(filename, require=None): """Reads a Google service account JSON file and returns its parsed info. Args: filename (str): The path to the service account .json file. require (Sequence[str]): List of keys required to be present in the info. Returns: Tuple[ Mapping[str, str], google.auth.crypt.Signer ]: The verified info and a signer instance. """ with io.open(filename, 'r', encoding='utf-8') as json_file: data = json.load(json_file) return data, from_dict(data, require=require)
python
def from_filename(filename, require=None): with io.open(filename, 'r', encoding='utf-8') as json_file: data = json.load(json_file) return data, from_dict(data, require=require)
[ "def", "from_filename", "(", "filename", ",", "require", "=", "None", ")", ":", "with", "io", ".", "open", "(", "filename", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", "as", "json_file", ":", "data", "=", "json", ".", "load", "(", "json_file", ...
Reads a Google service account JSON file and returns its parsed info. Args: filename (str): The path to the service account .json file. require (Sequence[str]): List of keys required to be present in the info. Returns: Tuple[ Mapping[str, str], google.auth.crypt.Signer ]: The verified info and a signer instance.
[ "Reads", "a", "Google", "service", "account", "JSON", "file", "and", "returns", "its", "parsed", "info", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/_service_account_info.py#L59-L73
228,625
googleapis/google-auth-library-python
google/auth/_helpers.py
copy_docstring
def copy_docstring(source_class): """Decorator that copies a method's docstring from another class. Args: source_class (type): The class that has the documented method. Returns: Callable: A decorator that will copy the docstring of the same named method in the source class to the decorated method. """ def decorator(method): """Decorator implementation. Args: method (Callable): The method to copy the docstring to. Returns: Callable: the same method passed in with an updated docstring. Raises: ValueError: if the method already has a docstring. """ if method.__doc__: raise ValueError('Method already has a docstring.') source_method = getattr(source_class, method.__name__) method.__doc__ = source_method.__doc__ return method return decorator
python
def copy_docstring(source_class): def decorator(method): """Decorator implementation. Args: method (Callable): The method to copy the docstring to. Returns: Callable: the same method passed in with an updated docstring. Raises: ValueError: if the method already has a docstring. """ if method.__doc__: raise ValueError('Method already has a docstring.') source_method = getattr(source_class, method.__name__) method.__doc__ = source_method.__doc__ return method return decorator
[ "def", "copy_docstring", "(", "source_class", ")", ":", "def", "decorator", "(", "method", ")", ":", "\"\"\"Decorator implementation.\n\n Args:\n method (Callable): The method to copy the docstring to.\n\n Returns:\n Callable: the same method passed in wit...
Decorator that copies a method's docstring from another class. Args: source_class (type): The class that has the documented method. Returns: Callable: A decorator that will copy the docstring of the same named method in the source class to the decorated method.
[ "Decorator", "that", "copies", "a", "method", "s", "docstring", "from", "another", "class", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/_helpers.py#L29-L58
228,626
googleapis/google-auth-library-python
google/auth/_helpers.py
from_bytes
def from_bytes(value): """Converts bytes to a string value, if necessary. Args: value (Union[str, bytes]): The value to be converted. Returns: str: The original value converted to unicode (if bytes) or as passed in if it started out as unicode. Raises: ValueError: If the value could not be converted to unicode. """ result = (value.decode('utf-8') if isinstance(value, six.binary_type) else value) if isinstance(result, six.text_type): return result else: raise ValueError( '{0!r} could not be converted to unicode'.format(value))
python
def from_bytes(value): result = (value.decode('utf-8') if isinstance(value, six.binary_type) else value) if isinstance(result, six.text_type): return result else: raise ValueError( '{0!r} could not be converted to unicode'.format(value))
[ "def", "from_bytes", "(", "value", ")", ":", "result", "=", "(", "value", ".", "decode", "(", "'utf-8'", ")", "if", "isinstance", "(", "value", ",", "six", ".", "binary_type", ")", "else", "value", ")", "if", "isinstance", "(", "result", ",", "six", ...
Converts bytes to a string value, if necessary. Args: value (Union[str, bytes]): The value to be converted. Returns: str: The original value converted to unicode (if bytes) or as passed in if it started out as unicode. Raises: ValueError: If the value could not be converted to unicode.
[ "Converts", "bytes", "to", "a", "string", "value", "if", "necessary", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/_helpers.py#L108-L127
228,627
googleapis/google-auth-library-python
google/auth/_helpers.py
update_query
def update_query(url, params, remove=None): """Updates a URL's query parameters. Replaces any current values if they are already present in the URL. Args: url (str): The URL to update. params (Mapping[str, str]): A mapping of query parameter keys to values. remove (Sequence[str]): Parameters to remove from the query string. Returns: str: The URL with updated query parameters. Examples: >>> url = 'http://example.com?a=1' >>> update_query(url, {'a': '2'}) http://example.com?a=2 >>> update_query(url, {'b': '3'}) http://example.com?a=1&b=3 >> update_query(url, {'b': '3'}, remove=['a']) http://example.com?b=3 """ if remove is None: remove = [] # Split the URL into parts. parts = urllib.parse.urlparse(url) # Parse the query string. query_params = urllib.parse.parse_qs(parts.query) # Update the query parameters with the new parameters. query_params.update(params) # Remove any values specified in remove. query_params = { key: value for key, value in six.iteritems(query_params) if key not in remove} # Re-encoded the query string. new_query = urllib.parse.urlencode(query_params, doseq=True) # Unsplit the url. new_parts = parts._replace(query=new_query) return urllib.parse.urlunparse(new_parts)
python
def update_query(url, params, remove=None): if remove is None: remove = [] # Split the URL into parts. parts = urllib.parse.urlparse(url) # Parse the query string. query_params = urllib.parse.parse_qs(parts.query) # Update the query parameters with the new parameters. query_params.update(params) # Remove any values specified in remove. query_params = { key: value for key, value in six.iteritems(query_params) if key not in remove} # Re-encoded the query string. new_query = urllib.parse.urlencode(query_params, doseq=True) # Unsplit the url. new_parts = parts._replace(query=new_query) return urllib.parse.urlunparse(new_parts)
[ "def", "update_query", "(", "url", ",", "params", ",", "remove", "=", "None", ")", ":", "if", "remove", "is", "None", ":", "remove", "=", "[", "]", "# Split the URL into parts.", "parts", "=", "urllib", ".", "parse", ".", "urlparse", "(", "url", ")", "...
Updates a URL's query parameters. Replaces any current values if they are already present in the URL. Args: url (str): The URL to update. params (Mapping[str, str]): A mapping of query parameter keys to values. remove (Sequence[str]): Parameters to remove from the query string. Returns: str: The URL with updated query parameters. Examples: >>> url = 'http://example.com?a=1' >>> update_query(url, {'a': '2'}) http://example.com?a=2 >>> update_query(url, {'b': '3'}) http://example.com?a=1&b=3 >> update_query(url, {'b': '3'}, remove=['a']) http://example.com?b=3
[ "Updates", "a", "URL", "s", "query", "parameters", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/_helpers.py#L130-L173
228,628
googleapis/google-auth-library-python
google/auth/_helpers.py
padded_urlsafe_b64decode
def padded_urlsafe_b64decode(value): """Decodes base64 strings lacking padding characters. Google infrastructure tends to omit the base64 padding characters. Args: value (Union[str, bytes]): The encoded value. Returns: bytes: The decoded value """ b64string = to_bytes(value) padded = b64string + b'=' * (-len(b64string) % 4) return base64.urlsafe_b64decode(padded)
python
def padded_urlsafe_b64decode(value): b64string = to_bytes(value) padded = b64string + b'=' * (-len(b64string) % 4) return base64.urlsafe_b64decode(padded)
[ "def", "padded_urlsafe_b64decode", "(", "value", ")", ":", "b64string", "=", "to_bytes", "(", "value", ")", "padded", "=", "b64string", "+", "b'='", "*", "(", "-", "len", "(", "b64string", ")", "%", "4", ")", "return", "base64", ".", "urlsafe_b64decode", ...
Decodes base64 strings lacking padding characters. Google infrastructure tends to omit the base64 padding characters. Args: value (Union[str, bytes]): The encoded value. Returns: bytes: The decoded value
[ "Decodes", "base64", "strings", "lacking", "padding", "characters", "." ]
2c6ad78917e936f38f87c946209c8031166dc96e
https://github.com/googleapis/google-auth-library-python/blob/2c6ad78917e936f38f87c946209c8031166dc96e/google/auth/_helpers.py#L204-L217
228,629
RLBot/RLBot
src/main/python/rlbot/utils/process_configuration.py
configure_processes
def configure_processes(agent_metadata_map, logger): """ This will update the priority and CPU affinity of the processes owned by bots to try to achieve fairness and good performance. :param agent_metadata_map: A mapping of player index to agent metadata, including a list of owned process ids. """ if not optional_packages_installed: logger.warning("\n#### WARNING ####\n" "You are missing some optional packages which will become mandatory in the future!\n" "Please run `pip install -r requirements.txt` to enjoy optimal functionality " "and future-proof yourself!\n") if not optional_packages_installed: return team_pids_map = {} for player_index, data in agent_metadata_map.items(): team = data.team if team not in team_pids_map: team_pids_map[team] = set() team_pids_map[team].update(data.pids) shared_pids = set() cpu_count = psutil.cpu_count() cpus_per_team = cpu_count // 3 if len(team_pids_map) >= 2 and cpus_per_team > 0: # Sort into three sets of pids: team 0 exclusives, team 1 exclusives, and shared pids # All pids will be assigned high priority # Team exclusive pids will be bound to a subset of cpus so they can't adversely affect the opposite team. for team, team_set in team_pids_map.items(): if not shared_pids: shared_pids.update(team_set) else: shared_pids.intersection_update(team_set) for team, team_set in team_pids_map.items(): team_set -= shared_pids for team, team_pids in team_pids_map.items(): team_cpu_offset = cpus_per_team * team team_cpus = list(range(cpu_count - cpus_per_team - team_cpu_offset, cpu_count - team_cpu_offset)) for pid in team_pids: p = psutil.Process(pid) p.cpu_affinity(team_cpus) # Restrict the process to run on the cpus assigned to the team p.nice(psutil.HIGH_PRIORITY_CLASS) # Allow the process to run at high priority else: # Consider everything a shared pid, because we are not in a position to split up cpus. for team, team_set in team_pids_map.items(): shared_pids.update(team_set) for pid in shared_pids: p = psutil.Process(pid) # Allow the process to run at high priority p.nice(psutil.HIGH_PRIORITY_CLASS)
python
def configure_processes(agent_metadata_map, logger): if not optional_packages_installed: logger.warning("\n#### WARNING ####\n" "You are missing some optional packages which will become mandatory in the future!\n" "Please run `pip install -r requirements.txt` to enjoy optimal functionality " "and future-proof yourself!\n") if not optional_packages_installed: return team_pids_map = {} for player_index, data in agent_metadata_map.items(): team = data.team if team not in team_pids_map: team_pids_map[team] = set() team_pids_map[team].update(data.pids) shared_pids = set() cpu_count = psutil.cpu_count() cpus_per_team = cpu_count // 3 if len(team_pids_map) >= 2 and cpus_per_team > 0: # Sort into three sets of pids: team 0 exclusives, team 1 exclusives, and shared pids # All pids will be assigned high priority # Team exclusive pids will be bound to a subset of cpus so they can't adversely affect the opposite team. for team, team_set in team_pids_map.items(): if not shared_pids: shared_pids.update(team_set) else: shared_pids.intersection_update(team_set) for team, team_set in team_pids_map.items(): team_set -= shared_pids for team, team_pids in team_pids_map.items(): team_cpu_offset = cpus_per_team * team team_cpus = list(range(cpu_count - cpus_per_team - team_cpu_offset, cpu_count - team_cpu_offset)) for pid in team_pids: p = psutil.Process(pid) p.cpu_affinity(team_cpus) # Restrict the process to run on the cpus assigned to the team p.nice(psutil.HIGH_PRIORITY_CLASS) # Allow the process to run at high priority else: # Consider everything a shared pid, because we are not in a position to split up cpus. for team, team_set in team_pids_map.items(): shared_pids.update(team_set) for pid in shared_pids: p = psutil.Process(pid) # Allow the process to run at high priority p.nice(psutil.HIGH_PRIORITY_CLASS)
[ "def", "configure_processes", "(", "agent_metadata_map", ",", "logger", ")", ":", "if", "not", "optional_packages_installed", ":", "logger", ".", "warning", "(", "\"\\n#### WARNING ####\\n\"", "\"You are missing some optional packages which will become mandatory in the future!\\n\"...
This will update the priority and CPU affinity of the processes owned by bots to try to achieve fairness and good performance. :param agent_metadata_map: A mapping of player index to agent metadata, including a list of owned process ids.
[ "This", "will", "update", "the", "priority", "and", "CPU", "affinity", "of", "the", "processes", "owned", "by", "bots", "to", "try", "to", "achieve", "fairness", "and", "good", "performance", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/utils/process_configuration.py#L9-L66
228,630
RLBot/RLBot
src/main/python/rlbot/parsing/custom_config.py
ConfigObject.get_header
def get_header(self, header_name): """ Returns a header with that name, creates it if it does not exist. """ if header_name in self.headers: return self.headers[header_name] return self.add_header_name(header_name)
python
def get_header(self, header_name): if header_name in self.headers: return self.headers[header_name] return self.add_header_name(header_name)
[ "def", "get_header", "(", "self", ",", "header_name", ")", ":", "if", "header_name", "in", "self", ".", "headers", ":", "return", "self", ".", "headers", "[", "header_name", "]", "return", "self", ".", "add_header_name", "(", "header_name", ")" ]
Returns a header with that name, creates it if it does not exist.
[ "Returns", "a", "header", "with", "that", "name", "creates", "it", "if", "it", "does", "not", "exist", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/parsing/custom_config.py#L47-L53
228,631
RLBot/RLBot
src/main/python/rlbot/utils/logging_utils.py
log_warn
def log_warn(message, args): """Logs a warning message using the default logger.""" get_logger(DEFAULT_LOGGER, log_creation=False).log(logging.WARNING, message, *args)
python
def log_warn(message, args): get_logger(DEFAULT_LOGGER, log_creation=False).log(logging.WARNING, message, *args)
[ "def", "log_warn", "(", "message", ",", "args", ")", ":", "get_logger", "(", "DEFAULT_LOGGER", ",", "log_creation", "=", "False", ")", ".", "log", "(", "logging", ".", "WARNING", ",", "message", ",", "*", "args", ")" ]
Logs a warning message using the default logger.
[ "Logs", "a", "warning", "message", "using", "the", "default", "logger", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/utils/logging_utils.py#L31-L33
228,632
RLBot/RLBot
src/main/python/rlbot/gui/preset_editors.py
CarCustomisationDialog.create_config_headers_dicts
def create_config_headers_dicts(self): """ Creates the config_headers_to_widgets and config_widgets_to_headers and config_headers_to_categories dicts """ self.config_headers_to_widgets = { # blue stuff 'Bot Loadout': { 'team_color_id': (self.blue_primary_spinbox,), 'custom_color_id': (self.blue_secondary_spinbox,), 'car_id': (self.blue_car_spinbox, self.blue_car_combobox), 'decal_id': (self.blue_decal_spinbox, self.blue_decal_combobox), 'wheels_id': (self.blue_wheels_spinbox, self.blue_wheels_combobox), 'boost_id': (self.blue_boost_spinbox, self.blue_boost_combobox), 'antenna_id': (self.blue_antenna_spinbox, self.blue_antenna_combobox), 'hat_id': (self.blue_hat_spinbox, self.blue_hat_combobox), 'paint_finish_id': (self.blue_paint_finish_spinbox, self.blue_paint_finish_combobox), 'custom_finish_id': (self.blue_custom_finish_spinbox, self.blue_custom_finish_combobox), 'engine_audio_id': (self.blue_engine_spinbox, self.blue_engine_combobox), 'trails_id': (self.blue_trails_spinbox, self.blue_trails_combobox), 'goal_explosion_id': (self.blue_goal_explosion_spinbox, self.blue_goal_explosion_combobox) }, 'Bot Loadout Orange': { 'team_color_id': (self.orange_primary_spinbox,), 'custom_color_id': (self.orange_secondary_spinbox,), 'car_id': (self.orange_car_spinbox, self.orange_car_combobox), 'decal_id': (self.orange_decal_spinbox, self.orange_decal_combobox), 'wheels_id': (self.orange_wheels_spinbox, self.orange_wheels_combobox), 'boost_id': (self.orange_boost_spinbox, self.orange_boost_combobox), 'antenna_id': (self.orange_antenna_spinbox, self.orange_antenna_combobox), 'hat_id': (self.orange_hat_spinbox, self.orange_hat_combobox), 'paint_finish_id': (self.orange_paint_finish_spinbox, self.orange_paint_finish_combobox), 'custom_finish_id': (self.orange_custom_finish_spinbox, self.orange_custom_finish_combobox), 'engine_audio_id': (self.orange_engine_spinbox, self.orange_engine_combobox), 'trails_id': (self.orange_trails_spinbox, self.orange_trails_combobox), 'goal_explosion_id': (self.orange_goal_explosion_spinbox, self.orange_goal_explosion_combobox) }, } self.config_widgets_to_headers = {} for header_1, _field_dict in self.config_headers_to_widgets.items(): for header_2, _widgets in _field_dict.items(): for _widget in _widgets: self.config_widgets_to_headers[_widget] = (header_1, header_2) self.config_headers_to_categories = { 'car_id': 'Body', 'decal_id': 'Decal', 'wheels_id': 'Wheels', 'boost_id': 'Rocket Boost', 'antenna_id': 'Antenna', 'hat_id': 'Topper', 'paint_finish_id': 'Paint Finish', 'custom_finish_id': 'Paint Finish', 'engine_audio_id': 'Engine Audio', 'trails_id': 'Trail', 'goal_explosion_id': 'Goal Explosion' }
python
def create_config_headers_dicts(self): self.config_headers_to_widgets = { # blue stuff 'Bot Loadout': { 'team_color_id': (self.blue_primary_spinbox,), 'custom_color_id': (self.blue_secondary_spinbox,), 'car_id': (self.blue_car_spinbox, self.blue_car_combobox), 'decal_id': (self.blue_decal_spinbox, self.blue_decal_combobox), 'wheels_id': (self.blue_wheels_spinbox, self.blue_wheels_combobox), 'boost_id': (self.blue_boost_spinbox, self.blue_boost_combobox), 'antenna_id': (self.blue_antenna_spinbox, self.blue_antenna_combobox), 'hat_id': (self.blue_hat_spinbox, self.blue_hat_combobox), 'paint_finish_id': (self.blue_paint_finish_spinbox, self.blue_paint_finish_combobox), 'custom_finish_id': (self.blue_custom_finish_spinbox, self.blue_custom_finish_combobox), 'engine_audio_id': (self.blue_engine_spinbox, self.blue_engine_combobox), 'trails_id': (self.blue_trails_spinbox, self.blue_trails_combobox), 'goal_explosion_id': (self.blue_goal_explosion_spinbox, self.blue_goal_explosion_combobox) }, 'Bot Loadout Orange': { 'team_color_id': (self.orange_primary_spinbox,), 'custom_color_id': (self.orange_secondary_spinbox,), 'car_id': (self.orange_car_spinbox, self.orange_car_combobox), 'decal_id': (self.orange_decal_spinbox, self.orange_decal_combobox), 'wheels_id': (self.orange_wheels_spinbox, self.orange_wheels_combobox), 'boost_id': (self.orange_boost_spinbox, self.orange_boost_combobox), 'antenna_id': (self.orange_antenna_spinbox, self.orange_antenna_combobox), 'hat_id': (self.orange_hat_spinbox, self.orange_hat_combobox), 'paint_finish_id': (self.orange_paint_finish_spinbox, self.orange_paint_finish_combobox), 'custom_finish_id': (self.orange_custom_finish_spinbox, self.orange_custom_finish_combobox), 'engine_audio_id': (self.orange_engine_spinbox, self.orange_engine_combobox), 'trails_id': (self.orange_trails_spinbox, self.orange_trails_combobox), 'goal_explosion_id': (self.orange_goal_explosion_spinbox, self.orange_goal_explosion_combobox) }, } self.config_widgets_to_headers = {} for header_1, _field_dict in self.config_headers_to_widgets.items(): for header_2, _widgets in _field_dict.items(): for _widget in _widgets: self.config_widgets_to_headers[_widget] = (header_1, header_2) self.config_headers_to_categories = { 'car_id': 'Body', 'decal_id': 'Decal', 'wheels_id': 'Wheels', 'boost_id': 'Rocket Boost', 'antenna_id': 'Antenna', 'hat_id': 'Topper', 'paint_finish_id': 'Paint Finish', 'custom_finish_id': 'Paint Finish', 'engine_audio_id': 'Engine Audio', 'trails_id': 'Trail', 'goal_explosion_id': 'Goal Explosion' }
[ "def", "create_config_headers_dicts", "(", "self", ")", ":", "self", ".", "config_headers_to_widgets", "=", "{", "# blue stuff", "'Bot Loadout'", ":", "{", "'team_color_id'", ":", "(", "self", ".", "blue_primary_spinbox", ",", ")", ",", "'custom_color_id'", ":", "...
Creates the config_headers_to_widgets and config_widgets_to_headers and config_headers_to_categories dicts
[ "Creates", "the", "config_headers_to_widgets", "and", "config_widgets_to_headers", "and", "config_headers_to_categories", "dicts" ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/gui/preset_editors.py#L297-L352
228,633
RLBot/RLBot
src/main/python/rlbot/utils/file_util.py
get_rlbot_directory
def get_rlbot_directory() -> str: """Gets the path of the rlbot package directory""" return os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
python
def get_rlbot_directory() -> str: return os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
[ "def", "get_rlbot_directory", "(", ")", "->", "str", ":", "return", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", ")" ]
Gets the path of the rlbot package directory
[ "Gets", "the", "path", "of", "the", "rlbot", "package", "directory" ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/utils/file_util.py#L13-L15
228,634
RLBot/RLBot
src/main/python/rlbot/matchconfig/conversions.py
read_match_config_from_file
def read_match_config_from_file(match_config_path: Path) -> MatchConfig: """ Parse the rlbot.cfg file on disk into the python datastructure. """ config_obj = create_bot_config_layout() config_obj.parse_file(match_config_path, max_index=MAX_PLAYERS) return parse_match_config(config_obj, match_config_path, {}, {})
python
def read_match_config_from_file(match_config_path: Path) -> MatchConfig: config_obj = create_bot_config_layout() config_obj.parse_file(match_config_path, max_index=MAX_PLAYERS) return parse_match_config(config_obj, match_config_path, {}, {})
[ "def", "read_match_config_from_file", "(", "match_config_path", ":", "Path", ")", "->", "MatchConfig", ":", "config_obj", "=", "create_bot_config_layout", "(", ")", "config_obj", ".", "parse_file", "(", "match_config_path", ",", "max_index", "=", "MAX_PLAYERS", ")", ...
Parse the rlbot.cfg file on disk into the python datastructure.
[ "Parse", "the", "rlbot", ".", "cfg", "file", "on", "disk", "into", "the", "python", "datastructure", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/matchconfig/conversions.py#L18-L24
228,635
RLBot/RLBot
src/main/python/rlbot/parsing/bot_config_bundle.py
validate_bot_config
def validate_bot_config(config_bundle) -> None: """ Checks the config bundle to see whether it has all required attributes. """ if not config_bundle.name: bot_config = os.path.join(config_bundle.config_directory, config_bundle.config_file_name or '') raise AttributeError(f"Bot config {bot_config} has no name configured!") # This will raise an exception if we can't find the looks config, or if it's malformed config_bundle.get_looks_config()
python
def validate_bot_config(config_bundle) -> None: if not config_bundle.name: bot_config = os.path.join(config_bundle.config_directory, config_bundle.config_file_name or '') raise AttributeError(f"Bot config {bot_config} has no name configured!") # This will raise an exception if we can't find the looks config, or if it's malformed config_bundle.get_looks_config()
[ "def", "validate_bot_config", "(", "config_bundle", ")", "->", "None", ":", "if", "not", "config_bundle", ".", "name", ":", "bot_config", "=", "os", ".", "path", ".", "join", "(", "config_bundle", ".", "config_directory", ",", "config_bundle", ".", "config_fil...
Checks the config bundle to see whether it has all required attributes.
[ "Checks", "the", "config", "bundle", "to", "see", "whether", "it", "has", "all", "required", "attributes", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/parsing/bot_config_bundle.py#L56-L65
228,636
RLBot/RLBot
src/main/python/rlbot/botmanager/helper_process_manager.py
HelperProcessManager.start_or_update_helper_process
def start_or_update_helper_process(self, agent_metadata: AgentMetadata): """ Examines the agent metadata to see if the agent needs a helper process. If the process is not running yet, create the process. Once the process is running, feed the agent metadata to it. If a process is created here, the pid will be added to the agent metadata. """ helper_req = agent_metadata.helper_process_request if helper_req is not None: if helper_req.key not in self.helper_process_map: metadata_queue = mp.Queue() if helper_req.python_file_path is not None: process = mp.Process(target=run_helper_process, args=(helper_req.python_file_path, metadata_queue, self.quit_event, helper_req.options)) process.daemon = True process.start() agent_metadata.pids.add(process.pid) self.helper_process_map[helper_req.key] = metadata_queue if helper_req.executable is not None: # TODO: find a nice way to pass the options dict as arguments process = subprocess.Popen([helper_req.executable]) agent_metadata.pids.add(process.pid) self.helper_process_map[helper_req.key] = metadata_queue metadata_queue = self.helper_process_map[helper_req.key] metadata_queue.put(agent_metadata)
python
def start_or_update_helper_process(self, agent_metadata: AgentMetadata): helper_req = agent_metadata.helper_process_request if helper_req is not None: if helper_req.key not in self.helper_process_map: metadata_queue = mp.Queue() if helper_req.python_file_path is not None: process = mp.Process(target=run_helper_process, args=(helper_req.python_file_path, metadata_queue, self.quit_event, helper_req.options)) process.daemon = True process.start() agent_metadata.pids.add(process.pid) self.helper_process_map[helper_req.key] = metadata_queue if helper_req.executable is not None: # TODO: find a nice way to pass the options dict as arguments process = subprocess.Popen([helper_req.executable]) agent_metadata.pids.add(process.pid) self.helper_process_map[helper_req.key] = metadata_queue metadata_queue = self.helper_process_map[helper_req.key] metadata_queue.put(agent_metadata)
[ "def", "start_or_update_helper_process", "(", "self", ",", "agent_metadata", ":", "AgentMetadata", ")", ":", "helper_req", "=", "agent_metadata", ".", "helper_process_request", "if", "helper_req", "is", "not", "None", ":", "if", "helper_req", ".", "key", "not", "i...
Examines the agent metadata to see if the agent needs a helper process. If the process is not running yet, create the process. Once the process is running, feed the agent metadata to it. If a process is created here, the pid will be added to the agent metadata.
[ "Examines", "the", "agent", "metadata", "to", "see", "if", "the", "agent", "needs", "a", "helper", "process", ".", "If", "the", "process", "is", "not", "running", "yet", "create", "the", "process", ".", "Once", "the", "process", "is", "running", "feed", ...
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/botmanager/helper_process_manager.py#L20-L52
228,637
RLBot/RLBot
src/main/python/rlbot/matchconfig/match_config.py
PlayerConfig.bot_config
def bot_config(player_config_path: Path, team: Team) -> 'PlayerConfig': """ A function to cover the common case of creating a config for a bot. """ bot_config = PlayerConfig() bot_config.bot = True bot_config.rlbot_controlled = True bot_config.team = team.value bot_config.config_path = str(player_config_path.absolute()) # TODO: Refactor to use Path's config_bundle = get_bot_config_bundle(bot_config.config_path) bot_config.name = config_bundle.name bot_config.loadout_config = load_bot_appearance(config_bundle.get_looks_config(), bot_config.team) return bot_config
python
def bot_config(player_config_path: Path, team: Team) -> 'PlayerConfig': bot_config = PlayerConfig() bot_config.bot = True bot_config.rlbot_controlled = True bot_config.team = team.value bot_config.config_path = str(player_config_path.absolute()) # TODO: Refactor to use Path's config_bundle = get_bot_config_bundle(bot_config.config_path) bot_config.name = config_bundle.name bot_config.loadout_config = load_bot_appearance(config_bundle.get_looks_config(), bot_config.team) return bot_config
[ "def", "bot_config", "(", "player_config_path", ":", "Path", ",", "team", ":", "Team", ")", "->", "'PlayerConfig'", ":", "bot_config", "=", "PlayerConfig", "(", ")", "bot_config", ".", "bot", "=", "True", "bot_config", ".", "rlbot_controlled", "=", "True", "...
A function to cover the common case of creating a config for a bot.
[ "A", "function", "to", "cover", "the", "common", "case", "of", "creating", "a", "config", "for", "a", "bot", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/matchconfig/match_config.py#L38-L50
228,638
RLBot/RLBot
src/main/python/rlbot/setup_manager.py
setup_manager_context
def setup_manager_context(): """ Creates a initialized context manager which shuts down at the end of the `with` block. usage: >>> with setup_manager_context() as setup_manager: ... setup_manager.load_config(...) ... # ... Run match """ setup_manager = SetupManager() setup_manager.connect_to_game() try: yield setup_manager finally: setup_manager.shut_down(kill_all_pids=True)
python
def setup_manager_context(): setup_manager = SetupManager() setup_manager.connect_to_game() try: yield setup_manager finally: setup_manager.shut_down(kill_all_pids=True)
[ "def", "setup_manager_context", "(", ")", ":", "setup_manager", "=", "SetupManager", "(", ")", "setup_manager", ".", "connect_to_game", "(", ")", "try", ":", "yield", "setup_manager", "finally", ":", "setup_manager", ".", "shut_down", "(", "kill_all_pids", "=", ...
Creates a initialized context manager which shuts down at the end of the `with` block. usage: >>> with setup_manager_context() as setup_manager: ... setup_manager.load_config(...) ... # ... Run match
[ "Creates", "a", "initialized", "context", "manager", "which", "shuts", "down", "at", "the", "end", "of", "the", "with", "block", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/setup_manager.py#L37-L52
228,639
RLBot/RLBot
src/main/python/rlbot/setup_manager.py
SetupManager.load_match_config
def load_match_config(self, match_config: MatchConfig, bot_config_overrides={}): """ Loads the match config into internal data structures, which prepares us to later launch bot processes and start the match. This is an alternative to the load_config method; they accomplish the same thing. """ self.num_participants = match_config.num_players self.names = [bot.name for bot in match_config.player_configs] self.teams = [bot.team for bot in match_config.player_configs] bundles = [bot_config_overrides[index] if index in bot_config_overrides else get_bot_config_bundle(bot.config_path) if bot.config_path else None for index, bot in enumerate(match_config.player_configs)] self.python_files = [bundle.python_file if bundle else None for bundle in bundles] self.parameters = [] for index, bot in enumerate(match_config.player_configs): python_config = None if bot.rlbot_controlled: python_config = load_bot_parameters(bundles[index]) self.parameters.append(python_config) if bot.loadout_config is None and bundles[index]: looks_config = bundles[index].get_looks_config() bot.loadout_config = load_bot_appearance(looks_config, bot.team) if match_config.extension_config is not None and match_config.extension_config.python_file_path is not None: self.load_extension(match_config.extension_config.python_file_path) self.match_config = match_config self.start_match_configuration = match_config.create_match_settings() self.game_interface.start_match_configuration = self.start_match_configuration
python
def load_match_config(self, match_config: MatchConfig, bot_config_overrides={}): self.num_participants = match_config.num_players self.names = [bot.name for bot in match_config.player_configs] self.teams = [bot.team for bot in match_config.player_configs] bundles = [bot_config_overrides[index] if index in bot_config_overrides else get_bot_config_bundle(bot.config_path) if bot.config_path else None for index, bot in enumerate(match_config.player_configs)] self.python_files = [bundle.python_file if bundle else None for bundle in bundles] self.parameters = [] for index, bot in enumerate(match_config.player_configs): python_config = None if bot.rlbot_controlled: python_config = load_bot_parameters(bundles[index]) self.parameters.append(python_config) if bot.loadout_config is None and bundles[index]: looks_config = bundles[index].get_looks_config() bot.loadout_config = load_bot_appearance(looks_config, bot.team) if match_config.extension_config is not None and match_config.extension_config.python_file_path is not None: self.load_extension(match_config.extension_config.python_file_path) self.match_config = match_config self.start_match_configuration = match_config.create_match_settings() self.game_interface.start_match_configuration = self.start_match_configuration
[ "def", "load_match_config", "(", "self", ",", "match_config", ":", "MatchConfig", ",", "bot_config_overrides", "=", "{", "}", ")", ":", "self", ".", "num_participants", "=", "match_config", ".", "num_players", "self", ".", "names", "=", "[", "bot", ".", "nam...
Loads the match config into internal data structures, which prepares us to later launch bot processes and start the match. This is an alternative to the load_config method; they accomplish the same thing.
[ "Loads", "the", "match", "config", "into", "internal", "data", "structures", "which", "prepares", "us", "to", "later", "launch", "bot", "processes", "and", "start", "the", "match", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/setup_manager.py#L112-L146
228,640
RLBot/RLBot
src/main/python/rlbot/setup_manager.py
SetupManager.load_config
def load_config(self, framework_config: ConfigObject = None, config_location=DEFAULT_RLBOT_CONFIG_LOCATION, bot_configs=None, looks_configs=None): """ Loads the configuration into internal data structures, which prepares us to later launch bot processes and start the match. :param framework_config: A config object that indicates what bots to run. May come from parsing a rlbot.cfg. :param config_location: The location of the rlbot.cfg file, which will be used to resolve relative paths. :param bot_configs: Overrides for bot configurations. :param looks_configs: Overrides for looks configurations. """ self.logger.debug('reading the configs') # Set up RLBot.cfg if framework_config is None: framework_config = create_bot_config_layout() framework_config.parse_file(config_location, max_index=MAX_PLAYERS) if bot_configs is None: bot_configs = {} if looks_configs is None: looks_configs = {} match_config = parse_match_config(framework_config, config_location, bot_configs, looks_configs) self.load_match_config(match_config, bot_configs)
python
def load_config(self, framework_config: ConfigObject = None, config_location=DEFAULT_RLBOT_CONFIG_LOCATION, bot_configs=None, looks_configs=None): self.logger.debug('reading the configs') # Set up RLBot.cfg if framework_config is None: framework_config = create_bot_config_layout() framework_config.parse_file(config_location, max_index=MAX_PLAYERS) if bot_configs is None: bot_configs = {} if looks_configs is None: looks_configs = {} match_config = parse_match_config(framework_config, config_location, bot_configs, looks_configs) self.load_match_config(match_config, bot_configs)
[ "def", "load_config", "(", "self", ",", "framework_config", ":", "ConfigObject", "=", "None", ",", "config_location", "=", "DEFAULT_RLBOT_CONFIG_LOCATION", ",", "bot_configs", "=", "None", ",", "looks_configs", "=", "None", ")", ":", "self", ".", "logger", ".", ...
Loads the configuration into internal data structures, which prepares us to later launch bot processes and start the match. :param framework_config: A config object that indicates what bots to run. May come from parsing a rlbot.cfg. :param config_location: The location of the rlbot.cfg file, which will be used to resolve relative paths. :param bot_configs: Overrides for bot configurations. :param looks_configs: Overrides for looks configurations.
[ "Loads", "the", "configuration", "into", "internal", "data", "structures", "which", "prepares", "us", "to", "later", "launch", "bot", "processes", "and", "start", "the", "match", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/setup_manager.py#L148-L172
228,641
RLBot/RLBot
src/main/python/rlbot/utils/class_importer.py
load_external_module
def load_external_module(python_file): """ Returns the loaded module. All of its newly added dependencies are removed from sys.path after load. """ # There's a special case where python_file may be pointing at the base agent definition here in the framework. # This is sometimes done as a default and we want to allow it. Short-circuit the logic because # loading it as if it's an external class is a real mess. if os.path.abspath(python_file) == os.path.abspath(inspect.getfile(BaseAgent)): return BaseAgent, BaseAgent.__module__ if not os.path.isfile(python_file): raise FileNotFoundError(f"Could not find file {python_file}!") dir_name = os.path.dirname(python_file) module_name = os.path.splitext(os.path.basename(python_file))[0] keys_before = set(sys.modules.keys()) # Temporarily modify the sys.path while we load the module so that the module can use import statements naturally sys.path.insert(0, dir_name) loaded_module = importlib.import_module(module_name) # Clean up the changes to sys.path and sys.modules to avoid collisions with other external classes and to # prepare for the next reload. added = set(sys.modules.keys()).difference(keys_before) del sys.path[0] for key in added: del sys.modules[key] return loaded_module
python
def load_external_module(python_file): # There's a special case where python_file may be pointing at the base agent definition here in the framework. # This is sometimes done as a default and we want to allow it. Short-circuit the logic because # loading it as if it's an external class is a real mess. if os.path.abspath(python_file) == os.path.abspath(inspect.getfile(BaseAgent)): return BaseAgent, BaseAgent.__module__ if not os.path.isfile(python_file): raise FileNotFoundError(f"Could not find file {python_file}!") dir_name = os.path.dirname(python_file) module_name = os.path.splitext(os.path.basename(python_file))[0] keys_before = set(sys.modules.keys()) # Temporarily modify the sys.path while we load the module so that the module can use import statements naturally sys.path.insert(0, dir_name) loaded_module = importlib.import_module(module_name) # Clean up the changes to sys.path and sys.modules to avoid collisions with other external classes and to # prepare for the next reload. added = set(sys.modules.keys()).difference(keys_before) del sys.path[0] for key in added: del sys.modules[key] return loaded_module
[ "def", "load_external_module", "(", "python_file", ")", ":", "# There's a special case where python_file may be pointing at the base agent definition here in the framework.", "# This is sometimes done as a default and we want to allow it. Short-circuit the logic because", "# loading it as if it's an...
Returns the loaded module. All of its newly added dependencies are removed from sys.path after load.
[ "Returns", "the", "loaded", "module", ".", "All", "of", "its", "newly", "added", "dependencies", "are", "removed", "from", "sys", ".", "path", "after", "load", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/utils/class_importer.py#L65-L95
228,642
RLBot/RLBot
src/main/python/rlbot/botmanager/bot_manager.py
BotManager.send_quick_chat_from_agent
def send_quick_chat_from_agent(self, team_only, quick_chat): """ Passes the agents quick chats to the game, and also to other python bots. This does perform limiting. You are limited to 5 quick chats in a 2 second period starting from the first chat. This means you can spread your chats out to be even within that 2 second period. You could spam them in the first little bit but then will be throttled. """ # Send the quick chat to the game rlbot_status = send_quick_chat_flat(self.game_interface, self.index, self.team, team_only, quick_chat) if rlbot_status == RLBotCoreStatus.QuickChatRateExceeded: self.logger.debug('quick chat disabled') else: # Make the quick chat visible to other python bots. Unfortunately other languages can't see it. send_quick_chat(self.quick_chat_queue_holder, self.index, self.team, team_only, quick_chat)
python
def send_quick_chat_from_agent(self, team_only, quick_chat): # Send the quick chat to the game rlbot_status = send_quick_chat_flat(self.game_interface, self.index, self.team, team_only, quick_chat) if rlbot_status == RLBotCoreStatus.QuickChatRateExceeded: self.logger.debug('quick chat disabled') else: # Make the quick chat visible to other python bots. Unfortunately other languages can't see it. send_quick_chat(self.quick_chat_queue_holder, self.index, self.team, team_only, quick_chat)
[ "def", "send_quick_chat_from_agent", "(", "self", ",", "team_only", ",", "quick_chat", ")", ":", "# Send the quick chat to the game", "rlbot_status", "=", "send_quick_chat_flat", "(", "self", ".", "game_interface", ",", "self", ".", "index", ",", "self", ".", "team"...
Passes the agents quick chats to the game, and also to other python bots. This does perform limiting. You are limited to 5 quick chats in a 2 second period starting from the first chat. This means you can spread your chats out to be even within that 2 second period. You could spam them in the first little bit but then will be throttled.
[ "Passes", "the", "agents", "quick", "chats", "to", "the", "game", "and", "also", "to", "other", "python", "bots", ".", "This", "does", "perform", "limiting", ".", "You", "are", "limited", "to", "5", "quick", "chats", "in", "a", "2", "second", "period", ...
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/botmanager/bot_manager.py#L65-L81
228,643
RLBot/RLBot
src/main/python/rlbot/botmanager/bot_manager.py
BotManager.run
def run(self): """ Loads interface for RLBot, prepares environment and agent, and calls the update for the agent. """ self.logger.debug('initializing agent') self.game_interface.load_interface() self.prepare_for_run() # Create Ratelimiter rate_limit = rate_limiter.RateLimiter(GAME_TICK_PACKET_POLLS_PER_SECOND) last_tick_game_time = None # What the tick time of the last observed tick was last_call_real_time = datetime.now() # When we last called the Agent # Get bot module agent, agent_class_file = self.load_agent() last_module_modification_time = os.stat(agent_class_file).st_mtime # Run until main process tells to stop, or we detect Ctrl+C try: while not self.terminate_request_event.is_set(): self.pull_data_from_game() # game_tick_packet = self.game_interface.get # Read from game data shared memory # Run the Agent only if the game_info has updated. tick_game_time = self.get_game_time() should_call_while_paused = datetime.now() - last_call_real_time >= MAX_AGENT_CALL_PERIOD if tick_game_time != last_tick_game_time or should_call_while_paused: last_tick_game_time = tick_game_time last_call_real_time = datetime.now() # Reload the Agent if it has been modified or if reload is requested from outside. try: new_module_modification_time = os.stat(agent_class_file).st_mtime if new_module_modification_time != last_module_modification_time or self.reload_request_event.is_set(): self.reload_request_event.clear() last_module_modification_time = new_module_modification_time # Clear the render queue on reload. if hasattr(agent, 'renderer') and isinstance(agent.renderer, RenderingManager): agent.renderer.clear_all_touched_render_groups() agent, agent_class_file = self.reload_agent(agent, agent_class_file) except FileNotFoundError: self.logger.error(f"Agent file {agent_class_file} was not found. Will try again.") time.sleep(0.5) except Exception: self.logger.error("Reloading the agent failed:\n" + traceback.format_exc()) time.sleep(0.5) # Avoid burning CPU / logs if this starts happening constantly # Call agent try: self.call_agent(agent, self.agent_class_wrapper.get_loaded_class()) except Exception as e: self.logger.error("Call to agent failed:\n" + traceback.format_exc()) # Ratelimit here rate_limit.acquire() except KeyboardInterrupt: self.terminate_request_event.set() # Shut down the bot by calling cleanup functions. if hasattr(agent, 'retire'): try: agent.retire() except Exception as e: self.logger.error("Retiring the agent failed:\n" + traceback.format_exc()) if hasattr(agent, 'renderer') and isinstance(agent.renderer, RenderingManager): agent.renderer.clear_all_touched_render_groups() # Zero out the inputs, so it's more obvious that the bot has stopped. self.game_interface.update_player_input(PlayerInput(), self.index) self.quick_chat_quit_event.set() # Shut down quick chat. # If terminated, send callback self.termination_complete_event.set()
python
def run(self): self.logger.debug('initializing agent') self.game_interface.load_interface() self.prepare_for_run() # Create Ratelimiter rate_limit = rate_limiter.RateLimiter(GAME_TICK_PACKET_POLLS_PER_SECOND) last_tick_game_time = None # What the tick time of the last observed tick was last_call_real_time = datetime.now() # When we last called the Agent # Get bot module agent, agent_class_file = self.load_agent() last_module_modification_time = os.stat(agent_class_file).st_mtime # Run until main process tells to stop, or we detect Ctrl+C try: while not self.terminate_request_event.is_set(): self.pull_data_from_game() # game_tick_packet = self.game_interface.get # Read from game data shared memory # Run the Agent only if the game_info has updated. tick_game_time = self.get_game_time() should_call_while_paused = datetime.now() - last_call_real_time >= MAX_AGENT_CALL_PERIOD if tick_game_time != last_tick_game_time or should_call_while_paused: last_tick_game_time = tick_game_time last_call_real_time = datetime.now() # Reload the Agent if it has been modified or if reload is requested from outside. try: new_module_modification_time = os.stat(agent_class_file).st_mtime if new_module_modification_time != last_module_modification_time or self.reload_request_event.is_set(): self.reload_request_event.clear() last_module_modification_time = new_module_modification_time # Clear the render queue on reload. if hasattr(agent, 'renderer') and isinstance(agent.renderer, RenderingManager): agent.renderer.clear_all_touched_render_groups() agent, agent_class_file = self.reload_agent(agent, agent_class_file) except FileNotFoundError: self.logger.error(f"Agent file {agent_class_file} was not found. Will try again.") time.sleep(0.5) except Exception: self.logger.error("Reloading the agent failed:\n" + traceback.format_exc()) time.sleep(0.5) # Avoid burning CPU / logs if this starts happening constantly # Call agent try: self.call_agent(agent, self.agent_class_wrapper.get_loaded_class()) except Exception as e: self.logger.error("Call to agent failed:\n" + traceback.format_exc()) # Ratelimit here rate_limit.acquire() except KeyboardInterrupt: self.terminate_request_event.set() # Shut down the bot by calling cleanup functions. if hasattr(agent, 'retire'): try: agent.retire() except Exception as e: self.logger.error("Retiring the agent failed:\n" + traceback.format_exc()) if hasattr(agent, 'renderer') and isinstance(agent.renderer, RenderingManager): agent.renderer.clear_all_touched_render_groups() # Zero out the inputs, so it's more obvious that the bot has stopped. self.game_interface.update_player_input(PlayerInput(), self.index) self.quick_chat_quit_event.set() # Shut down quick chat. # If terminated, send callback self.termination_complete_event.set()
[ "def", "run", "(", "self", ")", ":", "self", ".", "logger", ".", "debug", "(", "'initializing agent'", ")", "self", ".", "game_interface", ".", "load_interface", "(", ")", "self", ".", "prepare_for_run", "(", ")", "# Create Ratelimiter", "rate_limit", "=", "...
Loads interface for RLBot, prepares environment and agent, and calls the update for the agent.
[ "Loads", "interface", "for", "RLBot", "prepares", "environment", "and", "agent", "and", "calls", "the", "update", "for", "the", "agent", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/botmanager/bot_manager.py#L152-L226
228,644
RLBot/RLBot
src/main/python/rlbot/gui/qt_root.py
RLBotQTGui.clean_overall_config_loadouts
def clean_overall_config_loadouts(self): """ Set all unusued loadout paths to None. This makes sure agents don't have a custom loadout when new agents are added in the gui. """ for i in range(MAX_PLAYERS): if i not in self.index_manager.numbers: self.overall_config.set_value(PARTICIPANT_CONFIGURATION_HEADER, PARTICIPANT_LOADOUT_CONFIG_KEY, "None", i)
python
def clean_overall_config_loadouts(self): for i in range(MAX_PLAYERS): if i not in self.index_manager.numbers: self.overall_config.set_value(PARTICIPANT_CONFIGURATION_HEADER, PARTICIPANT_LOADOUT_CONFIG_KEY, "None", i)
[ "def", "clean_overall_config_loadouts", "(", "self", ")", ":", "for", "i", "in", "range", "(", "MAX_PLAYERS", ")", ":", "if", "i", "not", "in", "self", ".", "index_manager", ".", "numbers", ":", "self", ".", "overall_config", ".", "set_value", "(", "PARTIC...
Set all unusued loadout paths to None. This makes sure agents don't have a custom loadout when new agents are added in the gui.
[ "Set", "all", "unusued", "loadout", "paths", "to", "None", ".", "This", "makes", "sure", "agents", "don", "t", "have", "a", "custom", "loadout", "when", "new", "agents", "are", "added", "in", "the", "gui", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/gui/qt_root.py#L448-L455
228,645
RLBot/RLBot
src/main/python/rlbot/agents/base_agent.py
BaseAgent.convert_output_to_v4
def convert_output_to_v4(self, controller_input): """Converts a v3 output to a v4 controller state""" player_input = SimpleControllerState() player_input.throttle = controller_input[0] player_input.steer = controller_input[1] player_input.pitch = controller_input[2] player_input.yaw = controller_input[3] player_input.roll = controller_input[4] player_input.jump = controller_input[5] player_input.boost = controller_input[6] player_input.handbrake = controller_input[7] return player_input
python
def convert_output_to_v4(self, controller_input): player_input = SimpleControllerState() player_input.throttle = controller_input[0] player_input.steer = controller_input[1] player_input.pitch = controller_input[2] player_input.yaw = controller_input[3] player_input.roll = controller_input[4] player_input.jump = controller_input[5] player_input.boost = controller_input[6] player_input.handbrake = controller_input[7] return player_input
[ "def", "convert_output_to_v4", "(", "self", ",", "controller_input", ")", ":", "player_input", "=", "SimpleControllerState", "(", ")", "player_input", ".", "throttle", "=", "controller_input", "[", "0", "]", "player_input", ".", "steer", "=", "controller_input", "...
Converts a v3 output to a v4 controller state
[ "Converts", "a", "v3", "output", "to", "a", "v4", "controller", "state" ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/agents/base_agent.py#L179-L191
228,646
RLBot/RLBot
src/main/python/rlbot/training/training.py
_wait_until_good_ticks
def _wait_until_good_ticks(game_interface: GameInterface, required_new_ticks: int=3): """Blocks until we're getting new packets, indicating that the match is ready.""" rate_limit = rate_limiter.RateLimiter(120) last_tick_game_time = None # What the tick time of the last observed tick was packet = GameTickPacket() # We want to do a deep copy for game inputs so people don't mess with em seen_times = 0 while seen_times < required_new_ticks: game_interface.update_live_data_packet(packet) def is_good_tick(): if packet.game_info.seconds_elapsed == last_tick_game_time: return False if not packet.game_info.is_round_active: return False if any(car.is_demolished for car in packet.game_cars): return False return True if is_good_tick(): seen_times += 1 last_tick_game_time = packet.game_info.seconds_elapsed rate_limit.acquire()
python
def _wait_until_good_ticks(game_interface: GameInterface, required_new_ticks: int=3): rate_limit = rate_limiter.RateLimiter(120) last_tick_game_time = None # What the tick time of the last observed tick was packet = GameTickPacket() # We want to do a deep copy for game inputs so people don't mess with em seen_times = 0 while seen_times < required_new_ticks: game_interface.update_live_data_packet(packet) def is_good_tick(): if packet.game_info.seconds_elapsed == last_tick_game_time: return False if not packet.game_info.is_round_active: return False if any(car.is_demolished for car in packet.game_cars): return False return True if is_good_tick(): seen_times += 1 last_tick_game_time = packet.game_info.seconds_elapsed rate_limit.acquire()
[ "def", "_wait_until_good_ticks", "(", "game_interface", ":", "GameInterface", ",", "required_new_ticks", ":", "int", "=", "3", ")", ":", "rate_limit", "=", "rate_limiter", ".", "RateLimiter", "(", "120", ")", "last_tick_game_time", "=", "None", "# What the tick time...
Blocks until we're getting new packets, indicating that the match is ready.
[ "Blocks", "until", "we", "re", "getting", "new", "packets", "indicating", "that", "the", "match", "is", "ready", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/training/training.py#L176-L193
228,647
RLBot/RLBot
src/main/python/rlbot/training/status_rendering.py
training_status_renderer_context
def training_status_renderer_context(exercise_names: List[str], renderman: RenderingManager): """ Ensures that the screen is always cleared, even on fatal errors in code that uses this renderer. """ renderer = TrainingStatusRenderer(exercise_names, renderman) try: yield renderer finally: renderer.clear_screen()
python
def training_status_renderer_context(exercise_names: List[str], renderman: RenderingManager): renderer = TrainingStatusRenderer(exercise_names, renderman) try: yield renderer finally: renderer.clear_screen()
[ "def", "training_status_renderer_context", "(", "exercise_names", ":", "List", "[", "str", "]", ",", "renderman", ":", "RenderingManager", ")", ":", "renderer", "=", "TrainingStatusRenderer", "(", "exercise_names", ",", "renderman", ")", "try", ":", "yield", "rend...
Ensures that the screen is always cleared, even on fatal errors in code that uses this renderer.
[ "Ensures", "that", "the", "screen", "is", "always", "cleared", "even", "on", "fatal", "errors", "in", "code", "that", "uses", "this", "renderer", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/training/status_rendering.py#L10-L19
228,648
RLBot/RLBot
src/main/python/rlbot/utils/structures/game_interface.py
GameInterface.inject_dll
def inject_dll(self): """ Calling this function will inject the DLL without GUI DLL will return status codes from 0 to 5 which correspond to injector_codes DLL injection is only valid if codes are 0->'INJECTION_SUCCESSFUL' or 3->'RLBOT_DLL_ALREADY_INJECTED' It will print the output code and if it's not valid it will kill runner.py If RL isn't running the Injector will stay hidden waiting for RL to open and inject as soon as it does """ self.logger.info('Injecting DLL') # Inject DLL injector_dir = os.path.join(get_dll_directory(), 'RLBot_Injector.exe') for file in ['RLBot_Injector.exe', 'RLBot_Core.dll', 'RLBot_Core_Interface.dll', 'RLBot_Core_Interface_32.dll']: file_path = os.path.join(get_dll_directory(), file) if not os.path.isfile(file_path): raise FileNotFoundError(f'{file} was not found in {get_dll_directory()}. ' 'Please check that the file exists and your antivirus ' 'is not removing it. See https://github.com/RLBot/RLBot/wiki/Antivirus-Notes') incode = subprocess.call([injector_dir, 'hidden']) injector_codes = ['INJECTION_SUCCESSFUL', 'INJECTION_FAILED', 'MULTIPLE_ROCKET_LEAGUE_PROCESSES_FOUND', 'RLBOT_DLL_ALREADY_INJECTED', 'RLBOT_DLL_NOT_FOUND', 'MULTIPLE_RLBOT_DLL_FILES_FOUND'] injector_valid_codes = ['INJECTION_SUCCESSFUL', 'RLBOT_DLL_ALREADY_INJECTED'] injection_status = injector_codes[incode] if injection_status in injector_valid_codes: self.logger.info('Finished Injecting DLL') return injection_status else: self.logger.error('Failed to inject DLL: ' + injection_status) sys.exit()
python
def inject_dll(self): self.logger.info('Injecting DLL') # Inject DLL injector_dir = os.path.join(get_dll_directory(), 'RLBot_Injector.exe') for file in ['RLBot_Injector.exe', 'RLBot_Core.dll', 'RLBot_Core_Interface.dll', 'RLBot_Core_Interface_32.dll']: file_path = os.path.join(get_dll_directory(), file) if not os.path.isfile(file_path): raise FileNotFoundError(f'{file} was not found in {get_dll_directory()}. ' 'Please check that the file exists and your antivirus ' 'is not removing it. See https://github.com/RLBot/RLBot/wiki/Antivirus-Notes') incode = subprocess.call([injector_dir, 'hidden']) injector_codes = ['INJECTION_SUCCESSFUL', 'INJECTION_FAILED', 'MULTIPLE_ROCKET_LEAGUE_PROCESSES_FOUND', 'RLBOT_DLL_ALREADY_INJECTED', 'RLBOT_DLL_NOT_FOUND', 'MULTIPLE_RLBOT_DLL_FILES_FOUND'] injector_valid_codes = ['INJECTION_SUCCESSFUL', 'RLBOT_DLL_ALREADY_INJECTED'] injection_status = injector_codes[incode] if injection_status in injector_valid_codes: self.logger.info('Finished Injecting DLL') return injection_status else: self.logger.error('Failed to inject DLL: ' + injection_status) sys.exit()
[ "def", "inject_dll", "(", "self", ")", ":", "self", ".", "logger", ".", "info", "(", "'Injecting DLL'", ")", "# Inject DLL", "injector_dir", "=", "os", ".", "path", ".", "join", "(", "get_dll_directory", "(", ")", ",", "'RLBot_Injector.exe'", ")", "for", "...
Calling this function will inject the DLL without GUI DLL will return status codes from 0 to 5 which correspond to injector_codes DLL injection is only valid if codes are 0->'INJECTION_SUCCESSFUL' or 3->'RLBOT_DLL_ALREADY_INJECTED' It will print the output code and if it's not valid it will kill runner.py If RL isn't running the Injector will stay hidden waiting for RL to open and inject as soon as it does
[ "Calling", "this", "function", "will", "inject", "the", "DLL", "without", "GUI", "DLL", "will", "return", "status", "codes", "from", "0", "to", "5", "which", "correspond", "to", "injector_codes", "DLL", "injection", "is", "only", "valid", "if", "codes", "are...
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/utils/structures/game_interface.py#L189-L224
228,649
RLBot/RLBot
src/main/python/rlbot/utils/structures/game_interface.py
GameInterface.update_rigid_body_tick
def update_rigid_body_tick(self, rigid_body_tick: RigidBodyTick): """Get the most recent state of the physics engine.""" rlbot_status = self.game.UpdateRigidBodyTick(rigid_body_tick) self.game_status(None, rlbot_status) return rigid_body_tick
python
def update_rigid_body_tick(self, rigid_body_tick: RigidBodyTick): rlbot_status = self.game.UpdateRigidBodyTick(rigid_body_tick) self.game_status(None, rlbot_status) return rigid_body_tick
[ "def", "update_rigid_body_tick", "(", "self", ",", "rigid_body_tick", ":", "RigidBodyTick", ")", ":", "rlbot_status", "=", "self", ".", "game", ".", "UpdateRigidBodyTick", "(", "rigid_body_tick", ")", "self", ".", "game_status", "(", "None", ",", "rlbot_status", ...
Get the most recent state of the physics engine.
[ "Get", "the", "most", "recent", "state", "of", "the", "physics", "engine", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/utils/structures/game_interface.py#L285-L289
228,650
RLBot/RLBot
src/main/python/rlbot/utils/structures/game_interface.py
GameInterface.get_ball_prediction
def get_ball_prediction(self) -> BallPredictionPacket: """ Gets the latest ball prediction available in shared memory. Only works if BallPrediction.exe is running. """ byte_buffer = self.game.GetBallPrediction() if byte_buffer.size >= 4: # GetRootAsGameTickPacket gets angry if the size is less than 4 # We're counting on this copying the data over to a new memory location so that the original # pointer can be freed safely. proto_string = ctypes.string_at(byte_buffer.ptr, byte_buffer.size) self.game.Free(byte_buffer.ptr) # Avoid a memory leak self.game_status(None, RLBotCoreStatus.Success) return BallPredictionPacket.GetRootAsBallPrediction(proto_string, 0)
python
def get_ball_prediction(self) -> BallPredictionPacket: byte_buffer = self.game.GetBallPrediction() if byte_buffer.size >= 4: # GetRootAsGameTickPacket gets angry if the size is less than 4 # We're counting on this copying the data over to a new memory location so that the original # pointer can be freed safely. proto_string = ctypes.string_at(byte_buffer.ptr, byte_buffer.size) self.game.Free(byte_buffer.ptr) # Avoid a memory leak self.game_status(None, RLBotCoreStatus.Success) return BallPredictionPacket.GetRootAsBallPrediction(proto_string, 0)
[ "def", "get_ball_prediction", "(", "self", ")", "->", "BallPredictionPacket", ":", "byte_buffer", "=", "self", ".", "game", ".", "GetBallPrediction", "(", ")", "if", "byte_buffer", ".", "size", ">=", "4", ":", "# GetRootAsGameTickPacket gets angry if the size is less ...
Gets the latest ball prediction available in shared memory. Only works if BallPrediction.exe is running.
[ "Gets", "the", "latest", "ball", "prediction", "available", "in", "shared", "memory", ".", "Only", "works", "if", "BallPrediction", ".", "exe", "is", "running", "." ]
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/utils/structures/game_interface.py#L311-L323
228,651
RLBot/RLBot
src/main/python/rlbot/utils/game_state_util.py
CarState.convert_to_flat
def convert_to_flat(self, builder): """ In this conversion, we always want to return a valid flatbuffer pointer even if all the contents are blank because sometimes we need to put empty car states into the car list to make the indices line up. """ physics_offset = None if self.physics is None else self.physics.convert_to_flat(builder) DesiredCarState.DesiredCarStateStart(builder) if physics_offset is not None: DesiredCarState.DesiredCarStateAddPhysics(builder, physics_offset) if self.boost_amount is not None: DesiredCarState.DesiredCarStateAddBoostAmount(builder, Float.CreateFloat(builder, self.boost_amount)) if self.jumped is not None: DesiredCarState.DesiredCarStateAddJumped(builder, Bool.CreateBool(builder, self.jumped)) if self.double_jumped is not None: DesiredCarState.DesiredCarStateAddDoubleJumped(builder, Bool.CreateBool(builder, self.double_jumped)) return DesiredCarState.DesiredCarStateEnd(builder)
python
def convert_to_flat(self, builder): physics_offset = None if self.physics is None else self.physics.convert_to_flat(builder) DesiredCarState.DesiredCarStateStart(builder) if physics_offset is not None: DesiredCarState.DesiredCarStateAddPhysics(builder, physics_offset) if self.boost_amount is not None: DesiredCarState.DesiredCarStateAddBoostAmount(builder, Float.CreateFloat(builder, self.boost_amount)) if self.jumped is not None: DesiredCarState.DesiredCarStateAddJumped(builder, Bool.CreateBool(builder, self.jumped)) if self.double_jumped is not None: DesiredCarState.DesiredCarStateAddDoubleJumped(builder, Bool.CreateBool(builder, self.double_jumped)) return DesiredCarState.DesiredCarStateEnd(builder)
[ "def", "convert_to_flat", "(", "self", ",", "builder", ")", ":", "physics_offset", "=", "None", "if", "self", ".", "physics", "is", "None", "else", "self", ".", "physics", ".", "convert_to_flat", "(", "builder", ")", "DesiredCarState", ".", "DesiredCarStateSta...
In this conversion, we always want to return a valid flatbuffer pointer even if all the contents are blank because sometimes we need to put empty car states into the car list to make the indices line up.
[ "In", "this", "conversion", "we", "always", "want", "to", "return", "a", "valid", "flatbuffer", "pointer", "even", "if", "all", "the", "contents", "are", "blank", "because", "sometimes", "we", "need", "to", "put", "empty", "car", "states", "into", "the", "...
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/utils/game_state_util.py#L109-L126
228,652
RLBot/RLBot
src/main/python/rlbot/utils/game_state_util.py
BoostState.convert_to_flat
def convert_to_flat(self, builder): """ In this conversion, we always want to return a valid flatbuffer pointer even if all the contents are blank because sometimes we need to put empty boost states into the boost list to make the indices line up. """ DesiredBoostState.DesiredBoostStateStart(builder) if self.respawn_time is not None: DesiredBoostState.DesiredBoostStateAddRespawnTime(builder, Float.CreateFloat(builder, self.respawn_time)) return DesiredBoostState.DesiredBoostStateEnd(builder)
python
def convert_to_flat(self, builder): DesiredBoostState.DesiredBoostStateStart(builder) if self.respawn_time is not None: DesiredBoostState.DesiredBoostStateAddRespawnTime(builder, Float.CreateFloat(builder, self.respawn_time)) return DesiredBoostState.DesiredBoostStateEnd(builder)
[ "def", "convert_to_flat", "(", "self", ",", "builder", ")", ":", "DesiredBoostState", ".", "DesiredBoostStateStart", "(", "builder", ")", "if", "self", ".", "respawn_time", "is", "not", "None", ":", "DesiredBoostState", ".", "DesiredBoostStateAddRespawnTime", "(", ...
In this conversion, we always want to return a valid flatbuffer pointer even if all the contents are blank because sometimes we need to put empty boost states into the boost list to make the indices line up.
[ "In", "this", "conversion", "we", "always", "want", "to", "return", "a", "valid", "flatbuffer", "pointer", "even", "if", "all", "the", "contents", "are", "blank", "because", "sometimes", "we", "need", "to", "put", "empty", "boost", "states", "into", "the", ...
3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234
https://github.com/RLBot/RLBot/blob/3f9b6bec8b9baf4dcfff0f6cf3103c8744ac6234/src/main/python/rlbot/utils/game_state_util.py#L134-L144
228,653
gouthambs/Flask-Blogging
flask_blogging/engine.py
BloggingEngine.init_app
def init_app(self, app, storage=None, cache=None, file_upload=None): """ Initialize the engine. :param app: The app to use :type app: Object :param storage: The blog storage instance that implements the :type storage: Object :param cache: (Optional) A Flask-Cache object to enable caching :type cache: Object ``Storage`` class interface. """ self.app = app self.config = self.app.config self.storage = storage or self.storage self.file_upload = file_upload or self.file_upload self.cache = cache or self.cache self._register_plugins(self.app, self.config) from .views import create_blueprint blog_app = create_blueprint(__name__, self) # external urls blueprint_created.send(self.app, engine=self, blueprint=blog_app) self.app.register_blueprint( blog_app, url_prefix=self.config.get("BLOGGING_URL_PREFIX")) self.app.extensions["FLASK_BLOGGING_ENGINE"] = self # duplicate self.app.extensions["blogging"] = self self.principal = Principal(self.app) engine_initialised.send(self.app, engine=self) if self.config.get("BLOGGING_ALLOW_FILEUPLOAD", True): self.ffu = self.file_upload or FlaskFileUpload(app)
python
def init_app(self, app, storage=None, cache=None, file_upload=None): self.app = app self.config = self.app.config self.storage = storage or self.storage self.file_upload = file_upload or self.file_upload self.cache = cache or self.cache self._register_plugins(self.app, self.config) from .views import create_blueprint blog_app = create_blueprint(__name__, self) # external urls blueprint_created.send(self.app, engine=self, blueprint=blog_app) self.app.register_blueprint( blog_app, url_prefix=self.config.get("BLOGGING_URL_PREFIX")) self.app.extensions["FLASK_BLOGGING_ENGINE"] = self # duplicate self.app.extensions["blogging"] = self self.principal = Principal(self.app) engine_initialised.send(self.app, engine=self) if self.config.get("BLOGGING_ALLOW_FILEUPLOAD", True): self.ffu = self.file_upload or FlaskFileUpload(app)
[ "def", "init_app", "(", "self", ",", "app", ",", "storage", "=", "None", ",", "cache", "=", "None", ",", "file_upload", "=", "None", ")", ":", "self", ".", "app", "=", "app", "self", ".", "config", "=", "self", ".", "app", ".", "config", "self", ...
Initialize the engine. :param app: The app to use :type app: Object :param storage: The blog storage instance that implements the :type storage: Object :param cache: (Optional) A Flask-Cache object to enable caching :type cache: Object ``Storage`` class interface.
[ "Initialize", "the", "engine", "." ]
6636b8941175e9910f116a329521f96b8b05a9ac
https://github.com/gouthambs/Flask-Blogging/blob/6636b8941175e9910f116a329521f96b8b05a9ac/flask_blogging/engine.py#L78-L111
228,654
gouthambs/Flask-Blogging
flask_blogging/sqlastorage.py
SQLAStorage.get_post_by_id
def get_post_by_id(self, post_id): """ Fetch the blog post given by ``post_id`` :param post_id: The post identifier for the blog post :type post_id: str :return: If the ``post_id`` is valid, the post data is retrieved, else returns ``None``. """ r = None post_id = _as_int(post_id) with self._engine.begin() as conn: try: post_statement = sqla.select([self._post_table]) \ .where(self._post_table.c.id == post_id) \ .alias('post') joined_statement = post_statement.join(self._tag_posts_table) \ .join(self._tag_table) \ .join(self._user_posts_table) \ .alias('join') # Note this will retrieve one row per tag all_rows = conn.execute( sqla.select([joined_statement]) ).fetchall() r = self._serialise_posts_and_tags_from_joined_rows( all_rows )[0] except Exception as e: self._logger.exception(str(e)) r = None return r
python
def get_post_by_id(self, post_id): r = None post_id = _as_int(post_id) with self._engine.begin() as conn: try: post_statement = sqla.select([self._post_table]) \ .where(self._post_table.c.id == post_id) \ .alias('post') joined_statement = post_statement.join(self._tag_posts_table) \ .join(self._tag_table) \ .join(self._user_posts_table) \ .alias('join') # Note this will retrieve one row per tag all_rows = conn.execute( sqla.select([joined_statement]) ).fetchall() r = self._serialise_posts_and_tags_from_joined_rows( all_rows )[0] except Exception as e: self._logger.exception(str(e)) r = None return r
[ "def", "get_post_by_id", "(", "self", ",", "post_id", ")", ":", "r", "=", "None", "post_id", "=", "_as_int", "(", "post_id", ")", "with", "self", ".", "_engine", ".", "begin", "(", ")", "as", "conn", ":", "try", ":", "post_statement", "=", "sqla", "....
Fetch the blog post given by ``post_id`` :param post_id: The post identifier for the blog post :type post_id: str :return: If the ``post_id`` is valid, the post data is retrieved, else returns ``None``.
[ "Fetch", "the", "blog", "post", "given", "by", "post_id" ]
6636b8941175e9910f116a329521f96b8b05a9ac
https://github.com/gouthambs/Flask-Blogging/blob/6636b8941175e9910f116a329521f96b8b05a9ac/flask_blogging/sqlastorage.py#L233-L266
228,655
gouthambs/Flask-Blogging
flask_blogging/sqlastorage.py
SQLAStorage.count_posts
def count_posts(self, tag=None, user_id=None, include_draft=False): """ Returns the total number of posts for the give filter :param tag: Filter by a specific tag :type tag: str :param user_id: Filter by a specific user :type user_id: str :param include_draft: Whether to include posts marked as draft or not :type include_draft: bool :return: The number of posts for the given filter. """ result = 0 with self._engine.begin() as conn: try: count_statement = sqla.select([sqla.func.count()]). \ select_from(self._post_table) sql_filter = self._get_filter(tag, user_id, include_draft, conn) count_statement = count_statement.where(sql_filter) result = conn.execute(count_statement).scalar() except Exception as e: self._logger.exception(str(e)) result = 0 return result
python
def count_posts(self, tag=None, user_id=None, include_draft=False): result = 0 with self._engine.begin() as conn: try: count_statement = sqla.select([sqla.func.count()]). \ select_from(self._post_table) sql_filter = self._get_filter(tag, user_id, include_draft, conn) count_statement = count_statement.where(sql_filter) result = conn.execute(count_statement).scalar() except Exception as e: self._logger.exception(str(e)) result = 0 return result
[ "def", "count_posts", "(", "self", ",", "tag", "=", "None", ",", "user_id", "=", "None", ",", "include_draft", "=", "False", ")", ":", "result", "=", "0", "with", "self", ".", "_engine", ".", "begin", "(", ")", "as", "conn", ":", "try", ":", "count...
Returns the total number of posts for the give filter :param tag: Filter by a specific tag :type tag: str :param user_id: Filter by a specific user :type user_id: str :param include_draft: Whether to include posts marked as draft or not :type include_draft: bool :return: The number of posts for the given filter.
[ "Returns", "the", "total", "number", "of", "posts", "for", "the", "give", "filter" ]
6636b8941175e9910f116a329521f96b8b05a9ac
https://github.com/gouthambs/Flask-Blogging/blob/6636b8941175e9910f116a329521f96b8b05a9ac/flask_blogging/sqlastorage.py#L336-L360
228,656
gouthambs/Flask-Blogging
flask_blogging/sqlastorage.py
SQLAStorage.delete_post
def delete_post(self, post_id): """ Delete the post defined by ``post_id`` :param post_id: The identifier corresponding to a post :type post_id: int :return: Returns True if the post was successfully deleted and False otherwise. """ status = False success = 0 post_id = _as_int(post_id) with self._engine.begin() as conn: try: post_del_statement = self._post_table.delete().where( self._post_table.c.id == post_id) conn.execute(post_del_statement) success += 1 except Exception as e: self._logger.exception(str(e)) try: user_posts_del_statement = self._user_posts_table.delete(). \ where(self._user_posts_table.c.post_id == post_id) conn.execute(user_posts_del_statement) success += 1 except Exception as e: self._logger.exception(str(e)) try: tag_posts_del_statement = self._tag_posts_table.delete(). \ where(self._tag_posts_table.c.post_id == post_id) conn.execute(tag_posts_del_statement) success += 1 except Exception as e: self._logger.exception(str(e)) status = success == 3 return status
python
def delete_post(self, post_id): status = False success = 0 post_id = _as_int(post_id) with self._engine.begin() as conn: try: post_del_statement = self._post_table.delete().where( self._post_table.c.id == post_id) conn.execute(post_del_statement) success += 1 except Exception as e: self._logger.exception(str(e)) try: user_posts_del_statement = self._user_posts_table.delete(). \ where(self._user_posts_table.c.post_id == post_id) conn.execute(user_posts_del_statement) success += 1 except Exception as e: self._logger.exception(str(e)) try: tag_posts_del_statement = self._tag_posts_table.delete(). \ where(self._tag_posts_table.c.post_id == post_id) conn.execute(tag_posts_del_statement) success += 1 except Exception as e: self._logger.exception(str(e)) status = success == 3 return status
[ "def", "delete_post", "(", "self", ",", "post_id", ")", ":", "status", "=", "False", "success", "=", "0", "post_id", "=", "_as_int", "(", "post_id", ")", "with", "self", ".", "_engine", ".", "begin", "(", ")", "as", "conn", ":", "try", ":", "post_del...
Delete the post defined by ``post_id`` :param post_id: The identifier corresponding to a post :type post_id: int :return: Returns True if the post was successfully deleted and False otherwise.
[ "Delete", "the", "post", "defined", "by", "post_id" ]
6636b8941175e9910f116a329521f96b8b05a9ac
https://github.com/gouthambs/Flask-Blogging/blob/6636b8941175e9910f116a329521f96b8b05a9ac/flask_blogging/sqlastorage.py#L362-L397
228,657
gouthambs/Flask-Blogging
flask_blogging/views.py
index
def index(count, page): """ Serves the page with a list of blog posts :param count: :param offset: :return: """ blogging_engine = _get_blogging_engine(current_app) storage = blogging_engine.storage config = blogging_engine.config count = count or config.get("BLOGGING_POSTS_PER_PAGE", 10) meta = _get_meta(storage, count, page) offset = meta["offset"] meta["is_user_blogger"] = _is_blogger(blogging_engine.blogger_permission) meta["count"] = count meta["page"] = page render = config.get("BLOGGING_RENDER_TEXT", True) posts = storage.get_posts(count=count, offset=offset, include_draft=False, tag=None, user_id=None, recent=True) index_posts_fetched.send(blogging_engine.app, engine=blogging_engine, posts=posts, meta=meta) for post in posts: blogging_engine.process_post(post, render=render) index_posts_processed.send(blogging_engine.app, engine=blogging_engine, posts=posts, meta=meta) return render_template("blogging/index.html", posts=posts, meta=meta, config=config)
python
def index(count, page): blogging_engine = _get_blogging_engine(current_app) storage = blogging_engine.storage config = blogging_engine.config count = count or config.get("BLOGGING_POSTS_PER_PAGE", 10) meta = _get_meta(storage, count, page) offset = meta["offset"] meta["is_user_blogger"] = _is_blogger(blogging_engine.blogger_permission) meta["count"] = count meta["page"] = page render = config.get("BLOGGING_RENDER_TEXT", True) posts = storage.get_posts(count=count, offset=offset, include_draft=False, tag=None, user_id=None, recent=True) index_posts_fetched.send(blogging_engine.app, engine=blogging_engine, posts=posts, meta=meta) for post in posts: blogging_engine.process_post(post, render=render) index_posts_processed.send(blogging_engine.app, engine=blogging_engine, posts=posts, meta=meta) return render_template("blogging/index.html", posts=posts, meta=meta, config=config)
[ "def", "index", "(", "count", ",", "page", ")", ":", "blogging_engine", "=", "_get_blogging_engine", "(", "current_app", ")", "storage", "=", "blogging_engine", ".", "storage", "config", "=", "blogging_engine", ".", "config", "count", "=", "count", "or", "conf...
Serves the page with a list of blog posts :param count: :param offset: :return:
[ "Serves", "the", "page", "with", "a", "list", "of", "blog", "posts" ]
6636b8941175e9910f116a329521f96b8b05a9ac
https://github.com/gouthambs/Flask-Blogging/blob/6636b8941175e9910f116a329521f96b8b05a9ac/flask_blogging/views.py#L104-L133
228,658
stripe/stripe-python
stripe/util.py
convert_to_dict
def convert_to_dict(obj): """Converts a StripeObject back to a regular dict. Nested StripeObjects are also converted back to regular dicts. :param obj: The StripeObject to convert. :returns: The StripeObject as a dict. """ if isinstance(obj, list): return [convert_to_dict(i) for i in obj] # This works by virtue of the fact that StripeObjects _are_ dicts. The dict # comprehension returns a regular dict and recursively applies the # conversion to each value. elif isinstance(obj, dict): return {k: convert_to_dict(v) for k, v in six.iteritems(obj)} else: return obj
python
def convert_to_dict(obj): if isinstance(obj, list): return [convert_to_dict(i) for i in obj] # This works by virtue of the fact that StripeObjects _are_ dicts. The dict # comprehension returns a regular dict and recursively applies the # conversion to each value. elif isinstance(obj, dict): return {k: convert_to_dict(v) for k, v in six.iteritems(obj)} else: return obj
[ "def", "convert_to_dict", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "list", ")", ":", "return", "[", "convert_to_dict", "(", "i", ")", "for", "i", "in", "obj", "]", "# This works by virtue of the fact that StripeObjects _are_ dicts. The dict", "# ...
Converts a StripeObject back to a regular dict. Nested StripeObjects are also converted back to regular dicts. :param obj: The StripeObject to convert. :returns: The StripeObject as a dict.
[ "Converts", "a", "StripeObject", "back", "to", "a", "regular", "dict", "." ]
25b21ce6fcb59db22e5698b95a11f254081714b4
https://github.com/stripe/stripe-python/blob/25b21ce6fcb59db22e5698b95a11f254081714b4/stripe/util.py#L268-L285
228,659
cobrateam/splinter
splinter/browser.py
Browser
def Browser(driver_name="firefox", *args, **kwargs): """ Returns a driver instance for the given name. When working with ``firefox``, it's possible to provide a profile name and a list of extensions. If you don't provide any driver_name, then ``firefox`` will be used. If there is no driver registered with the provided ``driver_name``, this function will raise a :class:`splinter.exceptions.DriverNotFoundError` exception. """ try: driver = _DRIVERS[driver_name] except KeyError: raise DriverNotFoundError("No driver for %s" % driver_name) return driver(*args, **kwargs)
python
def Browser(driver_name="firefox", *args, **kwargs): try: driver = _DRIVERS[driver_name] except KeyError: raise DriverNotFoundError("No driver for %s" % driver_name) return driver(*args, **kwargs)
[ "def", "Browser", "(", "driver_name", "=", "\"firefox\"", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "driver", "=", "_DRIVERS", "[", "driver_name", "]", "except", "KeyError", ":", "raise", "DriverNotFoundError", "(", "\"No driver for %s...
Returns a driver instance for the given name. When working with ``firefox``, it's possible to provide a profile name and a list of extensions. If you don't provide any driver_name, then ``firefox`` will be used. If there is no driver registered with the provided ``driver_name``, this function will raise a :class:`splinter.exceptions.DriverNotFoundError` exception.
[ "Returns", "a", "driver", "instance", "for", "the", "given", "name", "." ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/browser.py#L46-L64
228,660
cobrateam/splinter
splinter/driver/webdriver/__init__.py
Window.title
def title(self): """ The title of this window """ with switch_window(self._browser, self.name): return self._browser.title
python
def title(self): with switch_window(self._browser, self.name): return self._browser.title
[ "def", "title", "(", "self", ")", ":", "with", "switch_window", "(", "self", ".", "_browser", ",", "self", ".", "name", ")", ":", "return", "self", ".", "_browser", ".", "title" ]
The title of this window
[ "The", "title", "of", "this", "window" ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/driver/webdriver/__init__.py#L52-L55
228,661
cobrateam/splinter
splinter/driver/webdriver/__init__.py
Window.url
def url(self): """ The url of this window """ with switch_window(self._browser, self.name): return self._browser.url
python
def url(self): with switch_window(self._browser, self.name): return self._browser.url
[ "def", "url", "(", "self", ")", ":", "with", "switch_window", "(", "self", ".", "_browser", ",", "self", ".", "name", ")", ":", "return", "self", ".", "_browser", ".", "url" ]
The url of this window
[ "The", "url", "of", "this", "window" ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/driver/webdriver/__init__.py#L58-L61
228,662
cobrateam/splinter
splinter/driver/webdriver/__init__.py
Window.prev
def prev(self): """ Return the previous window """ prev_index = self.index - 1 prev_handle = self._browser.driver.window_handles[prev_index] return Window(self._browser, prev_handle)
python
def prev(self): prev_index = self.index - 1 prev_handle = self._browser.driver.window_handles[prev_index] return Window(self._browser, prev_handle)
[ "def", "prev", "(", "self", ")", ":", "prev_index", "=", "self", ".", "index", "-", "1", "prev_handle", "=", "self", ".", "_browser", ".", "driver", ".", "window_handles", "[", "prev_index", "]", "return", "Window", "(", "self", ".", "_browser", ",", "...
Return the previous window
[ "Return", "the", "previous", "window" ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/driver/webdriver/__init__.py#L69-L73
228,663
cobrateam/splinter
splinter/driver/webdriver/__init__.py
Window.next
def next(self): """ Return the next window """ next_index = (self.index + 1) % len(self._browser.driver.window_handles) next_handle = self._browser.driver.window_handles[next_index] return Window(self._browser, next_handle)
python
def next(self): next_index = (self.index + 1) % len(self._browser.driver.window_handles) next_handle = self._browser.driver.window_handles[next_index] return Window(self._browser, next_handle)
[ "def", "next", "(", "self", ")", ":", "next_index", "=", "(", "self", ".", "index", "+", "1", ")", "%", "len", "(", "self", ".", "_browser", ".", "driver", ".", "window_handles", ")", "next_handle", "=", "self", ".", "_browser", ".", "driver", ".", ...
Return the next window
[ "Return", "the", "next", "window" ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/driver/webdriver/__init__.py#L76-L80
228,664
cobrateam/splinter
splinter/driver/webdriver/__init__.py
Window.close
def close(self): """ Close this window. If this window is active, switch to previous window """ target = self.prev if (self.is_current and self.prev != self) else None with switch_window(self._browser, self.name): self._browser.driver.close() if target is not None: target.is_current = True
python
def close(self): target = self.prev if (self.is_current and self.prev != self) else None with switch_window(self._browser, self.name): self._browser.driver.close() if target is not None: target.is_current = True
[ "def", "close", "(", "self", ")", ":", "target", "=", "self", ".", "prev", "if", "(", "self", ".", "is_current", "and", "self", ".", "prev", "!=", "self", ")", "else", "None", "with", "switch_window", "(", "self", ".", "_browser", ",", "self", ".", ...
Close this window. If this window is active, switch to previous window
[ "Close", "this", "window", ".", "If", "this", "window", "is", "active", "switch", "to", "previous", "window" ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/driver/webdriver/__init__.py#L98-L106
228,665
cobrateam/splinter
splinter/driver/webdriver/__init__.py
WebDriverElement.mouse_over
def mouse_over(self): """ Performs a mouse over the element. Currently works only on Chrome driver. """ self.scroll_to() ActionChains(self.parent.driver).move_to_element(self._element).perform()
python
def mouse_over(self): self.scroll_to() ActionChains(self.parent.driver).move_to_element(self._element).perform()
[ "def", "mouse_over", "(", "self", ")", ":", "self", ".", "scroll_to", "(", ")", "ActionChains", "(", "self", ".", "parent", ".", "driver", ")", ".", "move_to_element", "(", "self", ".", "_element", ")", ".", "perform", "(", ")" ]
Performs a mouse over the element. Currently works only on Chrome driver.
[ "Performs", "a", "mouse", "over", "the", "element", "." ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/driver/webdriver/__init__.py#L724-L731
228,666
cobrateam/splinter
splinter/driver/webdriver/__init__.py
WebDriverElement.mouse_out
def mouse_out(self): """ Performs a mouse out the element. Currently works only on Chrome driver. """ self.scroll_to() ActionChains(self.parent.driver).move_by_offset(0, 0).click().perform()
python
def mouse_out(self): self.scroll_to() ActionChains(self.parent.driver).move_by_offset(0, 0).click().perform()
[ "def", "mouse_out", "(", "self", ")", ":", "self", ".", "scroll_to", "(", ")", "ActionChains", "(", "self", ".", "parent", ".", "driver", ")", ".", "move_by_offset", "(", "0", ",", "0", ")", ".", "click", "(", ")", ".", "perform", "(", ")" ]
Performs a mouse out the element. Currently works only on Chrome driver.
[ "Performs", "a", "mouse", "out", "the", "element", "." ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/driver/webdriver/__init__.py#L733-L740
228,667
cobrateam/splinter
splinter/driver/webdriver/__init__.py
WebDriverElement.double_click
def double_click(self): """ Performs a double click in the element. Currently works only on Chrome driver. """ self.scroll_to() ActionChains(self.parent.driver).double_click(self._element).perform()
python
def double_click(self): self.scroll_to() ActionChains(self.parent.driver).double_click(self._element).perform()
[ "def", "double_click", "(", "self", ")", ":", "self", ".", "scroll_to", "(", ")", "ActionChains", "(", "self", ".", "parent", ".", "driver", ")", ".", "double_click", "(", "self", ".", "_element", ")", ".", "perform", "(", ")" ]
Performs a double click in the element. Currently works only on Chrome driver.
[ "Performs", "a", "double", "click", "in", "the", "element", "." ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/driver/webdriver/__init__.py#L742-L749
228,668
cobrateam/splinter
splinter/driver/webdriver/__init__.py
WebDriverElement.right_click
def right_click(self): """ Performs a right click in the element. Currently works only on Chrome driver. """ self.scroll_to() ActionChains(self.parent.driver).context_click(self._element).perform()
python
def right_click(self): self.scroll_to() ActionChains(self.parent.driver).context_click(self._element).perform()
[ "def", "right_click", "(", "self", ")", ":", "self", ".", "scroll_to", "(", ")", "ActionChains", "(", "self", ".", "parent", ".", "driver", ")", ".", "context_click", "(", "self", ".", "_element", ")", ".", "perform", "(", ")" ]
Performs a right click in the element. Currently works only on Chrome driver.
[ "Performs", "a", "right", "click", "in", "the", "element", "." ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/driver/webdriver/__init__.py#L751-L758
228,669
cobrateam/splinter
splinter/driver/webdriver/__init__.py
WebDriverElement.drag_and_drop
def drag_and_drop(self, droppable): """ Performs drag a element to another elmenet. Currently works only on Chrome driver. """ self.scroll_to() ActionChains(self.parent.driver).drag_and_drop(self._element, droppable._element).perform()
python
def drag_and_drop(self, droppable): self.scroll_to() ActionChains(self.parent.driver).drag_and_drop(self._element, droppable._element).perform()
[ "def", "drag_and_drop", "(", "self", ",", "droppable", ")", ":", "self", ".", "scroll_to", "(", ")", "ActionChains", "(", "self", ".", "parent", ".", "driver", ")", ".", "drag_and_drop", "(", "self", ".", "_element", ",", "droppable", ".", "_element", ")...
Performs drag a element to another elmenet. Currently works only on Chrome driver.
[ "Performs", "drag", "a", "element", "to", "another", "elmenet", "." ]
32f11ff7fd7841b123f157720dcc95740f156ca8
https://github.com/cobrateam/splinter/blob/32f11ff7fd7841b123f157720dcc95740f156ca8/splinter/driver/webdriver/__init__.py#L760-L767
228,670
django-haystack/pysolr
pysolr.py
force_unicode
def force_unicode(value): """ Forces a bytestring to become a Unicode string. """ if IS_PY3: # Python 3.X if isinstance(value, bytes): value = value.decode('utf-8', errors='replace') elif not isinstance(value, str): value = str(value) else: # Python 2.X if isinstance(value, str): value = value.decode('utf-8', 'replace') elif not isinstance(value, basestring): # NOQA: F821 value = unicode(value) # NOQA: F821 return value
python
def force_unicode(value): if IS_PY3: # Python 3.X if isinstance(value, bytes): value = value.decode('utf-8', errors='replace') elif not isinstance(value, str): value = str(value) else: # Python 2.X if isinstance(value, str): value = value.decode('utf-8', 'replace') elif not isinstance(value, basestring): # NOQA: F821 value = unicode(value) # NOQA: F821 return value
[ "def", "force_unicode", "(", "value", ")", ":", "if", "IS_PY3", ":", "# Python 3.X", "if", "isinstance", "(", "value", ",", "bytes", ")", ":", "value", "=", "value", ".", "decode", "(", "'utf-8'", ",", "errors", "=", "'replace'", ")", "elif", "not", "i...
Forces a bytestring to become a Unicode string.
[ "Forces", "a", "bytestring", "to", "become", "a", "Unicode", "string", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L116-L133
228,671
django-haystack/pysolr
pysolr.py
force_bytes
def force_bytes(value): """ Forces a Unicode string to become a bytestring. """ if IS_PY3: if isinstance(value, str): value = value.encode('utf-8', 'backslashreplace') else: if isinstance(value, unicode): # NOQA: F821 value = value.encode('utf-8') return value
python
def force_bytes(value): if IS_PY3: if isinstance(value, str): value = value.encode('utf-8', 'backslashreplace') else: if isinstance(value, unicode): # NOQA: F821 value = value.encode('utf-8') return value
[ "def", "force_bytes", "(", "value", ")", ":", "if", "IS_PY3", ":", "if", "isinstance", "(", "value", ",", "str", ")", ":", "value", "=", "value", ".", "encode", "(", "'utf-8'", ",", "'backslashreplace'", ")", "else", ":", "if", "isinstance", "(", "valu...
Forces a Unicode string to become a bytestring.
[ "Forces", "a", "Unicode", "string", "to", "become", "a", "bytestring", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L136-L147
228,672
django-haystack/pysolr
pysolr.py
safe_urlencode
def safe_urlencode(params, doseq=0): """ UTF-8-safe version of safe_urlencode The stdlib safe_urlencode prior to Python 3.x chokes on UTF-8 values which can't fail down to ascii. """ if IS_PY3: return urlencode(params, doseq) if hasattr(params, "items"): params = params.items() new_params = [] for k, v in params: k = k.encode("utf-8") if isinstance(v, (list, tuple)): new_params.append((k, [force_bytes(i) for i in v])) else: new_params.append((k, force_bytes(v))) return urlencode(new_params, doseq)
python
def safe_urlencode(params, doseq=0): if IS_PY3: return urlencode(params, doseq) if hasattr(params, "items"): params = params.items() new_params = [] for k, v in params: k = k.encode("utf-8") if isinstance(v, (list, tuple)): new_params.append((k, [force_bytes(i) for i in v])) else: new_params.append((k, force_bytes(v))) return urlencode(new_params, doseq)
[ "def", "safe_urlencode", "(", "params", ",", "doseq", "=", "0", ")", ":", "if", "IS_PY3", ":", "return", "urlencode", "(", "params", ",", "doseq", ")", "if", "hasattr", "(", "params", ",", "\"items\"", ")", ":", "params", "=", "params", ".", "items", ...
UTF-8-safe version of safe_urlencode The stdlib safe_urlencode prior to Python 3.x chokes on UTF-8 values which can't fail down to ascii.
[ "UTF", "-", "8", "-", "safe", "version", "of", "safe_urlencode" ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L180-L203
228,673
django-haystack/pysolr
pysolr.py
Solr._extract_error
def _extract_error(self, resp): """ Extract the actual error message from a solr response. """ reason = resp.headers.get('reason', None) full_response = None if reason is None: try: # if response is in json format reason = resp.json()['error']['msg'] except KeyError: # if json response has unexpected structure full_response = resp.content except ValueError: # otherwise we assume it's html reason, full_html = self._scrape_response(resp.headers, resp.content) full_response = unescape_html(full_html) msg = "[Reason: %s]" % reason if reason is None: msg += "\n%s" % full_response return msg
python
def _extract_error(self, resp): reason = resp.headers.get('reason', None) full_response = None if reason is None: try: # if response is in json format reason = resp.json()['error']['msg'] except KeyError: # if json response has unexpected structure full_response = resp.content except ValueError: # otherwise we assume it's html reason, full_html = self._scrape_response(resp.headers, resp.content) full_response = unescape_html(full_html) msg = "[Reason: %s]" % reason if reason is None: msg += "\n%s" % full_response return msg
[ "def", "_extract_error", "(", "self", ",", "resp", ")", ":", "reason", "=", "resp", ".", "headers", ".", "get", "(", "'reason'", ",", "None", ")", "full_response", "=", "None", "if", "reason", "is", "None", ":", "try", ":", "# if response is in json format...
Extract the actual error message from a solr response.
[ "Extract", "the", "actual", "error", "message", "from", "a", "solr", "response", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L505-L529
228,674
django-haystack/pysolr
pysolr.py
Solr._scrape_response
def _scrape_response(self, headers, response): """ Scrape the html response. """ # identify the responding server server_type = None server_string = headers.get('server', '') if server_string and 'jetty' in server_string.lower(): server_type = 'jetty' if server_string and 'coyote' in server_string.lower(): server_type = 'tomcat' reason = None full_html = '' dom_tree = None # In Python3, response can be made of bytes if IS_PY3 and hasattr(response, 'decode'): response = response.decode() if response.startswith('<?xml'): # Try a strict XML parse try: soup = ElementTree.fromstring(response) reason_node = soup.find('lst[@name="error"]/str[@name="msg"]') tb_node = soup.find('lst[@name="error"]/str[@name="trace"]') if reason_node is not None: full_html = reason = reason_node.text.strip() if tb_node is not None: full_html = tb_node.text.strip() if reason is None: reason = full_html # Since we had a precise match, we'll return the results now: if reason and full_html: return reason, full_html except ElementTree.ParseError: # XML parsing error, so we'll let the more liberal code handle it. pass if server_type == 'tomcat': # Tomcat doesn't produce a valid XML response or consistent HTML: m = re.search(r'<(h1)[^>]*>\s*(.+?)\s*</\1>', response, re.IGNORECASE) if m: reason = m.group(2) else: full_html = "%s" % response else: # Let's assume others do produce a valid XML response try: dom_tree = ElementTree.fromstring(response) reason_node = None # html page might be different for every server if server_type == 'jetty': reason_node = dom_tree.find('body/pre') else: reason_node = dom_tree.find('head/title') if reason_node is not None: reason = reason_node.text if reason is None: full_html = ElementTree.tostring(dom_tree) except SyntaxError as err: LOG.warning('Unable to extract error message from invalid XML: %s', err, extra={'data': {'response': response}}) full_html = "%s" % response full_html = force_unicode(full_html) full_html = full_html.replace('\n', '') full_html = full_html.replace('\r', '') full_html = full_html.replace('<br/>', '') full_html = full_html.replace('<br />', '') full_html = full_html.strip() return reason, full_html
python
def _scrape_response(self, headers, response): # identify the responding server server_type = None server_string = headers.get('server', '') if server_string and 'jetty' in server_string.lower(): server_type = 'jetty' if server_string and 'coyote' in server_string.lower(): server_type = 'tomcat' reason = None full_html = '' dom_tree = None # In Python3, response can be made of bytes if IS_PY3 and hasattr(response, 'decode'): response = response.decode() if response.startswith('<?xml'): # Try a strict XML parse try: soup = ElementTree.fromstring(response) reason_node = soup.find('lst[@name="error"]/str[@name="msg"]') tb_node = soup.find('lst[@name="error"]/str[@name="trace"]') if reason_node is not None: full_html = reason = reason_node.text.strip() if tb_node is not None: full_html = tb_node.text.strip() if reason is None: reason = full_html # Since we had a precise match, we'll return the results now: if reason and full_html: return reason, full_html except ElementTree.ParseError: # XML parsing error, so we'll let the more liberal code handle it. pass if server_type == 'tomcat': # Tomcat doesn't produce a valid XML response or consistent HTML: m = re.search(r'<(h1)[^>]*>\s*(.+?)\s*</\1>', response, re.IGNORECASE) if m: reason = m.group(2) else: full_html = "%s" % response else: # Let's assume others do produce a valid XML response try: dom_tree = ElementTree.fromstring(response) reason_node = None # html page might be different for every server if server_type == 'jetty': reason_node = dom_tree.find('body/pre') else: reason_node = dom_tree.find('head/title') if reason_node is not None: reason = reason_node.text if reason is None: full_html = ElementTree.tostring(dom_tree) except SyntaxError as err: LOG.warning('Unable to extract error message from invalid XML: %s', err, extra={'data': {'response': response}}) full_html = "%s" % response full_html = force_unicode(full_html) full_html = full_html.replace('\n', '') full_html = full_html.replace('\r', '') full_html = full_html.replace('<br/>', '') full_html = full_html.replace('<br />', '') full_html = full_html.strip() return reason, full_html
[ "def", "_scrape_response", "(", "self", ",", "headers", ",", "response", ")", ":", "# identify the responding server", "server_type", "=", "None", "server_string", "=", "headers", ".", "get", "(", "'server'", ",", "''", ")", "if", "server_string", "and", "'jetty...
Scrape the html response.
[ "Scrape", "the", "html", "response", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L531-L608
228,675
django-haystack/pysolr
pysolr.py
Solr._from_python
def _from_python(self, value): """ Converts python values to a form suitable for insertion into the xml we send to solr. """ if hasattr(value, 'strftime'): if hasattr(value, 'hour'): offset = value.utcoffset() if offset: value = value - offset value = value.replace(tzinfo=None).isoformat() + 'Z' else: value = "%sT00:00:00Z" % value.isoformat() elif isinstance(value, bool): if value: value = 'true' else: value = 'false' else: if IS_PY3: # Python 3.X if isinstance(value, bytes): value = str(value, errors='replace') # NOQA: F821 else: # Python 2.X if isinstance(value, str): value = unicode(value, errors='replace') # NOQA: F821 value = "{0}".format(value) return clean_xml_string(value)
python
def _from_python(self, value): if hasattr(value, 'strftime'): if hasattr(value, 'hour'): offset = value.utcoffset() if offset: value = value - offset value = value.replace(tzinfo=None).isoformat() + 'Z' else: value = "%sT00:00:00Z" % value.isoformat() elif isinstance(value, bool): if value: value = 'true' else: value = 'false' else: if IS_PY3: # Python 3.X if isinstance(value, bytes): value = str(value, errors='replace') # NOQA: F821 else: # Python 2.X if isinstance(value, str): value = unicode(value, errors='replace') # NOQA: F821 value = "{0}".format(value) return clean_xml_string(value)
[ "def", "_from_python", "(", "self", ",", "value", ")", ":", "if", "hasattr", "(", "value", ",", "'strftime'", ")", ":", "if", "hasattr", "(", "value", ",", "'hour'", ")", ":", "offset", "=", "value", ".", "utcoffset", "(", ")", "if", "offset", ":", ...
Converts python values to a form suitable for insertion into the xml we send to solr.
[ "Converts", "python", "values", "to", "a", "form", "suitable", "for", "insertion", "into", "the", "xml", "we", "send", "to", "solr", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L612-L642
228,676
django-haystack/pysolr
pysolr.py
Solr._to_python
def _to_python(self, value): """ Converts values from Solr to native Python values. """ if isinstance(value, (int, float, long, complex)): return value if isinstance(value, (list, tuple)): value = value[0] if value == 'true': return True elif value == 'false': return False is_string = False if IS_PY3: if isinstance(value, bytes): value = force_unicode(value) if isinstance(value, str): is_string = True else: if isinstance(value, str): value = force_unicode(value) if isinstance(value, basestring): # NOQA: F821 is_string = True if is_string: possible_datetime = DATETIME_REGEX.search(value) if possible_datetime: date_values = possible_datetime.groupdict() for dk, dv in date_values.items(): date_values[dk] = int(dv) return datetime.datetime(date_values['year'], date_values['month'], date_values['day'], date_values['hour'], date_values['minute'], date_values['second']) try: # This is slightly gross but it's hard to tell otherwise what the # string's original type might have been. return ast.literal_eval(value) except (ValueError, SyntaxError): # If it fails, continue on. pass return value
python
def _to_python(self, value): if isinstance(value, (int, float, long, complex)): return value if isinstance(value, (list, tuple)): value = value[0] if value == 'true': return True elif value == 'false': return False is_string = False if IS_PY3: if isinstance(value, bytes): value = force_unicode(value) if isinstance(value, str): is_string = True else: if isinstance(value, str): value = force_unicode(value) if isinstance(value, basestring): # NOQA: F821 is_string = True if is_string: possible_datetime = DATETIME_REGEX.search(value) if possible_datetime: date_values = possible_datetime.groupdict() for dk, dv in date_values.items(): date_values[dk] = int(dv) return datetime.datetime(date_values['year'], date_values['month'], date_values['day'], date_values['hour'], date_values['minute'], date_values['second']) try: # This is slightly gross but it's hard to tell otherwise what the # string's original type might have been. return ast.literal_eval(value) except (ValueError, SyntaxError): # If it fails, continue on. pass return value
[ "def", "_to_python", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "(", "int", ",", "float", ",", "long", ",", "complex", ")", ")", ":", "return", "value", "if", "isinstance", "(", "value", ",", "(", "list", ",", "tupl...
Converts values from Solr to native Python values.
[ "Converts", "values", "from", "Solr", "to", "native", "Python", "values", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L644-L698
228,677
django-haystack/pysolr
pysolr.py
Solr._is_null_value
def _is_null_value(self, value): """ Check if a given value is ``null``. Criteria for this is based on values that shouldn't be included in the Solr ``add`` request at all. """ if value is None: return True if IS_PY3: # Python 3.X if isinstance(value, str) and len(value) == 0: return True else: # Python 2.X if isinstance(value, basestring) and len(value) == 0: # NOQA: F821 return True # TODO: This should probably be removed when solved in core Solr level? return False
python
def _is_null_value(self, value): if value is None: return True if IS_PY3: # Python 3.X if isinstance(value, str) and len(value) == 0: return True else: # Python 2.X if isinstance(value, basestring) and len(value) == 0: # NOQA: F821 return True # TODO: This should probably be removed when solved in core Solr level? return False
[ "def", "_is_null_value", "(", "self", ",", "value", ")", ":", "if", "value", "is", "None", ":", "return", "True", "if", "IS_PY3", ":", "# Python 3.X", "if", "isinstance", "(", "value", ",", "str", ")", "and", "len", "(", "value", ")", "==", "0", ":",...
Check if a given value is ``null``. Criteria for this is based on values that shouldn't be included in the Solr ``add`` request at all.
[ "Check", "if", "a", "given", "value", "is", "null", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L700-L720
228,678
django-haystack/pysolr
pysolr.py
Solr.search
def search(self, q, search_handler=None, **kwargs): """ Performs a search and returns the results. Requires a ``q`` for a string version of the query to run. Optionally accepts ``**kwargs`` for additional options to be passed through the Solr URL. Returns ``self.results_cls`` class object (defaults to ``pysolr.Results``) Usage:: # All docs. results = solr.search('*:*') # Search with highlighting. results = solr.search('ponies', **{ 'hl': 'true', 'hl.fragsize': 10, }) """ params = {'q': q} params.update(kwargs) response = self._select(params, handler=search_handler) decoded = self.decoder.decode(response) self.log.debug( "Found '%s' search results.", # cover both cases: there is no response key or value is None (decoded.get('response', {}) or {}).get('numFound', 0) ) return self.results_cls(decoded)
python
def search(self, q, search_handler=None, **kwargs): params = {'q': q} params.update(kwargs) response = self._select(params, handler=search_handler) decoded = self.decoder.decode(response) self.log.debug( "Found '%s' search results.", # cover both cases: there is no response key or value is None (decoded.get('response', {}) or {}).get('numFound', 0) ) return self.results_cls(decoded)
[ "def", "search", "(", "self", ",", "q", ",", "search_handler", "=", "None", ",", "*", "*", "kwargs", ")", ":", "params", "=", "{", "'q'", ":", "q", "}", "params", ".", "update", "(", "kwargs", ")", "response", "=", "self", ".", "_select", "(", "p...
Performs a search and returns the results. Requires a ``q`` for a string version of the query to run. Optionally accepts ``**kwargs`` for additional options to be passed through the Solr URL. Returns ``self.results_cls`` class object (defaults to ``pysolr.Results``) Usage:: # All docs. results = solr.search('*:*') # Search with highlighting. results = solr.search('ponies', **{ 'hl': 'true', 'hl.fragsize': 10, })
[ "Performs", "a", "search", "and", "returns", "the", "results", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L724-L758
228,679
django-haystack/pysolr
pysolr.py
Solr.more_like_this
def more_like_this(self, q, mltfl, handler='mlt', **kwargs): """ Finds and returns results similar to the provided query. Returns ``self.results_cls`` class object (defaults to ``pysolr.Results``) Requires Solr 1.3+. Usage:: similar = solr.more_like_this('id:doc_234', 'text') """ params = { 'q': q, 'mlt.fl': mltfl, } params.update(kwargs) response = self._mlt(params, handler=handler) decoded = self.decoder.decode(response) self.log.debug( "Found '%s' MLT results.", # cover both cases: there is no response key or value is None (decoded.get('response', {}) or {}).get('numFound', 0) ) return self.results_cls(decoded)
python
def more_like_this(self, q, mltfl, handler='mlt', **kwargs): params = { 'q': q, 'mlt.fl': mltfl, } params.update(kwargs) response = self._mlt(params, handler=handler) decoded = self.decoder.decode(response) self.log.debug( "Found '%s' MLT results.", # cover both cases: there is no response key or value is None (decoded.get('response', {}) or {}).get('numFound', 0) ) return self.results_cls(decoded)
[ "def", "more_like_this", "(", "self", ",", "q", ",", "mltfl", ",", "handler", "=", "'mlt'", ",", "*", "*", "kwargs", ")", ":", "params", "=", "{", "'q'", ":", "q", ",", "'mlt.fl'", ":", "mltfl", ",", "}", "params", ".", "update", "(", "kwargs", "...
Finds and returns results similar to the provided query. Returns ``self.results_cls`` class object (defaults to ``pysolr.Results``) Requires Solr 1.3+. Usage:: similar = solr.more_like_this('id:doc_234', 'text')
[ "Finds", "and", "returns", "results", "similar", "to", "the", "provided", "query", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L760-L787
228,680
django-haystack/pysolr
pysolr.py
Solr.suggest_terms
def suggest_terms(self, fields, prefix, handler='terms', **kwargs): """ Accepts a list of field names and a prefix Returns a dictionary keyed on field name containing a list of ``(term, count)`` pairs Requires Solr 1.4+. """ params = { 'terms.fl': fields, 'terms.prefix': prefix, } params.update(kwargs) response = self._suggest_terms(params, handler=handler) result = self.decoder.decode(response) terms = result.get("terms", {}) res = {} # in Solr 1.x the value of terms is a flat list: # ["field_name", ["dance",23,"dancers",10,"dancing",8,"dancer",6]] # # in Solr 3.x the value of terms is a dict: # {"field_name": ["dance",23,"dancers",10,"dancing",8,"dancer",6]} if isinstance(terms, (list, tuple)): terms = dict(zip(terms[0::2], terms[1::2])) for field, values in terms.items(): tmp = [] while values: tmp.append((values.pop(0), values.pop(0))) res[field] = tmp self.log.debug("Found '%d' Term suggestions results.", sum(len(j) for i, j in res.items())) return res
python
def suggest_terms(self, fields, prefix, handler='terms', **kwargs): params = { 'terms.fl': fields, 'terms.prefix': prefix, } params.update(kwargs) response = self._suggest_terms(params, handler=handler) result = self.decoder.decode(response) terms = result.get("terms", {}) res = {} # in Solr 1.x the value of terms is a flat list: # ["field_name", ["dance",23,"dancers",10,"dancing",8,"dancer",6]] # # in Solr 3.x the value of terms is a dict: # {"field_name": ["dance",23,"dancers",10,"dancing",8,"dancer",6]} if isinstance(terms, (list, tuple)): terms = dict(zip(terms[0::2], terms[1::2])) for field, values in terms.items(): tmp = [] while values: tmp.append((values.pop(0), values.pop(0))) res[field] = tmp self.log.debug("Found '%d' Term suggestions results.", sum(len(j) for i, j in res.items())) return res
[ "def", "suggest_terms", "(", "self", ",", "fields", ",", "prefix", ",", "handler", "=", "'terms'", ",", "*", "*", "kwargs", ")", ":", "params", "=", "{", "'terms.fl'", ":", "fields", ",", "'terms.prefix'", ":", "prefix", ",", "}", "params", ".", "updat...
Accepts a list of field names and a prefix Returns a dictionary keyed on field name containing a list of ``(term, count)`` pairs Requires Solr 1.4+.
[ "Accepts", "a", "list", "of", "field", "names", "and", "a", "prefix" ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L789-L825
228,681
django-haystack/pysolr
pysolr.py
Solr.add
def add(self, docs, boost=None, fieldUpdates=None, commit=None, softCommit=False, commitWithin=None, waitFlush=None, waitSearcher=None, overwrite=None, handler='update'): """ Adds or updates documents. Requires ``docs``, which is a list of dictionaries. Each key is the field name and each value is the value to index. Optionally accepts ``commit``. Default is ``None``. None signals to use default Optionally accepts ``softCommit``. Default is ``False``. Optionally accepts ``boost``. Default is ``None``. Optionally accepts ``fieldUpdates``. Default is ``None``. Optionally accepts ``commitWithin``. Default is ``None``. Optionally accepts ``waitFlush``. Default is ``None``. Optionally accepts ``waitSearcher``. Default is ``None``. Optionally accepts ``overwrite``. Default is ``None``. Usage:: solr.add([ { "id": "doc_1", "title": "A test document", }, { "id": "doc_2", "title": "The Banana: Tasty or Dangerous?", }, ]) """ start_time = time.time() self.log.debug("Starting to build add request...") message = ElementTree.Element('add') if commitWithin: message.set('commitWithin', commitWithin) for doc in docs: el = self._build_doc(doc, boost=boost, fieldUpdates=fieldUpdates) message.append(el) # This returns a bytestring. Ugh. m = ElementTree.tostring(message, encoding='utf-8') # Convert back to Unicode please. m = force_unicode(m) end_time = time.time() self.log.debug("Built add request of %s docs in %0.2f seconds.", len(message), end_time - start_time) return self._update(m, commit=commit, softCommit=softCommit, waitFlush=waitFlush, waitSearcher=waitSearcher, overwrite=overwrite, handler=handler)
python
def add(self, docs, boost=None, fieldUpdates=None, commit=None, softCommit=False, commitWithin=None, waitFlush=None, waitSearcher=None, overwrite=None, handler='update'): start_time = time.time() self.log.debug("Starting to build add request...") message = ElementTree.Element('add') if commitWithin: message.set('commitWithin', commitWithin) for doc in docs: el = self._build_doc(doc, boost=boost, fieldUpdates=fieldUpdates) message.append(el) # This returns a bytestring. Ugh. m = ElementTree.tostring(message, encoding='utf-8') # Convert back to Unicode please. m = force_unicode(m) end_time = time.time() self.log.debug("Built add request of %s docs in %0.2f seconds.", len(message), end_time - start_time) return self._update(m, commit=commit, softCommit=softCommit, waitFlush=waitFlush, waitSearcher=waitSearcher, overwrite=overwrite, handler=handler)
[ "def", "add", "(", "self", ",", "docs", ",", "boost", "=", "None", ",", "fieldUpdates", "=", "None", ",", "commit", "=", "None", ",", "softCommit", "=", "False", ",", "commitWithin", "=", "None", ",", "waitFlush", "=", "None", ",", "waitSearcher", "=",...
Adds or updates documents. Requires ``docs``, which is a list of dictionaries. Each key is the field name and each value is the value to index. Optionally accepts ``commit``. Default is ``None``. None signals to use default Optionally accepts ``softCommit``. Default is ``False``. Optionally accepts ``boost``. Default is ``None``. Optionally accepts ``fieldUpdates``. Default is ``None``. Optionally accepts ``commitWithin``. Default is ``None``. Optionally accepts ``waitFlush``. Default is ``None``. Optionally accepts ``waitSearcher``. Default is ``None``. Optionally accepts ``overwrite``. Default is ``None``. Usage:: solr.add([ { "id": "doc_1", "title": "A test document", }, { "id": "doc_2", "title": "The Banana: Tasty or Dangerous?", }, ])
[ "Adds", "or", "updates", "documents", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L870-L926
228,682
django-haystack/pysolr
pysolr.py
Solr.delete
def delete(self, id=None, q=None, commit=None, softCommit=False, waitFlush=None, waitSearcher=None, handler='update'): # NOQA: A002 """ Deletes documents. Requires *either* ``id`` or ``query``. ``id`` is if you know the specific document id to remove. Note that ``id`` can also be a list of document ids to be deleted. ``query`` is a Lucene-style query indicating a collection of documents to delete. Optionally accepts ``commit``. Default is ``True``. Optionally accepts ``softCommit``. Default is ``False``. Optionally accepts ``waitFlush``. Default is ``None``. Optionally accepts ``waitSearcher``. Default is ``None``. Usage:: solr.delete(id='doc_12') solr.delete(id=['doc_1', 'doc_3']) solr.delete(q='*:*') """ if id is None and q is None: raise ValueError('You must specify "id" or "q".') elif id is not None and q is not None: raise ValueError('You many only specify "id" OR "q", not both.') elif id is not None: if not isinstance(id, (list, set, tuple)): doc_id = [id] else: doc_id = list(filter(None, id)) if doc_id: m = '<delete>%s</delete>' % ''.join('<id>%s</id>' % i for i in doc_id) else: raise ValueError('The list of documents to delete was empty.') elif q is not None: m = '<delete><query>%s</query></delete>' % q return self._update(m, commit=commit, softCommit=softCommit, waitFlush=waitFlush, waitSearcher=waitSearcher, handler=handler)
python
def delete(self, id=None, q=None, commit=None, softCommit=False, waitFlush=None, waitSearcher=None, handler='update'): # NOQA: A002 if id is None and q is None: raise ValueError('You must specify "id" or "q".') elif id is not None and q is not None: raise ValueError('You many only specify "id" OR "q", not both.') elif id is not None: if not isinstance(id, (list, set, tuple)): doc_id = [id] else: doc_id = list(filter(None, id)) if doc_id: m = '<delete>%s</delete>' % ''.join('<id>%s</id>' % i for i in doc_id) else: raise ValueError('The list of documents to delete was empty.') elif q is not None: m = '<delete><query>%s</query></delete>' % q return self._update(m, commit=commit, softCommit=softCommit, waitFlush=waitFlush, waitSearcher=waitSearcher, handler=handler)
[ "def", "delete", "(", "self", ",", "id", "=", "None", ",", "q", "=", "None", ",", "commit", "=", "None", ",", "softCommit", "=", "False", ",", "waitFlush", "=", "None", ",", "waitSearcher", "=", "None", ",", "handler", "=", "'update'", ")", ":", "#...
Deletes documents. Requires *either* ``id`` or ``query``. ``id`` is if you know the specific document id to remove. Note that ``id`` can also be a list of document ids to be deleted. ``query`` is a Lucene-style query indicating a collection of documents to delete. Optionally accepts ``commit``. Default is ``True``. Optionally accepts ``softCommit``. Default is ``False``. Optionally accepts ``waitFlush``. Default is ``None``. Optionally accepts ``waitSearcher``. Default is ``None``. Usage:: solr.delete(id='doc_12') solr.delete(id=['doc_1', 'doc_3']) solr.delete(q='*:*')
[ "Deletes", "documents", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L928-L968
228,683
django-haystack/pysolr
pysolr.py
Solr.commit
def commit(self, softCommit=False, waitFlush=None, waitSearcher=None, expungeDeletes=None, handler='update'): """ Forces Solr to write the index data to disk. Optionally accepts ``expungeDeletes``. Default is ``None``. Optionally accepts ``waitFlush``. Default is ``None``. Optionally accepts ``waitSearcher``. Default is ``None``. Optionally accepts ``softCommit``. Default is ``False``. Usage:: solr.commit() """ if expungeDeletes is not None: msg = '<commit expungeDeletes="%s" />' % str(bool(expungeDeletes)).lower() else: msg = '<commit />' return self._update(msg, commit=not softCommit, softCommit=softCommit, waitFlush=waitFlush, waitSearcher=waitSearcher, handler=handler)
python
def commit(self, softCommit=False, waitFlush=None, waitSearcher=None, expungeDeletes=None, handler='update'): if expungeDeletes is not None: msg = '<commit expungeDeletes="%s" />' % str(bool(expungeDeletes)).lower() else: msg = '<commit />' return self._update(msg, commit=not softCommit, softCommit=softCommit, waitFlush=waitFlush, waitSearcher=waitSearcher, handler=handler)
[ "def", "commit", "(", "self", ",", "softCommit", "=", "False", ",", "waitFlush", "=", "None", ",", "waitSearcher", "=", "None", ",", "expungeDeletes", "=", "None", ",", "handler", "=", "'update'", ")", ":", "if", "expungeDeletes", "is", "not", "None", ":...
Forces Solr to write the index data to disk. Optionally accepts ``expungeDeletes``. Default is ``None``. Optionally accepts ``waitFlush``. Default is ``None``. Optionally accepts ``waitSearcher``. Default is ``None``. Optionally accepts ``softCommit``. Default is ``False``. Usage:: solr.commit()
[ "Forces", "Solr", "to", "write", "the", "index", "data", "to", "disk", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L970-L993
228,684
django-haystack/pysolr
pysolr.py
Solr.optimize
def optimize(self, commit=True, waitFlush=None, waitSearcher=None, maxSegments=None, handler='update'): """ Tells Solr to streamline the number of segments used, essentially a defragmentation operation. Optionally accepts ``maxSegments``. Default is ``None``. Optionally accepts ``waitFlush``. Default is ``None``. Optionally accepts ``waitSearcher``. Default is ``None``. Usage:: solr.optimize() """ if maxSegments: msg = '<optimize maxSegments="%d" />' % maxSegments else: msg = '<optimize />' return self._update(msg, commit=commit, waitFlush=waitFlush, waitSearcher=waitSearcher, handler=handler)
python
def optimize(self, commit=True, waitFlush=None, waitSearcher=None, maxSegments=None, handler='update'): if maxSegments: msg = '<optimize maxSegments="%d" />' % maxSegments else: msg = '<optimize />' return self._update(msg, commit=commit, waitFlush=waitFlush, waitSearcher=waitSearcher, handler=handler)
[ "def", "optimize", "(", "self", ",", "commit", "=", "True", ",", "waitFlush", "=", "None", ",", "waitSearcher", "=", "None", ",", "maxSegments", "=", "None", ",", "handler", "=", "'update'", ")", ":", "if", "maxSegments", ":", "msg", "=", "'<optimize max...
Tells Solr to streamline the number of segments used, essentially a defragmentation operation. Optionally accepts ``maxSegments``. Default is ``None``. Optionally accepts ``waitFlush``. Default is ``None``. Optionally accepts ``waitSearcher``. Default is ``None``. Usage:: solr.optimize()
[ "Tells", "Solr", "to", "streamline", "the", "number", "of", "segments", "used", "essentially", "a", "defragmentation", "operation", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L995-L1016
228,685
django-haystack/pysolr
pysolr.py
Solr.ping
def ping(self, handler='admin/ping', **kwargs): """ Sends a ping request. Usage:: solr.ping() """ params = kwargs params_encoded = safe_urlencode(params, True) if len(params_encoded) < 1024: # Typical case. path = '%s/?%s' % (handler, params_encoded) return self._send_request('get', path) else: # Handles very long queries by submitting as a POST. path = '%s/' % handler headers = { 'Content-type': 'application/x-www-form-urlencoded; charset=utf-8', } return self._send_request('post', path, body=params_encoded, headers=headers)
python
def ping(self, handler='admin/ping', **kwargs): params = kwargs params_encoded = safe_urlencode(params, True) if len(params_encoded) < 1024: # Typical case. path = '%s/?%s' % (handler, params_encoded) return self._send_request('get', path) else: # Handles very long queries by submitting as a POST. path = '%s/' % handler headers = { 'Content-type': 'application/x-www-form-urlencoded; charset=utf-8', } return self._send_request('post', path, body=params_encoded, headers=headers)
[ "def", "ping", "(", "self", ",", "handler", "=", "'admin/ping'", ",", "*", "*", "kwargs", ")", ":", "params", "=", "kwargs", "params_encoded", "=", "safe_urlencode", "(", "params", ",", "True", ")", "if", "len", "(", "params_encoded", ")", "<", "1024", ...
Sends a ping request. Usage:: solr.ping()
[ "Sends", "a", "ping", "request", "." ]
ee28b39324fa21a99842d297e313c1759d8adbd2
https://github.com/django-haystack/pysolr/blob/ee28b39324fa21a99842d297e313c1759d8adbd2/pysolr.py#L1081-L1103
228,686
Azure/azure-cli-extensions
src/interactive/azext_interactive/azclishell/key_bindings.py
InteractiveKeyBindings.format_response
def format_response(self, response): """ formats a response in a binary """ conversion = self.shell_ctx.config.BOOLEAN_STATES if response in conversion: if conversion[response]: return 'yes' return 'no' raise ValueError('Invalid response: input should equate to true or false')
python
def format_response(self, response): conversion = self.shell_ctx.config.BOOLEAN_STATES if response in conversion: if conversion[response]: return 'yes' return 'no' raise ValueError('Invalid response: input should equate to true or false')
[ "def", "format_response", "(", "self", ",", "response", ")", ":", "conversion", "=", "self", ".", "shell_ctx", ".", "config", ".", "BOOLEAN_STATES", "if", "response", "in", "conversion", ":", "if", "conversion", "[", "response", "]", ":", "return", "'yes'", ...
formats a response in a binary
[ "formats", "a", "response", "in", "a", "binary" ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/interactive/azext_interactive/azclishell/key_bindings.py#L101-L108
228,687
Azure/azure-cli-extensions
src/interactive/azext_interactive/azclishell/util.py
get_window_dim
def get_window_dim(): """ gets the dimensions depending on python version and os""" version = sys.version_info if version >= (3, 3): return _size_36() if platform.system() == 'Windows': return _size_windows() return _size_27()
python
def get_window_dim(): version = sys.version_info if version >= (3, 3): return _size_36() if platform.system() == 'Windows': return _size_windows() return _size_27()
[ "def", "get_window_dim", "(", ")", ":", "version", "=", "sys", ".", "version_info", "if", "version", ">=", "(", "3", ",", "3", ")", ":", "return", "_size_36", "(", ")", "if", "platform", ".", "system", "(", ")", "==", "'Windows'", ":", "return", "_si...
gets the dimensions depending on python version and os
[ "gets", "the", "dimensions", "depending", "on", "python", "version", "and", "os" ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/interactive/azext_interactive/azclishell/util.py#L17-L25
228,688
Azure/azure-cli-extensions
src/interactive/azext_interactive/azclishell/util.py
_size_36
def _size_36(): """ returns the rows, columns of terminal """ from shutil import get_terminal_size dim = get_terminal_size() if isinstance(dim, list): return dim[0], dim[1] return dim.lines, dim.columns
python
def _size_36(): from shutil import get_terminal_size dim = get_terminal_size() if isinstance(dim, list): return dim[0], dim[1] return dim.lines, dim.columns
[ "def", "_size_36", "(", ")", ":", "from", "shutil", "import", "get_terminal_size", "dim", "=", "get_terminal_size", "(", ")", "if", "isinstance", "(", "dim", ",", "list", ")", ":", "return", "dim", "[", "0", "]", ",", "dim", "[", "1", "]", "return", ...
returns the rows, columns of terminal
[ "returns", "the", "rows", "columns", "of", "terminal" ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/interactive/azext_interactive/azclishell/util.py#L36-L42
228,689
Azure/azure-cli-extensions
src/interactive/azext_interactive/azclishell/frequency_heuristic.py
update_frequency
def update_frequency(shell_ctx): """ updates the frequency from files """ frequency_path = os.path.join(shell_ctx.config.get_config_dir(), shell_ctx.config.get_frequency()) if os.path.exists(frequency_path): with open(frequency_path, 'r') as freq: try: frequency = json.load(freq) except ValueError: frequency = {} else: frequency = {} with open(frequency_path, 'w') as freq: now = day_format(datetime.datetime.utcnow()) val = frequency.get(now) frequency[now] = val + 1 if val else 1 json.dump(frequency, freq) return frequency
python
def update_frequency(shell_ctx): frequency_path = os.path.join(shell_ctx.config.get_config_dir(), shell_ctx.config.get_frequency()) if os.path.exists(frequency_path): with open(frequency_path, 'r') as freq: try: frequency = json.load(freq) except ValueError: frequency = {} else: frequency = {} with open(frequency_path, 'w') as freq: now = day_format(datetime.datetime.utcnow()) val = frequency.get(now) frequency[now] = val + 1 if val else 1 json.dump(frequency, freq) return frequency
[ "def", "update_frequency", "(", "shell_ctx", ")", ":", "frequency_path", "=", "os", ".", "path", ".", "join", "(", "shell_ctx", ".", "config", ".", "get_config_dir", "(", ")", ",", "shell_ctx", ".", "config", ".", "get_frequency", "(", ")", ")", "if", "o...
updates the frequency from files
[ "updates", "the", "frequency", "from", "files" ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/interactive/azext_interactive/azclishell/frequency_heuristic.py#L20-L38
228,690
Azure/azure-cli-extensions
src/interactive/azext_interactive/azclishell/frequency_heuristic.py
frequency_measurement
def frequency_measurement(shell_ctx): """ measures how many times a user has used this program in the last calendar week """ freq = update_frequency(shell_ctx) count = 0 base = datetime.datetime.utcnow() date_list = [base - datetime.timedelta(days=x) for x in range(0, DAYS_AGO)] for day in date_list: count += 1 if freq.get(day_format(day), 0) > 0 else 0 return count
python
def frequency_measurement(shell_ctx): freq = update_frequency(shell_ctx) count = 0 base = datetime.datetime.utcnow() date_list = [base - datetime.timedelta(days=x) for x in range(0, DAYS_AGO)] for day in date_list: count += 1 if freq.get(day_format(day), 0) > 0 else 0 return count
[ "def", "frequency_measurement", "(", "shell_ctx", ")", ":", "freq", "=", "update_frequency", "(", "shell_ctx", ")", "count", "=", "0", "base", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "date_list", "=", "[", "base", "-", "datetime", ".", ...
measures how many times a user has used this program in the last calendar week
[ "measures", "how", "many", "times", "a", "user", "has", "used", "this", "program", "in", "the", "last", "calendar", "week" ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/interactive/azext_interactive/azclishell/frequency_heuristic.py#L41-L50
228,691
Azure/azure-cli-extensions
src/azure-firewall/azext_firewall/_validators.py
get_public_ip_validator
def get_public_ip_validator(): """ Retrieves a validator for public IP address. Accepting all defaults will perform a check for an existing name or ID with no ARM-required -type parameter. """ from msrestazure.tools import is_valid_resource_id, resource_id def simple_validator(cmd, namespace): if namespace.public_ip_address: is_list = isinstance(namespace.public_ip_address, list) def _validate_name_or_id(public_ip): # determine if public_ip_address is name or ID is_id = is_valid_resource_id(public_ip) return public_ip if is_id else resource_id( subscription=get_subscription_id(cmd.cli_ctx), resource_group=namespace.resource_group_name, namespace='Microsoft.Network', type='publicIPAddresses', name=public_ip) if is_list: for i, public_ip in enumerate(namespace.public_ip_address): namespace.public_ip_address[i] = _validate_name_or_id(public_ip) else: namespace.public_ip_address = _validate_name_or_id(namespace.public_ip_address) return simple_validator
python
def get_public_ip_validator(): from msrestazure.tools import is_valid_resource_id, resource_id def simple_validator(cmd, namespace): if namespace.public_ip_address: is_list = isinstance(namespace.public_ip_address, list) def _validate_name_or_id(public_ip): # determine if public_ip_address is name or ID is_id = is_valid_resource_id(public_ip) return public_ip if is_id else resource_id( subscription=get_subscription_id(cmd.cli_ctx), resource_group=namespace.resource_group_name, namespace='Microsoft.Network', type='publicIPAddresses', name=public_ip) if is_list: for i, public_ip in enumerate(namespace.public_ip_address): namespace.public_ip_address[i] = _validate_name_or_id(public_ip) else: namespace.public_ip_address = _validate_name_or_id(namespace.public_ip_address) return simple_validator
[ "def", "get_public_ip_validator", "(", ")", ":", "from", "msrestazure", ".", "tools", "import", "is_valid_resource_id", ",", "resource_id", "def", "simple_validator", "(", "cmd", ",", "namespace", ")", ":", "if", "namespace", ".", "public_ip_address", ":", "is_lis...
Retrieves a validator for public IP address. Accepting all defaults will perform a check for an existing name or ID with no ARM-required -type parameter.
[ "Retrieves", "a", "validator", "for", "public", "IP", "address", ".", "Accepting", "all", "defaults", "will", "perform", "a", "check", "for", "an", "existing", "name", "or", "ID", "with", "no", "ARM", "-", "required", "-", "type", "parameter", "." ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/azure-firewall/azext_firewall/_validators.py#L24-L49
228,692
Azure/azure-cli-extensions
src/interactive/azext_interactive/azclishell/_dump_commands.py
load_help_files
def load_help_files(data): """ loads all the extra information from help files """ for command_name, help_yaml in helps.items(): help_entry = yaml.safe_load(help_yaml) try: help_type = help_entry['type'] except KeyError: continue # if there is extra help for this command but it's not reflected in the command table if command_name not in data and help_type == 'command': logger.debug('Command: %s not found in command table', command_name) continue short_summary = help_entry.get('short-summary') if short_summary and help_type == 'command': data[command_name]['help'] = short_summary else: # must be a command group or sub-group data[command_name] = {'help': short_summary} continue if 'parameters' in help_entry: for param in help_entry['parameters']: # this could fail if the help file and options list are not in the same order param_name = param['name'].split()[0] if param_name not in data[command_name]['parameters']: logger.debug('Command %s does not have parameter: %s', command_name, param_name) continue if 'short-summary' in param: data[command_name]['parameters'][param_name]['help'] = param["short-summary"] if 'examples' in help_entry: data[command_name]['examples'] = [[example['name'], example['text']] for example in help_entry['examples']]
python
def load_help_files(data): for command_name, help_yaml in helps.items(): help_entry = yaml.safe_load(help_yaml) try: help_type = help_entry['type'] except KeyError: continue # if there is extra help for this command but it's not reflected in the command table if command_name not in data and help_type == 'command': logger.debug('Command: %s not found in command table', command_name) continue short_summary = help_entry.get('short-summary') if short_summary and help_type == 'command': data[command_name]['help'] = short_summary else: # must be a command group or sub-group data[command_name] = {'help': short_summary} continue if 'parameters' in help_entry: for param in help_entry['parameters']: # this could fail if the help file and options list are not in the same order param_name = param['name'].split()[0] if param_name not in data[command_name]['parameters']: logger.debug('Command %s does not have parameter: %s', command_name, param_name) continue if 'short-summary' in param: data[command_name]['parameters'][param_name]['help'] = param["short-summary"] if 'examples' in help_entry: data[command_name]['examples'] = [[example['name'], example['text']] for example in help_entry['examples']]
[ "def", "load_help_files", "(", "data", ")", ":", "for", "command_name", ",", "help_yaml", "in", "helps", ".", "items", "(", ")", ":", "help_entry", "=", "yaml", ".", "safe_load", "(", "help_yaml", ")", "try", ":", "help_type", "=", "help_entry", "[", "'t...
loads all the extra information from help files
[ "loads", "all", "the", "extra", "information", "from", "help", "files" ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/interactive/azext_interactive/azclishell/_dump_commands.py#L133-L170
228,693
Azure/azure-cli-extensions
src/interactive/azext_interactive/azclishell/_dump_commands.py
get_cache_dir
def get_cache_dir(shell_ctx): """ gets the location of the cache """ azure_folder = shell_ctx.config.get_config_dir() cache_path = os.path.join(azure_folder, 'cache') if not os.path.exists(azure_folder): os.makedirs(azure_folder) if not os.path.exists(cache_path): os.makedirs(cache_path) return cache_path
python
def get_cache_dir(shell_ctx): azure_folder = shell_ctx.config.get_config_dir() cache_path = os.path.join(azure_folder, 'cache') if not os.path.exists(azure_folder): os.makedirs(azure_folder) if not os.path.exists(cache_path): os.makedirs(cache_path) return cache_path
[ "def", "get_cache_dir", "(", "shell_ctx", ")", ":", "azure_folder", "=", "shell_ctx", ".", "config", ".", "get_config_dir", "(", ")", "cache_path", "=", "os", ".", "path", ".", "join", "(", "azure_folder", ",", "'cache'", ")", "if", "not", "os", ".", "pa...
gets the location of the cache
[ "gets", "the", "location", "of", "the", "cache" ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/interactive/azext_interactive/azclishell/_dump_commands.py#L173-L181
228,694
Azure/azure-cli-extensions
src/interactive/azext_interactive/azclishell/_dump_commands.py
FreshTable.dump_command_table
def dump_command_table(self, shell_ctx=None): """ dumps the command table """ from azure.cli.core.commands.arm import register_global_subscription_argument, register_ids_argument from knack import events import timeit start_time = timeit.default_timer() shell_ctx = shell_ctx or self.shell_ctx main_loader = AzInteractiveCommandsLoader(shell_ctx.cli_ctx) main_loader.load_command_table(None) main_loader.load_arguments(None) register_global_subscription_argument(shell_ctx.cli_ctx) register_ids_argument(shell_ctx.cli_ctx) shell_ctx.cli_ctx.raise_event(events.EVENT_INVOKER_POST_CMD_TBL_CREATE, commands_loader=main_loader) cmd_table = main_loader.command_table cmd_table_data = {} for command_name, cmd in cmd_table.items(): try: command_description = cmd.description if callable(command_description): command_description = command_description() # checking all the parameters for a single command parameter_metadata = {} for arg in cmd.arguments.values(): options = { 'name': [name for name in arg.options_list], 'required': REQUIRED_TAG if arg.type.settings.get('required') else '', 'help': arg.type.settings.get('help') or '' } # the key is the first alias option if arg.options_list: parameter_metadata[arg.options_list[0]] = options cmd_table_data[command_name] = { 'parameters': parameter_metadata, 'help': command_description, 'examples': '' } except (ImportError, ValueError): pass load_help_files(cmd_table_data) elapsed = timeit.default_timer() - start_time logger.debug('Command table dumped: %s sec', elapsed) FreshTable.loader = main_loader # dump into the cache file command_file = shell_ctx.config.get_help_files() with open(os.path.join(get_cache_dir(shell_ctx), command_file), 'w') as help_file: json.dump(cmd_table_data, help_file, default=lambda x: x.target or '', skipkeys=True)
python
def dump_command_table(self, shell_ctx=None): from azure.cli.core.commands.arm import register_global_subscription_argument, register_ids_argument from knack import events import timeit start_time = timeit.default_timer() shell_ctx = shell_ctx or self.shell_ctx main_loader = AzInteractiveCommandsLoader(shell_ctx.cli_ctx) main_loader.load_command_table(None) main_loader.load_arguments(None) register_global_subscription_argument(shell_ctx.cli_ctx) register_ids_argument(shell_ctx.cli_ctx) shell_ctx.cli_ctx.raise_event(events.EVENT_INVOKER_POST_CMD_TBL_CREATE, commands_loader=main_loader) cmd_table = main_loader.command_table cmd_table_data = {} for command_name, cmd in cmd_table.items(): try: command_description = cmd.description if callable(command_description): command_description = command_description() # checking all the parameters for a single command parameter_metadata = {} for arg in cmd.arguments.values(): options = { 'name': [name for name in arg.options_list], 'required': REQUIRED_TAG if arg.type.settings.get('required') else '', 'help': arg.type.settings.get('help') or '' } # the key is the first alias option if arg.options_list: parameter_metadata[arg.options_list[0]] = options cmd_table_data[command_name] = { 'parameters': parameter_metadata, 'help': command_description, 'examples': '' } except (ImportError, ValueError): pass load_help_files(cmd_table_data) elapsed = timeit.default_timer() - start_time logger.debug('Command table dumped: %s sec', elapsed) FreshTable.loader = main_loader # dump into the cache file command_file = shell_ctx.config.get_help_files() with open(os.path.join(get_cache_dir(shell_ctx), command_file), 'w') as help_file: json.dump(cmd_table_data, help_file, default=lambda x: x.target or '', skipkeys=True)
[ "def", "dump_command_table", "(", "self", ",", "shell_ctx", "=", "None", ")", ":", "from", "azure", ".", "cli", ".", "core", ".", "commands", ".", "arm", "import", "register_global_subscription_argument", ",", "register_ids_argument", "from", "knack", "import", ...
dumps the command table
[ "dumps", "the", "command", "table" ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/interactive/azext_interactive/azclishell/_dump_commands.py#L77-L130
228,695
Azure/azure-cli-extensions
src/storage-preview/azext_storage_preview/_validators.py
_query_account_key
def _query_account_key(cli_ctx, account_name): """Query the storage account key. This is used when the customer doesn't offer account key but name.""" rg, scf = _query_account_rg(cli_ctx, account_name) t_storage_account_keys = get_sdk( cli_ctx, CUSTOM_MGMT_STORAGE, 'models.storage_account_keys#StorageAccountKeys') if t_storage_account_keys: return scf.storage_accounts.list_keys(rg, account_name).key1 # of type: models.storage_account_list_keys_result#StorageAccountListKeysResult return scf.storage_accounts.list_keys(rg, account_name).keys[0].value
python
def _query_account_key(cli_ctx, account_name): rg, scf = _query_account_rg(cli_ctx, account_name) t_storage_account_keys = get_sdk( cli_ctx, CUSTOM_MGMT_STORAGE, 'models.storage_account_keys#StorageAccountKeys') if t_storage_account_keys: return scf.storage_accounts.list_keys(rg, account_name).key1 # of type: models.storage_account_list_keys_result#StorageAccountListKeysResult return scf.storage_accounts.list_keys(rg, account_name).keys[0].value
[ "def", "_query_account_key", "(", "cli_ctx", ",", "account_name", ")", ":", "rg", ",", "scf", "=", "_query_account_rg", "(", "cli_ctx", ",", "account_name", ")", "t_storage_account_keys", "=", "get_sdk", "(", "cli_ctx", ",", "CUSTOM_MGMT_STORAGE", ",", "'models.st...
Query the storage account key. This is used when the customer doesn't offer account key but name.
[ "Query", "the", "storage", "account", "key", ".", "This", "is", "used", "when", "the", "customer", "doesn", "t", "offer", "account", "key", "but", "name", "." ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/storage-preview/azext_storage_preview/_validators.py#L25-L34
228,696
Azure/azure-cli-extensions
src/storage-preview/azext_storage_preview/_validators.py
_query_account_rg
def _query_account_rg(cli_ctx, account_name): """Query the storage account's resource group, which the mgmt sdk requires.""" scf = get_mgmt_service_client(cli_ctx, CUSTOM_MGMT_STORAGE) acc = next((x for x in scf.storage_accounts.list() if x.name == account_name), None) if acc: from msrestazure.tools import parse_resource_id return parse_resource_id(acc.id)['resource_group'], scf raise ValueError("Storage account '{}' not found.".format(account_name))
python
def _query_account_rg(cli_ctx, account_name): scf = get_mgmt_service_client(cli_ctx, CUSTOM_MGMT_STORAGE) acc = next((x for x in scf.storage_accounts.list() if x.name == account_name), None) if acc: from msrestazure.tools import parse_resource_id return parse_resource_id(acc.id)['resource_group'], scf raise ValueError("Storage account '{}' not found.".format(account_name))
[ "def", "_query_account_rg", "(", "cli_ctx", ",", "account_name", ")", ":", "scf", "=", "get_mgmt_service_client", "(", "cli_ctx", ",", "CUSTOM_MGMT_STORAGE", ")", "acc", "=", "next", "(", "(", "x", "for", "x", "in", "scf", ".", "storage_accounts", ".", "list...
Query the storage account's resource group, which the mgmt sdk requires.
[ "Query", "the", "storage", "account", "s", "resource", "group", "which", "the", "mgmt", "sdk", "requires", "." ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/storage-preview/azext_storage_preview/_validators.py#L37-L44
228,697
Azure/azure-cli-extensions
src/storage-preview/azext_storage_preview/_validators.py
process_resource_group
def process_resource_group(cmd, namespace): """Processes the resource group parameter from the account name""" if namespace.account_name and not namespace.resource_group_name: namespace.resource_group_name = _query_account_rg(cmd.cli_ctx, namespace.account_name)[0]
python
def process_resource_group(cmd, namespace): if namespace.account_name and not namespace.resource_group_name: namespace.resource_group_name = _query_account_rg(cmd.cli_ctx, namespace.account_name)[0]
[ "def", "process_resource_group", "(", "cmd", ",", "namespace", ")", ":", "if", "namespace", ".", "account_name", "and", "not", "namespace", ".", "resource_group_name", ":", "namespace", ".", "resource_group_name", "=", "_query_account_rg", "(", "cmd", ".", "cli_ct...
Processes the resource group parameter from the account name
[ "Processes", "the", "resource", "group", "parameter", "from", "the", "account", "name" ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/storage-preview/azext_storage_preview/_validators.py#L64-L67
228,698
Azure/azure-cli-extensions
src/storage-preview/azext_storage_preview/_validators.py
validate_client_parameters
def validate_client_parameters(cmd, namespace): """ Retrieves storage connection parameters from environment variables and parses out connection string into account name and key """ n = namespace def get_config_value(section, key, default): return cmd.cli_ctx.config.get(section, key, default) if hasattr(n, 'auth_mode'): auth_mode = n.auth_mode or get_config_value('storage', 'auth_mode', None) del n.auth_mode if not n.account_name: n.account_name = get_config_value('storage', 'account', None) if auth_mode == 'login': n.token_credential = _create_token_credential(cmd.cli_ctx) # give warning if there are account key args being ignored account_key_args = [n.account_key and "--account-key", n.sas_token and "--sas-token", n.connection_string and "--connection-string"] account_key_args = [arg for arg in account_key_args if arg] if account_key_args: from knack.log import get_logger logger = get_logger(__name__) logger.warning('In "login" auth mode, the following arguments are ignored: %s', ' ,'.join(account_key_args)) return if not n.connection_string: n.connection_string = get_config_value('storage', 'connection_string', None) # if connection string supplied or in environment variables, extract account key and name if n.connection_string: conn_dict = validate_key_value_pairs(n.connection_string) n.account_name = conn_dict.get('AccountName') n.account_key = conn_dict.get('AccountKey') if not n.account_name or not n.account_key: from knack.util import CLIError raise CLIError('Connection-string: %s, is malformed. Some shell environments require the ' 'connection string to be surrounded by quotes.' % n.connection_string) # otherwise, simply try to retrieve the remaining variables from environment variables if not n.account_name: n.account_name = get_config_value('storage', 'account', None) if not n.account_key: n.account_key = get_config_value('storage', 'key', None) if not n.sas_token: n.sas_token = get_config_value('storage', 'sas_token', None) # strip the '?' from sas token. the portal and command line are returns sas token in different # forms if n.sas_token: n.sas_token = n.sas_token.lstrip('?') # if account name is specified but no key, attempt to query if n.account_name and not n.account_key and not n.sas_token: n.account_key = _query_account_key(cmd.cli_ctx, n.account_name)
python
def validate_client_parameters(cmd, namespace): n = namespace def get_config_value(section, key, default): return cmd.cli_ctx.config.get(section, key, default) if hasattr(n, 'auth_mode'): auth_mode = n.auth_mode or get_config_value('storage', 'auth_mode', None) del n.auth_mode if not n.account_name: n.account_name = get_config_value('storage', 'account', None) if auth_mode == 'login': n.token_credential = _create_token_credential(cmd.cli_ctx) # give warning if there are account key args being ignored account_key_args = [n.account_key and "--account-key", n.sas_token and "--sas-token", n.connection_string and "--connection-string"] account_key_args = [arg for arg in account_key_args if arg] if account_key_args: from knack.log import get_logger logger = get_logger(__name__) logger.warning('In "login" auth mode, the following arguments are ignored: %s', ' ,'.join(account_key_args)) return if not n.connection_string: n.connection_string = get_config_value('storage', 'connection_string', None) # if connection string supplied or in environment variables, extract account key and name if n.connection_string: conn_dict = validate_key_value_pairs(n.connection_string) n.account_name = conn_dict.get('AccountName') n.account_key = conn_dict.get('AccountKey') if not n.account_name or not n.account_key: from knack.util import CLIError raise CLIError('Connection-string: %s, is malformed. Some shell environments require the ' 'connection string to be surrounded by quotes.' % n.connection_string) # otherwise, simply try to retrieve the remaining variables from environment variables if not n.account_name: n.account_name = get_config_value('storage', 'account', None) if not n.account_key: n.account_key = get_config_value('storage', 'key', None) if not n.sas_token: n.sas_token = get_config_value('storage', 'sas_token', None) # strip the '?' from sas token. the portal and command line are returns sas token in different # forms if n.sas_token: n.sas_token = n.sas_token.lstrip('?') # if account name is specified but no key, attempt to query if n.account_name and not n.account_key and not n.sas_token: n.account_key = _query_account_key(cmd.cli_ctx, n.account_name)
[ "def", "validate_client_parameters", "(", "cmd", ",", "namespace", ")", ":", "n", "=", "namespace", "def", "get_config_value", "(", "section", ",", "key", ",", "default", ")", ":", "return", "cmd", ".", "cli_ctx", ".", "config", ".", "get", "(", "section",...
Retrieves storage connection parameters from environment variables and parses out connection string into account name and key
[ "Retrieves", "storage", "connection", "parameters", "from", "environment", "variables", "and", "parses", "out", "connection", "string", "into", "account", "name", "and", "key" ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/storage-preview/azext_storage_preview/_validators.py#L75-L132
228,699
Azure/azure-cli-extensions
src/storage-preview/azext_storage_preview/_validators.py
validate_encryption_services
def validate_encryption_services(cmd, namespace): """ Builds up the encryption services object for storage account operations based on the list of services passed in. """ if namespace.encryption_services: t_encryption_services, t_encryption_service = get_sdk(cmd.cli_ctx, CUSTOM_MGMT_STORAGE, 'EncryptionServices', 'EncryptionService', mod='models') services = {service: t_encryption_service(enabled=True) for service in namespace.encryption_services} namespace.encryption_services = t_encryption_services(**services)
python
def validate_encryption_services(cmd, namespace): if namespace.encryption_services: t_encryption_services, t_encryption_service = get_sdk(cmd.cli_ctx, CUSTOM_MGMT_STORAGE, 'EncryptionServices', 'EncryptionService', mod='models') services = {service: t_encryption_service(enabled=True) for service in namespace.encryption_services} namespace.encryption_services = t_encryption_services(**services)
[ "def", "validate_encryption_services", "(", "cmd", ",", "namespace", ")", ":", "if", "namespace", ".", "encryption_services", ":", "t_encryption_services", ",", "t_encryption_service", "=", "get_sdk", "(", "cmd", ".", "cli_ctx", ",", "CUSTOM_MGMT_STORAGE", ",", "'En...
Builds up the encryption services object for storage account operations based on the list of services passed in.
[ "Builds", "up", "the", "encryption", "services", "object", "for", "storage", "account", "operations", "based", "on", "the", "list", "of", "services", "passed", "in", "." ]
3d4854205b0f0d882f688cfa12383d14506c2e35
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/storage-preview/azext_storage_preview/_validators.py#L265-L274