idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
40,400
def offers ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_offers ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the offers JSON from this instance s Horizon server .
40,401
def transactions ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_transactions ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the transactions JSON from this instance s Horizon server .
40,402
def operations ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_operations ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the operations JSON from this instance s Horizon server .
40,403
def trades ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_trades ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the trades JSON from this instance s Horizon server .
40,404
def effects ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_effects ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the effects JSON from this instance s Horizon server .
40,405
def submit ( self , te ) : params = { 'tx' : te } url = urljoin ( self . horizon_uri , 'transactions/' ) reply = None retry_count = self . num_retries while True : try : reply = self . _session . post ( url , data = params , timeout = self . request_timeout ) return check_horizon_reply ( reply . json ( ) ) except ( RequestException , NewConnectionError , ValueError ) as e : if reply is not None : msg = 'Horizon submit exception: {}, reply: [{}] {}' . format ( str ( e ) , reply . status_code , reply . text ) else : msg = 'Horizon submit exception: {}' . format ( str ( e ) ) logging . warning ( msg ) if ( reply is not None and reply . status_code not in self . status_forcelist ) or retry_count <= 0 : if reply is None : raise HorizonRequestError ( e ) raise HorizonError ( 'Invalid horizon reply: [{}] {}' . format ( reply . status_code , reply . text ) , reply . status_code ) retry_count -= 1 logging . warning ( 'Submit retry attempt {}' . format ( retry_count ) ) sleep ( self . backoff_factor )
Submit the transaction using a pooled connection and retry on failure .
40,406
def account ( self , address ) : endpoint = '/accounts/{account_id}' . format ( account_id = address ) return self . query ( endpoint )
Returns information and links relating to a single account .
40,407
def account_data ( self , address , key ) : endpoint = '/accounts/{account_id}/data/{data_key}' . format ( account_id = address , data_key = key ) return self . query ( endpoint )
This endpoint represents a single data associated with a given account .
40,408
def account_effects ( self , address , cursor = None , order = 'asc' , limit = 10 , sse = False ) : endpoint = '/accounts/{account_id}/effects' . format ( account_id = address ) params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params , sse )
This endpoint represents all effects that changed a given account .
40,409
def assets ( self , asset_code = None , asset_issuer = None , cursor = None , order = 'asc' , limit = 10 ) : endpoint = '/assets' params = self . __query_params ( asset_code = asset_code , asset_issuer = asset_issuer , cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params )
This endpoint represents all assets . It will give you all the assets in the system along with various statistics about each .
40,410
def transaction ( self , tx_hash ) : endpoint = '/transactions/{tx_hash}' . format ( tx_hash = tx_hash ) return self . query ( endpoint )
The transaction details endpoint provides information on a single transaction .
40,411
def transaction_operations ( self , tx_hash , cursor = None , order = 'asc' , include_failed = False , limit = 10 ) : endpoint = '/transactions/{tx_hash}/operations' . format ( tx_hash = tx_hash ) params = self . __query_params ( cursor = cursor , order = order , limit = limit , include_failed = include_failed ) return self . query ( endpoint , params )
This endpoint represents all operations that are part of a given transaction .
40,412
def transaction_effects ( self , tx_hash , cursor = None , order = 'asc' , limit = 10 ) : endpoint = '/transactions/{tx_hash}/effects' . format ( tx_hash = tx_hash ) params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params )
This endpoint represents all effects that occurred as a result of a given transaction .
40,413
def order_book ( self , selling_asset_code , buying_asset_code , selling_asset_issuer = None , buying_asset_issuer = None , limit = 10 ) : selling_asset = Asset ( selling_asset_code , selling_asset_issuer ) buying_asset = Asset ( buying_asset_code , buying_asset_issuer ) asset_params = { 'selling_asset_type' : selling_asset . type , 'selling_asset_code' : None if selling_asset . is_native ( ) else selling_asset . code , 'selling_asset_issuer' : selling_asset . issuer , 'buying_asset_type' : buying_asset . type , 'buying_asset_code' : None if buying_asset . is_native ( ) else buying_asset . code , 'buying_asset_issuer' : buying_asset . issuer , } endpoint = '/order_book' params = self . __query_params ( limit = limit , ** asset_params ) return self . query ( endpoint , params )
Return for each orderbook a summary of the orderbook and the bids and asks associated with that orderbook .
40,414
def ledger ( self , ledger_id ) : endpoint = '/ledgers/{ledger_id}' . format ( ledger_id = ledger_id ) return self . query ( endpoint )
The ledger details endpoint provides information on a single ledger .
40,415
def ledger_effects ( self , ledger_id , cursor = None , order = 'asc' , limit = 10 ) : endpoint = '/ledgers/{ledger_id}/effects' . format ( ledger_id = ledger_id ) params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params )
This endpoint represents all effects that occurred in the given ledger .
40,416
def ledger_transactions ( self , ledger_id , cursor = None , order = 'asc' , include_failed = False , limit = 10 ) : endpoint = '/ledgers/{ledger_id}/transactions' . format ( ledger_id = ledger_id ) params = self . __query_params ( cursor = cursor , order = order , limit = limit , include_failed = include_failed ) return self . query ( endpoint , params )
This endpoint represents all transactions in a given ledger .
40,417
def effects ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : endpoint = '/effects' params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params , sse )
This endpoint represents all effects .
40,418
def operations ( self , cursor = None , order = 'asc' , limit = 10 , include_failed = False , sse = False ) : endpoint = '/operations' params = self . __query_params ( cursor = cursor , order = order , limit = limit , include_failed = include_failed ) return self . query ( endpoint , params , sse )
This endpoint represents all operations that are part of validated transactions .
40,419
def operation ( self , op_id ) : endpoint = '/operations/{op_id}' . format ( op_id = op_id ) return self . query ( endpoint )
The operation details endpoint provides information on a single operation .
40,420
def operation_effects ( self , op_id , cursor = None , order = 'asc' , limit = 10 ) : endpoint = '/operations/{op_id}/effects' . format ( op_id = op_id ) params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params )
This endpoint represents all effects that occurred as a result of a given operation .
40,421
def paths ( self , destination_account , destination_amount , source_account , destination_asset_code , destination_asset_issuer = None ) : destination_asset = Asset ( destination_asset_code , destination_asset_issuer ) destination_asset_params = { 'destination_asset_type' : destination_asset . type , 'destination_asset_code' : None if destination_asset . is_native ( ) else destination_asset . code , 'destination_asset_issuer' : destination_asset . issuer } endpoint = '/paths' params = self . __query_params ( destination_account = destination_account , source_account = source_account , destination_amount = destination_amount , ** destination_asset_params ) return self . query ( endpoint , params )
Load a list of assets available to the source account id and find any payment paths from those source assets to the desired destination asset .
40,422
def trades ( self , base_asset_code = None , counter_asset_code = None , base_asset_issuer = None , counter_asset_issuer = None , offer_id = None , cursor = None , order = 'asc' , limit = 10 ) : base_asset = Asset ( base_asset_code , base_asset_issuer ) counter_asset = Asset ( counter_asset_code , counter_asset_issuer ) asset_params = { 'base_asset_type' : base_asset . type , 'base_asset_code' : None if base_asset . is_native ( ) else base_asset . code , 'base_asset_issuer' : base_asset . issuer , 'counter_asset_type' : counter_asset . type , 'counter_asset_code' : None if counter_asset . is_native ( ) else counter_asset . code , 'counter_asset_issuer' : counter_asset . issuer } endpoint = '/trades' params = self . __query_params ( offer_id = offer_id , cursor = cursor , order = order , limit = limit , ** asset_params ) return self . query ( endpoint , params )
Load a list of trades optionally filtered by an orderbook .
40,423
def trade_aggregations ( self , resolution , base_asset_code , counter_asset_code , base_asset_issuer = None , counter_asset_issuer = None , start_time = None , end_time = None , order = 'asc' , limit = 10 , offset = 0 ) : allowed_resolutions = ( 60000 , 300000 , 900000 , 3600000 , 86400000 , 604800000 ) if resolution not in allowed_resolutions : raise NotValidParamError ( "resolution is invalid" ) if offset > resolution or offset >= 24 * 3600000 or offset % 3600000 != 0 : raise NotValidParamError ( "offset is invalid" ) base_asset = Asset ( base_asset_code , base_asset_issuer ) counter_asset = Asset ( counter_asset_code , counter_asset_issuer ) asset_params = { 'base_asset_type' : base_asset . type , 'base_asset_code' : None if base_asset . is_native ( ) else base_asset . code , 'base_asset_issuer' : base_asset . issuer , 'counter_asset_type' : counter_asset . type , 'counter_asset_code' : None if counter_asset . is_native ( ) else counter_asset . code , 'counter_asset_issuer' : counter_asset . issuer } endpoint = '/trade_aggregations' params = self . __query_params ( start_time = start_time , end_time = end_time , resolution = resolution , order = order , limit = limit , offset = offset , ** asset_params ) return self . query ( endpoint , params )
Load a list of aggregated historical trade data optionally filtered by an orderbook .
40,424
def offer_trades ( self , offer_id , cursor = None , order = 'asc' , limit = 10 ) : endpoint = '/offers/{offer_id}/trades' . format ( offer_id = offer_id ) params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params )
This endpoint represents all trades for a given offer .
40,425
def sign ( self , keypair ) : assert isinstance ( keypair , Keypair ) tx_hash = self . hash_meta ( ) sig = keypair . sign_decorated ( tx_hash ) sig_dict = [ signature . __dict__ for signature in self . signatures ] if sig . __dict__ in sig_dict : raise SignatureExistError ( 'The keypair has already signed' ) else : self . signatures . append ( sig )
Sign this transaction envelope with a given keypair .
40,426
def signature_base ( self ) : network_id = self . network_id tx_type = Xdr . StellarXDRPacker ( ) tx_type . pack_EnvelopeType ( Xdr . const . ENVELOPE_TYPE_TX ) tx_type = tx_type . get_buffer ( ) tx = Xdr . StellarXDRPacker ( ) tx . pack_Transaction ( self . tx . to_xdr_object ( ) ) tx = tx . get_buffer ( ) return network_id + tx_type + tx
Get the signature base of this transaction envelope .
40,427
def get_federation_service ( domain , allow_http = False ) : st = get_stellar_toml ( domain , allow_http ) if not st : return None return st . get ( 'FEDERATION_SERVER' )
Retrieve the FEDERATION_SERVER config from a domain s stellar . toml .
40,428
def get_auth_server ( domain , allow_http = False ) : st = get_stellar_toml ( domain , allow_http ) if not st : return None return st . get ( 'AUTH_SERVER' )
Retrieve the AUTH_SERVER config from a domain s stellar . toml .
40,429
def get_stellar_toml ( domain , allow_http = False ) : toml_link = '/.well-known/stellar.toml' if allow_http : protocol = 'http://' else : protocol = 'https://' url_list = [ '' , 'www.' , 'stellar.' ] url_list = [ protocol + url + domain + toml_link for url in url_list ] for url in url_list : r = requests . get ( url ) if r . status_code == 200 : return toml . loads ( r . text ) return None
Retrieve the stellar . toml file from a given domain .
40,430
def account_xdr_object ( self ) : return Xdr . types . PublicKey ( Xdr . const . KEY_TYPE_ED25519 , self . verifying_key . to_bytes ( ) )
Create PublicKey XDR object via public key bytes .
40,431
def xdr ( self ) : kp = Xdr . StellarXDRPacker ( ) kp . pack_PublicKey ( self . account_xdr_object ( ) ) return base64 . b64encode ( kp . get_buffer ( ) )
Generate base64 encoded XDR PublicKey object .
40,432
def verify ( self , data , signature ) : try : return self . verifying_key . verify ( signature , data ) except ed25519 . BadSignatureError : raise BadSignatureError ( "Signature verification failed." )
Verify the signature of a sequence of bytes .
40,433
def sign_decorated ( self , data ) : signature = self . sign ( data ) hint = self . signature_hint ( ) return Xdr . types . DecoratedSignature ( hint , signature )
Sign a bytes - like object and return the decorated signature .
40,434
def bytes_from_decode_data ( s ) : if isinstance ( s , ( str , unicode ) ) : try : return s . encode ( 'ascii' ) except UnicodeEncodeError : raise NotValidParamError ( 'String argument should contain only ASCII characters' ) if isinstance ( s , bytes_types ) : return s try : return memoryview ( s ) . tobytes ( ) except TypeError : raise suppress_context ( TypeError ( 'Argument should be a bytes-like object or ASCII string, not ' '{!r}' . format ( s . __class__ . __name__ ) ) )
copy from base64 . _bytes_from_decode_data
40,435
def to_xdr_amount ( value ) : if not isinstance ( value , str ) : raise NotValidParamError ( "Value of type '{}' must be of type String, but got {}" . format ( value , type ( value ) ) ) try : amount = int ( ( Decimal ( value ) * ONE ) . to_integral_exact ( context = Context ( traps = [ Inexact ] ) ) ) except decimal . Inexact : raise NotValidParamError ( "Value of '{}' must have at most 7 digits after the decimal." . format ( value ) ) except decimal . InvalidOperation : raise NotValidParamError ( "Value of '{}' must represent a positive number." . format ( value ) ) return amount
Converts an amount to the appropriate value to send over the network as a part of an XDR object .
40,436
def to_xdr_object ( self ) : return Xdr . types . Memo ( type = Xdr . const . MEMO_TEXT , text = self . text )
Creates an XDR Memo object for a transaction with MEMO_TEXT .
40,437
def to_xdr_object ( self ) : return Xdr . types . Memo ( type = Xdr . const . MEMO_ID , id = self . memo_id )
Creates an XDR Memo object for a transaction with MEMO_ID .
40,438
def to_xdr_object ( self ) : return Xdr . types . Memo ( type = Xdr . const . MEMO_HASH , hash = self . memo_hash )
Creates an XDR Memo object for a transaction with MEMO_HASH .
40,439
def to_xdr_object ( self ) : return Xdr . types . Memo ( type = Xdr . const . MEMO_RETURN , retHash = self . memo_return )
Creates an XDR Memo object for a transaction with MEMO_RETURN .
40,440
def append_hashx_signer ( self , hashx , signer_weight , source = None ) : return self . append_set_options_op ( signer_address = hashx , signer_type = 'hashX' , signer_weight = signer_weight , source = source )
Add a HashX signer to an account .
40,441
def append_pre_auth_tx_signer ( self , pre_auth_tx , signer_weight , source = None ) : return self . append_set_options_op ( signer_address = pre_auth_tx , signer_type = 'preAuthTx' , signer_weight = signer_weight , source = source )
Add a PreAuthTx signer to an account .
40,442
def next_builder ( self ) : sequence = self . sequence + 1 next_builder = Builder ( horizon_uri = self . horizon . horizon_uri , address = self . address , network = self . network , sequence = sequence , fee = self . fee ) next_builder . keypair = self . keypair return next_builder
Create a new builder based off of this one with its sequence number incremented .
40,443
def get_sequence ( self ) : if not self . address : raise StellarAddressInvalidError ( 'No address provided.' ) address = self . horizon . account ( self . address ) return int ( address . get ( 'sequence' ) )
Get the sequence number for a given account via Horizon .
40,444
def to_dict ( self ) : rv = { 'code' : self . code } if not self . is_native ( ) : rv [ 'issuer' ] = self . issuer rv [ 'type' ] = self . type else : rv [ 'type' ] = 'native' return rv
Generate a dict for this object s attributes .
40,445
def id_unique ( dict_id , name , lineno ) : if dict_id in name_dict : global error_occurred error_occurred = True print ( "ERROR - {0:s} definition {1:s} at line {2:d} conflicts with {3:s}" . format ( name , dict_id , lineno , name_dict [ dict_id ] ) ) return False else : return True
Returns True if dict_id not already used . Otherwise invokes error
40,446
def main ( ) : import sys import argparse stdout = buffer ( sys . stdout ) parser = argparse . ArgumentParser ( description = main . __doc__ ) parser . add_argument ( 'file' , metavar = 'FILE' , nargs = '?' , type = argparse . FileType ( 'r' ) , default = '-' ) parser . add_argument ( '-d' , '--decode' , action = 'store_true' , help = 'decode data' ) parser . add_argument ( '-c' , '--check' , action = 'store_true' , help = 'append a checksum before encoding' ) args = parser . parse_args ( ) fun = { ( False , False ) : b58encode , ( False , True ) : b58encode_check , ( True , False ) : b58decode , ( True , True ) : b58decode_check } [ ( args . decode , args . check ) ] data = buffer ( args . file ) . read ( ) . rstrip ( b'\n' ) try : result = fun ( data ) except Exception as e : sys . exit ( e ) if not isinstance ( result , bytes ) : result = result . encode ( 'ascii' ) stdout . write ( result )
Base58 encode or decode FILE or standard input to standard output .
40,447
def _Dhcpcd ( self , interfaces , logger ) : for interface in interfaces : dhcpcd = [ '/sbin/dhcpcd' ] try : subprocess . check_call ( dhcpcd + [ '-x' , interface ] ) except subprocess . CalledProcessError : logger . info ( 'Dhcpcd not yet running for interface %s.' , interface ) try : subprocess . check_call ( dhcpcd + [ interface ] ) except subprocess . CalledProcessError : logger . warning ( 'Could not activate interface %s.' , interface )
Use dhcpcd to activate the interfaces .
40,448
def _CreateTempDir ( prefix , run_dir = None ) : temp_dir = tempfile . mkdtemp ( prefix = prefix + '-' , dir = run_dir ) try : yield temp_dir finally : shutil . rmtree ( temp_dir )
Context manager for creating a temporary directory .
40,449
def _RunScripts ( self , run_dir = None ) : with _CreateTempDir ( self . script_type , run_dir = run_dir ) as dest_dir : try : self . logger . info ( 'Starting %s scripts.' , self . script_type ) script_dict = self . retriever . GetScripts ( dest_dir ) self . executor . RunScripts ( script_dict ) finally : self . logger . info ( 'Finished running %s scripts.' , self . script_type )
Retrieve metadata scripts and execute them .
40,450
def _GetInstanceConfig ( self ) : try : instance_data = self . metadata_dict [ 'instance' ] [ 'attributes' ] except KeyError : instance_data = { } self . logger . warning ( 'Instance attributes were not found.' ) try : project_data = self . metadata_dict [ 'project' ] [ 'attributes' ] except KeyError : project_data = { } self . logger . warning ( 'Project attributes were not found.' ) return ( instance_data . get ( 'google-instance-configs' ) or project_data . get ( 'google-instance-configs' ) )
Get the instance configuration specified in metadata .
40,451
def _GenerateSshKey ( self , key_type , key_dest ) : with tempfile . NamedTemporaryFile ( prefix = key_type , delete = True ) as temp : temp_key = temp . name command = [ 'ssh-keygen' , '-t' , key_type , '-f' , temp_key , '-N' , '' , '-q' ] try : self . logger . info ( 'Generating SSH key %s.' , key_dest ) subprocess . check_call ( command ) except subprocess . CalledProcessError : self . logger . warning ( 'Could not create SSH key %s.' , key_dest ) return shutil . move ( temp_key , key_dest ) shutil . move ( '%s.pub' % temp_key , '%s.pub' % key_dest ) file_utils . SetPermissions ( key_dest , mode = 0o600 ) file_utils . SetPermissions ( '%s.pub' % key_dest , mode = 0o644 )
Generate a new SSH key .
40,452
def _StartSshd ( self ) : if os . path . exists ( constants . LOCALBASE + '/bin/systemctl' ) : return elif ( os . path . exists ( '/etc/init.d/ssh' ) or os . path . exists ( '/etc/init/ssh.conf' ) ) : subprocess . call ( [ 'service' , 'ssh' , 'start' ] ) subprocess . call ( [ 'service' , 'ssh' , 'reload' ] ) elif ( os . path . exists ( '/etc/init.d/sshd' ) or os . path . exists ( '/etc/init/sshd.conf' ) ) : subprocess . call ( [ 'service' , 'sshd' , 'start' ] ) subprocess . call ( [ 'service' , 'sshd' , 'reload' ] )
Initialize the SSH daemon .
40,453
def _SetSshHostKeys ( self , host_key_types = None ) : section = 'Instance' instance_id = self . _GetInstanceId ( ) if instance_id != self . instance_config . GetOptionString ( section , 'instance_id' ) : self . logger . info ( 'Generating SSH host keys for instance %s.' , instance_id ) file_regex = re . compile ( r'ssh_host_(?P<type>[a-z0-9]*)_key\Z' ) key_dir = '/etc/ssh' key_files = [ f for f in os . listdir ( key_dir ) if file_regex . match ( f ) ] key_types = host_key_types . split ( ',' ) if host_key_types else [ ] key_types_files = [ 'ssh_host_%s_key' % key_type for key_type in key_types ] for key_file in set ( key_files ) | set ( key_types_files ) : key_type = file_regex . match ( key_file ) . group ( 'type' ) key_dest = os . path . join ( key_dir , key_file ) self . _GenerateSshKey ( key_type , key_dest ) self . _StartSshd ( ) self . instance_config . SetOption ( section , 'instance_id' , str ( instance_id ) )
Regenerates SSH host keys when the VM is restarted with a new IP address .
40,454
def _SetupBotoConfig ( self ) : project_id = self . _GetNumericProjectId ( ) try : boto_config . BotoConfig ( project_id , debug = self . debug ) except ( IOError , OSError ) as e : self . logger . warning ( str ( e ) )
Set the boto config so GSUtil works with provisioned service accounts .
40,455
def _DownloadAuthUrl ( self , url , dest_dir ) : dest_file = tempfile . NamedTemporaryFile ( dir = dest_dir , delete = False ) dest_file . close ( ) dest = dest_file . name self . logger . info ( 'Downloading url from %s to %s using authentication token.' , url , dest ) if not self . token : response = self . watcher . GetMetadata ( self . token_metadata_key , recursive = False , retry = False ) if not response : self . logger . info ( 'Authentication token not found. Attempting unauthenticated ' 'download.' ) return self . _DownloadUrl ( url , dest_dir ) self . token = '%s %s' % ( response . get ( 'token_type' , '' ) , response . get ( 'access_token' , '' ) ) try : request = urlrequest . Request ( url ) request . add_unredirected_header ( 'Metadata-Flavor' , 'Google' ) request . add_unredirected_header ( 'Authorization' , self . token ) content = urlrequest . urlopen ( request ) . read ( ) . decode ( 'utf-8' ) except ( httpclient . HTTPException , socket . error , urlerror . URLError ) as e : self . logger . warning ( 'Could not download %s. %s.' , url , str ( e ) ) return None with open ( dest , 'wb' ) as f : f . write ( content ) return dest
Download a Google Storage URL using an authentication token .
40,456
def _DownloadUrl ( self , url , dest_dir ) : dest_file = tempfile . NamedTemporaryFile ( dir = dest_dir , delete = False ) dest_file . close ( ) dest = dest_file . name self . logger . info ( 'Downloading url from %s to %s.' , url , dest ) try : urlretrieve . urlretrieve ( url , dest ) return dest except ( httpclient . HTTPException , socket . error , urlerror . URLError ) as e : self . logger . warning ( 'Could not download %s. %s.' , url , str ( e ) ) except Exception as e : self . logger . warning ( 'Exception downloading %s. %s.' , url , str ( e ) ) return None
Download a script from a given URL .
40,457
def _DownloadScript ( self , url , dest_dir ) : if url . startswith ( r'gs://' ) : url = re . sub ( '^gs://' , 'https://storage.googleapis.com/' , url ) return self . _DownloadAuthUrl ( url , dest_dir ) header = r'http[s]?://' domain = r'storage\.googleapis\.com' bucket = r'(?P<bucket>[a-z0-9][-_.a-z0-9]*[a-z0-9])' obj = r'(?P<obj>[^\*\?]+)' gs_regex = re . compile ( r'\A%s%s\.%s/%s\Z' % ( header , bucket , domain , obj ) ) match = gs_regex . match ( url ) if match : return self . _DownloadAuthUrl ( url , dest_dir ) gs_regex = re . compile ( r'\A%s(commondata)?%s/%s/%s\Z' % ( header , domain , bucket , obj ) ) match = gs_regex . match ( url ) if match : return self . _DownloadAuthUrl ( url , dest_dir ) return self . _DownloadUrl ( url , dest_dir )
Download the contents of the URL to the destination .
40,458
def _GetAttributeScripts ( self , attribute_data , dest_dir ) : script_dict = { } attribute_data = attribute_data or { } metadata_key = '%s-script' % self . script_type metadata_value = attribute_data . get ( metadata_key ) if metadata_value : self . logger . info ( 'Found %s in metadata.' , metadata_key ) with tempfile . NamedTemporaryFile ( mode = 'w' , dir = dest_dir , delete = False ) as dest : dest . write ( metadata_value . lstrip ( ) ) script_dict [ metadata_key ] = dest . name metadata_key = '%s-script-url' % self . script_type metadata_value = attribute_data . get ( metadata_key ) if metadata_value : self . logger . info ( 'Found %s in metadata.' , metadata_key ) script_dict [ metadata_key ] = self . _DownloadScript ( metadata_value , dest_dir ) return script_dict
Retrieve the scripts from attribute metadata .
40,459
def GetScripts ( self , dest_dir ) : metadata_dict = self . watcher . GetMetadata ( ) or { } try : instance_data = metadata_dict [ 'instance' ] [ 'attributes' ] except KeyError : instance_data = None self . logger . warning ( 'Instance attributes were not found.' ) try : project_data = metadata_dict [ 'project' ] [ 'attributes' ] except KeyError : project_data = None self . logger . warning ( 'Project attributes were not found.' ) return ( self . _GetAttributeScripts ( instance_data , dest_dir ) or self . _GetAttributeScripts ( project_data , dest_dir ) )
Retrieve the scripts to execute .
40,460
def _MakeExecutable ( self , metadata_script ) : mode = os . stat ( metadata_script ) . st_mode os . chmod ( metadata_script , mode | stat . S_IEXEC )
Add executable permissions to a file .
40,461
def RunScripts ( self , script_dict ) : metadata_types = [ '%s-script-url' , '%s-script' ] metadata_keys = [ key % self . script_type for key in metadata_types ] metadata_keys = [ key for key in metadata_keys if script_dict . get ( key ) ] if not metadata_keys : self . logger . info ( 'No %s scripts found in metadata.' , self . script_type ) for metadata_key in metadata_keys : metadata_script = script_dict . get ( metadata_key ) self . _MakeExecutable ( metadata_script ) self . _RunScript ( metadata_key , metadata_script )
Run the metadata scripts ; execute a URL script first if one is provided .
40,462
def _AddHeader ( self , fp ) : text = textwrap . wrap ( textwrap . dedent ( self . config_header ) , break_on_hyphens = False ) fp . write ( '\n' . join ( [ '# ' + line for line in text ] ) ) fp . write ( '\n\n' )
Create a file header in the config .
40,463
def SetOption ( self , section , option , value , overwrite = True ) : if not overwrite and self . config . has_option ( section , option ) : return if not self . config . has_section ( section ) : self . config . add_section ( section ) self . config . set ( section , option , str ( value ) )
Set the value of an option in the config file .
40,464
def WriteConfig ( self , config_file = None ) : config_file = config_file or self . config_file config_name = os . path . splitext ( os . path . basename ( config_file ) ) [ 0 ] config_lock = ( '%s/lock/google_%s.lock' % ( constants . LOCALSTATEDIR , config_name ) ) with file_utils . LockFile ( config_lock ) : with open ( config_file , 'w' ) as config_fp : if self . config_header : self . _AddHeader ( config_fp ) self . config . write ( config_fp )
Write the config values to a given file .
40,465
def Logger ( name , debug = False , facility = None ) : logger = logging . getLogger ( name ) logger . handlers = [ ] logger . addHandler ( logging . NullHandler ( ) ) logger . propagate = False logger . setLevel ( logging . DEBUG ) formatter = logging . Formatter ( name + ': %(levelname)s %(message)s' ) if debug : console_handler = logging . StreamHandler ( ) console_handler . setLevel ( logging . DEBUG ) console_handler . setFormatter ( formatter ) logger . addHandler ( console_handler ) if facility : syslog_handler = logging . handlers . SysLogHandler ( address = constants . SYSLOG_SOCKET , facility = facility ) syslog_handler . setLevel ( logging . INFO ) syslog_handler . setFormatter ( formatter ) logger . addHandler ( syslog_handler ) return logger
Get a logging object with handlers for sending logs to SysLog .
40,466
def _CreateSudoersGroup ( self ) : if not self . _GetGroup ( self . google_sudoers_group ) : try : command = self . groupadd_cmd . format ( group = self . google_sudoers_group ) subprocess . check_call ( command . split ( ' ' ) ) except subprocess . CalledProcessError as e : self . logger . warning ( 'Could not create the sudoers group. %s.' , str ( e ) ) if not os . path . exists ( self . google_sudoers_file ) : try : with open ( self . google_sudoers_file , 'w' ) as group : message = '%{0} ALL=(ALL:ALL) NOPASSWD:ALL' . format ( self . google_sudoers_group ) group . write ( message ) except IOError as e : self . logger . error ( 'Could not write sudoers file. %s. %s' , self . google_sudoers_file , str ( e ) ) return file_utils . SetPermissions ( self . google_sudoers_file , mode = 0o440 , uid = 0 , gid = 0 )
Create a Linux group for Google added sudo user accounts .
40,467
def _AddUser ( self , user ) : self . logger . info ( 'Creating a new user account for %s.' , user ) command = self . useradd_cmd . format ( user = user ) try : subprocess . check_call ( command . split ( ' ' ) ) except subprocess . CalledProcessError as e : self . logger . warning ( 'Could not create user %s. %s.' , user , str ( e ) ) return False else : self . logger . info ( 'Created user account %s.' , user ) return True
Configure a Linux user account .
40,468
def _UpdateUserGroups ( self , user , groups ) : groups = ',' . join ( groups ) self . logger . debug ( 'Updating user %s with groups %s.' , user , groups ) command = self . usermod_cmd . format ( user = user , groups = groups ) try : subprocess . check_call ( command . split ( ' ' ) ) except subprocess . CalledProcessError as e : self . logger . warning ( 'Could not update user %s. %s.' , user , str ( e ) ) return False else : self . logger . debug ( 'Updated user account %s.' , user ) return True
Update group membership for a Linux user .
40,469
def _UpdateAuthorizedKeys ( self , user , ssh_keys ) : pw_entry = self . _GetUser ( user ) if not pw_entry : return uid = pw_entry . pw_uid gid = pw_entry . pw_gid home_dir = pw_entry . pw_dir ssh_dir = os . path . join ( home_dir , '.ssh' ) authorized_keys_file = os . path . join ( ssh_dir , 'authorized_keys' ) if os . path . islink ( ssh_dir ) or os . path . islink ( authorized_keys_file ) : self . logger . warning ( 'Not updating authorized keys for user %s. File is a symlink.' , user ) return if not os . path . exists ( home_dir ) : file_utils . SetPermissions ( home_dir , mode = 0o755 , uid = uid , gid = gid , mkdir = True ) file_utils . SetPermissions ( ssh_dir , mode = 0o700 , uid = uid , gid = gid , mkdir = True ) prefix = self . logger . name + '-' with tempfile . NamedTemporaryFile ( mode = 'w' , prefix = prefix , delete = True ) as updated_keys : updated_keys_file = updated_keys . name if os . path . exists ( authorized_keys_file ) : lines = open ( authorized_keys_file ) . readlines ( ) else : lines = [ ] google_lines = set ( ) for i , line in enumerate ( lines ) : if line . startswith ( self . google_comment ) : google_lines . update ( [ i , i + 1 ] ) for i , line in enumerate ( lines ) : if i not in google_lines and line : line += '\n' if not line . endswith ( '\n' ) else '' updated_keys . write ( line ) for ssh_key in ssh_keys : ssh_key += '\n' if not ssh_key . endswith ( '\n' ) else '' updated_keys . write ( '%s\n' % self . google_comment ) updated_keys . write ( ssh_key ) updated_keys . flush ( ) shutil . copy ( updated_keys_file , authorized_keys_file ) file_utils . SetPermissions ( authorized_keys_file , mode = 0o600 , uid = uid , gid = gid )
Update the authorized keys file for a Linux user with a list of SSH keys .
40,470
def _UpdateSudoer ( self , user , sudoer = False ) : if sudoer : self . logger . info ( 'Adding user %s to the Google sudoers group.' , user ) command = self . gpasswd_add_cmd . format ( user = user , group = self . google_sudoers_group ) else : self . logger . info ( 'Removing user %s from the Google sudoers group.' , user ) command = self . gpasswd_remove_cmd . format ( user = user , group = self . google_sudoers_group ) try : subprocess . check_call ( command . split ( ' ' ) ) except subprocess . CalledProcessError as e : self . logger . warning ( 'Could not update user %s. %s.' , user , str ( e ) ) return False else : self . logger . debug ( 'Removed user %s from the Google sudoers group.' , user ) return True
Update sudoer group membership for a Linux user account .
40,471
def _RemoveAuthorizedKeys ( self , user ) : pw_entry = self . _GetUser ( user ) if not pw_entry : return home_dir = pw_entry . pw_dir authorized_keys_file = os . path . join ( home_dir , '.ssh' , 'authorized_keys' ) if os . path . exists ( authorized_keys_file ) : try : os . remove ( authorized_keys_file ) except OSError as e : message = 'Could not remove authorized keys for user %s. %s.' self . logger . warning ( message , user , str ( e ) )
Remove a Linux user account s authorized keys file to prevent login .
40,472
def GetConfiguredUsers ( self ) : if os . path . exists ( self . google_users_file ) : users = open ( self . google_users_file ) . readlines ( ) else : users = [ ] return [ user . strip ( ) for user in users ]
Retrieve the list of configured Google user accounts .
40,473
def SetConfiguredUsers ( self , users ) : prefix = self . logger . name + '-' with tempfile . NamedTemporaryFile ( mode = 'w' , prefix = prefix , delete = True ) as updated_users : updated_users_file = updated_users . name for user in users : updated_users . write ( user + '\n' ) updated_users . flush ( ) if not os . path . exists ( self . google_users_dir ) : os . makedirs ( self . google_users_dir ) shutil . copy ( updated_users_file , self . google_users_file ) file_utils . SetPermissions ( self . google_users_file , mode = 0o600 , uid = 0 , gid = 0 )
Set the list of configured Google user accounts .
40,474
def UpdateUser ( self , user , ssh_keys ) : if not bool ( USER_REGEX . match ( user ) ) : self . logger . warning ( 'Invalid user account name %s.' , user ) return False if not self . _GetUser ( user ) : if not ( self . _AddUser ( user ) and self . _UpdateUserGroups ( user , self . groups ) ) : return False if not self . _UpdateSudoer ( user , sudoer = True ) : return False pw_entry = self . _GetUser ( user ) if pw_entry and os . path . basename ( pw_entry . pw_shell ) == 'nologin' : message = 'Not updating user %s. User set `nologin` as login shell.' self . logger . debug ( message , user ) return True try : self . _UpdateAuthorizedKeys ( user , ssh_keys ) except ( IOError , OSError ) as e : message = 'Could not update the authorized keys file for user %s. %s.' self . logger . warning ( message , user , str ( e ) ) return False else : return True
Update a Linux user with authorized SSH keys .
40,475
def RemoveUser ( self , user ) : self . logger . info ( 'Removing user %s.' , user ) if self . remove : command = self . userdel_cmd . format ( user = user ) try : subprocess . check_call ( command . split ( ' ' ) ) except subprocess . CalledProcessError as e : self . logger . warning ( 'Could not remove user %s. %s.' , user , str ( e ) ) else : self . logger . info ( 'Removed user account %s.' , user ) self . _RemoveAuthorizedKeys ( user ) self . _UpdateSudoer ( user , sudoer = False )
Remove a Linux user account .
40,476
def _RunOsLoginControl ( self , params ) : try : return subprocess . call ( [ constants . OSLOGIN_CONTROL_SCRIPT ] + params ) except OSError as e : if e . errno == errno . ENOENT : return None else : raise
Run the OS Login control script .
40,477
def _GetStatus ( self , two_factor = False ) : params = [ 'status' ] if two_factor : params += [ '--twofactor' ] retcode = self . _RunOsLoginControl ( params ) if retcode is None : if self . oslogin_installed : self . logger . warning ( 'OS Login not installed.' ) self . oslogin_installed = False return None self . oslogin_installed = True if not os . path . exists ( constants . OSLOGIN_NSS_CACHE ) : return False return not retcode
Check whether OS Login is installed .
40,478
def _RunOsLoginNssCache ( self ) : try : return subprocess . call ( [ constants . OSLOGIN_NSS_CACHE_SCRIPT ] ) except OSError as e : if e . errno == errno . ENOENT : return None else : raise
Run the OS Login NSS cache binary .
40,479
def _RemoveOsLoginNssCache ( self ) : if os . path . exists ( constants . OSLOGIN_NSS_CACHE ) : try : os . remove ( constants . OSLOGIN_NSS_CACHE ) except OSError as e : if e . errno != errno . ENOENT : raise
Remove the OS Login NSS cache file .
40,480
def UpdateOsLogin ( self , oslogin_desired , two_factor_desired = False ) : oslogin_configured = self . _GetStatus ( two_factor = False ) if oslogin_configured is None : return None two_factor_configured = self . _GetStatus ( two_factor = True ) two_factor_desired = two_factor_desired and oslogin_desired if oslogin_desired : params = [ 'activate' ] if two_factor_desired : params += [ '--twofactor' ] if not oslogin_configured : self . logger . info ( 'Activating OS Login.' ) return self . _RunOsLoginControl ( params ) or self . _RunOsLoginNssCache ( ) if two_factor_desired and not two_factor_configured : self . logger . info ( 'Activating OS Login two factor authentication.' ) return self . _RunOsLoginControl ( params ) or self . _RunOsLoginNssCache ( ) if two_factor_configured and not two_factor_desired : self . logger . info ( 'Reactivating OS Login with two factor disabled.' ) return ( self . _RunOsLoginControl ( [ 'deactivate' ] ) or self . _RunOsLoginControl ( params ) ) current_time = time . time ( ) if current_time - self . update_time > NSS_CACHE_DURATION_SEC : self . update_time = current_time return self . _RunOsLoginNssCache ( ) elif oslogin_configured : self . logger . info ( 'Deactivating OS Login.' ) return ( self . _RunOsLoginControl ( [ 'deactivate' ] ) or self . _RemoveOsLoginNssCache ( ) ) return 0
Update whether OS Login is enabled and update NSS cache if necessary .
40,481
def CallDhclient ( interfaces , logger , dhclient_script = None ) : logger . info ( 'Enabling the Ethernet interfaces %s.' , interfaces ) dhclient_command = [ 'dhclient' ] if dhclient_script and os . path . exists ( dhclient_script ) : dhclient_command += [ '-sf' , dhclient_script ] try : subprocess . check_call ( dhclient_command + [ '-x' ] + interfaces ) subprocess . check_call ( dhclient_command + interfaces ) except subprocess . CalledProcessError : logger . warning ( 'Could not enable interfaces %s.' , interfaces )
Configure the network interfaces using dhclient .
40,482
def CallHwclock ( logger ) : command = [ '/sbin/hwclock' , '--hctosys' ] try : subprocess . check_call ( command ) except subprocess . CalledProcessError : logger . warning ( 'Failed to sync system time with hardware clock.' ) else : logger . info ( 'Synced system time with hardware clock.' )
Sync clock using hwclock .
40,483
def CallNtpdate ( logger ) : ntpd_inactive = subprocess . call ( [ 'service' , 'ntpd' , 'status' ] ) try : if not ntpd_inactive : subprocess . check_call ( [ 'service' , 'ntpd' , 'stop' ] ) subprocess . check_call ( 'ntpdate `awk \'$1=="server" {print $2}\' /etc/ntp.conf`' , shell = True ) if not ntpd_inactive : subprocess . check_call ( [ 'service' , 'ntpd' , 'start' ] ) except subprocess . CalledProcessError : logger . warning ( 'Failed to sync system time with ntp server.' ) else : logger . info ( 'Synced system time with ntp server.' )
Sync clock using ntpdate .
40,484
def _GetNumericProjectId ( self ) : project_id = 'project/numeric-project-id' return self . watcher . GetMetadata ( metadata_key = project_id , recursive = False )
Get the numeric project ID for this VM .
40,485
def _CreateConfig ( self , project_id ) : project_id = project_id or self . _GetNumericProjectId ( ) if not project_id : return self . boto_config_header %= ( self . boto_config_script , self . boto_config_template ) config = config_manager . ConfigManager ( config_file = self . boto_config_template , config_header = self . boto_config_header ) boto_dir = os . path . dirname ( self . boto_config_script ) config . SetOption ( 'GSUtil' , 'default_project_id' , project_id ) config . SetOption ( 'GSUtil' , 'default_api_version' , '2' ) config . SetOption ( 'GoogleCompute' , 'service_account' , 'default' ) config . SetOption ( 'Plugin' , 'plugin_directory' , boto_dir ) config . WriteConfig ( config_file = self . boto_config )
Create the boto config to support standalone GSUtil .
40,486
def _CreateRouteOptions ( self , ** kwargs ) : options = { 'proto' : self . proto_id , 'scope' : 'host' , } options . update ( kwargs ) return options
Create a dictionary of parameters to append to the ip route command .
40,487
def _RunIpRoute ( self , args = None , options = None ) : args = args or [ ] options = options or { } command = [ 'ip' , 'route' ] command . extend ( args ) for item in options . items ( ) : command . extend ( item ) try : process = subprocess . Popen ( command , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) stdout , stderr = process . communicate ( ) except OSError as e : self . logger . warning ( 'Exception running %s. %s.' , command , str ( e ) ) else : if process . returncode : message = 'Non-zero exit status running %s. %s.' self . logger . warning ( message , command , stderr . strip ( ) ) else : return stdout . decode ( 'utf-8' , 'replace' ) return ''
Run a command with ip route and return the response .
40,488
def RemoveForwardedIp ( self , address , interface ) : ip = netaddr . IPNetwork ( address ) self . _RunIfconfig ( args = [ interface , '-alias' , str ( ip . ip ) ] )
Delete an IP address on the network interface .
40,489
def _GetGsScopes ( self ) : service_accounts = self . watcher . GetMetadata ( metadata_key = self . metadata_key ) try : scopes = service_accounts [ self . service_account ] [ 'scopes' ] return list ( GS_SCOPES . intersection ( set ( scopes ) ) ) if scopes else None except KeyError : return None
Return all Google Storage scopes available on this VM .
40,490
def _GetAccessToken ( self ) : service_accounts = self . watcher . GetMetadata ( metadata_key = self . metadata_key ) try : return service_accounts [ self . service_account ] [ 'token' ] [ 'access_token' ] except KeyError : return None
Return an OAuth 2 . 0 access token for Google Storage .
40,491
def HandleClockSync ( self , response ) : self . logger . info ( 'Clock drift token has changed: %s.' , response ) self . distro_utils . HandleClockSync ( self . logger )
Called when clock drift token changes .
40,492
def _DisableNetworkManager ( self , interfaces , logger ) : for interface in interfaces : interface_config = os . path . join ( self . network_path , 'ifcfg-%s' % interface ) if os . path . exists ( interface_config ) : self . _ModifyInterface ( interface_config , 'DEVICE' , interface , replace = False ) self . _ModifyInterface ( interface_config , 'NM_CONTROLLED' , 'no' , replace = True ) else : with open ( interface_config , 'w' ) as interface_file : interface_content = [ '# Added by Google.' , 'BOOTPROTO=none' , 'DEFROUTE=no' , 'DEVICE=%s' % interface , 'IPV6INIT=no' , 'NM_CONTROLLED=no' , 'NOZEROCONF=yes' , '' , ] interface_file . write ( '\n' . join ( interface_content ) ) logger . info ( 'Created config file for interface %s.' , interface )
Disable network manager management on a list of network interfaces .
40,493
def _ModifyInterface ( self , interface_config , config_key , config_value , replace = False ) : config_entry = '%s=%s' % ( config_key , config_value ) if not open ( interface_config ) . read ( ) . count ( config_key ) : with open ( interface_config , 'a' ) as config : config . write ( '%s\n' % config_entry ) elif replace : for line in fileinput . input ( interface_config , inplace = True ) : print ( re . sub ( r'%s=.*' % config_key , config_entry , line . rstrip ( ) ) )
Write a value to a config file if not already present .
40,494
def _HasExpired ( self , key ) : self . logger . debug ( 'Processing key: %s.' , key ) try : schema , json_str = key . split ( None , 3 ) [ 2 : ] except ( ValueError , AttributeError ) : self . logger . debug ( 'No schema identifier. Not expiring key.' ) return False if schema != 'google-ssh' : self . logger . debug ( 'Invalid schema %s. Not expiring key.' , schema ) return False try : json_obj = json . loads ( json_str ) except ValueError : self . logger . debug ( 'Invalid JSON %s. Not expiring key.' , json_str ) return False if 'expireOn' not in json_obj : self . logger . debug ( 'No expiration timestamp. Not expiring key.' ) return False expire_str = json_obj [ 'expireOn' ] format_str = '%Y-%m-%dT%H:%M:%S+0000' try : expire_time = datetime . datetime . strptime ( expire_str , format_str ) except ValueError : self . logger . warning ( 'Expiration timestamp "%s" not in format %s. Not expiring key.' , expire_str , format_str ) return False return datetime . datetime . utcnow ( ) > expire_time
Check whether an SSH key has expired .
40,495
def _ParseAccountsData ( self , account_data ) : if not account_data : return { } lines = [ line for line in account_data . splitlines ( ) if line ] user_map = { } for line in lines : if not all ( ord ( c ) < 128 for c in line ) : self . logger . info ( 'SSH key contains non-ascii character: %s.' , line ) continue split_line = line . split ( ':' , 1 ) if len ( split_line ) != 2 : self . logger . info ( 'SSH key is not a complete entry: %s.' , split_line ) continue user , key = split_line if self . _HasExpired ( key ) : self . logger . debug ( 'Expired SSH key for user %s: %s.' , user , key ) continue if user not in user_map : user_map [ user ] = [ ] user_map [ user ] . append ( key ) logging . debug ( 'User accounts: %s.' , user_map ) return user_map
Parse the SSH key data into a user map .
40,496
def _GetInstanceAndProjectAttributes ( self , metadata_dict ) : metadata_dict = metadata_dict or { } try : instance_data = metadata_dict [ 'instance' ] [ 'attributes' ] except KeyError : instance_data = { } self . logger . warning ( 'Instance attributes were not found.' ) try : project_data = metadata_dict [ 'project' ] [ 'attributes' ] except KeyError : project_data = { } self . logger . warning ( 'Project attributes were not found.' ) return instance_data , project_data
Get dictionaries for instance and project attributes .
40,497
def _GetAccountsData ( self , metadata_dict ) : instance_data , project_data = self . _GetInstanceAndProjectAttributes ( metadata_dict ) valid_keys = [ instance_data . get ( 'sshKeys' ) , instance_data . get ( 'ssh-keys' ) ] block_project = instance_data . get ( 'block-project-ssh-keys' , '' ) . lower ( ) if block_project != 'true' and not instance_data . get ( 'sshKeys' ) : valid_keys . append ( project_data . get ( 'ssh-keys' ) ) valid_keys . append ( project_data . get ( 'sshKeys' ) ) accounts_data = '\n' . join ( [ key for key in valid_keys if key ] ) return self . _ParseAccountsData ( accounts_data )
Get the user accounts specified in metadata server contents .
40,498
def _UpdateUsers ( self , update_users ) : for user , ssh_keys in update_users . items ( ) : if not user or user in self . invalid_users : continue configured_keys = self . user_ssh_keys . get ( user , [ ] ) if set ( ssh_keys ) != set ( configured_keys ) : if not self . utils . UpdateUser ( user , ssh_keys ) : self . invalid_users . add ( user ) else : self . user_ssh_keys [ user ] = ssh_keys [ : ]
Provision and update Linux user accounts based on account metadata .
40,499
def _RemoveUsers ( self , remove_users ) : for username in remove_users : self . utils . RemoveUser ( username ) self . user_ssh_keys . pop ( username , None ) self . invalid_users -= set ( remove_users )
Deprovision Linux user accounts that do not appear in account metadata .