idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
27,800 | def check_rdn_deposits ( raiden , user_deposit_proxy : UserDeposit ) : while True : rei_balance = user_deposit_proxy . effective_balance ( raiden . address , "latest" ) rdn_balance = to_rdn ( rei_balance ) if rei_balance < MIN_REI_THRESHOLD : click . secho ( ( f'WARNING\n' f'Your account\'s RDN balance of {rdn_balance}... | Check periodically for RDN deposits in the user - deposits contract |
27,801 | def check_network_id ( network_id , web3 : Web3 ) : while True : current_id = int ( web3 . version . network ) if network_id != current_id : raise RuntimeError ( f'Raiden was running on network with id {network_id} and it detected ' f'that the underlying ethereum client network id changed to {current_id}.' f' Changing ... | Check periodically if the underlying ethereum client s network id has changed |
27,802 | def register_callback ( self , callback ) : if not callable ( callback ) : raise ValueError ( 'callback is not a callable' ) self . callbacks . append ( callback ) | Register a new callback . |
27,803 | def remove_callback ( self , callback ) : if callback in self . callbacks : self . callbacks . remove ( callback ) | Remove callback from the list of callbacks if it exists |
27,804 | def first_run ( self , known_block_number ) : assert self . callbacks , 'callbacks not set' latest_block = self . chain . get_block ( block_identifier = 'latest' ) log . debug ( 'Alarm task first run' , known_block_number = known_block_number , latest_block_number = latest_block [ 'number' ] , latest_gas_limit = latest... | Blocking call to update the local state if necessary . |
27,805 | def _maybe_run_callbacks ( self , latest_block ) : assert self . known_block_number is not None , 'known_block_number not set' latest_block_number = latest_block [ 'number' ] missed_blocks = latest_block_number - self . known_block_number if missed_blocks < 0 : log . critical ( 'Block number decreased' , chain_id = sel... | Run the callbacks if there is at least one new block . |
27,806 | def channel_state_until_state_change ( raiden , canonical_identifier : CanonicalIdentifier , state_change_identifier : int , ) -> typing . Optional [ NettingChannelState ] : wal = restore_to_state_change ( transition_function = node . state_transition , storage = raiden . wal . storage , state_change_identifier = state... | Go through WAL state changes until a certain balance hash is found . |
27,807 | def pack_data ( abi_types , values ) -> bytes : if len ( abi_types ) != len ( values ) : raise ValueError ( "Length mismatch between provided abi types and values. Got " "{0} types and {1} values." . format ( len ( abi_types ) , len ( values ) ) , ) normalized_values = map_abi_data ( [ abi_address_to_hex ] , abi_types... | Normalize data and pack them into a byte array |
27,808 | def _expiration_generator ( timeout_generator : Iterable [ float ] , now : Callable [ [ ] , float ] = time . time , ) -> Iterator [ bool ] : for timeout in timeout_generator : _next = now ( ) + timeout yield True while now ( ) < _next : yield False | Stateful generator that yields True if more than timeout has passed since previous True False otherwise . |
27,809 | def enqueue ( self , queue_identifier : QueueIdentifier , message : Message ) : assert queue_identifier . recipient == self . receiver with self . _lock : already_queued = any ( queue_identifier == data . queue_identifier and message == data . message for data in self . _message_queue ) if already_queued : self . log .... | Enqueue a message to be sent and notify main loop |
27,810 | def stop ( self ) : if self . _stop_event . ready ( ) : return self . _stop_event . set ( ) self . _global_send_event . set ( ) for retrier in self . _address_to_retrier . values ( ) : if retrier : retrier . notify ( ) self . _client . set_presence_state ( UserPresence . OFFLINE . value ) self . _client . stop_listener... | Try to gracefully stop the greenlet synchronously |
27,811 | def whitelist ( self , address : Address ) : self . log . debug ( 'Whitelist' , address = to_normalized_address ( address ) ) self . _address_mgr . add_address ( address ) | Whitelist peer address to receive communications from |
27,812 | def send_async ( self , queue_identifier : QueueIdentifier , message : Message , ) : receiver_address = queue_identifier . recipient if not is_binary_address ( receiver_address ) : raise ValueError ( 'Invalid address {}' . format ( pex ( receiver_address ) ) ) if isinstance ( message , ( Delivered , Ping , Pong ) ) : r... | Queue the message for sending to recipient in the queue_identifier |
27,813 | def send_global ( self , room : str , message : Message ) -> None : self . _global_send_queue . put ( ( room , message ) ) self . _global_send_event . set ( ) | Sends a message to one of the global rooms |
27,814 | def _handle_invite ( self , room_id : _RoomID , state : dict ) : if self . _stop_event . ready ( ) : return self . log . debug ( 'Got invite' , room_id = room_id ) invite_events = [ event for event in state [ 'events' ] if event [ 'type' ] == 'm.room.member' and event [ 'content' ] . get ( 'membership' ) == 'invite' an... | Join rooms invited by whitelisted partners |
27,815 | def _get_retrier ( self , receiver : Address ) -> _RetryQueue : if receiver not in self . _address_to_retrier : retrier = _RetryQueue ( transport = self , receiver = receiver ) self . _address_to_retrier [ receiver ] = retrier retrier . start ( ) return self . _address_to_retrier [ receiver ] | Construct and return a _RetryQueue for receiver |
27,816 | def _get_private_room ( self , invitees : List [ User ] ) : return self . _client . create_room ( None , invitees = [ user . user_id for user in invitees ] , is_public = False , ) | Create an anonymous private room and invite peers |
27,817 | def _sign ( self , data : bytes ) -> bytes : assert self . _raiden_service is not None return self . _raiden_service . signer . sign ( data = data ) | Use eth_sign compatible hasher to sign matrix data |
27,818 | def _get_user ( self , user : Union [ User , str ] ) -> User : user_id : str = getattr ( user , 'user_id' , user ) discovery_room = self . _global_rooms . get ( make_room_alias ( self . network_id , DISCOVERY_DEFAULT_ROOM ) , ) if discovery_room and user_id in discovery_room . _members : duser = discovery_room . _membe... | Creates an User from an user_id if none or fetch a cached User |
27,819 | def _set_room_id_for_address ( self , address : Address , room_id : Optional [ _RoomID ] = None ) : assert not room_id or room_id in self . _client . rooms , 'Invalid room_id' address_hex : AddressHex = to_checksum_address ( address ) room_ids = self . _get_room_ids_for_address ( address , filter_private = False ) with... | Uses GMatrixClient . set_account_data to keep updated mapping of addresses - > rooms |
27,820 | def _get_room_ids_for_address ( self , address : Address , filter_private : bool = None , ) -> List [ _RoomID ] : address_hex : AddressHex = to_checksum_address ( address ) with self . _account_data_lock : room_ids = self . _client . account_data . get ( 'network.raiden.rooms' , { } , ) . get ( address_hex ) self . log... | Uses GMatrixClient . get_account_data to get updated mapping of address - > rooms |
27,821 | def _leave_unused_rooms ( self , _address_to_room_ids : Dict [ AddressHex , List [ _RoomID ] ] ) : _msg = '_leave_unused_rooms called without account data lock' assert self . _account_data_lock . locked ( ) , _msg self . _client . set_account_data ( 'network.raiden.rooms' , cast ( Dict [ str , Any ] , _address_to_room_... | Checks for rooms we ve joined and which partner isn t health - checked and leave . |
27,822 | def nodes ( self ) -> List [ str ] : if self . _scenario_version == 1 and 'range' in self . _config : range_config = self . _config [ 'range' ] try : start , stop = range_config [ 'first' ] , range_config [ 'last' ] + 1 except KeyError : raise MissingNodesConfiguration ( 'Setting "range" must be a dict containing keys ... | Return the list of nodes configured in the scenario s yaml . |
27,823 | def version ( self ) -> int : version = self . _config . get ( 'version' , 1 ) if version not in SUPPORTED_SCENARIO_VERSIONS : raise InvalidScenarioVersion ( f'Unexpected scenario version {version}' ) return version | Return the scenario s version . |
27,824 | def protocol ( self ) -> str : if self . nodes . mode is NodeMode . MANAGED : if 'protocol' in self . _config : log . warning ( 'The "protocol" setting is not supported in "managed" node mode.' ) return 'http' return self . _config . get ( 'protocol' , 'http' ) | Return the designated protocol of the scenario . |
27,825 | def task ( self ) -> Tuple [ str , Any ] : try : items , = self . configuration . items ( ) except ValueError : raise MultipleTaskDefinitions ( 'Multiple tasks defined in scenario configuration!' , ) return items | Return the scenario s task configuration as a tuple . |
27,826 | def task_class ( self ) : from scenario_player . tasks . base import get_task_class_for_type root_task_type , _ = self . task task_class = get_task_class_for_type ( root_task_type ) return task_class | Return the Task class type configured for the scenario . |
27,827 | def _check_workflows_align ( config ) : jobs_default = config [ 'workflows' ] [ 'raiden-default' ] [ 'jobs' ] jobs_nightly = config [ 'workflows' ] [ 'nightly' ] [ 'jobs' ] if jobs_default == jobs_nightly [ : len ( jobs_default ) ] : return True , [ ] job_diff = unified_diff ( [ f"{line}\n" for line in jobs_default ] ,... | Ensure that the common shared jobs in the raiden - default and nightly workflows are identical . |
27,828 | def get_transaction_data ( abi : Dict , function_name : str , args : Any = None , kwargs : Any = None , ) : args = args or list ( ) fn_abi = find_matching_fn_abi ( abi , function_name , args = args , kwargs = kwargs , ) return encode_transaction_data ( None , function_name , contract_abi = abi , fn_abi = fn_abi , args ... | Get encoded transaction data |
27,829 | def decode_transaction_input ( self , transaction_hash : bytes ) -> Dict : transaction = self . contract . web3 . eth . getTransaction ( transaction_hash , ) return self . contract . decode_function_input ( transaction [ 'input' ] , ) | Return inputs of a method call |
27,830 | def estimate_gas ( self , block_identifier , function : str , * args , ** kwargs , ) -> typing . Optional [ int ] : fn = getattr ( self . contract . functions , function ) address = to_checksum_address ( self . jsonrpc_client . address ) if self . jsonrpc_client . eth_node is constants . EthClient . GETH : block_identi... | Returns a gas estimate for the function with the given arguments or None if the function call will fail due to Insufficient funds or the logic in the called function . |
27,831 | def sign ( self , signer : Signer ) : message_data = self . _data_to_sign ( ) self . signature = signer . sign ( data = message_data ) | Sign message using signer . |
27,832 | def _sign ( self , signer : Signer ) -> Signature : data = self . _data_to_sign ( ) return signer . sign ( data ) | Internal function for the overall sign function of RequestMonitoring . |
27,833 | def verify_request_monitoring ( self , partner_address : Address , requesting_address : Address , ) -> bool : if not self . non_closing_signature : return False balance_proof_data = pack_balance_proof ( nonce = self . balance_proof . nonce , balance_hash = self . balance_proof . balance_hash , additional_hash = self . ... | One should only use this method to verify integrity and signatures of a RequestMonitoring message . |
27,834 | def get_filter_args_for_specific_event_from_channel ( token_network_address : TokenNetworkAddress , channel_identifier : ChannelID , event_name : str , contract_manager : ContractManager , from_block : BlockSpecification = GENESIS_BLOCK_NUMBER , to_block : BlockSpecification = 'latest' , ) : if not event_name : raise V... | Return the filter params for a specific event of a given channel . |
27,835 | def get_filter_args_for_all_events_from_channel ( token_network_address : TokenNetworkAddress , channel_identifier : ChannelID , contract_manager : ContractManager , from_block : BlockSpecification = GENESIS_BLOCK_NUMBER , to_block : BlockSpecification = 'latest' , ) -> Dict : event_filter_params = get_filter_args_for_... | Return the filter params for all events of a given channel . |
27,836 | def add ( self , predicted , target ) : predicted = predicted . cpu ( ) . numpy ( ) target = target . cpu ( ) . numpy ( ) assert predicted . shape [ 0 ] == target . shape [ 0 ] , 'number of targets and predicted outputs do not match' if np . ndim ( predicted ) != 1 : assert predicted . shape [ 1 ] == self . k , 'number... | Computes the confusion matrix of K x K size where K is no of classes |
27,837 | def resample ( self , seed = None ) : if seed is not None : gen = torch . manual_seed ( seed ) else : gen = torch . default_generator if self . replacement : self . perm = torch . LongTensor ( len ( self ) ) . random_ ( len ( self . dataset ) , generator = gen ) else : self . perm = torch . randperm ( len ( self . data... | Resample the dataset . |
27,838 | def reset ( self ) : self . scores = torch . FloatTensor ( torch . FloatStorage ( ) ) self . targets = torch . LongTensor ( torch . LongStorage ( ) ) self . weights = torch . FloatTensor ( torch . FloatStorage ( ) ) | Resets the meter with empty member variables |
27,839 | def value ( self ) : if self . scores . numel ( ) == 0 : return 0 ap = torch . zeros ( self . scores . size ( 1 ) ) if hasattr ( torch , "arange" ) : rg = torch . arange ( 1 , self . scores . size ( 0 ) + 1 ) . float ( ) else : rg = torch . range ( 1 , self . scores . size ( 0 ) ) . float ( ) if self . weights . numel ... | Returns the model s average precision for each class |
27,840 | def hook ( self , name , state ) : r if name in self . hooks : self . hooks [ name ] ( state ) | r Registers a backward hook . |
27,841 | def _viz_prototype ( self , vis_fn ) : def _viz_logger ( * args , ** kwargs ) : self . win = vis_fn ( * args , win = self . win , env = self . env , opts = self . opts , ** kwargs ) return _viz_logger | Outputs a function which will log the arguments to Visdom in an appropriate way . |
27,842 | def log_state ( self , state ) : results = [ ] for field_idx , field in enumerate ( self . fields ) : parent , stat = None , state for f in field : parent , stat = stat , stat [ f ] results . append ( stat ) self . log ( * results ) | Gathers the stats from self . trainer . stats and passes them into self . log as a list |
27,843 | def peek_meter ( self ) : result = { } for key in self . meter . keys ( ) : val = self . meter [ key ] . value ( ) val = val [ 0 ] if isinstance ( val , ( list , tuple ) ) else val result [ key ] = val return result | Returns a dict of all meters and their values . |
27,844 | def update ( self , task_name , result ) : with open ( self . filepath , 'rb' ) as f : existing_results = pickle . load ( f ) if task_name not in self . tasks : self . _add_task ( task_name ) existing_results [ 'tasks' ] . append ( task_name ) existing_results [ 'results' ] . append ( [ ] ) task_name_idx = existing_res... | Update the results file with new information . |
27,845 | def query ( dom , query , context = None , tagid = None ) : if context is None : source = dom else : source = context if tagid is None : return source . xpath ( query , namespaces = OneLogin_Saml2_Constants . NSMAP ) else : return source . xpath ( query , tagid = tagid , namespaces = OneLogin_Saml2_Constants . NSMAP ) | Extracts nodes that match the query from the Element |
27,846 | def dict_deep_merge ( a , b , path = None ) : if path is None : path = [ ] for key in b : if key in a : if isinstance ( a [ key ] , dict ) and isinstance ( b [ key ] , dict ) : dict_deep_merge ( a [ key ] , b [ key ] , path + [ str ( key ) ] ) elif a [ key ] == b [ key ] : pass else : a [ key ] = b [ key ] else : a [ k... | Deep - merge dictionary b into dictionary a . |
27,847 | def __load_paths ( self , base_path = None ) : if base_path is None : base_path = dirname ( dirname ( dirname ( __file__ ) ) ) if not base_path . endswith ( sep ) : base_path += sep self . __paths = { 'base' : base_path , 'cert' : base_path + 'certs' + sep , 'lib' : base_path + 'lib' + sep , 'extlib' : base_path + 'ext... | Set the paths of the different folders |
27,848 | def __update_paths ( self , settings ) : if not isinstance ( settings , dict ) : return if 'custom_base_path' in settings : base_path = settings [ 'custom_base_path' ] base_path = join ( dirname ( __file__ ) , base_path ) self . __load_paths ( base_path ) | Set custom paths if necessary |
27,849 | def __load_settings_from_dict ( self , settings ) : errors = self . check_settings ( settings ) if len ( errors ) == 0 : self . __errors = [ ] self . __sp = settings [ 'sp' ] self . __idp = settings . get ( 'idp' , { } ) self . __strict = settings . get ( 'strict' , False ) self . __debug = settings . get ( 'debug' , F... | Loads settings info from a settings Dict |
27,850 | def check_settings ( self , settings ) : assert isinstance ( settings , dict ) errors = [ ] if not isinstance ( settings , dict ) or len ( settings ) == 0 : errors . append ( 'invalid_syntax' ) else : if not self . __sp_validation_only : errors += self . check_idp_settings ( settings ) sp_errors = self . check_sp_setti... | Checks the settings info . |
27,851 | def format_idp_cert_multi ( self ) : if 'x509certMulti' in self . __idp : if 'signing' in self . __idp [ 'x509certMulti' ] : for idx in range ( len ( self . __idp [ 'x509certMulti' ] [ 'signing' ] ) ) : self . __idp [ 'x509certMulti' ] [ 'signing' ] [ idx ] = OneLogin_Saml2_Utils . format_cert ( self . __idp [ 'x509cer... | Formats the Multple IdP certs . |
27,852 | def return_false_on_exception ( func ) : @ wraps ( func ) def exceptfalse ( * args , ** kwargs ) : if not kwargs . pop ( 'raise_exceptions' , False ) : try : return func ( * args , ** kwargs ) except Exception : return False else : return func ( * args , ** kwargs ) return exceptfalse | Decorator . When applied to a function it will by default suppress any exceptions raised by that function and return False . It may be overridden by passing a raise_exceptions keyword argument when calling the wrapped function . |
27,853 | def is_https ( request_data ) : is_https = 'https' in request_data and request_data [ 'https' ] != 'off' is_https = is_https or ( 'server_port' in request_data and str ( request_data [ 'server_port' ] ) == '443' ) return is_https | Checks if https or http . |
27,854 | def get_self_url_no_query ( request_data ) : self_url_host = OneLogin_Saml2_Utils . get_self_url_host ( request_data ) script_name = request_data [ 'script_name' ] if script_name : if script_name [ 0 ] != '/' : script_name = '/' + script_name else : script_name = '' self_url_no_query = self_url_host + script_name if 'p... | Returns the URL of the current host + current view . |
27,855 | def get_self_routed_url_no_query ( request_data ) : self_url_host = OneLogin_Saml2_Utils . get_self_url_host ( request_data ) route = '' if 'request_uri' in request_data and request_data [ 'request_uri' ] : route = request_data [ 'request_uri' ] if 'query_string' in request_data and request_data [ 'query_string' ] : ro... | Returns the routed URL of the current host + current view . |
27,856 | def get_self_url ( request_data ) : self_url_host = OneLogin_Saml2_Utils . get_self_url_host ( request_data ) request_uri = '' if 'request_uri' in request_data : request_uri = request_data [ 'request_uri' ] if not request_uri . startswith ( '/' ) : match = re . search ( '^https?://[^/]*(/.*)' , request_uri ) if match i... | Returns the URL of the current host + current view + query . |
27,857 | def get_expire_time ( cache_duration = None , valid_until = None ) : expire_time = None if cache_duration is not None : expire_time = OneLogin_Saml2_Utils . parse_duration ( cache_duration ) if valid_until is not None : if isinstance ( valid_until , int ) : valid_until_time = valid_until else : valid_until_time = OneLo... | Compares 2 dates and returns the earliest . |
27,858 | def calculate_x509_fingerprint ( x509_cert , alg = 'sha1' ) : assert isinstance ( x509_cert , compat . str_type ) lines = x509_cert . split ( '\n' ) data = '' inData = False for line in lines : line = line . rstrip ( ) if not inData : if line == '-----BEGIN CERTIFICATE-----' : inData = True elif line == '-----BEGIN PUB... | Calculates the fingerprint of a formatted x509cert . |
27,859 | def sign_binary ( msg , key , algorithm = xmlsec . Transform . RSA_SHA1 , debug = False ) : if isinstance ( msg , str ) : msg = msg . encode ( 'utf8' ) xmlsec . enable_debug_trace ( debug ) dsig_ctx = xmlsec . SignatureContext ( ) dsig_ctx . key = xmlsec . Key . from_memory ( key , xmlsec . KeyFormat . PEM , None ) ret... | Sign binary message |
27,860 | def process_response ( self , request_id = None ) : self . __errors = [ ] self . __error_reason = None if 'post_data' in self . __request_data and 'SAMLResponse' in self . __request_data [ 'post_data' ] : response = OneLogin_Saml2_Response ( self . __settings , self . __request_data [ 'post_data' ] [ 'SAMLResponse' ] )... | Process the SAML Response sent by the IdP . |
27,861 | def redirect_to ( self , url = None , parameters = { } ) : if url is None and 'RelayState' in self . __request_data [ 'get_data' ] : url = self . __request_data [ 'get_data' ] [ 'RelayState' ] return OneLogin_Saml2_Utils . redirect ( url , parameters , request_data = self . __request_data ) | Redirects the user to the URL passed by parameter or to the URL that we defined in our SSO Request . |
27,862 | def __build_sign_query ( saml_data , relay_state , algorithm , saml_type , lowercase_urlencoding = False ) : sign_data = [ '%s=%s' % ( saml_type , OneLogin_Saml2_Utils . escape_url ( saml_data , lowercase_urlencoding ) ) ] if relay_state is not None : sign_data . append ( 'RelayState=%s' % OneLogin_Saml2_Utils . escape... | Build sign query |
27,863 | def check_status ( self ) : status = OneLogin_Saml2_Utils . get_status ( self . document ) code = status . get ( 'code' , None ) if code and code != OneLogin_Saml2_Constants . STATUS_SUCCESS : splited_code = code . split ( ':' ) printable_code = splited_code . pop ( ) status_exception_msg = 'The status code of the Resp... | Check if the status of the response is success or not |
27,864 | def get_authn_contexts ( self ) : authn_context_nodes = self . __query_assertion ( '/saml:AuthnStatement/saml:AuthnContext/saml:AuthnContextClassRef' ) return [ OneLogin_Saml2_XML . element_text ( node ) for node in authn_context_nodes ] | Gets the authentication contexts |
27,865 | def get_nameid ( self ) : nameid_value = None nameid_data = self . get_nameid_data ( ) if nameid_data and 'Value' in nameid_data . keys ( ) : nameid_value = nameid_data [ 'Value' ] return nameid_value | Gets the NameID provided by the SAML Response from the IdP |
27,866 | def get_nameid_format ( self ) : nameid_format = None nameid_data = self . get_nameid_data ( ) if nameid_data and 'Format' in nameid_data . keys ( ) : nameid_format = nameid_data [ 'Format' ] return nameid_format | Gets the NameID Format provided by the SAML Response from the IdP |
27,867 | def get_session_not_on_or_after ( self ) : not_on_or_after = None authn_statement_nodes = self . __query_assertion ( '/saml:AuthnStatement[@SessionNotOnOrAfter]' ) if authn_statement_nodes : not_on_or_after = OneLogin_Saml2_Utils . parse_SAML_to_time ( authn_statement_nodes [ 0 ] . get ( 'SessionNotOnOrAfter' ) ) retur... | Gets the SessionNotOnOrAfter from the AuthnStatement Could be used to set the local session expiration |
27,868 | def get_session_index ( self ) : session_index = None authn_statement_nodes = self . __query_assertion ( '/saml:AuthnStatement[@SessionIndex]' ) if authn_statement_nodes : session_index = authn_statement_nodes [ 0 ] . get ( 'SessionIndex' ) return session_index | Gets the SessionIndex from the AuthnStatement Could be used to be stored in the local session in order to be used in a future Logout Request that the SP could send to the SP to set what specific session must be deleted |
27,869 | def get_attributes ( self ) : attributes = { } attribute_nodes = self . __query_assertion ( '/saml:AttributeStatement/saml:Attribute' ) for attribute_node in attribute_nodes : attr_name = attribute_node . get ( 'Name' ) if attr_name in attributes . keys ( ) : raise OneLogin_Saml2_ValidationError ( 'Found an Attribute e... | Gets the Attributes from the AttributeStatement element . EncryptedAttributes are not supported |
27,870 | def validate_timestamps ( self ) : conditions_nodes = self . __query_assertion ( '/saml:Conditions' ) for conditions_node in conditions_nodes : nb_attr = conditions_node . get ( 'NotBefore' ) nooa_attr = conditions_node . get ( 'NotOnOrAfter' ) if nb_attr and OneLogin_Saml2_Utils . parse_SAML_to_time ( nb_attr ) > OneL... | Verifies that the document is valid according to Conditions Element |
27,871 | def __query_assertion ( self , xpath_expr ) : assertion_expr = '/saml:Assertion' signature_expr = '/ds:Signature/ds:SignedInfo/ds:Reference' signed_assertion_query = '/samlp:Response' + assertion_expr + signature_expr assertion_reference_nodes = self . __query ( signed_assertion_query ) tagid = None if not assertion_re... | Extracts nodes that match the query from the Assertion |
27,872 | def pdf ( self , x_test ) : N , D = self . data . shape x_test = np . asfortranarray ( x_test ) x_test = x_test . reshape ( [ - 1 , D ] ) pdfs = self . _individual_pdfs ( x_test ) if self . fully_dimensional : pdfs = np . sum ( np . prod ( pdfs , axis = - 1 ) * self . weights [ None , : ] , axis = - 1 ) else : pdfs = n... | Computes the probability density function at all x_test |
27,873 | def realtime_learning_curves ( runs ) : sr = sorted ( runs , key = lambda r : r . budget ) lc = list ( filter ( lambda t : not t [ 1 ] is None , [ ( r . time_stamps [ 'finished' ] , r . info [ 'test accuracy' ] ) for r in sr ] ) ) return ( [ lc , ] ) | example how to extract a different kind of learning curve . The x values are now the time the runs finished not the budget anymore . We no longer plot the validation loss on the y axis but now the test accuracy . This is just to show how to get different information into the interactive plot . |
27,874 | def load_nameserver_credentials ( self , working_directory , num_tries = 60 , interval = 1 ) : fn = os . path . join ( working_directory , 'HPB_run_%s_pyro.pkl' % self . run_id ) for i in range ( num_tries ) : try : with open ( fn , 'rb' ) as fh : self . nameserver , self . nameserver_port = pickle . load ( fh ) return... | loads the nameserver credentials in cases where master and workers share a filesystem |
27,875 | def wait_for_workers ( self , min_n_workers = 1 ) : self . logger . debug ( 'wait_for_workers trying to get the condition' ) with self . thread_cond : while ( self . dispatcher . number_of_workers ( ) < min_n_workers ) : self . logger . debug ( 'HBMASTER: only %i worker(s) available, waiting for at least %i.' % ( self ... | helper function to hold execution until some workers are active |
27,876 | def run ( self , n_iterations = 1 , min_n_workers = 1 , iteration_kwargs = { } , ) : self . wait_for_workers ( min_n_workers ) iteration_kwargs . update ( { 'result_logger' : self . result_logger } ) if self . time_ref is None : self . time_ref = time . time ( ) self . config [ 'time_ref' ] = self . time_ref self . log... | run n_iterations of SuccessiveHalving |
27,877 | def job_callback ( self , job ) : self . logger . debug ( 'job_callback for %s started' % str ( job . id ) ) with self . thread_cond : self . logger . debug ( 'job_callback for %s got condition' % str ( job . id ) ) self . num_running_jobs -= 1 if not self . result_logger is None : self . result_logger ( job ) self . i... | method to be called when a job has finished |
27,878 | def _submit_job ( self , config_id , config , budget ) : self . logger . debug ( 'HBMASTER: trying submitting job %s to dispatcher' % str ( config_id ) ) with self . thread_cond : self . logger . debug ( 'HBMASTER: submitting job %s to dispatcher' % str ( config_id ) ) self . dispatcher . submit_job ( config_id , confi... | hidden function to submit a new job to the dispatcher |
27,879 | def fit ( self , times , losses , configs = None ) : assert np . all ( times > 0 ) and np . all ( times <= self . max_num_epochs ) train = None targets = None for i in range ( len ( configs ) ) : t_idx = times [ i ] / self . max_num_epochs x = np . repeat ( np . array ( configs [ i ] ) [ None , : ] , t_idx . shape [ 0 ... | function to train the model on the observed data |
27,880 | def predict_unseen ( self , times , config ) : assert np . all ( times > 0 ) and np . all ( times <= self . max_num_epochs ) x = np . array ( config ) [ None , : ] idx = times / self . max_num_epochs x = np . repeat ( x , idx . shape [ 0 ] , axis = 0 ) x = np . concatenate ( ( x , idx [ : , None ] ) , axis = 1 ) mean ,... | predict the loss of an unseen configuration |
27,881 | def extend_partial ( self , times , obs_times , obs_losses , config = None ) : return self . predict_unseen ( times , config ) | extends a partially observed curve |
27,882 | def new_result ( self , job , update_model = True ) : if not job . exception is None : self . logger . warning ( "job {} failed with exception\n{}" . format ( job . id , job . exception ) ) | registers finished runs |
27,883 | def invert_differencing ( self , initial_part , differenced_rest , order = None ) : if order is None : order = self . diff_order starting_points = [ self . apply_differencing ( initial_part , order = order ) [ - 1 ] for order in range ( self . diff_order ) ] actual_predictions = differenced_rest import pdb pdb . set_tr... | function to invert the differencing |
27,884 | def nic_name_to_host ( nic_name ) : from netifaces import ifaddresses , AF_INET host = ifaddresses ( nic_name ) . setdefault ( AF_INET , [ { 'addr' : 'No IP addr' } ] ) [ 0 ] [ 'addr' ] return ( host ) | helper function to translate the name of a network card into a valid host name |
27,885 | def register_result ( self , job , skip_sanity_checks = False ) : if self . is_finished : raise RuntimeError ( "This HB iteration is finished, you can't register more results!" ) config_id = job . id config = job . kwargs [ 'config' ] budget = job . kwargs [ 'budget' ] timestamps = job . timestamps result = job . resul... | function to register the result of a job |
27,886 | def get_next_run ( self ) : if self . is_finished : return ( None ) for k , v in self . data . items ( ) : if v . status == 'QUEUED' : assert v . budget == self . budgets [ self . stage ] , 'Configuration budget does not align with current stage!' v . status = 'RUNNING' self . num_running += 1 return ( k , v . config ,... | function to return the next configuration and budget to run . |
27,887 | def process_results ( self ) : self . stage += 1 config_ids = list ( filter ( lambda cid : self . data [ cid ] . status == 'REVIEW' , self . data . keys ( ) ) ) if ( self . stage >= len ( self . num_configs ) ) : self . finish_up ( ) return budgets = [ self . data [ cid ] . budget for cid in config_ids ] if len ( set (... | function that is called when a stage is completed and needs to be analyzed befor further computations . |
27,888 | def fix_timestamps ( self , time_ref ) : for k , v in self . data . items ( ) : for kk , vv in v . time_stamps . items ( ) : for kkk , vvv in vv . items ( ) : self . data [ k ] . time_stamps [ kk ] [ kkk ] += time_ref | manipulates internal time stamps such that the last run ends at time 0 |
27,889 | def get_config ( self , budget ) : self . lock . acquire ( ) if not self . is_trained : c = self . config_space . sample_configuration ( ) . get_array ( ) else : candidates = np . array ( [ self . config_space . sample_configuration ( ) . get_array ( ) for _ in range ( self . n_candidates ) ] ) projected_candidates = n... | function to sample a new configuration |
27,890 | def extract_HBS_learning_curves ( runs ) : sr = sorted ( runs , key = lambda r : r . budget ) lc = list ( filter ( lambda t : not t [ 1 ] is None , [ ( r . budget , r . loss ) for r in sr ] ) ) return ( [ lc , ] ) | function to get the hyperband learning curves |
27,891 | def logged_results_to_HBS_result ( directory ) : data = { } time_ref = float ( 'inf' ) budget_set = set ( ) with open ( os . path . join ( directory , 'configs.json' ) ) as fh : for line in fh : line = json . loads ( line ) if len ( line ) == 3 : config_id , config , config_info = line if len ( line ) == 2 : config_id ... | function to import logged live - results and return a HB_result object |
27,892 | def get_incumbent_id ( self ) : tmp_list = [ ] for k , v in self . data . items ( ) : try : res = v . results [ self . HB_config [ 'max_budget' ] ] if not res is None : tmp_list . append ( ( res [ 'loss' ] , k ) ) except KeyError as e : pass except : raise if len ( tmp_list ) > 0 : return ( min ( tmp_list ) [ 1 ] ) ret... | Find the config_id of the incumbent . |
27,893 | def get_runs_by_id ( self , config_id ) : d = self . data [ config_id ] runs = [ ] for b in d . results . keys ( ) : try : err_logs = d . exceptions . get ( b , None ) if d . results [ b ] is None : r = Run ( config_id , b , None , None , d . time_stamps [ b ] , err_logs ) else : r = Run ( config_id , b , d . results [... | returns a list of runs for a given config id |
27,894 | def get_learning_curves ( self , lc_extractor = extract_HBS_learning_curves , config_ids = None ) : config_ids = self . data . keys ( ) if config_ids is None else config_ids lc_dict = { } for id in config_ids : runs = self . get_runs_by_id ( id ) lc_dict [ id ] = lc_extractor ( runs ) return ( lc_dict ) | extracts all learning curves from all run configurations |
27,895 | def get_all_runs ( self , only_largest_budget = False ) : all_runs = [ ] for k in self . data . keys ( ) : runs = self . get_runs_by_id ( k ) if len ( runs ) > 0 : if only_largest_budget : all_runs . append ( runs [ - 1 ] ) else : all_runs . extend ( runs ) return ( all_runs ) | returns all runs performed |
27,896 | def get_id2config_mapping ( self ) : new_dict = { } for k , v in self . data . items ( ) : new_dict [ k ] = { } new_dict [ k ] [ 'config' ] = copy . deepcopy ( v . config ) try : new_dict [ k ] [ 'config_info' ] = copy . deepcopy ( v . config_info ) except : pass return ( new_dict ) | returns a dict where the keys are the config_ids and the values are the actual configurations |
27,897 | def _merge_results ( self ) : new_dict = { } for it in self . data : new_dict . update ( it ) for k , v in new_dict . items ( ) : for kk , vv in v . time_stamps . items ( ) : for kkk , vvv in vv . items ( ) : new_dict [ k ] . time_stamps [ kk ] [ kkk ] = vvv - self . HB_config [ 'time_ref' ] self . data = new_dict | hidden function to merge the list of results into one dictionary and normalize the time stamps |
27,898 | def unsign ( self , value , max_age = None ) : result = super ( TimestampSigner , self ) . unsign ( value ) value , timestamp = result . rsplit ( self . sep , 1 ) timestamp = baseconv . base62 . decode ( timestamp ) if max_age is not None : if isinstance ( max_age , datetime . timedelta ) : max_age = max_age . total_se... | Retrieve original value and check it wasn t signed more than max_age seconds ago . |
27,899 | def humanize ( self , hexdigest , words = 4 , separator = '-' ) : bytes = [ int ( x , 16 ) for x in list ( map ( '' . join , list ( zip ( hexdigest [ : : 2 ] , hexdigest [ 1 : : 2 ] ) ) ) ) ] compressed = self . compress ( bytes , words ) return separator . join ( self . wordlist [ byte ] for byte in compressed ) | Humanize a given hexadecimal digest . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.