idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
41,900
def try_lock ( self , timeout = 0 , lease_time = - 1 ) : return self . _encode_invoke ( lock_try_lock_codec , invocation_timeout = MAX_SIZE , lease = to_millis ( lease_time ) , thread_id = thread_id ( ) , timeout = to_millis ( timeout ) , reference_id = self . reference_id_generator . get_and_increment ( ) )
Tries to acquire the lock . When the lock is not available
41,901
def contains_key ( self , key ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _encode_invoke_on_key ( multi_map_contains_key_codec , key_data , key = key_data , thread_id = thread_id ( ) )
Determines whether this multimap contains an entry with the key .
41,902
def contains_entry ( self , key , value ) : check_not_none ( key , "key can't be None" ) check_not_none ( value , "value can't be None" ) key_data = self . _to_data ( key ) value_data = self . _to_data ( value ) return self . _encode_invoke_on_key ( multi_map_contains_entry_codec , key_data , key = key_data , value = value_data , thread_id = thread_id ( ) )
Returns whether the multimap contains an entry with the value .
41,903
def get ( self , key ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _encode_invoke_on_key ( multi_map_get_codec , key_data , key = key_data , thread_id = thread_id ( ) )
Returns the list of values associated with the key . None if this map does not contain this key .
41,904
def is_locked ( self , key ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _encode_invoke_on_key ( multi_map_is_locked_codec , key_data , key = key_data )
Checks the lock for the specified key . If the lock is acquired returns true . Otherwise returns false .
41,905
def remove ( self , key , value ) : check_not_none ( key , "key can't be None" ) check_not_none ( key , "value can't be None" ) key_data = self . _to_data ( key ) value_data = self . _to_data ( value ) return self . _encode_invoke_on_key ( multi_map_remove_entry_codec , key_data , key = key_data , value = value_data , thread_id = thread_id ( ) )
Removes the given key - value tuple from the multimap .
41,906
def remove_all ( self , key ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _encode_invoke_on_key ( multi_map_remove_codec , key_data , key = key_data , thread_id = thread_id ( ) )
Removes all the entries with the given key and returns the value list associated with this key .
41,907
def put ( self , key , value ) : check_not_none ( key , "key can't be None" ) check_not_none ( value , "value can't be None" ) key_data = self . _to_data ( key ) value_data = self . _to_data ( value ) return self . _encode_invoke_on_key ( multi_map_put_codec , key_data , key = key_data , value = value_data , thread_id = thread_id ( ) )
Stores a key - value tuple in the multimap .
41,908
def value_count ( self , key ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _encode_invoke_on_key ( multi_map_value_count_codec , key_data , key = key_data , thread_id = thread_id ( ) )
Returns the number of values that match the given key in the multimap .
41,909
def alter ( self , function ) : check_not_none ( function , "function can't be None" ) return self . _encode_invoke ( atomic_reference_alter_codec , function = self . _to_data ( function ) )
Alters the currently stored reference by applying a function on it .
41,910
def alter_and_get ( self , function ) : check_not_none ( function , "function can't be None" ) return self . _encode_invoke ( atomic_reference_alter_and_get_codec , function = self . _to_data ( function ) )
Alters the currently stored reference by applying a function on it and gets the result .
41,911
def contains ( self , expected ) : return self . _encode_invoke ( atomic_reference_contains_codec , expected = self . _to_data ( expected ) )
Checks if the reference contains the value .
41,912
def get_and_alter ( self , function ) : check_not_none ( function , "function can't be None" ) return self . _encode_invoke ( atomic_reference_get_and_alter_codec , function = self . _to_data ( function ) )
Alters the currently stored reference by applying a function on it on and gets the old value .
41,913
def get_and_set ( self , new_value ) : return self . _encode_invoke ( atomic_reference_get_and_set_codec , new_value = self . _to_data ( new_value ) )
Gets the old value and sets the new value .
41,914
def set ( self , new_value ) : return self . _encode_invoke ( atomic_reference_set_codec , new_value = self . _to_data ( new_value ) )
Atomically sets the given value .
41,915
def set_and_get ( self , new_value ) : return self . _encode_invoke ( atomic_reference_set_and_get_codec , new_value = self . _to_data ( new_value ) )
Sets and gets the value .
41,916
def get_type ( self ) : if self . total_size ( ) == 0 : return CONSTANT_TYPE_NULL return unpack_from ( FMT_BE_INT , self . _buffer , TYPE_OFFSET ) [ 0 ]
Returns serialization type of binary form .
41,917
def has_partition_hash ( self ) : return self . _buffer is not None and len ( self . _buffer ) >= HEAP_DATA_OVERHEAD and unpack_from ( FMT_BE_INT , self . _buffer , PARTITION_HASH_OFFSET ) [ 0 ] != 0
Determines whether this Data has partition hash or not .
41,918
def serializer_for ( self , obj ) : if obj is None : return self . _null_serializer obj_type = type ( obj ) serializer = self . lookup_default_serializer ( obj_type , obj ) if serializer is None : serializer = self . lookup_custom_serializer ( obj_type ) if serializer is None : serializer = self . lookup_global_serializer ( obj_type ) if serializer is None : serializer = self . lookup_python_serializer ( obj_type ) if serializer is None : raise HazelcastSerializationError ( "There is no suitable serializer for:" + str ( obj_type ) ) return serializer
Searches for a serializer for the provided object Serializers will be searched in this order ;
41,919
def is_expired ( self , max_idle_seconds ) : now = current_time ( ) return ( self . expiration_time is not None and self . expiration_time < now ) or ( max_idle_seconds is not None and self . last_access_time + max_idle_seconds < now )
Determines whether this record is expired or not .
41,920
def combine_futures ( * futures ) : expected = len ( futures ) results = [ ] completed = AtomicInteger ( ) combined = Future ( ) def done ( f ) : if not combined . done ( ) : if f . is_success ( ) : results . append ( f . result ( ) ) if completed . get_and_increment ( ) + 1 == expected : combined . set_result ( results ) else : combined . set_exception ( f . exception ( ) , f . traceback ( ) ) for future in futures : future . add_done_callback ( done ) return combined
Combines set of Futures .
41,921
def set_result ( self , result ) : if result is None : self . _result = NONE_RESULT else : self . _result = result self . _event . set ( ) self . _invoke_callbacks ( )
Sets the result of the Future .
41,922
def set_exception ( self , exception , traceback = None ) : if not isinstance ( exception , BaseException ) : raise RuntimeError ( "Exception must be of BaseException type" ) self . _exception = exception self . _traceback = traceback self . _event . set ( ) self . _invoke_callbacks ( )
Sets the exception for this Future in case of errors .
41,923
def result ( self ) : self . _reactor_check ( ) self . _event . wait ( ) if self . _exception : six . reraise ( self . _exception . __class__ , self . _exception , self . _traceback ) if self . _result == NONE_RESULT : return None else : return self . _result
Returns the result of the Future which makes the call synchronous if the result has not been computed yet .
41,924
def continue_with ( self , continuation_func , * args ) : future = Future ( ) def callback ( f ) : try : future . set_result ( continuation_func ( f , * args ) ) except : future . set_exception ( sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) self . add_done_callback ( callback ) return future
Create a continuation that executes when the Future is completed .
41,925
def on_auth ( self , f , connection , address ) : if f . is_success ( ) : self . logger . info ( "Authenticated with %s" , f . result ( ) , extra = self . _logger_extras ) with self . _new_connection_mutex : self . connections [ connection . endpoint ] = f . result ( ) try : self . _pending_connections . pop ( address ) except KeyError : pass for on_connection_opened , _ in self . _connection_listeners : if on_connection_opened : on_connection_opened ( f . result ( ) ) return f . result ( ) else : self . logger . debug ( "Error opening %s" , connection , extra = self . _logger_extras ) with self . _new_connection_mutex : try : self . _pending_connections . pop ( address ) except KeyError : pass six . reraise ( f . exception ( ) . __class__ , f . exception ( ) , f . traceback ( ) )
Checks for authentication of a connection .
41,926
def close_connection ( self , address , cause ) : try : connection = self . connections [ address ] connection . close ( cause ) except KeyError : self . logger . warning ( "No connection with %s was found to close." , address , extra = self . _logger_extras ) return False
Closes the connection with given address .
41,927
def start ( self ) : def _heartbeat ( ) : if not self . _client . lifecycle . is_live : return self . _heartbeat ( ) self . _heartbeat_timer = self . _client . reactor . add_timer ( self . _heartbeat_interval , _heartbeat ) self . _heartbeat_timer = self . _client . reactor . add_timer ( self . _heartbeat_interval , _heartbeat )
Starts sending periodic HeartBeat operations .
41,928
def send_message ( self , message ) : if not self . live ( ) : raise IOError ( "Connection is not live." ) message . add_flag ( BEGIN_END_FLAG ) self . write ( message . buffer )
Sends a message to this connection .
41,929
def receive_message ( self ) : while len ( self . _read_buffer ) >= INT_SIZE_IN_BYTES : frame_length = struct . unpack_from ( FMT_LE_INT , self . _read_buffer , 0 ) [ 0 ] if frame_length > len ( self . _read_buffer ) : return message = ClientMessage ( memoryview ( self . _read_buffer ) [ : frame_length ] ) self . _read_buffer = self . _read_buffer [ frame_length : ] self . _builder . on_message ( message )
Receives a message from this connection .
41,930
def init ( self , permits ) : check_not_negative ( permits , "Permits cannot be negative!" ) return self . _encode_invoke ( semaphore_init_codec , permits = permits )
Try to initialize this Semaphore instance with the given permit count .
41,931
def acquire ( self , permits = 1 ) : check_not_negative ( permits , "Permits cannot be negative!" ) return self . _encode_invoke ( semaphore_acquire_codec , permits = permits )
Acquires one or specified amount of permits if available and returns immediately reducing the number of available permits by one or given amount .
41,932
def reduce_permits ( self , reduction ) : check_not_negative ( reduction , "Reduction cannot be negative!" ) return self . _encode_invoke ( semaphore_reduce_permits_codec , reduction = reduction )
Shrinks the number of available permits by the indicated reduction . This method differs from acquire in that it does not block waiting for permits to become available .
41,933
def release ( self , permits = 1 ) : check_not_negative ( permits , "Permits cannot be negative!" ) return self . _encode_invoke ( semaphore_release_codec , permits = permits )
Releases one or given number of permits increasing the number of available permits by one or that amount .
41,934
def try_acquire ( self , permits = 1 , timeout = 0 ) : check_not_negative ( permits , "Permits cannot be negative!" ) return self . _encode_invoke ( semaphore_try_acquire_codec , permits = permits , timeout = to_millis ( timeout ) )
Tries to acquire one or the given number of permits if they are available and returns immediately with the value true reducing the number of available permits by the given amount .
41,935
def add_membership_listener ( self , member_added = None , member_removed = None , fire_for_existing = False ) : self . membership_listeners . append ( ( member_added , member_removed , fire_for_existing ) ) return self
Helper method for adding membership listeners
41,936
def set_custom_serializer ( self , _type , serializer ) : validate_type ( _type ) validate_serializer ( serializer , StreamSerializer ) self . _custom_serializers [ _type ] = serializer
Assign a serializer for the type .
41,937
def get ( self , property ) : return self . _properties . get ( property . name ) or os . getenv ( property . name ) or property . default_value
Gets the value of the given property . First checks client config properties then environment variables and lastly fall backs to the default value of the property .
41,938
def get_bool ( self , property ) : value = self . get ( property ) if isinstance ( value , bool ) : return value return value . lower ( ) == "true"
Gets the value of the given property as boolean .
41,939
def get_seconds ( self , property ) : return TimeUnit . to_seconds ( self . get ( property ) , property . time_unit )
Gets the value of the given property in seconds . If the value of the given property is not a number throws TypeError .
41,940
def get_seconds_positive_or_default ( self , property ) : seconds = self . get_seconds ( property ) return seconds if seconds > 0 else TimeUnit . to_seconds ( property . default_value , property . time_unit )
Gets the value of the given property in seconds . If the value of the given property is not a number throws TypeError . If the value of the given property in seconds is not positive tries to return the default value in seconds .
41,941
def start ( self ) : self . logger . debug ( "Starting partition service" , extra = self . _logger_extras ) def partition_updater ( ) : self . _do_refresh ( ) self . timer = self . _client . reactor . add_timer ( PARTITION_UPDATE_INTERVAL , partition_updater ) self . timer = self . _client . reactor . add_timer ( PARTITION_UPDATE_INTERVAL , partition_updater )
Starts the partition service .
41,942
def get_partition_owner ( self , partition_id ) : if partition_id not in self . partitions : self . _do_refresh ( ) return self . partitions . get ( partition_id , None )
Gets the owner of the partition if it s set . Otherwise it will trigger partition assignment .
41,943
def get_partition_id ( self , key ) : data = self . _client . serialization_service . to_data ( key ) count = self . get_partition_count ( ) if count <= 0 : return 0 return hash_to_index ( data . get_partition_hash ( ) , count )
Returns the partition id for a Data key .
41,944
def murmur_hash3_x86_32 ( data , offset , size , seed = 0x01000193 ) : key = bytearray ( data [ offset : offset + size ] ) length = len ( key ) nblocks = int ( length / 4 ) h1 = seed c1 = 0xcc9e2d51 c2 = 0x1b873593 for block_start in range ( 0 , nblocks * 4 , 4 ) : k1 = key [ block_start + 3 ] << 24 | key [ block_start + 2 ] << 16 | key [ block_start + 1 ] << 8 | key [ block_start + 0 ] k1 = c1 * k1 & 0xFFFFFFFF k1 = ( k1 << 15 | k1 >> 17 ) & 0xFFFFFFFF k1 = ( c2 * k1 ) & 0xFFFFFFFF h1 ^= k1 h1 = ( h1 << 13 | h1 >> 19 ) & 0xFFFFFFFF h1 = ( h1 * 5 + 0xe6546b64 ) & 0xFFFFFFFF tail_index = nblocks * 4 k1 = 0 tail_size = length & 3 if tail_size >= 3 : k1 ^= key [ tail_index + 2 ] << 16 if tail_size >= 2 : k1 ^= key [ tail_index + 1 ] << 8 if tail_size >= 1 : k1 ^= key [ tail_index + 0 ] if tail_size != 0 : k1 = ( k1 * c1 ) & 0xFFFFFFFF k1 = ( k1 << 15 | k1 >> 17 ) & 0xFFFFFFFF k1 = ( k1 * c2 ) & 0xFFFFFFFF h1 ^= k1 result = _fmix ( h1 ^ length ) return - ( result & 0x80000000 ) | ( result & 0x7FFFFFFF )
murmur3 hash function to determine partition
41,945
def add ( self , item ) : check_not_none ( item , "Value can't be None" ) element_data = self . _to_data ( item ) return self . _encode_invoke ( list_add_codec , value = element_data )
Adds the specified item to the end of this list .
41,946
def add_at ( self , index , item ) : check_not_none ( item , "Value can't be None" ) element_data = self . _to_data ( item ) return self . _encode_invoke ( list_add_with_index_codec , index = index , value = element_data )
Adds the specified item at the specific position in this list . Element in this position and following elements are shifted to the right if any .
41,947
def add_all ( self , items ) : check_not_none ( items , "Value can't be None" ) data_items = [ ] for item in items : check_not_none ( item , "Value can't be None" ) data_items . append ( self . _to_data ( item ) ) return self . _encode_invoke ( list_add_all_codec , value_list = data_items )
Adds all of the items in the specified collection to the end of this list . The order of new elements is determined by the specified collection s iterator .
41,948
def add_all_at ( self , index , items ) : check_not_none ( items , "Value can't be None" ) data_items = [ ] for item in items : check_not_none ( item , "Value can't be None" ) data_items . append ( self . _to_data ( item ) ) return self . _encode_invoke ( list_add_all_with_index_codec , index = index , value_list = data_items )
Adds all of the elements in the specified collection into this list at the specified position . Elements in this positions and following elements are shifted to the right if any . The order of new elements is determined by the specified collection s iterator .
41,949
def contains_all ( self , items ) : check_not_none ( items , "Items can't be None" ) data_items = [ ] for item in items : check_not_none ( item , "item can't be None" ) data_items . append ( self . _to_data ( item ) ) return self . _encode_invoke ( list_contains_all_codec , values = data_items )
Determines whether this list contains all of the items in specified collection or not .
41,950
def index_of ( self , item ) : check_not_none ( item , "Value can't be None" ) item_data = self . _to_data ( item ) return self . _encode_invoke ( list_index_of_codec , value = item_data )
Returns the first index of specified items s occurrences in this list . If specified item is not present in this list returns - 1 .
41,951
def last_index_of ( self , item ) : check_not_none ( item , "Value can't be None" ) item_data = self . _to_data ( item ) return self . _encode_invoke ( list_last_index_of_codec , value = item_data )
Returns the last index of specified items s occurrences in this list . If specified item is not present in this list returns - 1 .
41,952
def remove ( self , item ) : check_not_none ( item , "Value can't be None" ) item_data = self . _to_data ( item ) return self . _encode_invoke ( list_remove_codec , value = item_data )
Removes the specified element s first occurrence from the list if it exists in this list .
41,953
def remove_all ( self , items ) : check_not_none ( items , "Value can't be None" ) data_items = [ ] for item in items : check_not_none ( item , "Value can't be None" ) data_items . append ( self . _to_data ( item ) ) return self . _encode_invoke ( list_compare_and_remove_all_codec , values = data_items )
Removes all of the elements that is present in the specified collection from this list .
41,954
def retain_all ( self , items ) : check_not_none ( items , "Value can't be None" ) data_items = [ ] for item in items : check_not_none ( item , "Value can't be None" ) data_items . append ( self . _to_data ( item ) ) return self . _encode_invoke ( list_compare_and_retain_all_codec , values = data_items )
Retains only the items that are contained in the specified collection . It means items which are not present in the specified collection are removed from this list .
41,955
def set_at ( self , index , item ) : check_not_none ( item , "Value can't be None" ) element_data = self . _to_data ( item ) return self . _encode_invoke ( list_set_codec , index = index , value = element_data )
Replaces the specified element with the element at the specified position in this list .
41,956
def add_index ( self , attribute , ordered = False ) : return self . _encode_invoke ( map_add_index_codec , attribute = attribute , ordered = ordered )
Adds an index to this map for the specified entries so that queries can run faster .
41,957
def add_interceptor ( self , interceptor ) : return self . _encode_invoke ( map_add_interceptor_codec , interceptor = self . _to_data ( interceptor ) )
Adds an interceptor for this map . Added interceptor will intercept operations and execute user defined methods .
41,958
def entry_set ( self , predicate = None ) : if predicate : predicate_data = self . _to_data ( predicate ) return self . _encode_invoke ( map_entries_with_predicate_codec , predicate = predicate_data ) else : return self . _encode_invoke ( map_entry_set_codec )
Returns a list clone of the mappings contained in this map .
41,959
def evict ( self , key ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _evict_internal ( key_data )
Evicts the specified key from this map .
41,960
def execute_on_entries ( self , entry_processor , predicate = None ) : if predicate : return self . _encode_invoke ( map_execute_with_predicate_codec , entry_processor = self . _to_data ( entry_processor ) , predicate = self . _to_data ( predicate ) ) return self . _encode_invoke ( map_execute_on_all_keys_codec , entry_processor = self . _to_data ( entry_processor ) )
Applies the user defined EntryProcessor to all the entries in the map or entries in the map which satisfies the predicate if provided . Returns the results mapped by each key in the map .
41,961
def execute_on_key ( self , key , entry_processor ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _execute_on_key_internal ( key_data , entry_processor )
Applies the user defined EntryProcessor to the entry mapped by the key . Returns the object which is the result of EntryProcessor s process method .
41,962
def execute_on_keys ( self , keys , entry_processor ) : key_list = [ ] for key in keys : check_not_none ( key , "key can't be None" ) key_list . append ( self . _to_data ( key ) ) if len ( keys ) == 0 : return ImmediateFuture ( [ ] ) return self . _encode_invoke ( map_execute_on_keys_codec , entry_processor = self . _to_data ( entry_processor ) , keys = key_list )
Applies the user defined EntryProcessor to the entries mapped by the collection of keys . Returns the results mapped by each key in the collection .
41,963
def force_unlock ( self , key ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _encode_invoke_on_key ( map_force_unlock_codec , key_data , key = key_data , reference_id = self . reference_id_generator . get_and_increment ( ) )
Releases the lock for the specified key regardless of the lock owner . It always successfully unlocks the key never blocks and returns immediately .
41,964
def get_all ( self , keys ) : check_not_none ( keys , "keys can't be None" ) if not keys : return ImmediateFuture ( { } ) partition_service = self . _client . partition_service partition_to_keys = { } for key in keys : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) partition_id = partition_service . get_partition_id ( key_data ) try : partition_to_keys [ partition_id ] [ key ] = key_data except KeyError : partition_to_keys [ partition_id ] = { key : key_data } return self . _get_all_internal ( partition_to_keys )
Returns the entries for the given keys .
41,965
def get_entry_view ( self , key ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _encode_invoke_on_key ( map_get_entry_view_codec , key_data , key = key_data , thread_id = thread_id ( ) )
Returns the EntryView for the specified key .
41,966
def is_locked ( self , key ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _encode_invoke_on_key ( map_is_locked_codec , key_data , key = key_data )
Checks the lock for the specified key . If the lock is acquired it returns true . Otherwise it returns false .
41,967
def key_set ( self , predicate = None ) : if predicate : predicate_data = self . _to_data ( predicate ) return self . _encode_invoke ( map_key_set_with_predicate_codec , predicate = predicate_data ) else : return self . _encode_invoke ( map_key_set_codec )
Returns a List clone of the keys contained in this map or the keys of the entries filtered with the predicate if provided .
41,968
def load_all ( self , keys = None , replace_existing_values = True ) : if keys : key_data_list = list ( map ( self . _to_data , keys ) ) return self . _load_all_internal ( key_data_list , replace_existing_values ) else : return self . _encode_invoke ( map_load_all_codec , replace_existing_values = replace_existing_values )
Loads all keys from the store at server side or loads the given keys if provided .
41,969
def put_if_absent ( self , key , value , ttl = - 1 ) : check_not_none ( key , "key can't be None" ) check_not_none ( value , "value can't be None" ) key_data = self . _to_data ( key ) value_data = self . _to_data ( value ) return self . _put_if_absent_internal ( key_data , value_data , ttl )
Associates the specified key with the given value if it is not already associated . If ttl is provided entry will expire and get evicted after the ttl .
41,970
def remove_if_same ( self , key , value ) : check_not_none ( key , "key can't be None" ) check_not_none ( value , "value can't be None" ) key_data = self . _to_data ( key ) value_data = self . _to_data ( value ) return self . _remove_if_same_internal_ ( key_data , value_data )
Removes the entry for a key only if it is currently mapped to a given value .
41,971
def replace ( self , key , value ) : check_not_none ( key , "key can't be None" ) check_not_none ( value , "value can't be None" ) key_data = self . _to_data ( key ) value_data = self . _to_data ( value ) return self . _replace_internal ( key_data , value_data )
Replaces the entry for a key only if it is currently mapped to some value .
41,972
def replace_if_same ( self , key , old_value , new_value ) : check_not_none ( key , "key can't be None" ) check_not_none ( old_value , "old_value can't be None" ) check_not_none ( new_value , "new_value can't be None" ) key_data = self . _to_data ( key ) old_value_data = self . _to_data ( old_value ) new_value_data = self . _to_data ( new_value ) return self . _replace_if_same_internal ( key_data , old_value_data , new_value_data )
Replaces the entry for a key only if it is currently mapped to a given value .
41,973
def set ( self , key , value , ttl = - 1 ) : check_not_none ( key , "key can't be None" ) check_not_none ( value , "value can't be None" ) key_data = self . _to_data ( key ) value_data = self . _to_data ( value ) return self . _set_internal ( key_data , value_data , ttl )
Puts an entry into this map . Similar to the put operation except that set doesn t return the old value which is more efficient . If ttl is provided entry will expire and get evicted after the ttl .
41,974
def try_put ( self , key , value , timeout = 0 ) : check_not_none ( key , "key can't be None" ) check_not_none ( value , "value can't be None" ) key_data = self . _to_data ( key ) value_data = self . _to_data ( value ) return self . _try_put_internal ( key_data , value_data , timeout )
Tries to put the given key and value into this map and returns immediately if timeout is not provided . If timeout is provided operation waits until it is completed or timeout is reached .
41,975
def try_remove ( self , key , timeout = 0 ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _try_remove_internal ( key_data , timeout )
Tries to remove the given key from this map and returns immediately if timeout is not provided . If timeout is provided operation waits until it is completed or timeout is reached .
41,976
def unlock ( self , key ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) return self . _encode_invoke_on_key ( map_unlock_codec , key_data , key = key_data , thread_id = thread_id ( ) , reference_id = self . reference_id_generator . get_and_increment ( ) )
Releases the lock for the specified key . It never blocks and returns immediately . If the current thread is the holder of this lock then the hold count is decremented . If the hold count is zero then the lock is released .
41,977
def values ( self , predicate = None ) : if predicate : predicate_data = self . _to_data ( predicate ) return self . _encode_invoke ( map_values_with_predicate_codec , predicate = predicate_data ) else : return self . _encode_invoke ( map_values_codec )
Returns a list clone of the values contained in this map or values of the entries which are filtered with the predicate if provided .
41,978
def new_transaction ( self , timeout , durability , transaction_type ) : connection = self . _connect ( ) return Transaction ( self . _client , connection , timeout , durability , transaction_type )
Creates a Transaction object with given timeout durability and transaction type .
41,979
def begin ( self ) : if hasattr ( self . _locals , 'transaction_exists' ) and self . _locals . transaction_exists : raise TransactionError ( "Nested transactions are not allowed." ) if self . state != _STATE_NOT_STARTED : raise TransactionError ( "Transaction has already been started." ) self . _locals . transaction_exists = True self . start_time = time . time ( ) self . thread_id = thread_id ( ) try : request = transaction_create_codec . encode_request ( timeout = int ( self . timeout * 1000 ) , durability = self . durability , transaction_type = self . transaction_type , thread_id = self . thread_id ) response = self . client . invoker . invoke_on_connection ( request , self . connection ) . result ( ) self . id = transaction_create_codec . decode_response ( response ) [ "response" ] self . state = _STATE_ACTIVE except : self . _locals . transaction_exists = False raise
Begins this transaction .
41,980
def commit ( self ) : self . _check_thread ( ) if self . state != _STATE_ACTIVE : raise TransactionError ( "Transaction is not active." ) try : self . _check_timeout ( ) request = transaction_commit_codec . encode_request ( self . id , self . thread_id ) self . client . invoker . invoke_on_connection ( request , self . connection ) . result ( ) self . state = _STATE_COMMITTED except : self . state = _STATE_PARTIAL_COMMIT raise finally : self . _locals . transaction_exists = False
Commits this transaction .
41,981
def rollback ( self ) : self . _check_thread ( ) if self . state not in ( _STATE_ACTIVE , _STATE_PARTIAL_COMMIT ) : raise TransactionError ( "Transaction is not active." ) try : if self . state != _STATE_PARTIAL_COMMIT : request = transaction_rollback_codec . encode_request ( self . id , self . thread_id ) self . client . invoker . invoke_on_connection ( request , self . connection ) . result ( ) self . state = _STATE_ROLLED_BACK finally : self . _locals . transaction_exists = False
Rollback of this current transaction .
41,982
def load_addresses ( self ) : try : return list ( self . cloud_discovery . discover_nodes ( ) . keys ( ) ) except Exception as ex : self . logger . warning ( "Failed to load addresses from Hazelcast.cloud: {}" . format ( ex . args [ 0 ] ) , extra = self . _logger_extras ) return [ ]
Loads member addresses from Hazelcast . cloud endpoint .
41,983
def translate ( self , address ) : if address is None : return None public_address = self . _private_to_public . get ( address ) if public_address : return public_address self . refresh ( ) return self . _private_to_public . get ( address )
Translates the given address to another address specific to network or service .
41,984
def refresh ( self ) : try : self . _private_to_public = self . cloud_discovery . discover_nodes ( ) except Exception as ex : self . logger . warning ( "Failed to load addresses from Hazelcast.cloud: {}" . format ( ex . args [ 0 ] ) , extra = self . _logger_extras )
Refreshes the internal lookup table if necessary .
41,985
def get_host_and_url ( properties , cloud_token ) : host = properties . get ( HazelcastCloudDiscovery . CLOUD_URL_BASE_PROPERTY . name , HazelcastCloudDiscovery . CLOUD_URL_BASE_PROPERTY . default_value ) host = host . replace ( "https://" , "" ) host = host . replace ( "http://" , "" ) return host , HazelcastCloudDiscovery . _CLOUD_URL_PATH + cloud_token
Helper method to get host and url that can be used in HTTPSConnection .
41,986
def try_set_count ( self , count ) : check_not_negative ( count , "count can't be negative" ) return self . _encode_invoke ( count_down_latch_try_set_count_codec , count = count )
Sets the count to the given value if the current count is zero . If count is not zero this method does nothing and returns false .
41,987
def destroy ( self ) : self . _on_destroy ( ) return self . _client . proxy . destroy_proxy ( self . service_name , self . name )
Destroys this proxy .
41,988
def add_listener ( self , member_added = None , member_removed = None , fire_for_existing = False ) : registration_id = str ( uuid . uuid4 ( ) ) self . listeners [ registration_id ] = ( member_added , member_removed ) if fire_for_existing : for member in self . get_member_list ( ) : member_added ( member ) return registration_id
Adds a membership listener to listen for membership updates it will be notified when a member is added to cluster or removed from cluster . There is no check for duplicate registrations so if you register the listener twice it will get events twice .
41,989
def remove_listener ( self , registration_id ) : try : self . listeners . pop ( registration_id ) return True except KeyError : return False
Removes the specified membership listener .
41,990
def get_member_by_uuid ( self , member_uuid ) : for member in self . get_member_list ( ) : if member . uuid == member_uuid : return member
Returns the member with specified member uuid .
41,991
def get_members ( self , selector ) : members = [ ] for member in self . get_member_list ( ) : if selector . select ( member ) : members . append ( member ) return members
Returns the members that satisfy the given selector .
41,992
def is_after ( self , other ) : any_timestamp_greater = False for replica_id , other_timestamp in other . entry_set ( ) : local_timestamp = self . _replica_timestamps . get ( replica_id ) if local_timestamp is None or local_timestamp < other_timestamp : return False elif local_timestamp > other_timestamp : any_timestamp_greater = True return any_timestamp_greater or other . size ( ) < self . size ( )
Returns true if this vector clock is causally strictly after the provided vector clock . This means that it the provided clock is neither equal to greater than or concurrent to this vector clock .
41,993
def contains ( self , item ) : check_not_none ( item , "Value can't be None" ) item_data = self . _to_data ( item ) return self . _encode_invoke ( set_contains_codec , value = item_data )
Determines whether this set contains the specified item or not .
41,994
def contains_all ( self , items ) : check_not_none ( items , "Value can't be None" ) data_items = [ ] for item in items : check_not_none ( item , "Value can't be None" ) data_items . append ( self . _to_data ( item ) ) return self . _encode_invoke ( set_contains_all_codec , items = data_items )
Determines whether this set contains all of the items in the specified collection or not .
41,995
def alter ( self , function ) : check_not_none ( function , "function can't be None" ) return self . _encode_invoke ( atomic_long_alter_codec , function = self . _to_data ( function ) )
Alters the currently stored value by applying a function on it .
41,996
def alter_and_get ( self , function ) : check_not_none ( function , "function can't be None" ) return self . _encode_invoke ( atomic_long_alter_and_get_codec , function = self . _to_data ( function ) )
Alters the currently stored value by applying a function on it and gets the result .
41,997
def get_and_alter ( self , function ) : check_not_none ( function , "function can't be None" ) return self . _encode_invoke ( atomic_long_get_and_alter_codec , function = self . _to_data ( function ) )
Alters the currently stored value by applying a function on it on and gets the old value .
41,998
def main ( jlink_serial , device ) : buf = StringIO . StringIO ( ) jlink = pylink . JLink ( log = buf . write , detailed_log = buf . write ) jlink . open ( serial_no = jlink_serial ) jlink . set_tif ( pylink . enums . JLinkInterfaces . SWD ) jlink . connect ( device , verbose = True ) sys . stdout . write ( 'ARM Id: %d\n' % jlink . core_id ( ) ) sys . stdout . write ( 'CPU Id: %d\n' % jlink . core_cpu ( ) ) sys . stdout . write ( 'Core Name: %s\n' % jlink . core_name ( ) ) sys . stdout . write ( 'Device Family: %d\n' % jlink . device_family ( ) )
Prints the core s information .
41,999
def acquire ( self ) : if os . path . exists ( self . path ) : try : pid = None with open ( self . path , 'r' ) as f : line = f . readline ( ) . strip ( ) pid = int ( line ) if not psutil . pid_exists ( pid ) : os . remove ( self . path ) except ValueError as e : os . remove ( self . path ) except IOError as e : pass try : self . fd = os . open ( self . path , os . O_CREAT | os . O_EXCL | os . O_RDWR ) to_write = '%s%s' % ( os . getpid ( ) , os . linesep ) os . write ( self . fd , to_write . encode ( ) ) except OSError as e : if not os . path . exists ( self . path ) : raise return False self . acquired = True return True
Attempts to acquire a lock for the J - Link lockfile .