query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Create a Solr search index for Yokozuna .
def create_search_index ( self , index , schema = None , n_val = None , timeout = None ) : if not self . yz_wm_index : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_index_path ( index ) headers = { 'Content-Type' : 'application/json' } content_dict = dict ( ) if schema : content_dict [ 'schema' ] = schema if n_val : content_dict [ 'n_val' ] = n_val if timeout : content_dict [ 'timeout' ] = timeout content = json . dumps ( content_dict ) # Run the request... status , _ , _ = self . _request ( 'PUT' , url , headers , content ) if status != 204 : raise RiakError ( 'Error setting Search 2.0 index.' ) return True
251,700
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L447-L483
[ "def", "_setup_from_data", "(", "self", ",", "data", ")", ":", "fitted_state", "=", "{", "}", "_raise_error_if_not_of_type", "(", "data", ",", "[", "_SFrame", "]", ")", "feature_columns", "=", "_internal_utils", ".", "get_column_names", "(", "data", ",", "self...
Return a list of Solr search indexes from Yokozuna .
def list_search_indexes ( self ) : if not self . yz_wm_index : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_index_path ( ) # Run the request... status , headers , body = self . _request ( 'GET' , url ) if status == 200 : json_data = json . loads ( bytes_to_str ( body ) ) # Return a list of dictionaries return json_data else : raise RiakError ( 'Error getting Search 2.0 index.' )
251,701
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L508-L528
[ "def", "validateExtractOptions", "(", "options", ")", ":", "if", "not", "options", ".", "pattern", "and", "not", "options", ".", "pattern2", ":", "if", "not", "options", ".", "read2_in", ":", "U", ".", "error", "(", "\"Must supply --bc-pattern for single-end\"",...
Create a new Solr schema for Yokozuna .
def create_search_schema ( self , schema , content ) : if not self . yz_wm_schema : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_schema_path ( schema ) headers = { 'Content-Type' : 'application/xml' } # Run the request... status , header , body = self . _request ( 'PUT' , url , headers , content ) if status != 204 : raise RiakError ( 'Error creating Search 2.0 schema.' ) return True
251,702
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L552-L575
[ "def", "_get_purecn_dx_files", "(", "paired", ",", "out", ")", ":", "out_base", "=", "\"%s-dx\"", "%", "utils", ".", "splitext_plus", "(", "out", "[", "\"rds\"", "]", ")", "[", "0", "]", "all_files", "=", "[", "]", "for", "key", ",", "ext", "in", "["...
Fetch a Solr schema from Yokozuna .
def get_search_schema ( self , schema ) : if not self . yz_wm_schema : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_schema_path ( schema ) # Run the request... status , _ , body = self . _request ( 'GET' , url ) if status == 200 : result = { } result [ 'name' ] = schema result [ 'content' ] = bytes_to_str ( body ) return result else : raise RiakError ( 'Error getting Search 2.0 schema.' )
251,703
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L577-L600
[ "def", "_get_purecn_dx_files", "(", "paired", ",", "out", ")", ":", "out_base", "=", "\"%s-dx\"", "%", "utils", ".", "splitext_plus", "(", "out", "[", "\"rds\"", "]", ")", "[", "0", "]", "all_files", "=", "[", "]", "for", "key", ",", "ext", "in", "["...
Performs a search query .
def search ( self , index , query , * * params ) : if index is None : index = 'search' options = { } if 'op' in params : op = params . pop ( 'op' ) options [ 'q.op' ] = op options . update ( params ) url = self . solr_select_path ( index , query , * * options ) status , headers , data = self . _request ( 'GET' , url ) self . check_http_code ( status , [ 200 ] ) if 'json' in headers [ 'content-type' ] : results = json . loads ( bytes_to_str ( data ) ) return self . _normalize_json_search_response ( results ) elif 'xml' in headers [ 'content-type' ] : return self . _normalize_xml_search_response ( data ) else : raise ValueError ( "Could not decode search response" )
251,704
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L602-L624
[ "def", "reassign_comment_to_book", "(", "self", ",", "comment_id", ",", "from_book_id", ",", "to_book_id", ")", ":", "# Implemented from template for", "# osid.resource.ResourceBinAssignmentSession.reassign_resource_to_bin", "self", ".", "assign_comment_to_book", "(", "comment_id...
Adds documents to the search index .
def fulltext_add ( self , index , docs ) : xml = Document ( ) root = xml . createElement ( 'add' ) for doc in docs : doc_element = xml . createElement ( 'doc' ) for key in doc : value = doc [ key ] field = xml . createElement ( 'field' ) field . setAttribute ( "name" , key ) text = xml . createTextNode ( value ) field . appendChild ( text ) doc_element . appendChild ( field ) root . appendChild ( doc_element ) xml . appendChild ( root ) self . _request ( 'POST' , self . solr_update_path ( index ) , { 'Content-Type' : 'text/xml' } , xml . toxml ( ) . encode ( 'utf-8' ) )
251,705
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L626-L646
[ "def", "read_pgroups", "(", "in_file", ")", ":", "out", "=", "{", "}", "with", "open", "(", "in_file", ")", "as", "in_handle", ":", "for", "line", "in", "(", "l", "for", "l", "in", "in_handle", "if", "not", "l", ".", "startswith", "(", "\"#\"", ")"...
Removes documents from the full - text index .
def fulltext_delete ( self , index , docs = None , queries = None ) : xml = Document ( ) root = xml . createElement ( 'delete' ) if docs : for doc in docs : doc_element = xml . createElement ( 'id' ) text = xml . createTextNode ( doc ) doc_element . appendChild ( text ) root . appendChild ( doc_element ) if queries : for query in queries : query_element = xml . createElement ( 'query' ) text = xml . createTextNode ( query ) query_element . appendChild ( text ) root . appendChild ( query_element ) xml . appendChild ( root ) self . _request ( 'POST' , self . solr_update_path ( index ) , { 'Content-Type' : 'text/xml' } , xml . toxml ( ) . encode ( 'utf-8' ) )
251,706
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L648-L671
[ "def", "notifications_dismiss", "(", "self", ",", "id", ")", ":", "id", "=", "self", ".", "__unpack_id", "(", "id", ")", "params", "=", "self", ".", "__generate_params", "(", "locals", "(", ")", ")", "self", ".", "__api_request", "(", "'POST'", ",", "'...
Releases this resource back to the pool it came from .
def release ( self ) : if self . errored : self . pool . delete_resource ( self ) else : self . pool . release ( self )
251,707
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/pool.py#L76-L83
[ "def", "_ion_equals", "(", "a", ",", "b", ",", "timestamp_comparison_func", ",", "recursive_comparison_func", ")", ":", "for", "a", ",", "b", "in", "(", "(", "a", ",", "b", ")", ",", "(", "b", ",", "a", ")", ")", ":", "# Ensures that operand order does n...
Deletes the resource from the pool and destroys the associated resource . Not usually needed by users of the pool but called internally when BadResource is raised .
def delete_resource ( self , resource ) : with self . lock : self . resources . remove ( resource ) self . destroy_resource ( resource . object ) del resource
251,708
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/pool.py#L209-L221
[ "def", "benchmark_setup", "(", "self", ")", ":", "def", "f", "(", ")", ":", "self", ".", "_setup", "(", ")", "self", ".", "mod_ext", ".", "synchronize", "(", "*", "*", "self", ".", "ext_kwargs", ")", "f", "(", ")", "# Ignore first", "self", ".", "s...
Returns an Erlang - TTB encoded tuple with the appropriate data and metadata from a TsObject .
def encode_timeseries_put ( self , tsobj ) : if tsobj . columns : raise NotImplementedError ( 'columns are not used' ) if tsobj . rows and isinstance ( tsobj . rows , list ) : req_rows = [ ] for row in tsobj . rows : req_r = [ ] for cell in row : req_r . append ( self . encode_to_ts_cell ( cell ) ) req_rows . append ( tuple ( req_r ) ) req = tsputreq_a , tsobj . table . name , [ ] , req_rows mc = MSG_CODE_TS_TTB_MSG rc = MSG_CODE_TS_TTB_MSG return Msg ( mc , encode ( req ) , rc ) else : raise RiakError ( "TsObject requires a list of rows" )
251,709
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/ttb.py#L116-L140
[ "def", "get", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "private_file", "=", "self", ".", "get_private_file", "(", ")", "if", "not", "self", ".", "can_access_file", "(", "private_file", ")", ":", "return", "HttpR...
Decodes a TTB - encoded TsRow into a list
def decode_timeseries_row ( self , tsrow , tsct , convert_timestamp = False ) : row = [ ] for i , cell in enumerate ( tsrow ) : if cell is None : row . append ( None ) elif isinstance ( cell , list ) and len ( cell ) == 0 : row . append ( None ) else : if convert_timestamp and tsct [ i ] == timestamp_a : row . append ( datetime_from_unix_time_millis ( cell ) ) else : row . append ( cell ) return row
251,710
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/ttb.py#L205-L228
[ "def", "add_configuration_file", "(", "self", ",", "file_name", ")", ":", "logger", ".", "info", "(", "'adding %s to configuration files'", ",", "file_name", ")", "if", "file_name", "not", "in", "self", ".", "configuration_files", "and", "self", ".", "_inotify", ...
Extracts the modification operation from the set .
def to_op ( self ) : if not self . _adds and not self . _removes : return None changes = { } if self . _adds : changes [ 'adds' ] = list ( self . _adds ) if self . _removes : changes [ 'removes' ] = list ( self . _removes ) return changes
251,711
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/set.py#L60-L73
[ "def", "density_hub", "(", "self", ",", "weather_df", ")", ":", "if", "self", ".", "density_model", "!=", "'interpolation_extrapolation'", ":", "temperature_hub", "=", "self", ".", "temperature_hub", "(", "weather_df", ")", "# Calculation of density in kg/m³ at hub heig...
Removes an element from the set .
def discard ( self , element ) : _check_element ( element ) self . _require_context ( ) self . _removes . add ( element )
251,712
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/set.py#L101-L113
[ "def", "regenerate_recovery_code", "(", "self", ",", "user_id", ")", ":", "url", "=", "self", ".", "_url", "(", "'{}/recovery-code-regeneration'", ".", "format", "(", "user_id", ")", ")", "return", "self", ".", "client", ".", "post", "(", "url", ")" ]
Get one value matching the key raising a KeyError if multiple values were found .
def getone ( self , key ) : v = self . getall ( key ) if not v : raise KeyError ( 'Key not found: %r' % key ) if len ( v ) > 1 : raise KeyError ( 'Multiple values match %r: %r' % ( key , v ) ) return v [ 0 ]
251,713
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/multidict.py#L73-L83
[ "def", "api_download", "(", "service", ",", "fileId", ",", "authorisation", ")", ":", "data", "=", "tempfile", ".", "SpooledTemporaryFile", "(", "max_size", "=", "SPOOL_SIZE", ",", "mode", "=", "'w+b'", ")", "headers", "=", "{", "'Authorization'", ":", "'sen...
Returns a dictionary where each key is associated with a list of values .
def dict_of_lists ( self ) : result = { } for key , value in self . _items : if key in result : result [ key ] . append ( value ) else : result [ key ] = [ value ] return result
251,714
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/multidict.py#L108-L119
[ "def", "_at", "(", "self", ",", "t", ")", ":", "rITRF", ",", "vITRF", ",", "error", "=", "self", ".", "ITRF_position_velocity_error", "(", "t", ")", "rGCRS", ",", "vGCRS", "=", "ITRF_to_GCRS2", "(", "t", ",", "rITRF", ",", "vITRF", ")", "return", "rG...
Enqueues a fetch task to the pool of workers . This will raise a RuntimeError if the pool is stopped or in the process of stopping .
def enq ( self , task ) : if not self . _stop . is_set ( ) : self . _inq . put ( task ) else : raise RuntimeError ( "Attempted to enqueue an operation while " "multi pool was shutdown!" )
251,715
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L73-L86
[ "def", "parse_files", "(", "self", ")", ":", "log_re", "=", "self", ".", "log_format_regex", "log_lines", "=", "[", "]", "for", "log_file", "in", "self", ".", "matching_files", "(", ")", ":", "with", "open", "(", "log_file", ")", "as", "f", ":", "match...
Starts the worker threads if they are not already started . This method is thread - safe and will be called automatically when executing an operation .
def start ( self ) : # Check whether we are already started, skip if we are. if not self . _started . is_set ( ) : # If we are not started, try to capture the lock. if self . _lock . acquire ( False ) : # If we got the lock, go ahead and start the worker # threads, set the started flag, and release the lock. for i in range ( self . _size ) : name = "riak.client.multi-worker-{0}-{1}" . format ( self . _name , i ) worker = Thread ( target = self . _worker_method , name = name ) worker . daemon = False worker . start ( ) self . _workers . append ( worker ) self . _started . set ( ) self . _lock . release ( ) else : # We didn't get the lock, so someone else is already # starting the worker threads. Wait until they have # signaled that the threads are started. self . _started . wait ( )
251,716
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L88-L113
[ "def", "update_repodata", "(", "self", ",", "channels", "=", "None", ")", ":", "norm_channels", "=", "self", ".", "conda_get_condarc_channels", "(", "channels", "=", "channels", ",", "normalize", "=", "True", ")", "repodata_urls", "=", "self", ".", "_set_repo_...
Signals the worker threads to exit and waits on them .
def stop ( self ) : if not self . stopped ( ) : self . _stop . set ( ) for worker in self . _workers : worker . join ( )
251,717
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L115-L122
[ "def", "characterize_local_files", "(", "filedir", ",", "max_bytes", "=", "MAX_FILE_DEFAULT", ")", ":", "file_data", "=", "{", "}", "logging", ".", "info", "(", "'Characterizing files in {}'", ".", "format", "(", "filedir", ")", ")", "for", "filename", "in", "...
Ensures well - formedness of a key .
def _check_key ( self , key ) : if not len ( key ) == 2 : raise TypeError ( 'invalid key: %r' % key ) elif key [ 1 ] not in TYPES : raise TypeError ( 'invalid datatype: %s' % key [ 1 ] )
251,718
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/map.py#L227-L234
[ "def", "getSrcBlocks", "(", "self", ",", "url", ",", "dataset", "=", "\"\"", ",", "block", "=", "\"\"", ")", ":", "if", "block", ":", "params", "=", "{", "'block_name'", ":", "block", ",", "'open_for_writing'", ":", "0", "}", "elif", "dataset", ":", ...
Whether the map has staged local modifications .
def modified ( self ) : if self . _removes : return True for v in self . _value : if self . _value [ v ] . modified : return True for v in self . _updates : if self . _updates [ v ] . modified : return True return False
251,719
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/map.py#L252-L264
[ "def", "get_keyvault", "(", "access_token", ",", "subscription_id", ",", "rgname", ",", "vault_name", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/resourcegroups/'"...
Change the PB files to use full pathnames for Python 3 . x and modify the metaclasses to be version agnostic
def _format_python2_or_3 ( self ) : pb_files = set ( ) with open ( self . source , 'r' , buffering = 1 ) as csvfile : reader = csv . reader ( csvfile ) for row in reader : _ , _ , proto = row pb_files . add ( 'riak/pb/{0}_pb2.py' . format ( proto ) ) for im in sorted ( pb_files ) : with open ( im , 'r' , buffering = 1 ) as pbfile : contents = 'from six import *\n' + pbfile . read ( ) contents = re . sub ( r'riak_pb2' , r'riak.pb.riak_pb2' , contents ) # Look for this pattern in the protoc-generated file: # # class RpbCounterGetResp(_message.Message): # __metaclass__ = _reflection.GeneratedProtocolMessageType # # and convert it to: # # @add_metaclass(_reflection.GeneratedProtocolMessageType) # class RpbCounterGetResp(_message.Message): contents = re . sub ( r'class\s+(\S+)\((\S+)\):\s*\n' '\s+__metaclass__\s+=\s+(\S+)\s*\n' , r'@add_metaclass(\3)\nclass \1(\2):\n' , contents ) with open ( im , 'w' , buffering = 1 ) as pbfile : pbfile . write ( contents )
251,720
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/commands.py#L372-L405
[ "def", "setThrowException", "(", "self", ",", "setting", ")", ":", "if", "setting", ":", "self", ".", "_throwException", "=", "True", "self", ".", "_findFailedResponse", "=", "\"ABORT\"", "else", ":", "self", ".", "_throwException", "=", "False", "self", "."...
Reloads the datatype from Riak .
def reload ( self , * * params ) : if not self . bucket : raise ValueError ( 'bucket property not assigned' ) if not self . key : raise ValueError ( 'key property not assigned' ) dtype , value , context = self . bucket . _client . _fetch_datatype ( self . bucket , self . key , * * params ) if not dtype == self . type_name : raise TypeError ( "Expected datatype {} but " "got datatype {}" . format ( self . __class__ , TYPES [ dtype ] ) ) self . clear ( ) self . _context = context self . _set_value ( value ) return self
251,721
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/datatype.py#L79-L120
[ "def", "to_fp", "(", "self", ",", "file_pointer", ",", "comments", "=", "None", ")", ":", "# saving formula's internal comments", "for", "c", "in", "self", ".", "comments", ":", "print", "(", "c", ",", "file", "=", "file_pointer", ")", "# saving externally spe...
Sends locally staged mutations to Riak .
def update ( self , * * params ) : if not self . modified : raise ValueError ( "No operation to perform" ) params . setdefault ( 'return_body' , True ) self . bucket . _client . update_datatype ( self , * * params ) self . clear ( ) return self
251,722
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/datatype.py#L133-L163
[ "def", "main", "(", ")", ":", "try", ":", "# Retrieve an AD2 device that has been exposed with ser2sock on localhost:10000.", "device", "=", "AlarmDecoder", "(", "SocketDevice", "(", "interface", "=", "(", "HOSTNAME", ",", "PORT", ")", ")", ")", "# Set up an event handl...
Converts a symbolic quorum value into its on - the - wire equivalent .
def encode_quorum ( self , rw ) : if rw in QUORUM_TO_PB : return QUORUM_TO_PB [ rw ] elif type ( rw ) is int and rw >= 0 : return rw else : return None
251,723
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L124-L138
[ "def", "extract_string_pairs_in_directory", "(", "directory_path", ",", "extract_func", ",", "filter_func", ")", ":", "result", "=", "{", "}", "for", "root", ",", "dirnames", ",", "filenames", "in", "os", ".", "walk", "(", "directory_path", ")", ":", "for", ...
Decodes the list of siblings from the protobuf representation into the object .
def decode_contents ( self , contents , obj ) : obj . siblings = [ self . decode_content ( c , RiakContent ( obj ) ) for c in contents ] # Invoke sibling-resolution logic if len ( obj . siblings ) > 1 and obj . resolver is not None : obj . resolver ( obj ) return obj
251,724
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L154-L170
[ "def", "on_key_pressed", "(", "self", ",", "event", ")", ":", "return", "# TODO", "if", "event", ".", "keysym", "==", "\"Up\"", ":", "self", ".", "manager", ".", "set_joystick", "(", "0.0", ",", "-", "1.0", ",", "0", ")", "elif", "event", ".", "keysy...
Decodes a single sibling from the protobuf representation into a RiakObject .
def decode_content ( self , rpb_content , sibling ) : if rpb_content . HasField ( "deleted" ) and rpb_content . deleted : sibling . exists = False else : sibling . exists = True if rpb_content . HasField ( "content_type" ) : sibling . content_type = bytes_to_str ( rpb_content . content_type ) if rpb_content . HasField ( "charset" ) : sibling . charset = bytes_to_str ( rpb_content . charset ) if rpb_content . HasField ( "content_encoding" ) : sibling . content_encoding = bytes_to_str ( rpb_content . content_encoding ) if rpb_content . HasField ( "vtag" ) : sibling . etag = bytes_to_str ( rpb_content . vtag ) sibling . links = [ self . decode_link ( link ) for link in rpb_content . links ] if rpb_content . HasField ( "last_mod" ) : sibling . last_modified = float ( rpb_content . last_mod ) if rpb_content . HasField ( "last_mod_usecs" ) : sibling . last_modified += rpb_content . last_mod_usecs / 1000000.0 sibling . usermeta = dict ( [ ( bytes_to_str ( usermd . key ) , bytes_to_str ( usermd . value ) ) for usermd in rpb_content . usermeta ] ) sibling . indexes = set ( [ ( bytes_to_str ( index . key ) , decode_index_value ( index . key , index . value ) ) for index in rpb_content . indexes ] ) sibling . encoded_data = rpb_content . value return sibling
251,725
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L172-L213
[ "def", "clean_expired_user_attempts", "(", "attempt_time", ":", "datetime", "=", "None", ")", "->", "int", ":", "if", "settings", ".", "AXES_COOLOFF_TIME", "is", "None", ":", "log", ".", "debug", "(", "'AXES: Skipping clean for expired access attempts because no AXES_CO...
Fills an RpbContent message with the appropriate data and metadata from a RiakObject .
def encode_content ( self , robj , rpb_content ) : if robj . content_type : rpb_content . content_type = str_to_bytes ( robj . content_type ) if robj . charset : rpb_content . charset = str_to_bytes ( robj . charset ) if robj . content_encoding : rpb_content . content_encoding = str_to_bytes ( robj . content_encoding ) for uk in robj . usermeta : pair = rpb_content . usermeta . add ( ) pair . key = str_to_bytes ( uk ) pair . value = str_to_bytes ( robj . usermeta [ uk ] ) for link in robj . links : pb_link = rpb_content . links . add ( ) try : bucket , key , tag = link except ValueError : raise RiakError ( "Invalid link tuple %s" % link ) pb_link . bucket = str_to_bytes ( bucket ) pb_link . key = str_to_bytes ( key ) if tag : pb_link . tag = str_to_bytes ( tag ) else : pb_link . tag = str_to_bytes ( '' ) for field , value in robj . indexes : pair = rpb_content . indexes . add ( ) pair . key = str_to_bytes ( field ) pair . value = str_to_bytes ( str ( value ) ) # Python 2.x data is stored in a string if six . PY2 : rpb_content . value = str ( robj . encoded_data ) else : rpb_content . value = robj . encoded_data
251,726
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L215-L258
[ "def", "event_tracker", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "async", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"\n Wraps function to provide redis\n tracking\n \"\"\"", "event", "=", "Event...
Decodes an RpbLink message into a tuple
def decode_link ( self , link ) : if link . HasField ( "bucket" ) : bucket = bytes_to_str ( link . bucket ) else : bucket = None if link . HasField ( "key" ) : key = bytes_to_str ( link . key ) else : key = None if link . HasField ( "tag" ) : tag = bytes_to_str ( link . tag ) else : tag = None return ( bucket , key , tag )
251,727
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L260-L282
[ "def", "main", "(", "config_file", ")", ":", "if", "not", "(", "os", ".", "path", ".", "exists", "(", "config_file", ")", "and", "os", ".", "path", ".", "isfile", "(", "config_file", ")", ")", ":", "msg", "=", "\"Missing or invalid config file {0}\"", "....
Encodes a dict of bucket properties into the protobuf message .
def encode_bucket_props ( self , props , msg ) : for prop in NORMAL_PROPS : if prop in props and props [ prop ] is not None : if isinstance ( props [ prop ] , six . string_types ) : setattr ( msg . props , prop , str_to_bytes ( props [ prop ] ) ) else : setattr ( msg . props , prop , props [ prop ] ) for prop in COMMIT_HOOK_PROPS : if prop in props : setattr ( msg . props , 'has_' + prop , True ) self . encode_hooklist ( props [ prop ] , getattr ( msg . props , prop ) ) for prop in MODFUN_PROPS : if prop in props and props [ prop ] is not None : self . encode_modfun ( props [ prop ] , getattr ( msg . props , prop ) ) for prop in QUORUM_PROPS : if prop in props and props [ prop ] not in ( None , 'default' ) : value = self . encode_quorum ( props [ prop ] ) if value is not None : if isinstance ( value , six . string_types ) : setattr ( msg . props , prop , str_to_bytes ( value ) ) else : setattr ( msg . props , prop , value ) if 'repl' in props : msg . props . repl = REPL_TO_PB [ props [ 'repl' ] ] return msg
251,728
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L298-L331
[ "def", "on_recv", "(", "self", ",", "cf", ")", ":", "data", "=", "bytes", "(", "cf", ".", "data", ")", "if", "len", "(", "data", ")", "<", "2", ":", "return", "ae", "=", "0", "if", "self", ".", "extended_rx_addr", "is", "not", "None", ":", "ae"...
Decodes the protobuf bucket properties message into a dict .
def decode_bucket_props ( self , msg ) : props = { } for prop in NORMAL_PROPS : if msg . HasField ( prop ) : props [ prop ] = getattr ( msg , prop ) if isinstance ( props [ prop ] , bytes ) : props [ prop ] = bytes_to_str ( props [ prop ] ) for prop in COMMIT_HOOK_PROPS : if getattr ( msg , 'has_' + prop ) : props [ prop ] = self . decode_hooklist ( getattr ( msg , prop ) ) for prop in MODFUN_PROPS : if msg . HasField ( prop ) : props [ prop ] = self . decode_modfun ( getattr ( msg , prop ) ) for prop in QUORUM_PROPS : if msg . HasField ( prop ) : props [ prop ] = self . decode_quorum ( getattr ( msg , prop ) ) if msg . HasField ( 'repl' ) : props [ 'repl' ] = REPL_TO_PY [ msg . repl ] return props
251,729
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L333-L358
[ "def", "on_recv", "(", "self", ",", "cf", ")", ":", "data", "=", "bytes", "(", "cf", ".", "data", ")", "if", "len", "(", "data", ")", "<", "2", ":", "return", "ae", "=", "0", "if", "self", ".", "extended_rx_addr", "is", "not", "None", ":", "ae"...
Encodes a dict with mod and fun keys into a protobuf modfun pair . Used in bucket properties .
def encode_modfun ( self , props , msg = None ) : if msg is None : msg = riak . pb . riak_pb2 . RpbModFun ( ) msg . module = str_to_bytes ( props [ 'mod' ] ) msg . function = str_to_bytes ( props [ 'fun' ] ) return msg
251,730
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L372-L387
[ "def", "a_capture_show_configuration_failed", "(", "ctx", ")", ":", "result", "=", "ctx", ".", "device", ".", "send", "(", "\"show configuration failed\"", ")", "ctx", ".", "device", ".", "last_command_result", "=", "result", "index", "=", "result", ".", "find",...
Encodes a list of commit hooks into their protobuf equivalent . Used in bucket properties .
def encode_hooklist ( self , hooklist , msg ) : for hook in hooklist : pbhook = msg . add ( ) self . encode_hook ( hook , pbhook )
251,731
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L400-L411
[ "def", "is_expired", "(", "self", ",", "max_idle_seconds", ")", ":", "now", "=", "current_time", "(", ")", "return", "(", "self", ".", "expiration_time", "is", "not", "None", "and", "self", ".", "expiration_time", "<", "now", ")", "or", "(", "max_idle_seco...
Decodes a protobuf commit hook message into a dict . Used in bucket properties .
def decode_hook ( self , hook ) : if hook . HasField ( 'modfun' ) : return self . decode_modfun ( hook . modfun ) else : return { 'name' : bytes_to_str ( hook . name ) }
251,732
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L413-L425
[ "def", "is_expired", "(", "self", ",", "max_idle_seconds", ")", ":", "now", "=", "current_time", "(", ")", "return", "(", "self", ".", "expiration_time", "is", "not", "None", "and", "self", ".", "expiration_time", "<", "now", ")", "or", "(", "max_idle_seco...
Encodes a commit hook dict into the protobuf message . Used in bucket properties .
def encode_hook ( self , hook , msg ) : if 'name' in hook : msg . name = str_to_bytes ( hook [ 'name' ] ) else : self . encode_modfun ( hook , msg . modfun ) return msg
251,733
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L427-L442
[ "def", "_ensure_connection", "(", "self", ")", ":", "conn", "=", "self", ".", "connect", "(", ")", "if", "conn", ".", "recycle", "and", "conn", ".", "recycle", "<", "time", ".", "time", "(", ")", ":", "logger", ".", "debug", "(", "'Client session expir...
Encodes a secondary index request into the protobuf message .
def encode_index_req ( self , bucket , index , startkey , endkey = None , return_terms = None , max_results = None , continuation = None , timeout = None , term_regex = None , streaming = False ) : req = riak . pb . riak_kv_pb2 . RpbIndexReq ( bucket = str_to_bytes ( bucket . name ) , index = str_to_bytes ( index ) ) self . _add_bucket_type ( req , bucket . bucket_type ) if endkey is not None : req . qtype = riak . pb . riak_kv_pb2 . RpbIndexReq . range req . range_min = str_to_bytes ( str ( startkey ) ) req . range_max = str_to_bytes ( str ( endkey ) ) else : req . qtype = riak . pb . riak_kv_pb2 . RpbIndexReq . eq req . key = str_to_bytes ( str ( startkey ) ) if return_terms is not None : req . return_terms = return_terms if max_results : req . max_results = max_results if continuation : req . continuation = str_to_bytes ( continuation ) if timeout : if timeout == 'infinity' : req . timeout = 0 else : req . timeout = timeout if term_regex : req . term_regex = str_to_bytes ( term_regex ) req . stream = streaming mc = riak . pb . messages . MSG_CODE_INDEX_REQ rc = riak . pb . messages . MSG_CODE_INDEX_RESP return Msg ( mc , req . SerializeToString ( ) , rc )
251,734
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L444-L501
[ "def", "__isOpenThreadWpanRunning", "(", "self", ")", ":", "print", "'call __isOpenThreadWpanRunning'", "if", "self", ".", "__stripValue", "(", "self", ".", "__sendCommand", "(", "WPANCTL_CMD", "+", "'getprop -v NCP:State'", ")", "[", "0", "]", ")", "==", "'associ...
Fills an RpbYokozunaIndex message with the appropriate data .
def decode_search_index ( self , index ) : result = { } result [ 'name' ] = bytes_to_str ( index . name ) if index . HasField ( 'schema' ) : result [ 'schema' ] = bytes_to_str ( index . schema ) if index . HasField ( 'n_val' ) : result [ 'n_val' ] = index . n_val return result
251,735
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L519-L533
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_access", "is", "not", "None", ":", "_logger", ".", "debug", "(", "\"Cleaning up\"", ")", "pci_cleanup", "(", "self", ".", "_access", ")", "self", ".", "_access", "=", "None" ]
Fills an TsPutReq message with the appropriate data and metadata from a TsObject .
def encode_timeseries_put ( self , tsobj ) : req = riak . pb . riak_ts_pb2 . TsPutReq ( ) req . table = str_to_bytes ( tsobj . table . name ) if tsobj . columns : raise NotImplementedError ( "columns are not implemented yet" ) if tsobj . rows and isinstance ( tsobj . rows , list ) : for row in tsobj . rows : tsr = req . rows . add ( ) # NB: type TsRow if not isinstance ( row , list ) : raise ValueError ( "TsObject row must be a list of values" ) for cell in row : tsc = tsr . cells . add ( ) # NB: type TsCell self . encode_to_ts_cell ( cell , tsc ) else : raise RiakError ( "TsObject requires a list of rows" ) mc = riak . pb . messages . MSG_CODE_TS_PUT_REQ rc = riak . pb . messages . MSG_CODE_TS_PUT_RESP return Msg ( mc , req . SerializeToString ( ) , rc )
251,736
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L758-L787
[ "def", "setup_recovery", "(", "working_dir", ")", ":", "db", "=", "get_db_state", "(", "working_dir", ")", "bitcoind_session", "=", "get_bitcoind", "(", "new", "=", "True", ")", "assert", "bitcoind_session", "is", "not", "None", "_", ",", "current_block", "=",...
Decodes a TsRow into a list
def decode_timeseries_row ( self , tsrow , tscols = None , convert_timestamp = False ) : row = [ ] for i , cell in enumerate ( tsrow . cells ) : col = None if tscols is not None : col = tscols [ i ] if cell . HasField ( 'varchar_value' ) : if col and not ( col . type == TsColumnType . Value ( 'VARCHAR' ) or col . type == TsColumnType . Value ( 'BLOB' ) ) : raise TypeError ( 'expected VARCHAR or BLOB column' ) else : row . append ( cell . varchar_value ) elif cell . HasField ( 'sint64_value' ) : if col and col . type != TsColumnType . Value ( 'SINT64' ) : raise TypeError ( 'expected SINT64 column' ) else : row . append ( cell . sint64_value ) elif cell . HasField ( 'double_value' ) : if col and col . type != TsColumnType . Value ( 'DOUBLE' ) : raise TypeError ( 'expected DOUBLE column' ) else : row . append ( cell . double_value ) elif cell . HasField ( 'timestamp_value' ) : if col and col . type != TsColumnType . Value ( 'TIMESTAMP' ) : raise TypeError ( 'expected TIMESTAMP column' ) else : dt = cell . timestamp_value if convert_timestamp : dt = datetime_from_unix_time_millis ( cell . timestamp_value ) row . append ( dt ) elif cell . HasField ( 'boolean_value' ) : if col and col . type != TsColumnType . Value ( 'BOOLEAN' ) : raise TypeError ( 'expected BOOLEAN column' ) else : row . append ( cell . boolean_value ) else : row . append ( None ) return row
251,737
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L847-L895
[ "def", "_validate_checksum", "(", "self", ",", "buffer", ")", ":", "self", ".", "_log", ".", "debug", "(", "\"Validating the buffer\"", ")", "if", "len", "(", "buffer", ")", "==", "0", ":", "self", ".", "_log", ".", "debug", "(", "\"Buffer was empty\"", ...
Decodes a preflist response
def decode_preflist ( self , item ) : result = { 'partition' : item . partition , 'node' : bytes_to_str ( item . node ) , 'primary' : item . primary } return result
251,738
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L897-L909
[ "def", "init_drivers", "(", "enable_debug_driver", "=", "False", ")", ":", "for", "driver", "in", "DRIVERS", ":", "try", ":", "if", "driver", "!=", "DebugDriver", "or", "enable_debug_driver", ":", "CLASSES", ".", "append", "(", "driver", ")", "except", "Exce...
Ping the remote server
def ping ( self ) : msg_code = riak . pb . messages . MSG_CODE_PING_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_ping ( ) resp_code , _ = self . _request ( msg , codec ) if resp_code == riak . pb . messages . MSG_CODE_PING_RESP : return True else : return False
251,739
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L107-L118
[ "def", "find_rust_extensions", "(", "*", "directories", ",", "*", "*", "kwargs", ")", ":", "# Get the file used to mark a Rust extension", "libfile", "=", "kwargs", ".", "get", "(", "\"libfile\"", ",", "\"lib.rs\"", ")", "# Get the directories to explore", "directories"...
Get information about the server
def get_server_info ( self ) : # NB: can't do it this way due to recursion # codec = self._get_codec(ttb_supported=False) codec = PbufCodec ( ) msg = Msg ( riak . pb . messages . MSG_CODE_GET_SERVER_INFO_REQ , None , riak . pb . messages . MSG_CODE_GET_SERVER_INFO_RESP ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_get_server_info ( resp )
251,740
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L120-L130
[ "def", "parse", "(", "self", ",", "msg", ")", ":", "#init", "dictValues", "=", "HL7Dict", "(", "self", ".", "tersersep", ")", "msg_", "=", "msg", ".", "strip", "(", "'\\r\\n '", ")", "# extracts separator defined in the message itself", "self", ".", "extractSe...
Serialize get request and deserialize response
def get ( self , robj , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : msg_code = riak . pb . messages . MSG_CODE_GET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get ( robj , r , pr , timeout , basic_quorum , notfound_ok , head_only ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_get ( robj , resp )
251,741
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L149-L160
[ "def", "TrimBeginningAndEndingSlashes", "(", "path", ")", ":", "if", "path", ".", "startswith", "(", "'/'", ")", ":", "# Returns substring starting from index 1 to end of the string", "path", "=", "path", "[", "1", ":", "]", "if", "path", ".", "endswith", "(", "...
Streams keys from a timeseries table returning an iterator that yields lists of keys .
def ts_stream_keys ( self , table , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_TS_LIST_KEYS_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_timeseries_listkeysreq ( table , timeout ) self . _send_msg ( msg . msg_code , msg . data ) return PbufTsKeyStream ( self , codec , self . _ts_convert_timestamp )
251,742
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L212-L221
[ "def", "cublasSdgmm", "(", "handle", ",", "mode", ",", "m", ",", "n", ",", "A", ",", "lda", ",", "x", ",", "incx", ",", "C", ",", "ldc", ")", ":", "status", "=", "_libcublas", ".", "cublasSdgmm", "(", "handle", ",", "_CUBLAS_SIDE", "[", "mode", "...
Lists all keys within a bucket .
def get_keys ( self , bucket , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_LIST_KEYS_REQ codec = self . _get_codec ( msg_code ) stream = self . stream_keys ( bucket , timeout = timeout ) return codec . decode_get_keys ( stream )
251,743
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L231-L238
[ "def", "simple_parse_file", "(", "filename", ":", "str", ")", "->", "Feed", ":", "pairs", "=", "(", "(", "rss", ".", "parse_rss_file", ",", "_adapt_rss_channel", ")", ",", "(", "atom", ".", "parse_atom_file", ",", "_adapt_atom_feed", ")", ",", "(", "json_f...
Streams keys from a bucket returning an iterator that yields lists of keys .
def stream_keys ( self , bucket , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_LIST_KEYS_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_stream_keys ( bucket , timeout ) self . _send_msg ( msg . msg_code , msg . data ) return PbufKeyStream ( self , codec )
251,744
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L240-L249
[ "def", "compute_K_analytical", "(", "self", ",", "spacing", ")", ":", "K", "=", "redaK", ".", "compute_K_analytical", "(", "self", ".", "data", ",", "spacing", "=", "spacing", ")", "self", ".", "data", "=", "redaK", ".", "apply_K", "(", "self", ".", "d...
Serialize bucket listing request and deserialize response
def get_buckets ( self , bucket_type = None , timeout = None ) : msg_code = riak . pb . messages . MSG_CODE_LIST_BUCKETS_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get_buckets ( bucket_type , timeout , streaming = False ) resp_code , resp = self . _request ( msg , codec ) return resp . buckets
251,745
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L251-L260
[ "def", "diff_commonSuffix", "(", "self", ",", "text1", ",", "text2", ")", ":", "# Quick check for common null cases.", "if", "not", "text1", "or", "not", "text2", "or", "text1", "[", "-", "1", "]", "!=", "text2", "[", "-", "1", "]", ":", "return", "0", ...
Serialize bucket property request and deserialize response
def get_bucket_props ( self , bucket ) : msg_code = riak . pb . messages . MSG_CODE_GET_BUCKET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get_bucket_props ( bucket ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_bucket_props ( resp . props )
251,746
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L276-L284
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", "...
Serialize set bucket property request and deserialize response
def set_bucket_props ( self , bucket , props ) : if not self . pb_all_bucket_props ( ) : for key in props : if key not in ( 'n_val' , 'allow_mult' ) : raise NotImplementedError ( 'Server only supports n_val and ' 'allow_mult properties over PBC' ) msg_code = riak . pb . messages . MSG_CODE_SET_BUCKET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_set_bucket_props ( bucket , props ) resp_code , resp = self . _request ( msg , codec ) return True
251,747
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L286-L299
[ "def", "fold_path", "(", "path", ",", "width", "=", "30", ")", ":", "assert", "isinstance", "(", "path", ",", "six", ".", "string_types", ")", "if", "len", "(", "path", ")", ">", "width", ":", "path", ".", "replace", "(", "\".\"", ",", "\".\\n \""...
Clear bucket properties resetting them to their defaults
def clear_bucket_props ( self , bucket ) : if not self . pb_clear_bucket_props ( ) : return False msg_code = riak . pb . messages . MSG_CODE_RESET_BUCKET_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_clear_bucket_props ( bucket ) self . _request ( msg , codec ) return True
251,748
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L301-L311
[ "def", "__get_vibration_code", "(", "self", ",", "left_motor", ",", "right_motor", ",", "duration", ")", ":", "inner_event", "=", "struct", ".", "pack", "(", "'2h6x2h2x2H28x'", ",", "0x50", ",", "-", "1", ",", "duration", ",", "0", ",", "int", "(", "left...
Fetch bucket - type properties
def get_bucket_type_props ( self , bucket_type ) : self . _check_bucket_types ( bucket_type ) msg_code = riak . pb . messages . MSG_CODE_GET_BUCKET_TYPE_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_get_bucket_type_props ( bucket_type ) resp_code , resp = self . _request ( msg , codec ) return codec . decode_bucket_props ( resp . props )
251,749
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L313-L322
[ "def", "_mod_repo_in_file", "(", "repo", ",", "repostr", ",", "filepath", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "c...
Set bucket - type properties
def set_bucket_type_props ( self , bucket_type , props ) : self . _check_bucket_types ( bucket_type ) msg_code = riak . pb . messages . MSG_CODE_SET_BUCKET_TYPE_REQ codec = self . _get_codec ( msg_code ) msg = codec . encode_set_bucket_type_props ( bucket_type , props ) resp_code , resp = self . _request ( msg , codec ) return True
251,750
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L324-L333
[ "def", "_mod_repo_in_file", "(", "repo", ",", "repostr", ",", "filepath", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "c...
Prints the report of one step of a benchmark .
def print_report ( label , user , system , real ) : print ( "{:<12s} {:12f} {:12f} ( {:12f} )" . format ( label , user , system , real ) )
251,751
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/benchmark.py#L134-L141
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "bas...
Runs the next iteration of the benchmark .
def next ( self ) : if self . count == 0 : raise StopIteration elif self . count > 1 : print_rehearsal_header ( ) else : if self . rehearse : gc . collect ( ) print ( "-" * 59 ) print ( ) print_header ( ) self . count -= 1 return self
251,752
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/benchmark.py#L96-L112
[ "def", "SetConsoleTextAttribute", "(", "stream_id", ",", "attrs", ")", ":", "handle", "=", "handles", "[", "stream_id", "]", "return", "windll", ".", "kernel32", ".", "SetConsoleTextAttribute", "(", "handle", ",", "attrs", ")" ]
Adds a RiakObject to the inputs .
def add_object ( self , obj ) : return self . add_bucket_key_data ( obj . _bucket . _name , obj . _key , None )
251,753
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L77-L85
[ "def", "stop", "(", "self", ")", ":", "self", ".", "_hw_virtualization", "=", "False", "yield", "from", "self", ".", "_stop_ubridge", "(", ")", "yield", "from", "self", ".", "_stop_remote_console", "(", ")", "vm_state", "=", "yield", "from", "self", ".", ...
Adds all keys in a bucket to the inputs .
def add_bucket ( self , bucket , bucket_type = None ) : if not riak . disable_list_exceptions : raise riak . ListError ( ) self . _input_mode = 'bucket' if isinstance ( bucket , riak . RiakBucket ) : if bucket . bucket_type . is_default ( ) : self . _inputs = { 'bucket' : bucket . name } else : self . _inputs = { 'bucket' : [ bucket . bucket_type . name , bucket . name ] } elif bucket_type is not None and bucket_type != "default" : self . _inputs = { 'bucket' : [ bucket_type , bucket ] } else : self . _inputs = { 'bucket' : bucket } return self
251,754
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L121-L144
[ "def", "softDeactivate", "(", "rh", ")", ":", "rh", ".", "printSysLog", "(", "\"Enter powerVM.softDeactivate, userid: \"", "+", "rh", ".", "userid", ")", "strCmd", "=", "\"echo 'ping'\"", "iucvResults", "=", "execCmdThruIUCV", "(", "rh", ",", "rh", ".", "userid"...
Adds key filters to the inputs .
def add_key_filters ( self , key_filters ) : if self . _input_mode == 'query' : raise ValueError ( 'Key filters are not supported in a query.' ) self . _key_filters . extend ( key_filters ) return self
251,755
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L146-L158
[ "def", "guess_peb_size", "(", "path", ")", ":", "file_offset", "=", "0", "offsets", "=", "[", "]", "f", "=", "open", "(", "path", ",", "'rb'", ")", "f", ".", "seek", "(", "0", ",", "2", ")", "file_size", "=", "f", ".", "tell", "(", ")", "+", ...
Add a single key filter to the inputs .
def add_key_filter ( self , * args ) : if self . _input_mode == 'query' : raise ValueError ( 'Key filters are not supported in a query.' ) self . _key_filters . append ( args ) return self
251,756
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L160-L172
[ "async", "def", "_wait_exponentially", "(", "self", ",", "exception", ",", "max_wait_time", "=", "300", ")", ":", "wait_time", "=", "min", "(", "(", "2", "**", "self", ".", "_connection_attempts", ")", "+", "random", ".", "random", "(", ")", ",", "max_wa...
Adds the Javascript built - in Riak . reduceSort to the query as a reduce phase .
def reduce_sort ( self , js_cmp = None , options = None ) : if options is None : options = dict ( ) if js_cmp : options [ 'arg' ] = js_cmp return self . reduce ( "Riak.reduceSort" , options = options )
251,757
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L448-L466
[ "def", "_hex_ids", "(", "self", ",", "dev_list", ")", ":", "for", "dl", "in", "dev_list", ":", "match", "=", "self", ".", "nlp", ".", "search", "(", "dl", ")", "if", "match", ":", "yield", "match", ".", "group", "(", "\"usbid\"", ")", ",", "_readli...
Adds the Javascript built - in Riak . reduceSlice to the query as a reduce phase .
def reduce_slice ( self , start , end , options = None ) : if options is None : options = dict ( ) options [ 'arg' ] = [ start , end ] return self . reduce ( "Riak.reduceSlice" , options = options )
251,758
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L500-L517
[ "def", "_hex_ids", "(", "self", ",", "dev_list", ")", ":", "for", "dl", "in", "dev_list", ":", "match", "=", "self", ".", "nlp", ".", "search", "(", "dl", ")", "if", "match", ":", "yield", "match", ".", "group", "(", "\"usbid\"", ")", ",", "_readli...
Convert the RiakMapReducePhase to a format that can be output into JSON . Used internally .
def to_array ( self ) : stepdef = { 'keep' : self . _keep , 'language' : self . _language , 'arg' : self . _arg } if self . _language == 'javascript' : if isinstance ( self . _function , list ) : stepdef [ 'bucket' ] = self . _function [ 0 ] stepdef [ 'key' ] = self . _function [ 1 ] elif isinstance ( self . _function , string_types ) : if ( "{" in self . _function ) : stepdef [ 'source' ] = self . _function else : stepdef [ 'name' ] = self . _function elif ( self . _language == 'erlang' and isinstance ( self . _function , list ) ) : stepdef [ 'module' ] = self . _function [ 0 ] stepdef [ 'function' ] = self . _function [ 1 ] elif ( self . _language == 'erlang' and isinstance ( self . _function , string_types ) ) : stepdef [ 'source' ] = self . _function return { self . _type : stepdef }
251,759
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L569-L598
[ "async", "def", "set_kernel_options", "(", "cls", ",", "options", ":", "typing", ".", "Optional", "[", "str", "]", ")", ":", "await", "cls", ".", "set_config", "(", "\"kernel_opts\"", ",", "\"\"", "if", "options", "is", "None", "else", "options", ")" ]
Convert the RiakLinkPhase to a format that can be output into JSON . Used internally .
def to_array ( self ) : stepdef = { 'bucket' : self . _bucket , 'tag' : self . _tag , 'keep' : self . _keep } return { 'link' : stepdef }
251,760
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L626-L634
[ "async", "def", "set_kernel_options", "(", "cls", ",", "options", ":", "typing", ".", "Optional", "[", "str", "]", ")", ":", "await", "cls", ".", "set_config", "(", "\"kernel_opts\"", ",", "\"\"", "if", "options", "is", "None", "else", "options", ")" ]
A conflict - resolution function that resolves by selecting the most recently - modified sibling by timestamp .
def last_written_resolver ( riak_object ) : riak_object . siblings = [ max ( riak_object . siblings , key = lambda x : x . last_modified ) , ]
251,761
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/resolver.py#L31-L40
[ "def", "split_query", "(", "query", ":", "str", ")", "->", "List", "[", "str", "]", ":", "try", ":", "_query", "=", "query", ".", "strip", "(", ")", "except", "(", "ValueError", ",", "AttributeError", ")", ":", "raise", "QueryParserException", "(", "'q...
The default OpenSSL certificate verification callback .
def verify_cb ( conn , cert , errnum , depth , ok ) : if not ok : raise SecurityError ( "Could not verify CA certificate {0}" . format ( cert . get_subject ( ) ) ) return ok
251,762
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/security.py#L27-L34
[ "def", "clear_stalled_files", "(", "self", ")", ":", "# FIXME: put lock in directory?", "CLEAR_AFTER", "=", "self", ".", "config", "[", "\"DELETE_STALLED_AFTER\"", "]", "minimum_age", "=", "time", ".", "time", "(", ")", "-", "CLEAR_AFTER", "for", "user_dir", "in",...
Fetches the next page using the same parameters as the original query .
def next_page ( self , timeout = None , stream = None ) : if not self . continuation : raise ValueError ( "Cannot get next index page, no continuation" ) if stream is not None : self . stream = stream args = { 'bucket' : self . bucket , 'index' : self . index , 'startkey' : self . startkey , 'endkey' : self . endkey , 'return_terms' : self . return_terms , 'max_results' : self . max_results , 'continuation' : self . continuation , 'timeout' : timeout , 'term_regex' : self . term_regex } if self . stream : return self . client . stream_index ( * * args ) else : return self . client . get_index ( * * args )
251,763
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/index_page.py#L117-L150
[ "async", "def", "unexpose", "(", "self", ")", ":", "app_facade", "=", "client", ".", "ApplicationFacade", ".", "from_connection", "(", "self", ".", "connection", ")", "log", ".", "debug", "(", "'Unexposing %s'", ",", "self", ".", "name", ")", "return", "aw...
Raises an exception if the given timeout is an invalid value .
def _validate_timeout ( timeout , infinity_ok = False ) : if timeout is None : return if timeout == 'infinity' : if infinity_ok : return else : raise ValueError ( 'timeout must be a positive integer ' '("infinity" is not valid)' ) if isinstance ( timeout , six . integer_types ) and timeout > 0 : return raise ValueError ( 'timeout must be a positive integer' )
251,764
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1270-L1288
[ "def", "indexed_file", "(", "self", ",", "f", ")", ":", "filename", ",", "handle", "=", "f", "if", "handle", "is", "None", "and", "filename", "is", "not", "None", ":", "handle", "=", "open", "(", "filename", ")", "if", "(", "handle", "is", "None", ...
Streams the list of buckets . This is a generator method that should be iterated over .
def stream_buckets ( self , bucket_type = None , timeout = None ) : if not riak . disable_list_exceptions : raise ListError ( ) _validate_timeout ( timeout ) if bucket_type : bucketfn = self . _bucket_type_bucket_builder else : bucketfn = self . _default_type_bucket_builder def make_op ( transport ) : return transport . stream_buckets ( bucket_type = bucket_type , timeout = timeout ) for bucket_list in self . _stream_with_retry ( make_op ) : bucket_list = [ bucketfn ( bytes_to_str ( name ) , bucket_type ) for name in bucket_list ] if len ( bucket_list ) > 0 : yield bucket_list
251,765
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L72-L125
[ "def", "ActivateCard", "(", "self", ",", "card", ")", ":", "if", "not", "hasattr", "(", "card", ",", "'connection'", ")", ":", "card", ".", "connection", "=", "card", ".", "createConnection", "(", ")", "if", "None", "!=", "self", ".", "parent", ".", ...
Queries a secondary index streaming matching keys through an iterator .
def stream_index ( self , bucket , index , startkey , endkey = None , return_terms = None , max_results = None , continuation = None , timeout = None , term_regex = None ) : # TODO FUTURE: implement "retry on connection closed" # as in stream_mapred _validate_timeout ( timeout , infinity_ok = True ) page = IndexPage ( self , bucket , index , startkey , endkey , return_terms , max_results , term_regex ) page . stream = True resource = self . _acquire ( ) transport = resource . object page . results = transport . stream_index ( bucket , index , startkey , endkey , return_terms = return_terms , max_results = max_results , continuation = continuation , timeout = timeout , term_regex = term_regex ) page . results . attach ( resource ) return page
251,766
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L238-L301
[ "def", "location", "(", "args", ")", ":", "fastafile", "=", "args", ".", "fastafile", "pwmfile", "=", "args", ".", "pwmfile", "lwidth", "=", "args", ".", "width", "if", "not", "lwidth", ":", "f", "=", "Fasta", "(", "fastafile", ")", "lwidth", "=", "l...
Lists all keys in a bucket via a stream . This is a generator method which should be iterated over .
def stream_keys ( self , bucket , timeout = None ) : if not riak . disable_list_exceptions : raise ListError ( ) _validate_timeout ( timeout ) def make_op ( transport ) : return transport . stream_keys ( bucket , timeout = timeout ) for keylist in self . _stream_with_retry ( make_op ) : if len ( keylist ) > 0 : if six . PY2 : yield keylist else : yield [ bytes_to_str ( item ) for item in keylist ]
251,767
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L484-L530
[ "def", "log_likelihood", "(", "C", ",", "T", ")", ":", "C", "=", "C", ".", "tocsr", "(", ")", "T", "=", "T", ".", "tocsr", "(", ")", "ind", "=", "scipy", ".", "nonzero", "(", "C", ")", "relT", "=", "np", ".", "array", "(", "T", "[", "ind", ...
Lists all keys in a time series table via a stream . This is a generator method which should be iterated over .
def ts_stream_keys ( self , table , timeout = None ) : if not riak . disable_list_exceptions : raise ListError ( ) t = table if isinstance ( t , six . string_types ) : t = Table ( self , table ) _validate_timeout ( timeout ) resource = self . _acquire ( ) transport = resource . object stream = transport . ts_stream_keys ( t , timeout ) stream . attach ( resource ) try : for keylist in stream : if len ( keylist ) > 0 : yield keylist finally : stream . close ( )
251,768
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L665-L713
[ "def", "rank_loss", "(", "sentence_emb", ",", "image_emb", ",", "margin", "=", "0.2", ")", ":", "with", "tf", ".", "name_scope", "(", "\"rank_loss\"", ")", ":", "# Normalize first as this is assumed in cosine similarity later.", "sentence_emb", "=", "tf", ".", "nn",...
Fetches many keys in parallel via threads .
def multiget ( self , pairs , * * params ) : if self . _multiget_pool : params [ 'pool' ] = self . _multiget_pool return riak . client . multi . multiget ( self , pairs , * * params )
251,769
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1003-L1016
[ "def", "update", "(", "self", ")", ":", "functions", ".", "check_valid_bs_range", "(", "self", ")", "# Here's a trick to find the gregorian date:", "# We find the number of days from earliest nepali date to the current", "# day. We then add the number of days to the earliest english date...
Stores objects in parallel via threads .
def multiput ( self , objs , * * params ) : if self . _multiput_pool : params [ 'pool' ] = self . _multiput_pool return riak . client . multi . multiput ( self , objs , * * params )
251,770
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1018-L1031
[ "def", "upload_cbn_dir", "(", "dir_path", ",", "manager", ")", ":", "t", "=", "time", ".", "time", "(", ")", "for", "jfg_path", "in", "os", ".", "listdir", "(", "dir_path", ")", ":", "if", "not", "jfg_path", ".", "endswith", "(", "'.jgf'", ")", ":", ...
Fetches the value of a Riak Datatype .
def fetch_datatype ( self , bucket , key , r = None , pr = None , basic_quorum = None , notfound_ok = None , timeout = None , include_context = None ) : dtype , value , context = self . _fetch_datatype ( bucket , key , r = r , pr = pr , basic_quorum = basic_quorum , notfound_ok = notfound_ok , timeout = timeout , include_context = include_context ) return TYPES [ dtype ] ( bucket = bucket , key = key , value = value , context = context )
251,771
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1106-L1143
[ "def", "_remove_persistent_module", "(", "mod", ",", "comment", ")", ":", "if", "not", "mod", "or", "mod", "not", "in", "mod_list", "(", "True", ")", ":", "return", "set", "(", ")", "if", "comment", ":", "__salt__", "[", "'file.comment'", "]", "(", "_L...
Sends an update to a Riak Datatype to the server . This operation is not idempotent and so will not be retried automatically .
def update_datatype ( self , datatype , w = None , dw = None , pw = None , return_body = None , timeout = None , include_context = None ) : _validate_timeout ( timeout ) with self . _transport ( ) as transport : return transport . update_datatype ( datatype , w = w , dw = dw , pw = pw , return_body = return_body , timeout = timeout , include_context = include_context )
251,772
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1145-L1175
[ "def", "xstrip", "(", "filename", ")", ":", "while", "xisabs", "(", "filename", ")", ":", "# strip windows drive with all slashes", "if", "re", ".", "match", "(", "b'\\\\w:[\\\\\\\\/]'", ",", "filename", ")", ":", "filename", "=", "re", ".", "sub", "(", "b'^...
Similar to self . _send_recv but doesn t try to initiate a connection thus preventing an infinite loop .
def _non_connect_send_recv ( self , msg_code , data = None ) : self . _non_connect_send_msg ( msg_code , data ) return self . _recv_msg ( )
251,773
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L53-L59
[ "def", "from_pint", "(", "cls", ",", "arr", ",", "unit_registry", "=", "None", ")", ":", "p_units", "=", "[", "]", "for", "base", ",", "exponent", "in", "arr", ".", "_units", ".", "items", "(", ")", ":", "bs", "=", "convert_pint_units", "(", "base", ...
Similar to self . _send but doesn t try to initiate a connection thus preventing an infinite loop .
def _non_connect_send_msg ( self , msg_code , data ) : try : self . _socket . sendall ( self . _encode_msg ( msg_code , data ) ) except ( IOError , socket . error ) as e : if e . errno == errno . EPIPE : raise ConnectionClosed ( e ) else : raise
251,774
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L65-L76
[ "def", "from_pint", "(", "cls", ",", "arr", ",", "unit_registry", "=", "None", ")", ":", "p_units", "=", "[", "]", "for", "base", ",", "exponent", "in", "arr", ".", "_units", ".", "items", "(", ")", ":", "bs", "=", "convert_pint_units", "(", "base", ...
Initialize a secure connection to the server .
def _init_security ( self ) : if not self . _starttls ( ) : raise SecurityError ( "Could not start TLS connection" ) # _ssh_handshake() will throw an exception upon failure self . _ssl_handshake ( ) if not self . _auth ( ) : raise SecurityError ( "Could not authorize connection" )
251,775
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L82-L91
[ "def", "setOverlayTexelAspect", "(", "self", ",", "ulOverlayHandle", ",", "fTexelAspect", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTexelAspect", "result", "=", "fn", "(", "ulOverlayHandle", ",", "fTexelAspect", ")", "return", "result" ]
Exchange a STARTTLS message with Riak to initiate secure communications return True is Riak responds with a STARTTLS response False otherwise
def _starttls ( self ) : resp_code , _ = self . _non_connect_send_recv ( riak . pb . messages . MSG_CODE_START_TLS ) if resp_code == riak . pb . messages . MSG_CODE_START_TLS : return True else : return False
251,776
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L93-L103
[ "def", "max_texture_limit", "(", "self", ")", ":", "max_unit_array", "=", "(", "gl", ".", "GLint", "*", "1", ")", "(", ")", "gl", ".", "glGetIntegerv", "(", "gl", ".", "GL_MAX_TEXTURE_IMAGE_UNITS", ",", "max_unit_array", ")", "return", "max_unit_array", "[",...
Closes the underlying socket of the PB connection .
def close ( self ) : if self . _socket : if USE_STDLIB_SSL : # NB: Python 2.7.8 and earlier does not have a compatible # shutdown() method due to the SSL lib try : self . _socket . shutdown ( socket . SHUT_RDWR ) except EnvironmentError : # NB: sometimes these exceptions are raised if the initial # connection didn't succeed correctly, or if shutdown() is # called after the connection dies logging . debug ( 'Exception occurred while shutting ' 'down socket.' , exc_info = True ) self . _socket . close ( ) del self . _socket
251,777
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L266-L283
[ "def", "record", "(", "self", ",", "tags", ",", "measurement_map", ",", "timestamp", ",", "attachments", "=", "None", ")", ":", "assert", "all", "(", "vv", ">=", "0", "for", "vv", "in", "measurement_map", ".", "values", "(", ")", ")", "for", "measure",...
Delegates a property to the first sibling in a RiakObject raising an error when the object is in conflict .
def content_property ( name , doc = None ) : def _setter ( self , value ) : if len ( self . siblings ) == 0 : # In this case, assume that what the user wants is to # create a new sibling inside an empty object. self . siblings = [ RiakContent ( self ) ] if len ( self . siblings ) != 1 : raise ConflictError ( ) setattr ( self . siblings [ 0 ] , name , value ) def _getter ( self ) : if len ( self . siblings ) == 0 : return if len ( self . siblings ) != 1 : raise ConflictError ( ) return getattr ( self . siblings [ 0 ] , name ) return property ( _getter , _setter , doc = doc )
251,778
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L22-L43
[ "def", "stop", "(", "self", ")", ":", "log", ".", "info", "(", "'Stopping te kafka listener class'", ")", "self", ".", "consumer", ".", "unsubscribe", "(", ")", "self", ".", "consumer", ".", "close", "(", ")" ]
Delegates a method to the first sibling in a RiakObject raising an error when the object is in conflict .
def content_method ( name ) : def _delegate ( self , * args , * * kwargs ) : if len ( self . siblings ) != 1 : raise ConflictError ( ) return getattr ( self . siblings [ 0 ] , name ) . __call__ ( * args , * * kwargs ) _delegate . __doc__ = getattr ( RiakContent , name ) . __doc__ return _delegate
251,779
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L46-L58
[ "def", "remove_namespace", "(", "self", ",", "namespace", ")", ":", "params", "=", "(", "namespace", ",", ")", "execute", "=", "self", ".", "cursor", ".", "execute", "execute", "(", "'DELETE FROM gauged_data WHERE namespace = %s'", ",", "params", ")", "execute",...
Store the object in Riak . When this operation completes the object could contain new metadata and possibly new data if Riak contains a newer version of the object according to the object s vector clock .
def store ( self , w = None , dw = None , pw = None , return_body = True , if_none_match = False , timeout = None ) : if len ( self . siblings ) != 1 : raise ConflictError ( "Attempting to store an invalid object, " "resolve the siblings first" ) self . client . put ( self , w = w , dw = dw , pw = pw , return_body = return_body , if_none_match = if_none_match , timeout = timeout ) return self
251,780
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L247-L283
[ "def", "configure_logging", "(", ")", ":", "if", "not", "parse_boolean", "(", "os", ".", "environ", ".", "get", "(", "'DISABLE_TRUSTAR_LOGGING'", ")", ")", ":", "# configure", "dictConfig", "(", "DEFAULT_LOGGING_CONFIG", ")", "# construct error logger", "error_logge...
Reload the object from Riak . When this operation completes the object could contain new metadata and a new value if the object was updated in Riak since it was last retrieved .
def reload ( self , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : self . client . get ( self , r = r , pr = pr , timeout = timeout , head_only = head_only ) return self
251,781
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L285-L317
[ "def", "sanitize_type", "(", "raw_type", ")", ":", "cleaned", "=", "get_printable", "(", "raw_type", ")", ".", "strip", "(", ")", "for", "bad", "in", "[", "r'__drv_aliasesMem'", ",", "r'__drv_freesMem'", ",", "r'__drv_strictTypeMatch\\(\\w+\\)'", ",", "r'__out_dat...
Delete this object from Riak .
def delete ( self , r = None , w = None , dw = None , pr = None , pw = None , timeout = None ) : self . client . delete ( self , r = r , w = w , dw = dw , pr = pr , pw = pw , timeout = timeout ) self . clear ( ) return self
251,782
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L319-L348
[ "def", "configure_app", "(", "*", "*", "kwargs", ")", ":", "sys_args", "=", "sys", ".", "argv", "args", ",", "command", ",", "command_args", "=", "parse_args", "(", "sys_args", "[", "1", ":", "]", ")", "parser", "=", "OptionParser", "(", ")", "parser",...
Get the encoding function for the provided content type for this bucket .
def get_encoder ( self , content_type ) : if content_type in self . _encoders : return self . _encoders [ content_type ] else : return self . _client . get_encoder ( content_type )
251,783
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L88-L100
[ "def", "SetConsoleTextAttribute", "(", "stream_id", ",", "attrs", ")", ":", "handle", "=", "handles", "[", "stream_id", "]", "return", "windll", ".", "kernel32", ".", "SetConsoleTextAttribute", "(", "handle", ",", "attrs", ")" ]
Get the decoding function for the provided content type for this bucket .
def get_decoder ( self , content_type ) : if content_type in self . _decoders : return self . _decoders [ content_type ] else : return self . _client . get_decoder ( content_type )
251,784
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L116-L128
[ "def", "_simulate_unitary", "(", "self", ",", "op", ":", "ops", ".", "Operation", ",", "data", ":", "_StateAndBuffer", ",", "indices", ":", "List", "[", "int", "]", ")", "->", "None", ":", "result", "=", "protocols", ".", "apply_unitary", "(", "op", ",...
Retrieves a list of keys belonging to this bucket in parallel .
def multiget ( self , keys , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : bkeys = [ ( self . bucket_type . name , self . name , key ) for key in keys ] return self . _client . multiget ( bkeys , r = r , pr = pr , timeout = timeout , basic_quorum = basic_quorum , notfound_ok = notfound_ok , head_only = head_only )
251,785
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L238-L268
[ "def", "realtime_observations", "(", "cls", ",", "buoy", ",", "data_type", "=", "'txt'", ")", ":", "endpoint", "=", "cls", "(", ")", "parsers", "=", "{", "'txt'", ":", "endpoint", ".", "_parse_met", ",", "'drift'", ":", "endpoint", ".", "_parse_drift", "...
Streams the list of buckets under this bucket - type . This is a generator method that should be iterated over .
def stream_buckets ( self , timeout = None ) : return self . _client . stream_buckets ( bucket_type = self , timeout = timeout )
251,786
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L712-L729
[ "def", "ActivateCard", "(", "self", ",", "card", ")", ":", "if", "not", "hasattr", "(", "card", ",", "'connection'", ")", ":", "card", ".", "connection", "=", "card", ".", "createConnection", "(", ")", "if", "None", "!=", "self", ".", "parent", ".", ...
Increases the value by the argument .
def incr ( self , d ) : with self . lock : self . p = self . value ( ) + d
251,787
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/node.py#L46-L54
[ "def", "load_market_data", "(", "trading_day", "=", "None", ",", "trading_days", "=", "None", ",", "bm_symbol", "=", "'SPY'", ",", "environ", "=", "None", ")", ":", "if", "trading_day", "is", "None", ":", "trading_day", "=", "get_calendar", "(", "'XNYS'", ...
Returns a random client identifier
def make_random_client_id ( self ) : if PY2 : return ( 'py_%s' % base64 . b64encode ( str ( random . randint ( 1 , 0x40000000 ) ) ) ) else : return ( 'py_%s' % base64 . b64encode ( bytes ( str ( random . randint ( 1 , 0x40000000 ) ) , 'ascii' ) ) )
251,788
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L42-L52
[ "def", "get_max_devices_per_port_for_storage_bus", "(", "self", ",", "bus", ")", ":", "if", "not", "isinstance", "(", "bus", ",", "StorageBus", ")", ":", "raise", "TypeError", "(", "\"bus can only be an instance of type StorageBus\"", ")", "max_devices_per_port", "=", ...
Fetches an object .
def get ( self , robj , r = None , pr = None , timeout = None , basic_quorum = None , notfound_ok = None , head_only = False ) : raise NotImplementedError
251,789
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L70-L75
[ "def", "delete_classifier", "(", "self", ",", "classifier_id", ",", "*", "*", "kwargs", ")", ":", "if", "classifier_id", "is", "None", ":", "raise", "ValueError", "(", "'classifier_id must be provided'", ")", "headers", "=", "{", "}", "if", "'headers'", "in", ...
Stores an object .
def put ( self , robj , w = None , dw = None , pw = None , return_body = None , if_none_match = None , timeout = None ) : raise NotImplementedError
251,790
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L77-L82
[ "def", "pvd_factory", "(", "sys_ident", ",", "vol_ident", ",", "set_size", ",", "seqnum", ",", "log_block_size", ",", "vol_set_ident", ",", "pub_ident_str", ",", "preparer_ident_str", ",", "app_ident_str", ",", "copyright_file", ",", "abstract_file", ",", "bibli_fil...
Deletes an object .
def delete ( self , robj , rw = None , r = None , w = None , dw = None , pr = None , pw = None , timeout = None ) : raise NotImplementedError
251,791
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L84-L89
[ "def", "pvd_factory", "(", "sys_ident", ",", "vol_ident", ",", "set_size", ",", "seqnum", ",", "log_block_size", ",", "vol_set_ident", ",", "pub_ident_str", ",", "preparer_ident_str", ",", "app_ident_str", ",", "copyright_file", ",", "abstract_file", ",", "bibli_fil...
Updates a counter by the given value .
def update_counter ( self , bucket , key , value , w = None , dw = None , pw = None , returnvalue = False ) : raise NotImplementedError
251,792
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L285-L290
[ "def", "Run", "(", "self", ")", ":", "self", ".", "_GetArgs", "(", ")", "goodlogging", ".", "Log", ".", "Info", "(", "\"CLEAR\"", ",", "\"Using database: {0}\"", ".", "format", "(", "self", ".", "_databasePath", ")", ")", "self", ".", "_db", "=", "data...
Fetches a Riak Datatype .
def fetch_datatype ( self , bucket , key , r = None , pr = None , basic_quorum = None , notfound_ok = None , timeout = None , include_context = None ) : raise NotImplementedError
251,793
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L292-L297
[ "def", "_load_state", "(", "self", ",", "context", ")", ":", "try", ":", "state", "=", "cookie_to_state", "(", "context", ".", "cookie", ",", "self", ".", "config", "[", "\"COOKIE_STATE_NAME\"", "]", ",", "self", ".", "config", "[", "\"STATE_ENCRYPTION_KEY\"...
Updates a Riak Datatype by sending local operations to the server .
def update_datatype ( self , datatype , w = None , dw = None , pw = None , return_body = None , timeout = None , include_context = None ) : raise NotImplementedError
251,794
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L299-L304
[ "def", "set_azure_secret_access_key", "(", "config_fpath", ",", "container", ",", "az_secret_access_key", ")", ":", "key", "=", "AZURE_KEY_PREFIX", "+", "container", "return", "write_config_value_to_file", "(", "key", ",", "az_secret_access_key", ",", "config_fpath", ")...
Emulates a search request via MapReduce . Used in the case where the transport supports MapReduce but has no native search capability .
def _search_mapred_emu ( self , index , query ) : phases = [ ] if not self . phaseless_mapred ( ) : phases . append ( { 'language' : 'erlang' , 'module' : 'riak_kv_mapreduce' , 'function' : 'reduce_identity' , 'keep' : True } ) mr_result = self . mapred ( { 'module' : 'riak_search' , 'function' : 'mapred_search' , 'arg' : [ index , query ] } , phases ) result = { 'num_found' : len ( mr_result ) , 'max_score' : 0.0 , 'docs' : [ ] } for bucket , key , data in mr_result : if u'score' in data and data [ u'score' ] [ 0 ] > result [ 'max_score' ] : result [ 'max_score' ] = data [ u'score' ] [ 0 ] result [ 'docs' ] . append ( { u'id' : key } ) return result
251,795
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L313-L336
[ "def", "hash_blockquotes", "(", "text", ",", "hashes", ",", "markdown_obj", ")", ":", "def", "sub", "(", "match", ")", ":", "block", "=", "match", ".", "group", "(", "1", ")", ".", "strip", "(", ")", "block", "=", "re", ".", "sub", "(", "r'(?:(?<=\...
Emulates a secondary index request via MapReduce . Used in the case where the transport supports MapReduce but has no native secondary index query capability .
def _get_index_mapred_emu ( self , bucket , index , startkey , endkey = None ) : phases = [ ] if not self . phaseless_mapred ( ) : phases . append ( { 'language' : 'erlang' , 'module' : 'riak_kv_mapreduce' , 'function' : 'reduce_identity' , 'keep' : True } ) if endkey : result = self . mapred ( { 'bucket' : bucket , 'index' : index , 'start' : startkey , 'end' : endkey } , phases ) else : result = self . mapred ( { 'bucket' : bucket , 'index' : index , 'key' : startkey } , phases ) return [ key for resultbucket , key in result ]
251,796
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L339-L362
[ "def", "weld_variance", "(", "array", ",", "weld_type", ")", ":", "weld_obj_mean", "=", "weld_mean", "(", "array", ",", "weld_type", ")", "obj_id", ",", "weld_obj", "=", "create_weld_object", "(", "array", ")", "weld_obj_mean_id", "=", "get_weld_obj_id", "(", ...
Parse the body of an object response and populate the object .
def _parse_body ( self , robj , response , expected_statuses ) : # If no response given, then return. if response is None : return None status , headers , data = response # Check if the server is down(status==0) if not status : m = 'Could not contact Riak Server: http://{0}:{1}!' . format ( self . _node . host , self . _node . http_port ) raise RiakError ( m ) # Make sure expected code came back self . check_http_code ( status , expected_statuses ) if 'x-riak-vclock' in headers : robj . vclock = VClock ( headers [ 'x-riak-vclock' ] , 'base64' ) # If 404(Not Found), then clear the object. if status == 404 : robj . siblings = [ ] return None # If 201 Created, we need to extract the location and set the # key on the object. elif status == 201 : robj . key = headers [ 'location' ] . strip ( ) . split ( '/' ) [ - 1 ] # If 300(Siblings), apply the siblings to the object elif status == 300 : ctype , params = parse_header ( headers [ 'content-type' ] ) if ctype == 'multipart/mixed' : if six . PY3 : data = bytes_to_str ( data ) boundary = re . compile ( '\r?\n--%s(?:--)?\r?\n' % re . escape ( params [ 'boundary' ] ) ) parts = [ message_from_string ( p ) for p in re . split ( boundary , data ) [ 1 : - 1 ] ] robj . siblings = [ self . _parse_sibling ( RiakContent ( robj ) , part . items ( ) , part . get_payload ( ) ) for part in parts ] # Invoke sibling-resolution logic if robj . resolver is not None : robj . resolver ( robj ) return robj else : raise Exception ( 'unexpected sibling response format: {0}' . format ( ctype ) ) robj . siblings = [ self . _parse_sibling ( RiakContent ( robj ) , headers . items ( ) , data ) ] return robj
251,797
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L46-L104
[ "def", "point_in_triangle", "(", "p", ",", "v1", ",", "v2", ",", "v3", ")", ":", "def", "_test", "(", "p1", ",", "p2", ",", "p3", ")", ":", "return", "(", "p1", "[", "0", "]", "-", "p3", "[", "0", "]", ")", "*", "(", "p2", "[", "1", "]", ...
Parses a single sibling out of a response .
def _parse_sibling ( self , sibling , headers , data ) : sibling . exists = True # Parse the headers... for header , value in headers : header = header . lower ( ) if header == 'content-type' : sibling . content_type , sibling . charset = self . _parse_content_type ( value ) elif header == 'etag' : sibling . etag = value elif header == 'link' : sibling . links = self . _parse_links ( value ) elif header == 'last-modified' : sibling . last_modified = mktime_tz ( parsedate_tz ( value ) ) elif header . startswith ( 'x-riak-meta-' ) : metakey = header . replace ( 'x-riak-meta-' , '' ) sibling . usermeta [ metakey ] = value elif header . startswith ( 'x-riak-index-' ) : field = header . replace ( 'x-riak-index-' , '' ) reader = csv . reader ( [ value ] , skipinitialspace = True ) for line in reader : for token in line : token = decode_index_value ( field , token ) sibling . add_index ( field , token ) elif header == 'x-riak-deleted' : sibling . exists = False sibling . encoded_data = data return sibling
251,798
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L106-L140
[ "def", "_OpenPathSpec", "(", "self", ",", "path_specification", ",", "ascii_codepage", "=", "'cp1252'", ")", ":", "if", "not", "path_specification", ":", "return", "None", "file_entry", "=", "self", ".", "_file_system", ".", "GetFileEntryByPathSpec", "(", "path_sp...
Convert the link tuple to a link header string . Used internally .
def _to_link_header ( self , link ) : try : bucket , key , tag = link except ValueError : raise RiakError ( "Invalid link tuple %s" % link ) tag = tag if tag is not None else bucket url = self . object_path ( bucket , key ) header = '<%s>; riaktag="%s"' % ( url , tag ) return header
251,799
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L142-L153
[ "def", "write_config_file", "(", "self", ",", "params", ",", "path", ")", ":", "cfgp", "=", "ConfigParser", "(", ")", "cfgp", ".", "add_section", "(", "params", "[", "'name'", "]", ")", "for", "p", "in", "params", ":", "if", "p", "==", "'name'", ":",...