idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
248,500
def redirectURL ( self , realm , return_to = None , immediate = False ) : message = self . getMessage ( realm , return_to , immediate ) return message . toURL ( self . endpoint . server_url )
Returns a URL with an encoded OpenID request .
48
10
248,501
def formMarkup ( self , realm , return_to = None , immediate = False , form_tag_attrs = None ) : message = self . getMessage ( realm , return_to , immediate ) return message . toFormMarkup ( self . endpoint . server_url , form_tag_attrs )
Get html for a form to submit this request to the IDP .
67
14
248,502
def htmlMarkup ( self , realm , return_to = None , immediate = False , form_tag_attrs = None ) : return oidutil . autoSubmitHTML ( self . formMarkup ( realm , return_to , immediate , form_tag_attrs ) )
Get an autosubmitting HTML page that submits this request to the IDP . This is just a wrapper for formMarkup .
60
27
248,503
def isSigned ( self , ns_uri , ns_key ) : return self . message . getKey ( ns_uri , ns_key ) in self . signed_fields
Return whether a particular key is signed regardless of its namespace alias
38
12
248,504
def getSigned ( self , ns_uri , ns_key , default = None ) : if self . isSigned ( ns_uri , ns_key ) : return self . message . getArg ( ns_uri , ns_key , default ) else : return default
Return the specified signed field if available otherwise return default
58
10
248,505
def getSignedNS ( self , ns_uri ) : msg_args = self . message . getArgs ( ns_uri ) for key in msg_args . iterkeys ( ) : if not self . isSigned ( ns_uri , key ) : logging . info ( "SuccessResponse.getSignedNS: (%s, %s) not signed." % ( ns_uri , key ) ) return None return msg_args
Get signed arguments from the response message . Return a dict of all arguments in the specified namespace . If any of the arguments are not signed return None .
92
30
248,506
def extensionResponse ( self , namespace_uri , require_signed ) : if require_signed : return self . getSignedNS ( namespace_uri ) else : return self . message . getArgs ( namespace_uri )
Return response arguments in the specified namespace .
46
8
248,507
def mkFilter ( parts ) : # Convert the parts into a list, and pass to mkCompoundFilter if parts is None : parts = [ BasicServiceEndpoint ] try : parts = list ( parts ) except TypeError : return mkCompoundFilter ( [ parts ] ) else : return mkCompoundFilter ( parts )
Convert a filter - convertable thing into a filter
67
11
248,508
def getServiceEndpoints ( self , yadis_url , service_element ) : endpoints = [ ] # Do an expansion of the service element by xrd:Type and xrd:URI for type_uris , uri , _ in expandService ( service_element ) : # Create a basic endpoint object to represent this # yadis_url, Service, Type, URI combination endpoint = BasicServiceEndpoint ( yadis_url , type_uris , uri , service_element ) e = self . applyFilters ( endpoint ) if e is not None : endpoints . append ( e ) return endpoints
Returns an iterator of endpoint objects produced by the filter functions .
134
12
248,509
def applyFilters ( self , endpoint ) : for filter_function in self . filter_functions : e = filter_function ( endpoint ) if e is not None : # Once one of the filters has returned an # endpoint, do not apply any more. return e return None
Apply filter functions to an endpoint until one of them returns non - None .
58
15
248,510
def getServiceEndpoints ( self , yadis_url , service_element ) : endpoints = [ ] for subfilter in self . subfilters : endpoints . extend ( subfilter . getServiceEndpoints ( yadis_url , service_element ) ) return endpoints
Generate all endpoint objects for all of the subfilters of this filter and return their concatenation .
61
22
248,511
def randomString ( length , chrs = None ) : if chrs is None : return getBytes ( length ) else : n = len ( chrs ) return '' . join ( [ chrs [ randrange ( n ) ] for _ in xrange ( length ) ] )
Produce a string of length random bytes chosen from chrs .
58
13
248,512
def _hasher_first_run ( self , preimage ) : new_hasher = self . _backend . keccak256 assert new_hasher ( b'' ) == b"\xc5\xd2F\x01\x86\xf7#<\x92~}\xb2\xdc\xc7\x03\xc0\xe5\x00\xb6S\xca\x82';\x7b\xfa\xd8\x04]\x85\xa4p" # noqa: E501 self . hasher = new_hasher return new_hasher ( preimage )
Invoke the backend on - demand and check an expected hash result then replace this first run with the new hasher method . This is a bit of a hacky way to minimize overhead on hash calls after this first one .
142
45
248,513
def dirname ( path : Optional [ str ] ) -> Optional [ str ] : if path is not None : return os . path . dirname ( path )
Returns the directory component of a pathname and None if the argument is None
33
15
248,514
def basename ( path : Optional [ str ] ) -> Optional [ str ] : if path is not None : return os . path . basename ( path )
Returns the final component of a pathname and None if the argument is None
33
15
248,515
def normpath ( path : Optional [ str ] ) -> Optional [ str ] : if path is not None : return os . path . normpath ( path )
Normalizes the path returns None if the argument is None
33
11
248,516
def join_paths ( path1 : Optional [ str ] , path2 : Optional [ str ] ) -> Optional [ str ] : if path1 is not None and path2 is not None : return os . path . join ( path1 , path2 )
Joins two paths if neither of them is None
54
10
248,517
def stasher ( self ) : # nonlocal for python2 stashed = [ False ] clean = [ False ] def stash ( ) : if clean [ 0 ] or not self . repo . is_dirty ( submodules = False ) : clean [ 0 ] = True return if stashed [ 0 ] : return if self . change_count > 1 : message = 'stashing {0} changes' else : message = 'stashing {0} change' print ( colored ( message . format ( self . change_count ) , 'magenta' ) ) try : self . _run ( 'stash' ) except GitError as e : raise StashError ( stderr = e . stderr , stdout = e . stdout ) stashed [ 0 ] = True yield stash if stashed [ 0 ] : print ( colored ( 'unstashing' , 'magenta' ) ) try : self . _run ( 'stash' , 'pop' ) except GitError as e : raise UnstashError ( stderr = e . stderr , stdout = e . stdout )
A stashing contextmanager .
236
6
248,518
def checkout ( self , branch_name ) : try : find ( self . repo . branches , lambda b : b . name == branch_name ) . checkout ( ) except OrigCheckoutError as e : raise CheckoutError ( branch_name , details = e )
Checkout a branch by name .
56
7
248,519
def rebase ( self , target_branch ) : current_branch = self . repo . active_branch arguments = ( ( [ self . config ( 'git-up.rebase.arguments' ) ] or [ ] ) + [ target_branch . name ] ) try : self . _run ( 'rebase' , * arguments ) except GitError as e : raise RebaseError ( current_branch . name , target_branch . name , * * e . __dict__ )
Rebase to target branch .
109
6
248,520
def push ( self , * args , * * kwargs ) : stdout = six . b ( '' ) # Execute command cmd = self . git . push ( as_process = True , * args , * * kwargs ) # Capture output while True : output = cmd . stdout . read ( 1 ) sys . stdout . write ( output . decode ( 'utf-8' ) ) sys . stdout . flush ( ) stdout += output # Check for EOF if output == six . b ( "" ) : break # Wait for the process to quit try : cmd . wait ( ) except GitCommandError as error : # Add more meta-information to errors message = "'{0}' returned exit status {1}" . format ( ' ' . join ( str ( c ) for c in error . command ) , error . status ) raise GitError ( message , stderr = error . stderr , stdout = stdout ) return stdout . strip ( )
Push commits to remote
209
4
248,521
def change_count ( self ) : status = self . git . status ( porcelain = True , untracked_files = 'no' ) . strip ( ) if not status : return 0 else : return len ( status . split ( '\n' ) )
The number of changes in the working directory .
57
9
248,522
def uniq ( seq ) : seen = set ( ) return [ x for x in seq if str ( x ) not in seen and not seen . add ( str ( x ) ) ]
Return a copy of seq without duplicates .
39
9
248,523
def current_version ( ) : # Monkeypatch setuptools.setup so we get the verison number import setuptools version = [ None ] def monkey_setup ( * * settings ) : version [ 0 ] = settings [ 'version' ] old_setup = setuptools . setup setuptools . setup = monkey_setup import setup # setup.py reload ( setup ) setuptools . setup = old_setup return version [ 0 ]
Get the current version number from setup . py
94
9
248,524
def run ( version , quiet , no_fetch , push , * * kwargs ) : # pragma: no cover if version : if NO_DISTRIBUTE : print ( colored ( 'Please install \'git-up\' via pip in order to ' 'get version information.' , 'yellow' ) ) else : GitUp ( sparse = True ) . version_info ( ) return if quiet : sys . stdout = StringIO ( ) try : gitup = GitUp ( ) if push is not None : gitup . settings [ 'push.auto' ] = push # if arguments['--no-fetch'] or arguments['--no-f']: if no_fetch : gitup . should_fetch = False except GitError : sys . exit ( 1 ) # Error in constructor else : gitup . run ( )
A nicer git pull .
179
5
248,525
def run ( self ) : try : if self . should_fetch : self . fetch ( ) self . rebase_all_branches ( ) if self . with_bundler ( ) : self . check_bundler ( ) if self . settings [ 'push.auto' ] : self . push ( ) except GitError as error : self . print_error ( error ) # Used for test cases if self . testing : raise else : # pragma: no cover sys . exit ( 1 )
Run all the git - up stuff .
108
8
248,526
def fetch ( self ) : fetch_kwargs = { 'multiple' : True } fetch_args = [ ] if self . is_prune ( ) : fetch_kwargs [ 'prune' ] = True if self . settings [ 'fetch.all' ] : fetch_kwargs [ 'all' ] = True else : if '.' in self . remotes : self . remotes . remove ( '.' ) if not self . remotes : # Only local target branches, # `git fetch --multiple` will fail return fetch_args . append ( self . remotes ) try : self . git . fetch ( * fetch_args , * * fetch_kwargs ) except GitError as error : error . message = "`git fetch` failed" raise error
Fetch the recent refs from the remotes . Unless git - up . fetch . all is set to true all remotes with locally existent branches will be fetched .
164
36
248,527
def log ( self , branch , remote ) : log_hook = self . settings [ 'rebase.log-hook' ] if log_hook : if ON_WINDOWS : # pragma: no cover # Running a string in CMD from Python is not that easy on # Windows. Running 'cmd /C log_hook' produces problems when # using multiple statements or things like 'echo'. Therefore, # we write the string to a bat file and execute it. # In addition, we replace occurences of $1 with %1 and so forth # in case the user is used to Bash or sh. # If there are occurences of %something, we'll replace it with # %%something. This is the case when running something like # 'git log --pretty=format:"%Cred%h..."'. # Also, we replace a semicolon with a newline, because if you # start with 'echo' on Windows, it will simply echo the # semicolon and the commands behind instead of echoing and then # running other commands # Prepare log_hook log_hook = re . sub ( r'\$(\d+)' , r'%\1' , log_hook ) log_hook = re . sub ( r'%(?!\d)' , '%%' , log_hook ) log_hook = re . sub ( r'; ?' , r'\n' , log_hook ) # Write log_hook to an temporary file and get it's path with NamedTemporaryFile ( prefix = 'PyGitUp.' , suffix = '.bat' , delete = False ) as bat_file : # Don't echo all commands bat_file . file . write ( b'@echo off\n' ) # Run log_hook bat_file . file . write ( log_hook . encode ( 'utf-8' ) ) # Run bat_file state = subprocess . call ( [ bat_file . name , branch . name , remote . name ] ) # Clean up file os . remove ( bat_file . name ) else : # pragma: no cover # Run log_hook via 'shell -c' state = subprocess . call ( [ log_hook , 'git-up' , branch . name , remote . name ] , shell = True ) if self . testing : assert state == 0 , 'log_hook returned != 0'
Call a log - command if set by git - up . fetch . all .
503
16
248,528
def version_info ( self ) : # Retrive and show local version info package = pkg . get_distribution ( 'git-up' ) local_version_str = package . version local_version = package . parsed_version print ( 'GitUp version is: ' + colored ( 'v' + local_version_str , 'green' ) ) if not self . settings [ 'updates.check' ] : return # Check for updates print ( 'Checking for updates...' , end = '' ) try : # Get version information from the PyPI JSON API reader = codecs . getreader ( 'utf-8' ) details = json . load ( reader ( urlopen ( PYPI_URL ) ) ) online_version = details [ 'info' ] [ 'version' ] except ( HTTPError , URLError , ValueError ) : recent = True # To not disturb the user with HTTP/parsing errors else : recent = local_version >= pkg . parse_version ( online_version ) if not recent : # noinspection PyUnboundLocalVariable print ( '\rRecent version is: ' + colored ( 'v' + online_version , color = 'yellow' , attrs = [ 'bold' ] ) ) print ( 'Run \'pip install -U git-up\' to get the update.' ) else : # Clear the update line sys . stdout . write ( '\r' + ' ' * 80 + '\n' )
Tell what version we re running at and if it s up to date .
319
15
248,529
def load_config ( self ) : for key in self . settings : value = self . config ( key ) # Parse true/false if value == '' or value is None : continue # Not set by user, go on if value . lower ( ) == 'true' : value = True elif value . lower ( ) == 'false' : value = False elif value : pass # A user-defined string, store the value later self . settings [ key ] = value
Load the configuration from git config .
100
7
248,530
def check_bundler ( self ) : def get_config ( name ) : return name if self . config ( 'bundler.' + name ) else '' from pkg_resources import Requirement , resource_filename relative_path = os . path . join ( 'PyGitUp' , 'check-bundler.rb' ) bundler_script = resource_filename ( Requirement . parse ( 'git-up' ) , relative_path ) assert os . path . exists ( bundler_script ) , 'check-bundler.rb doesn\'t ' 'exist!' return_value = subprocess . call ( [ 'ruby' , bundler_script , get_config ( 'autoinstall' ) , get_config ( 'local' ) , get_config ( 'rbenv' ) ] ) if self . testing : assert return_value == 0 , 'Errors while executing check-bundler.rb'
Run the bundler check .
202
6
248,531
def opendocx ( file ) : mydoc = zipfile . ZipFile ( file ) xmlcontent = mydoc . read ( 'word/document.xml' ) document = etree . fromstring ( xmlcontent ) return document
Open a docx file return a document XML tree
49
10
248,532
def makeelement ( tagname , tagtext = None , nsprefix = 'w' , attributes = None , attrnsprefix = None ) : # Deal with list of nsprefix by making namespacemap namespacemap = None if isinstance ( nsprefix , list ) : namespacemap = { } for prefix in nsprefix : namespacemap [ prefix ] = nsprefixes [ prefix ] # FIXME: rest of code below expects a single prefix nsprefix = nsprefix [ 0 ] if nsprefix : namespace = '{%s}' % nsprefixes [ nsprefix ] else : # For when namespace = None namespace = '' newelement = etree . Element ( namespace + tagname , nsmap = namespacemap ) # Add attributes with namespaces if attributes : # If they haven't bothered setting attribute namespace, use an empty # string (equivalent of no namespace) if not attrnsprefix : # Quick hack: it seems every element that has a 'w' nsprefix for # its tag uses the same prefix for it's attributes if nsprefix == 'w' : attributenamespace = namespace else : attributenamespace = '' else : attributenamespace = '{' + nsprefixes [ attrnsprefix ] + '}' for tagattribute in attributes : newelement . set ( attributenamespace + tagattribute , attributes [ tagattribute ] ) if tagtext : newelement . text = tagtext return newelement
Create an element & return it
340
6
248,533
def heading ( headingtext , headinglevel , lang = 'en' ) : lmap = { 'en' : 'Heading' , 'it' : 'Titolo' } # Make our elements paragraph = makeelement ( 'p' ) pr = makeelement ( 'pPr' ) pStyle = makeelement ( 'pStyle' , attributes = { 'val' : lmap [ lang ] + str ( headinglevel ) } ) run = makeelement ( 'r' ) text = makeelement ( 't' , tagtext = headingtext ) # Add the text the run, and the run to the paragraph pr . append ( pStyle ) run . append ( text ) paragraph . append ( pr ) paragraph . append ( run ) # Return the combined paragraph return paragraph
Make a new heading return the heading element
162
8
248,534
def clean ( document ) : newdocument = document # Clean empty text and r tags for t in ( 't' , 'r' ) : rmlist = [ ] for element in newdocument . iter ( ) : if element . tag == '{%s}%s' % ( nsprefixes [ 'w' ] , t ) : if not element . text and not len ( element ) : rmlist . append ( element ) for element in rmlist : element . getparent ( ) . remove ( element ) return newdocument
Perform misc cleaning operations on documents . Returns cleaned document .
116
12
248,535
def findTypeParent ( element , tag ) : p = element while True : p = p . getparent ( ) if p . tag == tag : return p # Not found return None
Finds fist parent of element of the given type
38
10
248,536
def AdvSearch ( document , search , bs = 3 ) : # Compile the search regexp searchre = re . compile ( search ) matches = [ ] # Will match against searchels. Searchels is a list that contains last # n text elements found in the document. 1 < n < bs searchels = [ ] for element in document . iter ( ) : if element . tag == '{%s}t' % nsprefixes [ 'w' ] : # t (text) elements if element . text : # Add this element to searchels searchels . append ( element ) if len ( searchels ) > bs : # Is searchels is too long, remove first elements searchels . pop ( 0 ) # Search all combinations, of searchels, starting from # smaller up to bigger ones # l = search lenght # s = search start # e = element IDs to merge found = False for l in range ( 1 , len ( searchels ) + 1 ) : if found : break for s in range ( len ( searchels ) ) : if found : break if s + l <= len ( searchels ) : e = range ( s , s + l ) txtsearch = '' for k in e : txtsearch += searchels [ k ] . text # Searcs for the text in the whole txtsearch match = searchre . search ( txtsearch ) if match : matches . append ( match . group ( ) ) found = True return set ( matches )
Return set of all regex matches
329
6
248,537
def getdocumenttext ( document ) : paratextlist = [ ] # Compile a list of all paragraph (p) elements paralist = [ ] for element in document . iter ( ) : # Find p (paragraph) elements if element . tag == '{' + nsprefixes [ 'w' ] + '}p' : paralist . append ( element ) # Since a single sentence might be spread over multiple text elements, # iterate through each paragraph, appending all text (t) children to that # paragraphs text. for para in paralist : paratext = u'' # Loop through each paragraph for element in para . iter ( ) : # Find t (text) elements if element . tag == '{' + nsprefixes [ 'w' ] + '}t' : if element . text : paratext = paratext + element . text elif element . tag == '{' + nsprefixes [ 'w' ] + '}tab' : paratext = paratext + '\t' # Add our completed paragraph text to the list of paragraph text if not len ( paratext ) == 0 : paratextlist . append ( paratext ) return paratextlist
Return the raw text of a document as a list of paragraphs .
265
13
248,538
def wordrelationships ( relationshiplist ) : # Default list of relationships # FIXME: using string hack instead of making element #relationships = makeelement('Relationships', nsprefix='pr') relationships = etree . fromstring ( '<Relationships xmlns="http://schemas.openxmlformats.org/package/2006' '/relationships"></Relationships>' ) count = 0 for relationship in relationshiplist : # Relationship IDs (rId) start at 1. rel_elm = makeelement ( 'Relationship' , nsprefix = None , attributes = { 'Id' : 'rId' + str ( count + 1 ) , 'Type' : relationship [ 0 ] , 'Target' : relationship [ 1 ] } ) relationships . append ( rel_elm ) count += 1 return relationships
Generate a Word relationships file
179
6
248,539
def savedocx ( document , coreprops , appprops , contenttypes , websettings , wordrelationships , output , imagefiledict = None ) : if imagefiledict is None : warn ( 'Using savedocx() without imagefiledict parameter will be deprec' 'ated in the future.' , PendingDeprecationWarning ) assert os . path . isdir ( template_dir ) docxfile = zipfile . ZipFile ( output , mode = 'w' , compression = zipfile . ZIP_DEFLATED ) # Move to the template data path prev_dir = os . path . abspath ( '.' ) # save previous working dir os . chdir ( template_dir ) # Serialize our trees into out zip file treesandfiles = { document : 'word/document.xml' , coreprops : 'docProps/core.xml' , appprops : 'docProps/app.xml' , contenttypes : '[Content_Types].xml' , websettings : 'word/webSettings.xml' , wordrelationships : 'word/_rels/document.xml.rels' } for tree in treesandfiles : log . info ( 'Saving: %s' % treesandfiles [ tree ] ) treestring = etree . tostring ( tree , pretty_print = True ) docxfile . writestr ( treesandfiles [ tree ] , treestring ) # Add & compress images, if applicable if imagefiledict is not None : for imagepath , picrelid in imagefiledict . items ( ) : archivename = 'word/media/%s_%s' % ( picrelid , basename ( imagepath ) ) log . info ( 'Saving: %s' , archivename ) docxfile . write ( imagepath , archivename ) # Add & compress support files files_to_ignore = [ '.DS_Store' ] # nuisance from some os's for dirpath , dirnames , filenames in os . walk ( '.' ) : for filename in filenames : if filename in files_to_ignore : continue templatefile = join ( dirpath , filename ) archivename = templatefile [ 2 : ] log . info ( 'Saving: %s' , archivename ) docxfile . write ( templatefile , archivename ) log . info ( 'Saved new file to: %r' , output ) docxfile . close ( ) os . chdir ( prev_dir ) # restore previous working dir return
Save a modified document
548
4
248,540
def _depr ( fn , usage , stacklevel = 3 ) : warn ( '{0} is deprecated. Use {1} instead' . format ( fn , usage ) , stacklevel = stacklevel , category = DeprecationWarning )
Internal convenience function for deprecation warnings
51
8
248,541
def upsert ( self , key , value , cas = 0 , ttl = 0 , format = None , persist_to = 0 , replicate_to = 0 ) : return _Base . upsert ( self , key , value , cas = cas , ttl = ttl , format = format , persist_to = persist_to , replicate_to = replicate_to )
Unconditionally store the object in Couchbase .
79
10
248,542
def insert ( self , key , value , ttl = 0 , format = None , persist_to = 0 , replicate_to = 0 ) : return _Base . insert ( self , key , value , ttl = ttl , format = format , persist_to = persist_to , replicate_to = replicate_to )
Store an object in Couchbase unless it already exists .
69
11
248,543
def prepend ( self , key , value , cas = 0 , format = None , persist_to = 0 , replicate_to = 0 ) : return _Base . prepend ( self , key , value , cas = cas , format = format , persist_to = persist_to , replicate_to = replicate_to )
Prepend a string to an existing value in Couchbase .
68
12
248,544
def get ( self , key , ttl = 0 , quiet = None , replica = False , no_format = False ) : return _Base . get ( self , key , ttl = ttl , quiet = quiet , replica = replica , no_format = no_format )
Obtain an object stored in Couchbase by given key .
59
12
248,545
def touch ( self , key , ttl = 0 ) : return _Base . touch ( self , key , ttl = ttl )
Update a key s expiration time
29
6
248,546
def lock ( self , key , ttl = 0 ) : return _Base . lock ( self , key , ttl = ttl )
Lock and retrieve a key - value entry in Couchbase .
29
12
248,547
def unlock ( self , key , cas ) : return _Base . unlock ( self , key , cas = cas )
Unlock a Locked Key in Couchbase .
24
9
248,548
def remove ( self , key , cas = 0 , quiet = None , persist_to = 0 , replicate_to = 0 ) : return _Base . remove ( self , key , cas = cas , quiet = quiet , persist_to = persist_to , replicate_to = replicate_to )
Remove the key - value entry for a given key in Couchbase .
62
14
248,549
def counter ( self , key , delta = 1 , initial = None , ttl = 0 ) : return _Base . counter ( self , key , delta = delta , initial = initial , ttl = ttl )
Increment or decrement the numeric value of an item .
45
12
248,550
def mutate_in ( self , key , * specs , * * kwargs ) : # Note we don't verify the validity of the options. lcb does that for # us. sdflags = kwargs . pop ( '_sd_doc_flags' , 0 ) if kwargs . pop ( 'insert_doc' , False ) : sdflags |= _P . CMDSUBDOC_F_INSERT_DOC if kwargs . pop ( 'upsert_doc' , False ) : sdflags |= _P . CMDSUBDOC_F_UPSERT_DOC kwargs [ '_sd_doc_flags' ] = sdflags return super ( Bucket , self ) . mutate_in ( key , specs , * * kwargs )
Perform multiple atomic modifications within a document .
169
9
248,551
def lookup_in ( self , key , * specs , * * kwargs ) : return super ( Bucket , self ) . lookup_in ( { key : specs } , * * kwargs )
Atomically retrieve one or more paths from a document .
43
12
248,552
def retrieve_in ( self , key , * paths , * * kwargs ) : import couchbase . subdocument as SD return self . lookup_in ( key , * tuple ( SD . get ( x ) for x in paths ) , * * kwargs )
Atomically fetch one or more paths from a document .
57
12
248,553
def stats ( self , keys = None , keystats = False ) : if keys and not isinstance ( keys , ( tuple , list ) ) : keys = ( keys , ) return self . _stats ( keys , keystats = keystats )
Request server statistics .
51
4
248,554
def observe ( self , key , master_only = False ) : return _Base . observe ( self , key , master_only = master_only )
Return storage information for a key .
32
7
248,555
def endure ( self , key , persist_to = - 1 , replicate_to = - 1 , cas = 0 , check_removed = False , timeout = 5.0 , interval = 0.010 ) : # We really just wrap 'endure_multi' kv = { key : cas } rvs = self . endure_multi ( keys = kv , persist_to = persist_to , replicate_to = replicate_to , check_removed = check_removed , timeout = timeout , interval = interval ) return rvs [ key ]
Wait until a key has been distributed to one or more nodes
118
12
248,556
def endure_multi ( self , keys , persist_to = - 1 , replicate_to = - 1 , timeout = 5.0 , interval = 0.010 , check_removed = False ) : return _Base . endure_multi ( self , keys , persist_to = persist_to , replicate_to = replicate_to , timeout = timeout , interval = interval , check_removed = check_removed )
Check durability requirements for multiple keys
89
6
248,557
def remove_multi ( self , kvs , quiet = None ) : return _Base . remove_multi ( self , kvs , quiet = quiet )
Remove multiple items from the cluster
32
6
248,558
def counter_multi ( self , kvs , initial = None , delta = 1 , ttl = 0 ) : return _Base . counter_multi ( self , kvs , initial = initial , delta = delta , ttl = ttl )
Perform counter operations on multiple items
51
7
248,559
def rget ( self , key , replica_index = None , quiet = None ) : if replica_index is not None : return _Base . _rgetix ( self , key , replica = replica_index , quiet = quiet ) else : return _Base . _rget ( self , key , quiet = quiet )
Get an item from a replica node
68
7
248,560
def query ( self , design , view , use_devmode = False , * * kwargs ) : design = self . _mk_devmode ( design , use_devmode ) itercls = kwargs . pop ( 'itercls' , View ) return itercls ( self , design , view , * * kwargs )
Query a pre - defined MapReduce view passing parameters .
75
12
248,561
def n1ql_query ( self , query , * args , * * kwargs ) : if not isinstance ( query , N1QLQuery ) : query = N1QLQuery ( query ) itercls = kwargs . pop ( 'itercls' , N1QLRequest ) return itercls ( query , self , * args , * * kwargs )
Execute a N1QL query .
82
8
248,562
def analytics_query ( self , query , host , * args , * * kwargs ) : if not isinstance ( query , AnalyticsQuery ) : query = AnalyticsQuery ( query , * args , * * kwargs ) else : query . update ( * args , * * kwargs ) return couchbase . analytics . gen_request ( query , host , self )
Execute an Analytics query .
79
6
248,563
def search ( self , index , query , * * kwargs ) : itercls = kwargs . pop ( 'itercls' , _FTS . SearchRequest ) iterargs = itercls . mk_kwargs ( kwargs ) params = kwargs . pop ( 'params' , _FTS . Params ( * * kwargs ) ) body = _FTS . make_search_body ( index , query , params ) return itercls ( body , self , * * iterargs )
Perform full - text searches
113
6
248,564
def is_ssl ( self ) : mode = self . _cntl ( op = _LCB . LCB_CNTL_SSL_MODE , value_type = 'int' ) return mode & _LCB . LCB_SSL_ENABLED != 0
Read - only boolean property indicating whether SSL is used for this connection .
59
14
248,565
def flush ( self ) : path = '/pools/default/buckets/{0}/controller/doFlush' path = path . format ( self . bucket ) return self . _http_request ( type = _LCB . LCB_HTTP_TYPE_MANAGEMENT , path = path , method = _LCB . LCB_HTTP_METHOD_POST )
Clears the bucket s contents .
81
7
248,566
def map_add ( self , key , mapkey , value , create = False , * * kwargs ) : op = SD . upsert ( mapkey , value ) sdres = self . mutate_in ( key , op , * * kwargs ) return self . _wrap_dsop ( sdres )
Set a value for a key in a map .
69
10
248,567
def map_get ( self , key , mapkey ) : op = SD . get ( mapkey ) sdres = self . lookup_in ( key , op ) return self . _wrap_dsop ( sdres , True )
Retrieve a value from a map .
49
8
248,568
def map_remove ( self , key , mapkey , * * kwargs ) : op = SD . remove ( mapkey ) sdres = self . mutate_in ( key , op , * * kwargs ) return self . _wrap_dsop ( sdres )
Remove an item from a map .
60
7
248,569
def map_size ( self , key ) : # TODO: This should use get_count, but we need to check for compat # with server version (i.e. >= 4.6) first; otherwise it just # disconnects. rv = self . get ( key ) return len ( rv . value )
Get the number of items in the map .
68
9
248,570
def list_append ( self , key , value , create = False , * * kwargs ) : op = SD . array_append ( '' , value ) sdres = self . mutate_in ( key , op , * * kwargs ) return self . _wrap_dsop ( sdres )
Add an item to the end of a list .
66
10
248,571
def list_prepend ( self , key , value , create = False , * * kwargs ) : op = SD . array_prepend ( '' , value ) sdres = self . mutate_in ( key , op , * * kwargs ) return self . _wrap_dsop ( sdres )
Add an item to the beginning of a list .
68
10
248,572
def list_set ( self , key , index , value , * * kwargs ) : op = SD . replace ( '[{0}]' . format ( index ) , value ) sdres = self . mutate_in ( key , op , * * kwargs ) return self . _wrap_dsop ( sdres )
Sets an item within a list at a given position .
71
12
248,573
def set_add ( self , key , value , create = False , * * kwargs ) : op = SD . array_addunique ( '' , value ) try : sdres = self . mutate_in ( key , op , * * kwargs ) return self . _wrap_dsop ( sdres ) except E . SubdocPathExistsError : pass
Add an item to a set if the item does not yet exist .
80
14
248,574
def set_remove ( self , key , value , * * kwargs ) : while True : rv = self . get ( key ) try : ix = rv . value . index ( value ) kwargs [ 'cas' ] = rv . cas return self . list_remove ( key , ix , * * kwargs ) except E . KeyExistsError : pass except ValueError : return
Remove an item from a set .
89
7
248,575
def list_remove ( self , key , index , * * kwargs ) : return self . map_remove ( key , '[{0}]' . format ( index ) , * * kwargs )
Remove the element at a specific index from a list .
44
11
248,576
def queue_push ( self , key , value , create = False , * * kwargs ) : return self . list_prepend ( key , value , * * kwargs )
Add an item to the end of a queue .
40
10
248,577
def queue_pop ( self , key , * * kwargs ) : while True : try : itm = self . list_get ( key , - 1 ) except IndexError : raise E . QueueEmpty kwargs [ 'cas' ] = itm . cas try : self . list_remove ( key , - 1 , * * kwargs ) return itm except E . KeyExistsError : pass except IndexError : raise E . QueueEmpty
Remove and return the first item queue .
99
8
248,578
def _callback ( self , mres ) : try : rows = self . _process_payload ( self . raw . rows ) if rows : self . on_rows ( rows ) if self . raw . done : self . on_done ( ) finally : if self . raw . done : self . _clear ( )
This is invoked as the row callback . If rows is true then we are a row callback otherwise the request has ended and it s time to collect the other data
68
32
248,579
def create ( cls , name , email , cb ) : it = cls ( name , create_structure = True ) it . value [ 'email' ] = email # In an actual application you'd probably want to use 'add', # but since this app might be run multiple times, you don't # want to get KeyExistsError cb . upsert_multi ( ItemSequence ( [ it ] ) ) return it
Create the basic structure of a player
93
7
248,580
def _doc_rev ( self , res ) : jstr = res . headers [ 'X-Couchbase-Meta' ] jobj = json . loads ( jstr ) return jobj [ 'rev' ]
Returns the rev id from the header
46
7
248,581
def design_create ( self , name , ddoc , use_devmode = True , syncwait = 0 ) : name = self . _cb . _mk_devmode ( name , use_devmode ) fqname = "_design/{0}" . format ( name ) if not isinstance ( ddoc , dict ) : ddoc = json . loads ( ddoc ) ddoc = ddoc . copy ( ) ddoc [ '_id' ] = fqname ddoc = json . dumps ( ddoc ) existing = None if syncwait : try : existing = self . design_get ( name , use_devmode = False ) except CouchbaseError : pass ret = self . _cb . _http_request ( type = _LCB . LCB_HTTP_TYPE_VIEW , path = fqname , method = _LCB . LCB_HTTP_METHOD_PUT , post_data = ddoc , content_type = "application/json" ) self . _design_poll ( name , 'add' , existing , syncwait , use_devmode = use_devmode ) return ret
Store a design document
239
4
248,582
def design_get ( self , name , use_devmode = True ) : name = self . _mk_devmode ( name , use_devmode ) existing = self . _http_request ( type = _LCB . LCB_HTTP_TYPE_VIEW , path = "_design/" + name , method = _LCB . LCB_HTTP_METHOD_GET , content_type = "application/json" ) return existing
Retrieve a design document
93
5
248,583
def design_delete ( self , name , use_devmode = True , syncwait = 0 ) : name = self . _mk_devmode ( name , use_devmode ) existing = None if syncwait : try : existing = self . design_get ( name , use_devmode = False ) except CouchbaseError : pass ret = self . _http_request ( type = _LCB . LCB_HTTP_TYPE_VIEW , path = "_design/" + name , method = _LCB . LCB_HTTP_METHOD_DELETE ) self . _design_poll ( name , 'del' , existing , syncwait ) return ret
Delete a design document
140
4
248,584
def design_list ( self ) : ret = self . _http_request ( type = _LCB . LCB_HTTP_TYPE_MANAGEMENT , path = "/pools/default/buckets/{0}/ddocs" . format ( self . _cb . bucket ) , method = _LCB . LCB_HTTP_METHOD_GET ) real_rows = { } for r in ret . value [ 'rows' ] : real_rows [ r [ 'doc' ] [ 'meta' ] [ 'id' ] ] = r [ 'doc' ] [ 'json' ] # Can't use normal assignment because 'value' is read-only ret . value . clear ( ) ret . value . update ( real_rows ) return ret
List all design documents for the current bucket .
163
9
248,585
def n1ql_index_create ( self , ix , * * kwargs ) : defer = kwargs . pop ( 'defer' , False ) ignore_exists = kwargs . pop ( 'ignore_exists' , False ) primary = kwargs . pop ( 'primary' , False ) fields = kwargs . pop ( 'fields' , [ ] ) cond = kwargs . pop ( 'condition' , None ) if kwargs : raise TypeError ( 'Unknown keyword arguments' , kwargs ) info = self . _mk_index_def ( ix , primary ) if primary and fields : raise TypeError ( 'Cannot create primary index with explicit fields' ) elif not primary and not fields : raise ValueError ( 'Fields required for non-primary index' ) if fields : info . fields = fields if primary and info . name is N1QL_PRIMARY_INDEX : del info . name if cond : if primary : raise ValueError ( 'cannot specify condition for primary index' ) info . condition = cond options = { 'ignore_exists' : ignore_exists , 'defer' : defer } # Now actually create the indexes return IxmgmtRequest ( self . _cb , 'create' , info , * * options ) . execute ( )
Create an index for use with N1QL .
286
10
248,586
def n1ql_index_create_primary ( self , defer = False , ignore_exists = False ) : return self . n1ql_index_create ( '' , defer = defer , primary = True , ignore_exists = ignore_exists )
Create the primary index on the bucket .
56
8
248,587
def n1ql_index_drop ( self , ix , primary = False , * * kwargs ) : info = self . _mk_index_def ( ix , primary ) return IxmgmtRequest ( self . _cb , 'drop' , info , * * kwargs ) . execute ( )
Delete an index from the cluster .
69
7
248,588
def n1ql_index_build_deferred ( self , other_buckets = False ) : info = N1qlIndex ( ) if not other_buckets : info . keyspace = self . _cb . bucket return IxmgmtRequest ( self . _cb , 'build' , info ) . execute ( )
Instruct the server to begin building any previously deferred index definitions .
70
12
248,589
def n1ql_index_watch ( self , indexes , timeout = 30 , interval = 0.2 , watch_primary = False ) : kwargs = { 'timeout_us' : int ( timeout * 1000000 ) , 'interval_us' : int ( interval * 1000000 ) } ixlist = [ N1qlIndex . from_any ( x , self . _cb . bucket ) for x in indexes ] if watch_primary : ixlist . append ( N1qlIndex . from_any ( N1QL_PRIMARY_INDEX , self . _cb . bucket ) ) return IxmgmtRequest ( self . _cb , 'watch' , ixlist , * * kwargs ) . execute ( )
Await completion of index building
161
6
248,590
def _set_range_common ( self , k_sugar , k_start , k_end , value ) : if not isinstance ( value , ( list , tuple , _Unspec ) ) : raise ArgumentError . pyexc ( "Range specification for {0} must be a list, tuple or UNSPEC" . format ( k_sugar ) ) if self . _user_options . get ( k_start , UNSPEC ) is not UNSPEC or ( self . _user_options . get ( k_end , UNSPEC ) is not UNSPEC ) : raise ArgumentError . pyexc ( "Cannot specify {0} with either {1} or {2}" . format ( k_sugar , k_start , k_end ) ) if not value : self . _set_common ( k_start , UNSPEC , set_user = False ) self . _set_common ( k_end , UNSPEC , set_user = False ) self . _user_options [ k_sugar ] = UNSPEC return if len ( value ) not in ( 1 , 2 ) : raise ArgumentError . pyexc ( "Range specification " "must have one or two elements" , value ) value = value [ : : ] if len ( value ) == 1 : value . append ( UNSPEC ) for p , ix in ( ( k_start , 0 ) , ( k_end , 1 ) ) : self . _set_common ( p , value [ ix ] , set_user = False ) self . _user_options [ k_sugar ] = value
Checks to see if the client - side convenience key is present and if so converts the sugar convenience key into its real server - side equivalents .
338
29
248,591
def update ( self , copy = False , * * params ) : if copy : self = deepcopy ( self ) for k , v in params . items ( ) : if not hasattr ( self , k ) : if not self . unrecognized_ok : raise ArgumentError . pyexc ( "Unknown option" , k ) self . _set_common ( k , v ) else : setattr ( self , k , v ) return self
Chained assignment operator .
92
5
248,592
def from_any ( cls , params , * * ctor_opts ) : if isinstance ( params , cls ) : return deepcopy ( params ) elif isinstance ( params , dict ) : ctor_opts . update ( * * params ) if cls is QueryBase : if ( 'bbox' in params or 'start_range' in params or 'end_range' in params ) : return SpatialQuery ( * * ctor_opts ) else : return ViewQuery ( * * ctor_opts ) elif isinstance ( params , basestring ) : ret = cls ( ) ret . _base_str = params return ret else : raise ArgumentError . pyexc ( "Params must be Query, dict, or string" )
Creates a new Query object from input .
167
9
248,593
def encoded ( self ) : if not self . _encoded : self . _encoded = self . _encode ( ) if self . _base_str : return '&' . join ( ( self . _base_str , self . _encoded ) ) else : return self . _encoded
Returns an encoded form of the query
65
7
248,594
def registerDeferred ( self , event , d ) : try : self . _evq [ event ] . schedule ( d ) except KeyError : raise ValueError ( "No such event type" , event )
Register a defer to be fired at the firing of a specific event .
44
14
248,595
def queryEx ( self , viewcls , * args , * * kwargs ) : kwargs [ 'itercls' ] = viewcls o = super ( AsyncBucket , self ) . query ( * args , * * kwargs ) if not self . connected : self . connect ( ) . addCallback ( lambda x : o . start ( ) ) else : o . start ( ) return o
Query a view with the viewcls instance receiving events of the query as they arrive .
90
18
248,596
def n1qlQueryEx ( self , cls , * args , * * kwargs ) : kwargs [ 'itercls' ] = cls o = super ( AsyncBucket , self ) . n1ql_query ( * args , * * kwargs ) if not self . connected : self . connect ( ) . addCallback ( lambda x : o . start ( ) ) else : o . start ( ) return o
Execute a N1QL statement providing a custom handler for rows .
95
14
248,597
def n1qlQueryAll ( self , * args , * * kwargs ) : if not self . connected : cb = lambda x : self . n1qlQueryAll ( * args , * * kwargs ) return self . connect ( ) . addCallback ( cb ) kwargs [ 'itercls' ] = BatchedN1QLRequest o = super ( RawBucket , self ) . n1ql_query ( * args , * * kwargs ) o . start ( ) return o . _getDeferred ( )
Execute a N1QL query retrieving all rows .
118
11
248,598
def _wrap ( self , meth , * args , * * kwargs ) : if not self . connected : return self . _connectSchedule ( self . _wrap , meth , * args , * * kwargs ) opres = meth ( self , * args , * * kwargs ) return self . defer ( opres )
Calls a given method with the appropriate arguments or defers such a call until the instance has been connected
72
21
248,599
def get_decode_format ( flags ) : c_flags = flags & FMT_COMMON_MASK l_flags = flags & FMT_LEGACY_MASK if c_flags : if c_flags not in COMMON_FORMATS : return FMT_BYTES , False else : return COMMON2UNIFIED [ c_flags ] , True else : if not l_flags in LEGACY_FORMATS : return FMT_BYTES , False else : return LEGACY2UNIFIED [ l_flags ] , True
Returns a tuple of format recognized
119
6