idx int64 0 252k | question stringlengths 48 5.28k | target stringlengths 5 1.23k |
|---|---|---|
3,500 | def term ( self , term , ** kwargs ) : if isinstance ( term , ( list , tuple ) ) : for t in term : self . term ( t , ** kwargs ) else : self . clause ( str ( term ) , ** kwargs ) return self | Adds a term to the current query creating a Clause and adds it to the list of clauses making up this Query . |
3,501 | def is_negated ( self ) : return all ( clause . presence == QueryPresence . PROHIBITED for clause in self . clauses ) | A negated query is one in which every clause has a presence of prohibited . These queries require some special processing to return the expected results . |
3,502 | def send_templated_email ( recipients , template_path , context = None , from_email = settings . DEFAULT_FROM_EMAIL , fail_silently = False , extra_headers = None ) : recipient_pks = [ r . pk for r in recipients if isinstance ( r , get_user_model ( ) ) ] recipient_emails = [ e for e in recipients if not isinstance ( e ... | recipients can be either a list of emails or a list of users if it is users the system will change to the language that the user has set as theyr mother toungue |
3,503 | def remove_duplicates ( seq ) : last_boundary = False for char in seq : if char == '\x00' : if not last_boundary : last_boundary = True yield char else : last_boundary = False yield char | Removes duplicate boundary token characters from the given character iterable . |
3,504 | def pretty_print_str ( self ) : retval = '' todo = [ self . root ] while todo : current = todo . pop ( ) for char in reversed ( sorted ( current . keys ( ) ) ) : todo . append ( current [ char ] ) indent = ' ' * ( current . depth * 2 ) retval += indent + current . __unicode__ ( ) + '\n' return retval . rstrip ( '\n' ) | Create a string to pretty - print this trie to standard output . |
3,505 | def _reset_suffix_links ( self ) : self . _suffix_links_set = False for current , _parent in self . dfs ( ) : current . suffix = None current . dict_suffix = None current . longest_prefix = None | Reset all suffix links in all nodes in this trie . |
3,506 | def _set_suffix_links ( self ) : self . _suffix_links_set = True for current , parent in self . bfs ( ) : if parent is None : continue current . longest_prefix = parent . longest_prefix if parent . has_value : current . longest_prefix = parent if current . has_suffix : continue suffix = parent while True : if not suffi... | Sets all suffix links in all nodes in this trie . |
3,507 | def greedy_replace ( self , seq ) : if not self . _suffix_links_set : self . _set_suffix_links ( ) current = self . root buffered = '' outstr = '' for char in seq : while char not in current : if current . has_dict_suffix : current = current . dict_suffix outstr += buffered [ : - current . depth ] outstr += current . v... | Greedily matches strings in seq and replaces them with their node values . |
3,508 | def _write_mo ( mo ) : classNotFound = False if ( UcsUtils . FindClassIdInMoMetaIgnoreCase ( mo . classId ) == None ) : classNotFound = True tabsize = 8 outstr = "\n" if classNotFound : outstr += "Managed Object\t\t\t:\t" + str ( UcsUtils . WordU ( mo . classId ) ) + "\n" else : outstr += "Managed Object\t\t\t:\t" + st... | Method to return string representation of a managed object . |
3,509 | def WriteObject ( moList ) : from Ucs import Dn from UcsHandle import UcsMoDiff tabsize = 8 if ( isinstance ( moList , _GenericMO ) == True ) : print str ( moList ) elif ( isinstance ( moList , ExternalMethod ) == True ) : if ( hasattr ( moList , "OutConfigs" ) == True ) : for child in moList . OutConfigs . GetChild ( ... | Writes the managed object on the terminal in form of key value pairs . |
3,510 | def childWriteXml ( self , w , option ) : ch = [ ] for c in self . child : ch . append ( c . WriteXml ( w , option ) ) return ch | Method writes the xml representation for the object . |
3,511 | def setattr ( self , key , value ) : if ( UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) != None ) : if ( key in _ManagedObjectMeta [ self . classId ] ) : propMeta = UcsUtils . GetUcsPropertyMeta ( self . classId , key ) if ( propMeta . ValidatePropertyValue ( value ) == False ) : return False if ( propMet... | This method sets attribute of a Managed Object . |
3,512 | def getattr ( self , key ) : if ( ( key == "classId" ) and ( self . __dict__ . has_key ( key ) ) ) : return self . __dict__ [ key ] if UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) : if self . __dict__ . has_key ( key ) : if key in _ManagedObjectMeta [ self . classId ] : return self . __dict__ [ key ] els... | This method gets attribute value of a Managed Object . |
3,513 | def MarkDirty ( self ) : if ( ( UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) == None ) and ( not self . IsDirty ( ) ) ) : self . dirtyMask = ManagedObject . DUMMYDIRTY else : self . dirtyMask = self . propMoMeta . mask | This method marks the managed object dirty . |
3,514 | def MakeRn ( self ) : rnPattern = self . propMoMeta . rn for prop in re . findall ( "\[([^\]]*)\]" , rnPattern ) : if prop in UcsUtils . GetUcsPropertyMetaAttributeList ( self . classId ) : if ( self . getattr ( prop ) != None ) : rnPattern = re . sub ( '\[%s\]' % prop , '%s' % self . getattr ( prop ) , rnPattern ) els... | This method returns the Rn for a managed object . |
3,515 | def LoadFromXml ( self , node , handle ) : self . SetHandle ( handle ) if node . hasAttributes ( ) : attributes = node . attributes attCount = len ( attributes ) for i in range ( attCount ) : attNode = attributes . item ( i ) attr = UcsUtils . WordU ( attNode . localName ) if ( UcsUtils . FindClassIdInMoMetaIgnoreCase ... | Method updates the object from the xml representation of the managed object . |
3,516 | def setattr ( self , key , value ) : if key in _MethodFactoryMeta [ self . classId ] : self . __dict__ [ key ] = value elif key == 'errorCode' : self . errorCode = value elif key == 'errorDescr' : self . errorDescr = value elif key == 'invocationResult' : self . invocationResult = value elif key == 'response' : self . ... | This method sets the attribute of external method object . |
3,517 | def getattr ( self , key ) : if key in _MethodFactoryMeta [ self . classId ] : return self . __dict__ [ key ] else : return None | This method gets the attribute value of external method object . |
3,518 | def getErrorResponse ( self , errorCode , errorDescr ) : self . errorCode = errorCode self . errorDescr = errorDescr self . response = "yes" return self | This methods sets error attributes of an external method object . |
3,519 | def GetUcsPropertyMeta ( classId , key ) : if classId in _ManagedObjectMeta : if key in _ManagedObjectMeta [ classId ] : return _ManagedObjectMeta [ classId ] [ key ] return None | Methods returns the property meta of the provided key for the given classId . |
3,520 | def GetUcsMethodMeta ( classId , key ) : if classId in _MethodFactoryMeta : if key in _MethodFactoryMeta [ classId ] : return _MethodFactoryMeta [ classId ] [ key ] return None | Methods returns the method meta of the ExternalMethod . |
3,521 | def GetUcsPropertyMetaAttributeList ( classId ) : if classId in _ManagedObjectMeta : attrList = _ManagedObjectMeta [ classId ] . keys ( ) attrList . remove ( "Meta" ) return attrList if classId in _MethodFactoryMeta : attrList = _MethodFactoryMeta [ classId ] . keys ( ) attrList . remove ( "Meta" ) return attrList nci ... | Methods returns the class meta . |
3,522 | def IsPropertyInMetaIgnoreCase ( classId , key ) : if classId in _ManagedObjectMeta : for prop in _ManagedObjectMeta [ classId ] : if ( prop . lower ( ) == key . lower ( ) ) : return _ManagedObjectMeta [ classId ] [ prop ] if classId in _MethodFactoryMeta : for prop in _MethodFactoryMeta [ classId ] : if ( prop . lower... | Methods returns the property meta of the provided key for the given classId . Given key is case insensitive . |
3,523 | def CheckRegistryKey ( javaKey ) : from _winreg import ConnectRegistry , HKEY_LOCAL_MACHINE , OpenKey , QueryValueEx path = None try : aReg = ConnectRegistry ( None , HKEY_LOCAL_MACHINE ) rk = OpenKey ( aReg , javaKey ) for i in range ( 1024 ) : currentVersion = QueryValueEx ( rk , "CurrentVersion" ) if currentVersion ... | Method checks for the java in the registry entries . |
3,524 | def GetJavaInstallationPath ( ) : import os , platform if platform . system ( ) == "Linux" : path = os . environ . get ( 'JAVA_HOME' ) if not path : raise UcsValidationException ( "Please make sure JAVA is installed and variable JAVA_HOME is set properly." ) else : path = os . path . join ( path , 'bin' ) path = os . p... | Method returns the java installation path in the windows or Linux environment . |
3,525 | def DownloadFile ( hUcs , source , destination ) : import urllib2 from sys import stdout from time import sleep httpAddress = "%s/%s" % ( hUcs . Uri ( ) , source ) file_name = httpAddress . split ( '/' ) [ - 1 ] req = urllib2 . Request ( httpAddress ) req . add_header ( 'Cookie' , 'ucsm-cookie=%s' % ( hUcs . _cookie ) ... | Method provides the functionality to download file from the UCS . This method is used in BackupUcs and GetTechSupport to download the files from the Ucs . |
3,526 | def GetSyncMoConfigFilePath ( ) : return os . path . join ( os . path . join ( os . path . dirname ( __file__ ) , "resources" ) , "SyncMoConfig.xml" ) | Method returs the path of SyncMoConfig . xml file . |
3,527 | def GetSyncMoConfig ( ConfigDoc ) : moConfigMap = { } configList = ConfigDoc . getElementsByTagName ( "mo" ) for moConfigNode in configList : classId = None noun = None version = None actionVersion = None action = None ignoreReason = None status = None excludeList = None if moConfigNode . hasAttribute ( "classid" ) : c... | Internal support method for SyncManagedObject . |
3,528 | def Expandkey ( key , clen ) : import sha from string import join from array import array blocks = ( clen + 19 ) / 20 xkey = [ ] seed = key for i in xrange ( blocks ) : seed = sha . new ( key + seed ) . digest ( ) xkey . append ( seed ) j = join ( xkey , '' ) return array ( 'L' , j ) | Internal method supporting encryption and decryption functionality . |
3,529 | def EncryptPassword ( password , key ) : from time import time from array import array import hmac import sha import os import base64 H = UcsUtils . GetShaHash uhash = H ( ',' . join ( str ( x ) for x in [ `time()` , `os.getpid()` , `len(password)` , password , key ] ) ) [ : 16 ] k_enc , k_auth = H ( 'enc' + key + uhas... | Encrypts the password using the given key . |
3,530 | def DecryptPassword ( cipher , key ) : import base64 import hmac import sha from array import array H = UcsUtils . GetShaHash cipher = cipher + "\n" cipher = base64 . decodestring ( cipher ) n = len ( cipher ) - 16 - 8 uhash = cipher [ : 16 ] passwordStream = cipher [ 16 : - 8 ] + "0000" [ n & 3 : ] auth = cipher [ - 8... | Decrypts the password using the given key with which the password was encrypted first . |
3,531 | def LoadFromXml ( self , node ) : import os self . classId = node . localName metaClassId = UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) if metaClassId : self . classId = metaClassId if node . hasAttribute ( NamingPropertyId . DN ) : self . dn = node . getAttribute ( NamingPropertyId . DN ) if self . dn ... | Method updates the object from the xml . |
3,532 | def WriteXml ( self , w , option , elementName = None ) : if elementName == None : x = w . createElement ( self . classId ) else : x = w . createElement ( elementName ) for prop in self . __dict__ [ 'properties' ] : x . setAttribute ( UcsUtils . WordL ( prop ) , self . __dict__ [ 'properties' ] [ prop ] ) x_child = sel... | Method writes the xml representation of the generic managed object . |
3,533 | def ToManagedObject ( self ) : from Ucs import ClassFactory cln = UcsUtils . WordU ( self . classId ) mo = ClassFactory ( cln ) if mo and ( isinstance ( mo , ManagedObject ) == True ) : metaClassId = UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) for property in self . properties : if UcsUtils . WordU ( pr... | Method creates and returns an object of ManagedObject class using the classId and information from the Generic managed object . |
3,534 | def FromManagedObject ( self ) : import os if ( isinstance ( self . mo , ManagedObject ) == True ) : self . classId = self . mo . classId if self . mo . getattr ( 'Dn' ) : self . dn = self . mo . getattr ( 'Dn' ) if self . mo . getattr ( 'Rn' ) : self . rn = self . mo . getattr ( 'Rn' ) elif self . dn : self . rn = os ... | Method creates and returns an object of _GenericMO class using the classId and other information from the managed object . |
3,535 | def GetChildClassId ( self , classId ) : childList = [ ] for ch in self . child : if ch . classId . lower ( ) == classId . lower ( ) : childList . append ( ch ) return childList | Method extracts and returns the child object list same as the given classId |
3,536 | def _total_seconds ( t ) : return sum ( [ int ( t . days * 86400 + t . seconds ) , int ( round ( t . microseconds / 1000000.0 ) ) ] ) | Takes a datetime . timedelta object and returns the delta in seconds . |
3,537 | def day ( t , now = None , format = '%B %d' ) : t1 = _to_date ( t ) t2 = _to_date ( now or datetime . datetime . now ( ) ) diff = t1 - t2 secs = _total_seconds ( diff ) days = abs ( diff . days ) if days == 0 : return _ ( 'today' ) elif days == 1 : if secs < 0 : return _ ( 'yesterday' ) else : return _ ( 'tomorrow' ) e... | Date delta compared to t . You can override now to specify what date to compare to . |
3,538 | def duration ( t , now = None , precision = 1 , pad = ', ' , words = None , justnow = datetime . timedelta ( seconds = 10 ) ) : if words is None : words = precision == 1 t1 = _to_datetime ( t ) t2 = _to_datetime ( now or datetime . datetime . now ( ) ) if t1 < t2 : format = _ ( '%s ago' ) else : format = _ ( '%s from n... | Time delta compared to t . You can override now to specify what time to compare to . |
3,539 | def search ( self , query_string ) : query = self . create_query ( ) parser = QueryParser ( query_string , query ) parser . parse ( ) return self . query ( query ) | Performs a search against the index using lunr query syntax . |
3,540 | def create_query ( self , fields = None ) : if fields is None : return Query ( self . fields ) non_contained_fields = set ( fields ) - set ( self . fields ) if non_contained_fields : raise BaseLunrException ( "Fields {} are not part of the index" , non_contained_fields ) return Query ( fields ) | Convenience method to create a Query with the Index s fields . |
3,541 | def load ( cls , serialized_index ) : from lunr import __TARGET_JS_VERSION__ if isinstance ( serialized_index , basestring ) : serialized_index = json . loads ( serialized_index ) if serialized_index [ "version" ] != __TARGET_JS_VERSION__ : logger . warning ( "Version mismatch when loading serialized index. " "Current ... | Load a serialized index |
3,542 | def configure ( logstash_host = None , logstash_port = None , logdir = None ) : if not ( logstash_host or logstash_port or logdir ) : raise ValueError ( 'you must specify at least one parameter' ) config . logstash . host = logstash_host or config . logstash . host config . logstash . port = logstash_port or config . l... | Configuration settings . |
3,543 | def new_logger ( name ) : log = get_task_logger ( name ) handler = logstash . LogstashHandler ( config . logstash . host , config . logstash . port ) log . addHandler ( handler ) create_logdir ( config . logdir ) handler = TimedRotatingFileHandler ( '%s.json' % join ( config . logdir , name ) , when = 'midnight' , utc ... | Return new logger which will log both to logstash and to file in JSON format . |
3,544 | def _zmq_socket_context ( context , socket_type , bind_endpoints ) : socket = context . socket ( socket_type ) try : for endpoint in bind_endpoints : try : socket . bind ( endpoint ) except Exception : _logger . fatal ( "Could not bind to '%s'." , endpoint ) raise yield socket finally : socket . close ( ) | A ZeroMQ socket context that both constructs a socket and closes it . |
3,545 | def _get_with_fallback ( config , section , option , fallback ) : exists = ( config . has_section ( section ) and config . has_option ( section , option ) ) if not exists : return fallback else : return config . get ( section , option ) | Get a configuration value using fallback for missing values . |
3,546 | def run ( options , exit_codeword = None ) : QUERY_ENDP_OPT = 'query-bind-endpoint' STREAM_ENDP_OPT = 'streaming-bind-endpoint' ZMQ_NTHREADS = "zmq-nthreads" if not options . has_section ( config . DEFAULT_SECTION ) : msg = "Missing default section, `{0}`." fmsg = msg . format ( config . DEFAULT_SECTION ) raise config ... | Actually execute the program . |
3,547 | def main ( argv = None ) : parser = argparse . ArgumentParser ( description = 'Event storage and event proxy.' , usage = '%(prog)s <configfile>' ) parser . add_argument ( '--exit-codeword' , metavar = "MSG" , dest = "exit_message" , default = None , help = "An incoming message that makes" " Rewind quit. Used for testin... | Entry point for Rewind . |
3,548 | def generate ( self ) : key = self . _propose_new_key ( ) while self . key_exists ( key ) : _logger . warning ( 'Previous candidate was used.' ' Regenerating another...' ) key = self . _propose_new_key ( ) return key | Generate a new string and return it . |
3,549 | def _handle_one_message ( self ) : result = True requesttype = self . query_socket . recv ( ) if requesttype == b"PUBLISH" : self . _handle_incoming_event ( ) elif requesttype == b"QUERY" : self . _handle_event_query ( ) elif ( self . exit_message is not None and requesttype == self . exit_message ) : _logger . warn ( ... | Handle one single incoming message on any socket . |
3,550 | def _handle_unknown_command ( self ) : while self . query_socket . getsockopt ( zmq . RCVMORE ) : self . query_socket . recv ( ) self . query_socket . send ( b"ERROR Unknown request type" ) | Handle an unknown RES command . |
3,551 | def _handle_event_query ( self ) : assert self . query_socket . getsockopt ( zmq . RCVMORE ) fro = self . query_socket . recv ( ) . decode ( ) assert self . query_socket . getsockopt ( zmq . RCVMORE ) to = self . query_socket . recv ( ) . decode ( ) assert not self . query_socket . getsockopt ( zmq . RCVMORE ) _logger ... | Handle an incoming event query . |
3,552 | def _handle_incoming_event ( self ) : eventstr = self . query_socket . recv ( ) newid = self . id_generator . generate ( ) assert newid not in ( b"QUERY" , b"PUBLISH" ) , "Generated ID must not be part of req/rep vocabulary." assert not newid . startswith ( "ERROR" ) , "Generated ID must not be part of req/rep vocabula... | Handle an incoming event . |
3,553 | def idf ( posting , document_count ) : documents_with_term = 0 for field_name in posting : if field_name == "_index" : continue documents_with_term += len ( posting [ field_name ] . keys ( ) ) x = ( document_count - documents_with_term + 0.5 ) / ( documents_with_term + 0.5 ) return math . log ( 1 + abs ( x ) ) | A function to calculate the inverse document frequency for a posting . This is shared between the builder and the index . |
3,554 | def check_config_options ( _class , required_options , optional_options , options ) : for opt in required_options : if opt not in options : msg = "Required option missing: {0}" raise ConfigurationError ( msg . format ( opt ) ) for opt in options : if opt not in ( required_options + optional_options ) : msg = "Unknown c... | Helper method to check options . |
3,555 | def accessed ( filename ) : if isinstance ( filename , file ) : filename = filename . name return duration ( os . stat ( filename ) [ stat . ST_ATIME ] ) | Retrieve how long ago a file has been accessed . |
3,556 | def created ( filename ) : if isinstance ( filename , file ) : filename = filename . name return duration ( os . stat ( filename ) [ stat . ST_CTIME ] ) | Retrieve how long ago a file has been created . |
3,557 | def modified ( filename ) : if isinstance ( filename , file ) : filename = filename . name return duration ( os . stat ( filename ) [ stat . ST_MTIME ] ) | Retrieve how long ago a file has been modified . |
3,558 | def size ( filename , format = 'decimal' ) : if isinstance ( filename , file ) : filename = filename . name return filesize ( os . stat ( filename ) [ stat . ST_SIZE ] , format ) | Retrieve the size of a file . |
3,559 | def show_more ( context , label = None , loading = settings . LOADING ) : data = utils . get_data_from_context ( context ) page = data [ 'page' ] if page . has_next ( ) : request = context [ 'request' ] page_number = page . next_page_number ( ) querystring_key = data [ 'querystring_key' ] querystring = utils . get_quer... | Show the link to get the next page in a Twitter - like pagination . |
3,560 | def show_more_table ( context , label = None , loading = settings . LOADING ) : return show_more ( context , label , loading ) | Show the link to get the next page in a Twitter - like pagination in a template for table . |
3,561 | def generate_trimmer ( word_characters ) : start_re = r"^[^{}]+" . format ( word_characters ) end_re = r"[^{}]+$" . format ( word_characters ) def trimmer ( token , i = None , tokens = None ) : def trim ( s , metadata = None ) : s = re . sub ( start_re , "" , s ) s = re . sub ( end_re , "" , s ) return s return token .... | Returns a trimmer function from a string of word characters . |
3,562 | def camelcase ( string ) : out = slug ( string ) . replace ( '-' , ' ' ) . title ( ) . replace ( ' ' , '' ) return out [ 0 ] . lower ( ) + out [ 1 : ] | Return a string in lowerCamelCase |
3,563 | def position_for_index ( self , index ) : if not self . elements : return 0 start = 0 end = int ( len ( self . elements ) / 2 ) slice_length = end - start pivot_point = int ( slice_length / 2 ) pivot_index = self . elements [ pivot_point * 2 ] while slice_length > 1 : if pivot_index < index : start = pivot_point elif p... | Calculates the position within the vector to insert a given index . |
3,564 | def insert ( self , insert_index , val ) : def prevent_duplicates ( index , val ) : raise BaseLunrException ( "Duplicate index" ) self . upsert ( insert_index , val , prevent_duplicates ) | Inserts an element at an index within the vector . |
3,565 | def upsert ( self , insert_index , val , fn = None ) : fn = fn or ( lambda current , passed : passed ) self . _magnitude = 0 position = self . position_for_index ( insert_index ) if position < len ( self . elements ) and self . elements [ position ] == insert_index : self . elements [ position + 1 ] = fn ( self . eleme... | Inserts or updates an existing index within the vector . |
3,566 | def to_list ( self ) : output = [ ] for i in range ( 1 , len ( self . elements ) , 2 ) : output . append ( self . elements [ i ] ) return output | Converts the vector to an array of the elements within the vector |
3,567 | def dot ( self , other ) : dot_product = 0 a = self . elements b = other . elements a_len = len ( a ) b_len = len ( b ) i = j = 0 while i < a_len and j < b_len : a_val = a [ i ] b_val = b [ j ] if a_val < b_val : i += 2 elif a_val > b_val : j += 2 else : dot_product += a [ i + 1 ] * b [ j + 1 ] i += 2 j += 2 return dot... | Calculates the dot product of this vector and another vector . |
3,568 | def similarity ( self , other ) : if self . magnitude == 0 or other . magnitude == 0 : return 0 return self . dot ( other ) / self . magnitude | Calculates the cosine similarity between this vector and another vector . |
3,569 | def bban_base10 ( number ) : number = bban_compact ( number ) number = number [ 4 : ] + number [ : 4 ] return '' . join ( [ str ( IBAN_ALPHABET . index ( char ) ) for char in number ] ) | Printable Basic Bank Account Number in base - 10 . |
3,570 | def _add_scheme ( ) : lists = [ urllib . parse . uses_relative , urllib . parse . uses_netloc , urllib . parse . uses_query , ] for l in lists : l . append ( 'mongodb' ) | urllib . parse doesn t support the mongodb scheme but it s easy to make it so . |
3,571 | def field ( self , field_name , boost = 1 , extractor = None ) : if "/" in field_name : raise ValueError ( "Field {} contains illegal character `/`" ) self . _fields [ field_name ] = Field ( field_name , boost , extractor ) | Adds a field to the list of document fields that will be indexed . |
3,572 | def b ( self , number ) : if number < 0 : self . _b = 0 elif number > 1 : self . _b = 1 else : self . _b = number | A parameter to tune the amount of field length normalisation that is applied when calculating relevance scores . |
3,573 | def add ( self , doc , attributes = None ) : doc_ref = str ( doc [ self . _ref ] ) self . _documents [ doc_ref ] = attributes or { } self . document_count += 1 for field_name , field in self . _fields . items ( ) : extractor = field . extractor field_value = doc [ field_name ] if extractor is None else extractor ( doc ... | Adds a document to the index . |
3,574 | def build ( self ) : self . _calculate_average_field_lengths ( ) self . _create_field_vectors ( ) self . _create_token_set ( ) return Index ( inverted_index = self . inverted_index , field_vectors = self . field_vectors , token_set = self . token_set , fields = list ( self . _fields . keys ( ) ) , pipeline = self . sea... | Builds the index creating an instance of lunr . Index . |
3,575 | def _create_token_set ( self ) : self . token_set = TokenSet . from_list ( sorted ( list ( self . inverted_index . keys ( ) ) ) ) | Creates a token set of all tokens in the index using lunr . TokenSet |
3,576 | def _calculate_average_field_lengths ( self ) : accumulator = defaultdict ( int ) documents_with_field = defaultdict ( int ) for field_ref , length in self . field_lengths . items ( ) : _field_ref = FieldRef . from_string ( field_ref ) field = _field_ref . field_name documents_with_field [ field ] += 1 accumulator [ fi... | Calculates the average document length for this index |
3,577 | def _create_field_vectors ( self ) : field_vectors = { } term_idf_cache = { } for field_ref , term_frequencies in self . field_term_frequencies . items ( ) : _field_ref = FieldRef . from_string ( field_ref ) field_name = _field_ref . field_name field_length = self . field_lengths [ field_ref ] field_vector = Vector ( )... | Builds a vector space model of every document using lunr . Vector . |
3,578 | def estimate ( coll , filter = { } , sample = 1 ) : total = coll . estimated_document_count ( ) if not filter and sample == 1 : return total if sample <= 1 : sample *= total pipeline = list ( builtins . filter ( None , [ { '$sample' : { 'size' : sample } } if sample < total else { } , { '$match' : filter } , { '$count'... | Estimate the number of documents in the collection matching the filter . |
3,579 | def render ( self , data , accepted_media_type = None , renderer_context = None ) : wrapper = None success = False for wrapper_name in self . wrappers : wrapper_method = getattr ( self , wrapper_name ) try : wrapper = wrapper_method ( data , renderer_context ) except WrapperNotApplicable : pass else : success = True br... | Convert native data to JSON API |
3,580 | def wrap_parser_error ( self , data , renderer_context ) : response = renderer_context . get ( "response" , None ) status_code = response and response . status_code if status_code != 400 : raise WrapperNotApplicable ( 'Status code must be 400.' ) if list ( data . keys ( ) ) != [ 'detail' ] : raise WrapperNotApplicable ... | Convert parser errors to the JSON API Error format |
3,581 | def wrap_field_error ( self , data , renderer_context ) : response = renderer_context . get ( "response" , None ) status_code = response and response . status_code if status_code != 400 : raise WrapperNotApplicable ( 'Status code must be 400.' ) return self . wrap_error ( data , renderer_context , keys_are_fields = Tru... | Convert field error native data to the JSON API Error format |
3,582 | def wrap_generic_error ( self , data , renderer_context ) : response = renderer_context . get ( "response" , None ) status_code = response and response . status_code is_error = ( status . is_client_error ( status_code ) or status . is_server_error ( status_code ) ) if not is_error : raise WrapperNotApplicable ( "Status... | Convert generic error native data using the JSON API Error format |
3,583 | def wrap_error ( self , data , renderer_context , keys_are_fields , issue_is_title ) : response = renderer_context . get ( "response" , None ) status_code = str ( response and response . status_code ) errors = [ ] for field , issues in data . items ( ) : if isinstance ( issues , six . string_types ) : issues = [ issues... | Convert error native data to the JSON API Error format |
3,584 | def wrap_options ( self , data , renderer_context ) : request = renderer_context . get ( "request" , None ) method = request and getattr ( request , 'method' ) if method != 'OPTIONS' : raise WrapperNotApplicable ( "Request method must be OPTIONS" ) wrapper = self . dict_class ( ) wrapper [ "meta" ] = data return wrappe... | Wrap OPTIONS data as JSON API meta value |
3,585 | def wrap_paginated ( self , data , renderer_context ) : pagination_keys = [ 'count' , 'next' , 'previous' , 'results' ] for key in pagination_keys : if not ( data and key in data ) : raise WrapperNotApplicable ( 'Not paginated results' ) view = renderer_context . get ( "view" , None ) model = self . model_from_obj ( vi... | Convert paginated data to JSON API with meta |
3,586 | def wrap_default ( self , data , renderer_context ) : wrapper = self . dict_class ( ) view = renderer_context . get ( "view" , None ) request = renderer_context . get ( "request" , None ) model = self . model_from_obj ( view ) resource_type = self . model_to_resource_type ( model ) if isinstance ( data , list ) : many ... | Convert native data to a JSON API resource collection |
3,587 | def acquire_lock ( self ) : try : self . collection . insert_one ( dict ( _id = self . id ) ) except pymongo . errors . DuplicateKeyError : pass unlocked_spec = dict ( _id = self . id , locked = None ) lock_timer = ( timers . Timer . after ( self . lock_timeout ) if self . lock_timeout else timers . NeverExpires ( ) ) ... | Acquire the lock . Blocks indefinitely until lock is available unless lock_timeout was supplied . If the lock_timeout elapses raises LockTimeout . |
3,588 | def set_boot_device ( self , device , persistent = False ) : operation = "set_boot_device" try : self . sp_manager . create_boot_policy ( ) self . sp_manager . set_boot_device ( device ) except UcsException as ex : raise exception . UcsOperationError ( operation = operation , error = ex ) | Set the boot device for the node . |
3,589 | def get_boot_device ( self ) : operation = 'get_boot_device' try : boot_device = self . sp_manager . get_boot_device ( ) return boot_device except UcsException as ex : print ( _ ( "Cisco client exception: %(msg)s." ) , { 'msg' : ex } ) raise exception . UcsOperationError ( operation = operation , error = ex ) | Get the current boot device for the node . |
3,590 | def lunr ( ref , fields , documents , languages = None ) : if languages is not None and lang . LANGUAGE_SUPPORT : if isinstance ( languages , basestring ) : languages = [ languages ] unsupported_languages = set ( languages ) - set ( lang . SUPPORTED_LANGUAGES ) if unsupported_languages : raise RuntimeError ( "The speci... | A convenience function to configure and construct a lunr . Index . |
3,591 | def from_config ( _config , ** options ) : expected_args = ( 'path' , ) rconfig . check_config_options ( "SQLiteEventStore" , expected_args , tuple ( ) , options ) return SQLiteEventStore ( options [ 'path' ] ) | Instantiate an SQLite event store from config . |
3,592 | def key_exists ( self , key ) : assert isinstance ( key , str ) cursor = self . conn . cursor ( ) with contextlib . closing ( cursor ) : cursor . execute ( 'SELECT COUNT(*) FROM events WHERE uuid=?' , ( key , ) ) res = cursor . fetchone ( ) count = res [ 0 ] if count == 0 : return False else : assert count in ( 0 , 1 )... | Check whether a key exists in the event store . |
3,593 | def count ( self ) : cursor = self . conn . cursor ( ) with contextlib . closing ( cursor ) : cursor . execute ( 'SELECT COUNT(*) FROM events' ) res = cursor . fetchone ( ) return res [ 0 ] | Return the number of events in the db . |
3,594 | def close ( self ) : if self . conn : self . conn . close ( ) self . conn = None fname = os . path . basename ( self . _path ) checksum_persister = _get_checksum_persister ( self . _path ) hasher = _initialize_hasher ( self . _path ) with contextlib . closing ( checksum_persister ) : checksum_persister [ fname ] = hash... | Close the event store . |
3,595 | def from_config ( config , ** options ) : expected_args = ( 'path' , ) rconfig . check_config_options ( "LogEventStore" , expected_args , tuple ( ) , options ) return LogEventStore ( options [ 'path' ] ) | Instantiate an LogEventStore from config . |
3,596 | def key_exists ( self , key ) : assert isinstance ( key , str ) self . _close ( ) try : return self . _unsafe_key_exists ( key ) finally : self . _open ( ) | Check if key has previously been added to this store . |
3,597 | def close ( self ) : fname = os . path . basename ( self . _path ) checksum_persister = _get_checksum_persister ( self . _path ) with contextlib . closing ( checksum_persister ) : checksum_persister [ fname ] = self . _hasher . hexdigest ( ) self . _close ( ) | Persist a checksum and close the file . |
3,598 | def from_config ( config , ** options ) : expected_args = ( 'prefix' , 'realclass' ) for arg in expected_args : if arg not in options : msg = "Required option missing: {0}" raise rconfig . ConfigurationError ( msg . format ( arg ) ) classpath = options [ 'realclass' ] classpath_pieces = classpath . split ( '.' ) classn... | Instantiate an RotatedEventStore from config . |
3,599 | def _construct_filename ( self , batchno ) : return os . path . join ( self . dirpath , "{0}.{1}" . format ( self . prefix , batchno ) ) | Construct a filename for a database . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.