idx int64 0 251k | question stringlengths 53 3.53k | target stringlengths 5 1.23k | len_question int64 20 893 | len_target int64 3 238 |
|---|---|---|---|---|
3,500 | def getattr ( self , key ) : if ( ( key == "classId" ) and ( self . __dict__ . has_key ( key ) ) ) : return self . __dict__ [ key ] if UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) : if self . __dict__ . has_key ( key ) : if key in _ManagedObjectMeta [ self . classId ] : """ property exists """ return self . __dict__ [ key ] else : if self . __dict__ . has_key ( 'XtraProperty' ) : if self . __dict__ [ 'XtraProperty' ] . has_key ( key ) : return self . __dict__ [ 'XtraProperty' ] [ UcsUtils . WordU ( key ) ] else : raise AttributeError ( key ) else : # TODO: Add Warning/Error messages in Logger. print "No XtraProperty in mo:" , self . classId , " key:" , key else : """ property does not exist """ if self . __dict__ [ 'XtraProperty' ] . has_key ( key ) : return self . __dict__ [ 'XtraProperty' ] [ UcsUtils . WordU ( key ) ] elif key == "Dn" or key == "Rn" : return None else : raise AttributeError ( key ) | This method gets attribute value of a Managed Object . | 300 | 11 |
3,501 | def MarkDirty ( self ) : if ( ( UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) == None ) and ( not self . IsDirty ( ) ) ) : self . dirtyMask = ManagedObject . DUMMYDIRTY else : self . dirtyMask = self . propMoMeta . mask | This method marks the managed object dirty . | 75 | 8 |
3,502 | def MakeRn ( self ) : rnPattern = self . propMoMeta . rn for prop in re . findall ( "\[([^\]]*)\]" , rnPattern ) : if prop in UcsUtils . GetUcsPropertyMetaAttributeList ( self . classId ) : if ( self . getattr ( prop ) != None ) : rnPattern = re . sub ( '\[%s\]' % prop , '%s' % self . getattr ( prop ) , rnPattern ) else : raise UcsValidationException ( 'Property "%s" was None in MakeRn' % prop ) # raise Exception('Property "%s" was None in MakeRn' %prop) else : raise UcsValidationException ( 'Property "%s" was not found in MakeRn arguments' % prop ) # raise Exception('Property "%s" was not found in MakeRn arguments' %prop) return rnPattern | This method returns the Rn for a managed object . | 205 | 11 |
3,503 | def LoadFromXml ( self , node , handle ) : self . SetHandle ( handle ) if node . hasAttributes ( ) : # attributes = node._get_attributes() # attCount = attributes._get_length() attributes = node . attributes attCount = len ( attributes ) for i in range ( attCount ) : attNode = attributes . item ( i ) # attr = UcsUtils.WordU(attNode._get_name()) attr = UcsUtils . WordU ( attNode . localName ) if ( UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) != None ) : if ( attr in UcsUtils . GetUcsPropertyMetaAttributeList ( self . classId ) ) : # self.setattr(attr, str(attNode.nodeValue)) self . setattr ( attr , str ( attNode . value ) ) else : # self.setattr(UcsUtils.WordU(attr), str(attNode.nodeValue)) self . setattr ( UcsUtils . WordU ( attr ) , str ( attNode . value ) ) else : # self.setattr(UcsUtils.WordU(attr), str(attNode.nodeValue)) self . setattr ( UcsUtils . WordU ( attr ) , str ( attNode . value ) ) if self . getattr ( "Rn" ) == None and self . getattr ( "Dn" ) != None : self . setattr ( "Rn" , str ( re . sub ( r'^.*/' , '' , self . getattr ( "Dn" ) ) ) ) if ( node . hasChildNodes ( ) ) : # childList = node._get_childNodes() # childCount = childList._get_length() childList = node . childNodes childCount = len ( childList ) for i in range ( childCount ) : childNode = childList . item ( i ) if ( childNode . nodeType != Node . ELEMENT_NODE ) : continue if childNode . localName in self . propMoMeta . fieldNames : # .LoadFromXml(childNode, handle) pass # TODO: Need code analysis. # if childNode.localName in self.propMoMeta.childFieldNames: c = ManagedObject ( UcsUtils . WordU ( childNode . localName ) ) self . child . append ( c ) c . LoadFromXml ( childNode , handle ) | Method updates the object from the xml representation of the managed object . | 548 | 13 |
3,504 | def setattr ( self , key , value ) : if key in _MethodFactoryMeta [ self . classId ] : self . __dict__ [ key ] = value elif key == 'errorCode' : self . errorCode = value elif key == 'errorDescr' : self . errorDescr = value elif key == 'invocationResult' : self . invocationResult = value elif key == 'response' : self . response = value else : """ no such property """ # print "No such property ClassId: %s Property:%s" %(self.classId, key) return None | This method sets the attribute of external method object . | 130 | 10 |
3,505 | def getattr ( self , key ) : if key in _MethodFactoryMeta [ self . classId ] : """ property exists """ return self . __dict__ [ key ] else : """ property does not exist """ return None | This method gets the attribute value of external method object . | 46 | 11 |
3,506 | def getErrorResponse ( self , errorCode , errorDescr ) : self . errorCode = errorCode self . errorDescr = errorDescr self . response = "yes" return self | This methods sets error attributes of an external method object . | 40 | 11 |
3,507 | def GetUcsPropertyMeta ( classId , key ) : if classId in _ManagedObjectMeta : if key in _ManagedObjectMeta [ classId ] : return _ManagedObjectMeta [ classId ] [ key ] return None | Methods returns the property meta of the provided key for the given classId . | 51 | 15 |
3,508 | def GetUcsMethodMeta ( classId , key ) : if classId in _MethodFactoryMeta : if key in _MethodFactoryMeta [ classId ] : return _MethodFactoryMeta [ classId ] [ key ] return None | Methods returns the method meta of the ExternalMethod . | 48 | 10 |
3,509 | def GetUcsPropertyMetaAttributeList ( classId ) : if classId in _ManagedObjectMeta : attrList = _ManagedObjectMeta [ classId ] . keys ( ) attrList . remove ( "Meta" ) return attrList if classId in _MethodFactoryMeta : attrList = _MethodFactoryMeta [ classId ] . keys ( ) attrList . remove ( "Meta" ) return attrList # If the case of classId is not as in Meta nci = UcsUtils . FindClassIdInMoMetaIgnoreCase ( classId ) if ( nci != None ) : attrList = _ManagedObjectMeta [ nci ] . keys ( ) attrList . remove ( "Meta" ) return attrList nci = UcsUtils . FindClassIdInMethodMetaIgnoreCase ( classId ) if ( nci != None ) : attrList = _MethodFactoryMeta [ nci ] . keys ( ) attrList . remove ( "Meta" ) return attrList return None | Methods returns the class meta . | 226 | 6 |
3,510 | def IsPropertyInMetaIgnoreCase ( classId , key ) : if classId in _ManagedObjectMeta : for prop in _ManagedObjectMeta [ classId ] : if ( prop . lower ( ) == key . lower ( ) ) : return _ManagedObjectMeta [ classId ] [ prop ] if classId in _MethodFactoryMeta : for prop in _MethodFactoryMeta [ classId ] : if ( prop . lower ( ) == key . lower ( ) ) : return _MethodFactoryMeta [ classId ] [ prop ] return None | Methods returns the property meta of the provided key for the given classId . Given key is case insensitive . | 116 | 21 |
3,511 | def CheckRegistryKey ( javaKey ) : from _winreg import ConnectRegistry , HKEY_LOCAL_MACHINE , OpenKey , QueryValueEx path = None try : aReg = ConnectRegistry ( None , HKEY_LOCAL_MACHINE ) rk = OpenKey ( aReg , javaKey ) for i in range ( 1024 ) : currentVersion = QueryValueEx ( rk , "CurrentVersion" ) if currentVersion != None : key = OpenKey ( rk , currentVersion [ 0 ] ) if key != None : path = QueryValueEx ( key , "JavaHome" ) return path [ 0 ] except Exception , err : # TODO: Add Warning/Error messages in Logger. WriteUcsWarning ( "Not able to access registry." ) return None | Method checks for the java in the registry entries . | 170 | 10 |
3,512 | def GetJavaInstallationPath ( ) : import os , platform # Get JavaPath for Ubuntu # if os.name == "posix": if platform . system ( ) == "Linux" : path = os . environ . get ( 'JAVA_HOME' ) if not path : raise UcsValidationException ( "Please make sure JAVA is installed and variable JAVA_HOME is set properly." ) # raise Exception("Please make sure JAVA is installed and variable JAVA_HOME is set properly.") else : path = os . path . join ( path , 'bin' ) path = os . path . join ( path , 'javaws' ) if not os . path . exists ( path ) : raise UcsValidationException ( "javaws is not installed on System." ) # raise Exception("javaws is not installed on System.") else : return path # Get JavaPath for Windows # elif os.name == "nt": elif platform . system ( ) == "Windows" or platform . system ( ) == "Microsoft" : path = os . environ . get ( 'JAVA_HOME' ) if path == None : path = UcsUtils . CheckRegistryKey ( r"SOFTWARE\\JavaSoft\\Java Runtime Environment\\" ) if path == None : # Check for 32 bit Java on 64 bit machine. path = UcsUtils . CheckRegistryKey ( r"SOFTWARE\\Wow6432Node\\JavaSoft\\Java Runtime Environment" ) if not path : raise UcsValidationException ( "Please make sure JAVA is installed." ) # raise Exception("Please make sure JAVA is installed.") else : path = os . path . join ( path , 'bin' ) path = os . path . join ( path , 'javaws.exe' ) if not os . path . exists ( path ) : raise UcsValidationException ( "javaws.exe is not installed on System." ) # raise Exception("javaws.exe is not installed on System.") else : return path | Method returns the java installation path in the windows or Linux environment . | 440 | 13 |
3,513 | def DownloadFile ( hUcs , source , destination ) : import urllib2 from sys import stdout from time import sleep httpAddress = "%s/%s" % ( hUcs . Uri ( ) , source ) file_name = httpAddress . split ( '/' ) [ - 1 ] req = urllib2 . Request ( httpAddress ) # send the new url with the cookie. req . add_header ( 'Cookie' , 'ucsm-cookie=%s' % ( hUcs . _cookie ) ) res = urllib2 . urlopen ( req ) meta = res . info ( ) file_size = int ( meta . getheaders ( "Content-Length" ) [ 0 ] ) print "Downloading: %s Bytes: %s" % ( file_name , file_size ) f = open ( destination , 'wb' ) file_size_dl = 0 block_sz = 8192 while True : rBuffer = res . read ( block_sz ) if not rBuffer : break file_size_dl += len ( rBuffer ) f . write ( rBuffer ) status = r"%10d [%3.2f%%]" % ( file_size_dl , file_size_dl * 100. / file_size ) status = status + chr ( 8 ) * ( len ( status ) + 1 ) stdout . write ( "\r%s" % status ) stdout . flush ( ) # print status f . close ( ) | Method provides the functionality to download file from the UCS . This method is used in BackupUcs and GetTechSupport to download the files from the Ucs . | 322 | 32 |
3,514 | def GetSyncMoConfigFilePath ( ) : return os . path . join ( os . path . join ( os . path . dirname ( __file__ ) , "resources" ) , "SyncMoConfig.xml" ) | Method returs the path of SyncMoConfig . xml file . | 48 | 13 |
3,515 | def GetSyncMoConfig ( ConfigDoc ) : moConfigMap = { } configList = ConfigDoc . getElementsByTagName ( "mo" ) for moConfigNode in configList : classId = None noun = None version = None actionVersion = None action = None ignoreReason = None status = None excludeList = None if moConfigNode . hasAttribute ( "classid" ) : classId = moConfigNode . getAttribute ( "classid" ) if moConfigNode . hasAttribute ( "noun" ) : noun = moConfigNode . getAttribute ( "noun" ) if moConfigNode . hasAttribute ( "version" ) : version = moConfigNode . getAttribute ( "version" ) if moConfigNode . hasAttribute ( "actionVersion" ) : actionVersion = moConfigNode . getAttribute ( "actionVersion" ) if moConfigNode . hasAttribute ( "action" ) : action = moConfigNode . getAttribute ( "action" ) if moConfigNode . hasAttribute ( "ignoreReason" ) : ignoreReason = moConfigNode . getAttribute ( "ignoreReason" ) if moConfigNode . hasAttribute ( "status" ) : status = moConfigNode . getAttribute ( "status" ) if moConfigNode . hasAttribute ( "excludeList" ) : excludeList = moConfigNode . getAttribute ( "excludeList" ) # SyncMoConfig Object moConfig = None if classId : moConfig = SyncMoConfig ( classId , noun , version , actionVersion , action , ignoreReason , status , excludeList ) if moConfig : if classId in moConfigMap : moConfigMap [ classId ] = moConfig else : moConfigList = [ ] moConfigList . append ( moConfig ) moConfigMap [ classId ] = moConfigList return moConfigMap | Internal support method for SyncManagedObject . | 387 | 9 |
3,516 | def Expandkey ( key , clen ) : import sha from string import join from array import array blocks = ( clen + 19 ) / 20 xkey = [ ] seed = key for i in xrange ( blocks ) : seed = sha . new ( key + seed ) . digest ( ) xkey . append ( seed ) j = join ( xkey , '' ) return array ( 'L' , j ) | Internal method supporting encryption and decryption functionality . | 86 | 9 |
3,517 | def EncryptPassword ( password , key ) : from time import time from array import array import hmac import sha import os import base64 H = UcsUtils . GetShaHash uhash = H ( ',' . join ( str ( x ) for x in [ `time()` , `os.getpid()` , `len(password)` , password , key ] ) ) [ : 16 ] k_enc , k_auth = H ( 'enc' + key + uhash ) , H ( 'auth' + key + uhash ) n = len ( password ) passwordStream = array ( 'L' , password + '0000' [ n & 3 : ] ) xkey = UcsUtils . Expandkey ( k_enc , n + 4 ) for i in xrange ( len ( passwordStream ) ) : passwordStream [ i ] = passwordStream [ i ] ^ xkey [ i ] ct = uhash + passwordStream . tostring ( ) [ : n ] auth = hmac . new ( ct , k_auth , sha ) . digest ( ) encryptStr = ct + auth [ : 8 ] encodedStr = base64 . encodestring ( encryptStr ) encryptedPassword = encodedStr . rstrip ( '\n' ) return encryptedPassword | Encrypts the password using the given key . | 275 | 10 |
3,518 | def DecryptPassword ( cipher , key ) : import base64 import hmac import sha from array import array H = UcsUtils . GetShaHash cipher = cipher + "\n" cipher = base64 . decodestring ( cipher ) n = len ( cipher ) - 16 - 8 uhash = cipher [ : 16 ] passwordStream = cipher [ 16 : - 8 ] + "0000" [ n & 3 : ] auth = cipher [ - 8 : ] k_enc , k_auth = H ( 'enc' + key + uhash ) , H ( 'auth' + key + uhash ) vauth = hmac . new ( cipher [ - 8 : ] , k_auth , sha ) . digest ( ) [ : 8 ] passwordStream = array ( 'L' , passwordStream ) xkey = UcsUtils . Expandkey ( k_enc , n + 4 ) for i in xrange ( len ( passwordStream ) ) : passwordStream [ i ] = passwordStream [ i ] ^ xkey [ i ] decryptedPassword = passwordStream . tostring ( ) [ : n ] return decryptedPassword | Decrypts the password using the given key with which the password was encrypted first . | 240 | 17 |
3,519 | def LoadFromXml ( self , node ) : import os self . classId = node . localName metaClassId = UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) if metaClassId : self . classId = metaClassId if node . hasAttribute ( NamingPropertyId . DN ) : self . dn = node . getAttribute ( NamingPropertyId . DN ) if self . dn : self . rn = os . path . basename ( self . dn ) # Write the attribute and value to dictionary properties, as it is . self . WriteToAttributes ( node ) # Run the LoadFromXml for each childNode recursively and populate child list too. if ( node . hasChildNodes ( ) ) : # childList = node._get_childNodes() # childCount = childList._get_length() childList = node . childNodes childCount = len ( childList ) for i in range ( childCount ) : childNode = childList . item ( i ) if ( childNode . nodeType != Node . ELEMENT_NODE ) : continue c = _GenericMO ( ) self . child . append ( c ) c . LoadFromXml ( childNode ) | Method updates the object from the xml . | 268 | 8 |
3,520 | def WriteXml ( self , w , option , elementName = None ) : if elementName == None : x = w . createElement ( self . classId ) else : x = w . createElement ( elementName ) for prop in self . __dict__ [ 'properties' ] : x . setAttribute ( UcsUtils . WordL ( prop ) , self . __dict__ [ 'properties' ] [ prop ] ) x_child = self . childWriteXml ( w , option ) for xc in x_child : if ( xc != None ) : x . appendChild ( xc ) return x | Method writes the xml representation of the generic managed object . | 132 | 11 |
3,521 | def ToManagedObject ( self ) : from Ucs import ClassFactory cln = UcsUtils . WordU ( self . classId ) mo = ClassFactory ( cln ) if mo and ( isinstance ( mo , ManagedObject ) == True ) : metaClassId = UcsUtils . FindClassIdInMoMetaIgnoreCase ( self . classId ) for property in self . properties : if UcsUtils . WordU ( property ) in UcsUtils . GetUcsPropertyMetaAttributeList ( metaClassId ) : mo . setattr ( UcsUtils . WordU ( property ) , self . properties [ property ] ) else : # TODO: Add Warning/Error messages in Logger. WriteUcsWarning ( "Property %s Not Exist in MO %s" % ( UcsUtils . WordU ( property ) , metaClassId ) ) if len ( self . child ) : for ch in self . child : moch = ch . ToManagedObject ( ) mo . child . append ( moch ) return mo else : return None | Method creates and returns an object of ManagedObject class using the classId and information from the Generic managed object . | 231 | 23 |
3,522 | def FromManagedObject ( self ) : import os if ( isinstance ( self . mo , ManagedObject ) == True ) : self . classId = self . mo . classId if self . mo . getattr ( 'Dn' ) : self . dn = self . mo . getattr ( 'Dn' ) if self . mo . getattr ( 'Rn' ) : self . rn = self . mo . getattr ( 'Rn' ) elif self . dn : self . rn = os . path . basename ( self . dn ) for property in UcsUtils . GetUcsPropertyMetaAttributeList ( self . mo . classId ) : self . properties [ property ] = self . mo . getattr ( property ) if len ( self . mo . child ) : for ch in self . mo . child : if not ch . getattr ( 'Dn' ) : _Dn = self . mo . getattr ( 'Dn' ) + "/" + ch . getattr ( 'Rn' ) ch . setattr ( 'Dn' , _Dn ) gmo = _GenericMO ( mo = ch ) self . child . append ( gmo ) | Method creates and returns an object of _GenericMO class using the classId and other information from the managed object . | 261 | 23 |
3,523 | def GetChildClassId ( self , classId ) : childList = [ ] for ch in self . child : if ch . classId . lower ( ) == classId . lower ( ) : childList . append ( ch ) return childList | Method extracts and returns the child object list same as the given classId | 51 | 14 |
3,524 | def _total_seconds ( t ) : return sum ( [ int ( t . days * 86400 + t . seconds ) , int ( round ( t . microseconds / 1000000.0 ) ) ] ) | Takes a datetime . timedelta object and returns the delta in seconds . | 45 | 16 |
3,525 | def day ( t , now = None , format = '%B %d' ) : t1 = _to_date ( t ) t2 = _to_date ( now or datetime . datetime . now ( ) ) diff = t1 - t2 secs = _total_seconds ( diff ) days = abs ( diff . days ) if days == 0 : return _ ( 'today' ) elif days == 1 : if secs < 0 : return _ ( 'yesterday' ) else : return _ ( 'tomorrow' ) elif days == 7 : if secs < 0 : return _ ( 'last week' ) else : return _ ( 'next week' ) else : return t1 . strftime ( format ) | Date delta compared to t . You can override now to specify what date to compare to . | 157 | 18 |
3,526 | def duration ( t , now = None , precision = 1 , pad = ', ' , words = None , justnow = datetime . timedelta ( seconds = 10 ) ) : if words is None : words = precision == 1 t1 = _to_datetime ( t ) t2 = _to_datetime ( now or datetime . datetime . now ( ) ) if t1 < t2 : format = _ ( '%s ago' ) else : format = _ ( '%s from now' ) result , remains = delta ( t1 , t2 , words = words , justnow = justnow ) if result in ( _ ( 'just now' ) , _ ( 'yesterday' ) , _ ( 'tomorrow' ) , _ ( 'last week' ) , _ ( 'next week' ) , ) : return result elif precision > 1 and remains : t3 = t2 - datetime . timedelta ( seconds = remains ) return pad . join ( [ result , duration ( t2 , t3 , precision - 1 , pad , words = False ) , ] ) else : return format % ( result , ) | Time delta compared to t . You can override now to specify what time to compare to . | 241 | 18 |
3,527 | def search ( self , query_string ) : query = self . create_query ( ) # TODO: should QueryParser be a method of query? should it return one? parser = QueryParser ( query_string , query ) parser . parse ( ) return self . query ( query ) | Performs a search against the index using lunr query syntax . | 60 | 13 |
3,528 | def create_query ( self , fields = None ) : if fields is None : return Query ( self . fields ) non_contained_fields = set ( fields ) - set ( self . fields ) if non_contained_fields : raise BaseLunrException ( "Fields {} are not part of the index" , non_contained_fields ) return Query ( fields ) | Convenience method to create a Query with the Index s fields . | 78 | 14 |
3,529 | def load ( cls , serialized_index ) : from lunr import __TARGET_JS_VERSION__ if isinstance ( serialized_index , basestring ) : serialized_index = json . loads ( serialized_index ) if serialized_index [ "version" ] != __TARGET_JS_VERSION__ : logger . warning ( "Version mismatch when loading serialized index. " "Current version of lunr {} does not match that of serialized " "index {}" . format ( __TARGET_JS_VERSION__ , serialized_index [ "version" ] ) ) field_vectors = { ref : Vector ( elements ) for ref , elements in serialized_index [ "fieldVectors" ] } tokenset_builder = TokenSetBuilder ( ) inverted_index = { } for term , posting in serialized_index [ "invertedIndex" ] : tokenset_builder . insert ( term ) inverted_index [ term ] = posting tokenset_builder . finish ( ) return Index ( fields = serialized_index [ "fields" ] , field_vectors = field_vectors , inverted_index = inverted_index , token_set = tokenset_builder . root , pipeline = Pipeline . load ( serialized_index [ "pipeline" ] ) , ) | Load a serialized index | 280 | 5 |
3,530 | def configure ( logstash_host = None , logstash_port = None , logdir = None ) : if not ( logstash_host or logstash_port or logdir ) : raise ValueError ( 'you must specify at least one parameter' ) config . logstash . host = logstash_host or config . logstash . host config . logstash . port = logstash_port or config . logstash . port config . logdir = logdir or config . logdir create_logdir ( config . logdir ) | Configuration settings . | 122 | 3 |
3,531 | def new_logger ( name ) : log = get_task_logger ( name ) handler = logstash . LogstashHandler ( config . logstash . host , config . logstash . port ) log . addHandler ( handler ) create_logdir ( config . logdir ) handler = TimedRotatingFileHandler ( '%s.json' % join ( config . logdir , name ) , when = 'midnight' , utc = True , ) handler . setFormatter ( JSONFormatter ( ) ) log . addHandler ( handler ) return TaskCtxAdapter ( log , { } ) | Return new logger which will log both to logstash and to file in JSON format . | 133 | 18 |
3,532 | def _zmq_socket_context ( context , socket_type , bind_endpoints ) : socket = context . socket ( socket_type ) try : for endpoint in bind_endpoints : try : socket . bind ( endpoint ) except Exception : _logger . fatal ( "Could not bind to '%s'." , endpoint ) raise yield socket finally : socket . close ( ) | A ZeroMQ socket context that both constructs a socket and closes it . | 81 | 14 |
3,533 | def _get_with_fallback ( config , section , option , fallback ) : exists = ( config . has_section ( section ) and config . has_option ( section , option ) ) if not exists : return fallback else : return config . get ( section , option ) | Get a configuration value using fallback for missing values . | 60 | 11 |
3,534 | def run ( options , exit_codeword = None ) : QUERY_ENDP_OPT = 'query-bind-endpoint' STREAM_ENDP_OPT = 'streaming-bind-endpoint' ZMQ_NTHREADS = "zmq-nthreads" if not options . has_section ( config . DEFAULT_SECTION ) : msg = "Missing default section, `{0}`." fmsg = msg . format ( config . DEFAULT_SECTION ) raise config . ConfigurationError ( fmsg ) if not options . has_option ( config . DEFAULT_SECTION , QUERY_ENDP_OPT ) : msg = "Missing (query) bind endpoint in option file: {0}:{1}" fmsg = msg . format ( config . DEFAULT_SECTION , QUERY_ENDP_OPT ) raise config . ConfigurationError ( fmsg ) queryendp = options . get ( config . DEFAULT_SECTION , QUERY_ENDP_OPT ) . split ( "," ) streamendp = _get_with_fallback ( options , config . DEFAULT_SECTION , STREAM_ENDP_OPT , '' ) . split ( "," ) queryendp = filter ( lambda x : x . strip ( ) , queryendp ) streamendp = filter ( lambda x : x . strip ( ) , streamendp ) try : eventstore = config . construct_eventstore ( options ) except config . ConfigurationError as e : _logger . exception ( "Could instantiate event store from config file." ) raise zmq_nthreads = _get_with_fallback ( options , config . DEFAULT_SECTION , ZMQ_NTHREADS , '3' ) try : zmq_nthreads = int ( zmq_nthreads ) except ValueError : msg = "{0}:{1} must be an integer" . format ( config . DEFAULT_SECTION , ZMQ_NTHREADS ) _logger . fatal ( msg ) return 1 with _zmq_context_context ( zmq_nthreads ) as context , _zmq_socket_context ( context , zmq . REP , queryendp ) as querysock , _zmq_socket_context ( context , zmq . PUB , streamendp ) as streamsock : # Executing the program in the context of ZeroMQ context as well as # ZeroMQ sockets. Using with here to make sure are correctly closing # things in the correct order, particularly also if we have an # exception or similar. runner = _RewindRunner ( eventstore , querysock , streamsock , ( exit_codeword . encode ( ) if exit_codeword else None ) ) runner . run ( ) return 0 | Actually execute the program . | 615 | 5 |
3,535 | def main ( argv = None ) : parser = argparse . ArgumentParser ( description = 'Event storage and event proxy.' , usage = '%(prog)s <configfile>' ) parser . add_argument ( '--exit-codeword' , metavar = "MSG" , dest = "exit_message" , default = None , help = "An incoming message that makes" " Rewind quit. Used for testing." ) parser . add_argument ( 'configfile' ) args = argv if argv is not None else sys . argv [ 1 : ] args = parser . parse_args ( args ) config = configparser . SafeConfigParser ( ) with open ( args . configfile ) as f : config . readfp ( f ) exitcode = run ( config , args . exit_message ) return exitcode | Entry point for Rewind . | 181 | 6 |
3,536 | def generate ( self ) : key = self . _propose_new_key ( ) while self . key_exists ( key ) : _logger . warning ( 'Previous candidate was used.' ' Regenerating another...' ) key = self . _propose_new_key ( ) return key | Generate a new string and return it . | 64 | 9 |
3,537 | def _handle_one_message ( self ) : result = True requesttype = self . query_socket . recv ( ) if requesttype == b"PUBLISH" : self . _handle_incoming_event ( ) elif requesttype == b"QUERY" : self . _handle_event_query ( ) elif ( self . exit_message is not None and requesttype == self . exit_message ) : _logger . warn ( "Asked to quit through an exit message." "I'm quitting..." ) self . query_socket . send ( b'QUIT' ) result = False else : _logger . warn ( "Could not identify request type: %s" , requesttype ) self . _handle_unknown_command ( ) return result | Handle one single incoming message on any socket . | 164 | 9 |
3,538 | def _handle_unknown_command ( self ) : while self . query_socket . getsockopt ( zmq . RCVMORE ) : # Making sure we 'empty' enveloped message. Otherwise, we can't # respond. self . query_socket . recv ( ) self . query_socket . send ( b"ERROR Unknown request type" ) | Handle an unknown RES command . | 76 | 6 |
3,539 | def _handle_event_query ( self ) : assert self . query_socket . getsockopt ( zmq . RCVMORE ) fro = self . query_socket . recv ( ) . decode ( ) assert self . query_socket . getsockopt ( zmq . RCVMORE ) to = self . query_socket . recv ( ) . decode ( ) assert not self . query_socket . getsockopt ( zmq . RCVMORE ) _logger . debug ( "Incoming query: (from, to)=(%s, %s)" , fro , to ) try : events = self . eventstore . get_events ( fro if fro else None , to if to else None ) except eventstores . EventStore . EventKeyDoesNotExistError : _logger . exception ( "A client requested a key that does not" " exist:" ) self . query_socket . send ( b"ERROR Key did not exist" ) return # Since we are using ZeroMQ enveloping we want to cap the # maximum number of messages that are send for each request. # Otherwise we might run out of memory for a lot of events. MAX_ELMNTS_PER_REQ = 100 events = itertools . islice ( events , 0 , MAX_ELMNTS_PER_REQ ) events = list ( events ) if len ( events ) == MAX_ELMNTS_PER_REQ : # There are more elements, but we are capping the result for eventid , eventdata in events [ : - 1 ] : self . query_socket . send ( eventid . encode ( ) , zmq . SNDMORE ) self . query_socket . send ( eventdata , zmq . SNDMORE ) lasteventid , lasteventdata = events [ - 1 ] self . query_socket . send ( lasteventid . encode ( ) , zmq . SNDMORE ) self . query_socket . send ( lasteventdata ) else : # Sending all events. Ie., we are not capping for eventid , eventdata in events : self . query_socket . send ( eventid . encode ( ) , zmq . SNDMORE ) self . query_socket . send ( eventdata , zmq . SNDMORE ) self . query_socket . send ( b"END" ) | Handle an incoming event query . | 503 | 6 |
3,540 | def _handle_incoming_event ( self ) : eventstr = self . query_socket . recv ( ) newid = self . id_generator . generate ( ) # Make sure newid is not part of our request vocabulary assert newid not in ( b"QUERY" , b"PUBLISH" ) , "Generated ID must not be part of req/rep vocabulary." assert not newid . startswith ( "ERROR" ) , "Generated ID must not be part of req/rep vocabulary." # Important this is done before forwarding to the streaming socket self . eventstore . add_event ( newid , eventstr ) self . streaming_socket . send ( newid . encode ( ) , zmq . SNDMORE ) self . streaming_socket . send ( self . oldid . encode ( ) , zmq . SNDMORE ) self . streaming_socket . send ( eventstr ) self . oldid = newid assert not self . query_socket . getsockopt ( zmq . RCVMORE ) self . query_socket . send ( b"PUBLISHED" ) | Handle an incoming event . | 240 | 5 |
3,541 | def idf ( posting , document_count ) : documents_with_term = 0 for field_name in posting : if field_name == "_index" : continue documents_with_term += len ( posting [ field_name ] . keys ( ) ) x = ( document_count - documents_with_term + 0.5 ) / ( documents_with_term + 0.5 ) return math . log ( 1 + abs ( x ) ) | A function to calculate the inverse document frequency for a posting . This is shared between the builder and the index . | 95 | 22 |
3,542 | def check_config_options ( _class , required_options , optional_options , options ) : for opt in required_options : if opt not in options : msg = "Required option missing: {0}" raise ConfigurationError ( msg . format ( opt ) ) for opt in options : if opt not in ( required_options + optional_options ) : msg = "Unknown config option to `{0}`: {1}" _logger . warn ( msg . format ( _class , opt ) ) | Helper method to check options . | 106 | 6 |
3,543 | def accessed ( filename ) : if isinstance ( filename , file ) : filename = filename . name return duration ( os . stat ( filename ) [ stat . ST_ATIME ] ) | Retrieve how long ago a file has been accessed . | 38 | 11 |
3,544 | def created ( filename ) : if isinstance ( filename , file ) : filename = filename . name return duration ( os . stat ( filename ) [ stat . ST_CTIME ] ) | Retrieve how long ago a file has been created . | 38 | 11 |
3,545 | def modified ( filename ) : if isinstance ( filename , file ) : filename = filename . name return duration ( os . stat ( filename ) [ stat . ST_MTIME ] ) | Retrieve how long ago a file has been modified . | 38 | 11 |
3,546 | def size ( filename , format = 'decimal' ) : if isinstance ( filename , file ) : filename = filename . name return filesize ( os . stat ( filename ) [ stat . ST_SIZE ] , format ) | Retrieve the size of a file . | 47 | 8 |
3,547 | def show_more ( context , label = None , loading = settings . LOADING ) : # This template tag could raise a PaginationError: you have to call # *paginate* or *lazy_paginate* before including the showmore template. data = utils . get_data_from_context ( context ) page = data [ 'page' ] # show the template only if there is a next page if page . has_next ( ) : request = context [ 'request' ] page_number = page . next_page_number ( ) # Generate the querystring. querystring_key = data [ 'querystring_key' ] querystring = utils . get_querystring_for_page ( request , page_number , querystring_key , default_number = data [ 'default_number' ] ) return { 'label' : label , 'loading' : loading , 'path' : iri_to_uri ( data [ 'override_path' ] or request . path ) , 'querystring' : querystring , 'querystring_key' : querystring_key , 'request' : request , } # No next page, nothing to see. return { } | Show the link to get the next page in a Twitter - like pagination . | 260 | 16 |
3,548 | def show_more_table ( context , label = None , loading = settings . LOADING ) : # This template tag could raise a PaginationError: you have to call # *paginate* or *lazy_paginate* before including the showmore template. return show_more ( context , label , loading ) | Show the link to get the next page in a Twitter - like pagination in a template for table . | 70 | 21 |
3,549 | def generate_trimmer ( word_characters ) : start_re = r"^[^{}]+" . format ( word_characters ) end_re = r"[^{}]+$" . format ( word_characters ) def trimmer ( token , i = None , tokens = None ) : def trim ( s , metadata = None ) : s = re . sub ( start_re , "" , s ) s = re . sub ( end_re , "" , s ) return s return token . update ( trim ) return trimmer | Returns a trimmer function from a string of word characters . | 116 | 12 |
3,550 | def camelcase ( string ) : out = slug ( string ) . replace ( '-' , ' ' ) . title ( ) . replace ( ' ' , '' ) return out [ 0 ] . lower ( ) + out [ 1 : ] | Return a string in lowerCamelCase | 49 | 8 |
3,551 | def position_for_index ( self , index ) : if not self . elements : return 0 start = 0 end = int ( len ( self . elements ) / 2 ) slice_length = end - start pivot_point = int ( slice_length / 2 ) pivot_index = self . elements [ pivot_point * 2 ] while slice_length > 1 : if pivot_index < index : start = pivot_point elif pivot_index > index : end = pivot_point else : break slice_length = end - start pivot_point = start + int ( slice_length / 2 ) pivot_index = self . elements [ pivot_point * 2 ] if pivot_index == index : return pivot_point * 2 elif pivot_index > index : return pivot_point * 2 else : return ( pivot_point + 1 ) * 2 | Calculates the position within the vector to insert a given index . | 178 | 14 |
3,552 | def insert ( self , insert_index , val ) : def prevent_duplicates ( index , val ) : raise BaseLunrException ( "Duplicate index" ) self . upsert ( insert_index , val , prevent_duplicates ) | Inserts an element at an index within the vector . | 55 | 11 |
3,553 | def upsert ( self , insert_index , val , fn = None ) : fn = fn or ( lambda current , passed : passed ) self . _magnitude = 0 position = self . position_for_index ( insert_index ) if position < len ( self . elements ) and self . elements [ position ] == insert_index : self . elements [ position + 1 ] = fn ( self . elements [ position + 1 ] , val ) else : self . elements . insert ( position , val ) self . elements . insert ( position , insert_index ) | Inserts or updates an existing index within the vector . | 118 | 11 |
3,554 | def to_list ( self ) : output = [ ] for i in range ( 1 , len ( self . elements ) , 2 ) : output . append ( self . elements [ i ] ) return output | Converts the vector to an array of the elements within the vector | 42 | 13 |
3,555 | def dot ( self , other ) : dot_product = 0 a = self . elements b = other . elements a_len = len ( a ) b_len = len ( b ) i = j = 0 while i < a_len and j < b_len : a_val = a [ i ] b_val = b [ j ] if a_val < b_val : i += 2 elif a_val > b_val : j += 2 else : dot_product += a [ i + 1 ] * b [ j + 1 ] i += 2 j += 2 return dot_product | Calculates the dot product of this vector and another vector . | 127 | 13 |
3,556 | def similarity ( self , other ) : if self . magnitude == 0 or other . magnitude == 0 : return 0 return self . dot ( other ) / self . magnitude | Calculates the cosine similarity between this vector and another vector . | 34 | 14 |
3,557 | def bban_base10 ( number ) : number = bban_compact ( number ) number = number [ 4 : ] + number [ : 4 ] return '' . join ( [ str ( IBAN_ALPHABET . index ( char ) ) for char in number ] ) | Printable Basic Bank Account Number in base - 10 . | 60 | 11 |
3,558 | def _add_scheme ( ) : lists = [ urllib . parse . uses_relative , urllib . parse . uses_netloc , urllib . parse . uses_query , ] for l in lists : l . append ( 'mongodb' ) | urllib . parse doesn t support the mongodb scheme but it s easy to make it so . | 59 | 23 |
3,559 | def field ( self , field_name , boost = 1 , extractor = None ) : if "/" in field_name : raise ValueError ( "Field {} contains illegal character `/`" ) self . _fields [ field_name ] = Field ( field_name , boost , extractor ) | Adds a field to the list of document fields that will be indexed . | 63 | 14 |
3,560 | def b ( self , number ) : if number < 0 : self . _b = 0 elif number > 1 : self . _b = 1 else : self . _b = number | A parameter to tune the amount of field length normalisation that is applied when calculating relevance scores . | 39 | 19 |
3,561 | def add ( self , doc , attributes = None ) : doc_ref = str ( doc [ self . _ref ] ) self . _documents [ doc_ref ] = attributes or { } self . document_count += 1 for field_name , field in self . _fields . items ( ) : extractor = field . extractor field_value = doc [ field_name ] if extractor is None else extractor ( doc ) tokens = Tokenizer ( field_value ) terms = self . pipeline . run ( tokens ) field_ref = FieldRef ( doc_ref , field_name ) field_terms = defaultdict ( int ) # TODO: field_refs are casted to strings in JS, should we allow # FieldRef as keys? self . field_term_frequencies [ str ( field_ref ) ] = field_terms self . field_lengths [ str ( field_ref ) ] = len ( terms ) for term in terms : # TODO: term is a Token, should we allow Tokens as keys? term_key = str ( term ) field_terms [ term_key ] += 1 if term_key not in self . inverted_index : posting = { _field_name : { } for _field_name in self . _fields } posting [ "_index" ] = self . term_index self . term_index += 1 self . inverted_index [ term_key ] = posting if doc_ref not in self . inverted_index [ term_key ] [ field_name ] : self . inverted_index [ term_key ] [ field_name ] [ doc_ref ] = defaultdict ( list ) for metadata_key in self . metadata_whitelist : metadata = term . metadata [ metadata_key ] self . inverted_index [ term_key ] [ field_name ] [ doc_ref ] [ metadata_key ] . append ( metadata ) | Adds a document to the index . | 404 | 7 |
3,562 | def build ( self ) : self . _calculate_average_field_lengths ( ) self . _create_field_vectors ( ) self . _create_token_set ( ) return Index ( inverted_index = self . inverted_index , field_vectors = self . field_vectors , token_set = self . token_set , fields = list ( self . _fields . keys ( ) ) , pipeline = self . search_pipeline , ) | Builds the index creating an instance of lunr . Index . | 102 | 13 |
3,563 | def _create_token_set ( self ) : self . token_set = TokenSet . from_list ( sorted ( list ( self . inverted_index . keys ( ) ) ) ) | Creates a token set of all tokens in the index using lunr . TokenSet | 40 | 17 |
3,564 | def _calculate_average_field_lengths ( self ) : accumulator = defaultdict ( int ) documents_with_field = defaultdict ( int ) for field_ref , length in self . field_lengths . items ( ) : _field_ref = FieldRef . from_string ( field_ref ) field = _field_ref . field_name documents_with_field [ field ] += 1 accumulator [ field ] += length for field_name in self . _fields : accumulator [ field_name ] /= documents_with_field [ field_name ] self . average_field_length = accumulator | Calculates the average document length for this index | 135 | 10 |
3,565 | def _create_field_vectors ( self ) : field_vectors = { } term_idf_cache = { } for field_ref , term_frequencies in self . field_term_frequencies . items ( ) : _field_ref = FieldRef . from_string ( field_ref ) field_name = _field_ref . field_name field_length = self . field_lengths [ field_ref ] field_vector = Vector ( ) field_boost = self . _fields [ field_name ] . boost doc_boost = self . _documents [ _field_ref . doc_ref ] . get ( "boost" , 1 ) for term , tf in term_frequencies . items ( ) : term_index = self . inverted_index [ term ] [ "_index" ] if term not in term_idf_cache : idf = Idf ( self . inverted_index [ term ] , self . document_count ) term_idf_cache [ term ] = idf else : idf = term_idf_cache [ term ] score = ( idf * ( ( self . _k1 + 1 ) * tf ) / ( self . _k1 * ( 1 - self . _b + self . _b * ( field_length / self . average_field_length [ field_name ] ) ) + tf ) ) score *= field_boost score *= doc_boost score_with_precision = round ( score , 3 ) field_vector . insert ( term_index , score_with_precision ) field_vectors [ field_ref ] = field_vector self . field_vectors = field_vectors | Builds a vector space model of every document using lunr . Vector . | 365 | 15 |
3,566 | def estimate ( coll , filter = { } , sample = 1 ) : total = coll . estimated_document_count ( ) if not filter and sample == 1 : return total if sample <= 1 : sample *= total pipeline = list ( builtins . filter ( None , [ { '$sample' : { 'size' : sample } } if sample < total else { } , { '$match' : filter } , { '$count' : 'matched' } , ] ) ) docs = next ( coll . aggregate ( pipeline ) ) ratio = docs [ 'matched' ] / sample return int ( total * ratio ) | Estimate the number of documents in the collection matching the filter . | 130 | 13 |
3,567 | def render ( self , data , accepted_media_type = None , renderer_context = None ) : wrapper = None success = False for wrapper_name in self . wrappers : wrapper_method = getattr ( self , wrapper_name ) try : wrapper = wrapper_method ( data , renderer_context ) except WrapperNotApplicable : pass else : success = True break if not success : raise WrapperNotApplicable ( 'No acceptable wrappers found for response.' , data = data , renderer_context = renderer_context ) renderer_context [ "indent" ] = 4 return super ( JsonApiMixin , self ) . render ( data = wrapper , accepted_media_type = accepted_media_type , renderer_context = renderer_context ) | Convert native data to JSON API | 169 | 7 |
3,568 | def wrap_parser_error ( self , data , renderer_context ) : response = renderer_context . get ( "response" , None ) status_code = response and response . status_code if status_code != 400 : raise WrapperNotApplicable ( 'Status code must be 400.' ) if list ( data . keys ( ) ) != [ 'detail' ] : raise WrapperNotApplicable ( 'Data must only have "detail" key.' ) # Probably a parser error, unless `detail` is a valid field view = renderer_context . get ( "view" , None ) model = self . model_from_obj ( view ) if 'detail' in model . _meta . get_all_field_names ( ) : raise WrapperNotApplicable ( ) return self . wrap_error ( data , renderer_context , keys_are_fields = False , issue_is_title = False ) | Convert parser errors to the JSON API Error format | 198 | 10 |
3,569 | def wrap_field_error ( self , data , renderer_context ) : response = renderer_context . get ( "response" , None ) status_code = response and response . status_code if status_code != 400 : raise WrapperNotApplicable ( 'Status code must be 400.' ) return self . wrap_error ( data , renderer_context , keys_are_fields = True , issue_is_title = False ) | Convert field error native data to the JSON API Error format | 95 | 12 |
3,570 | def wrap_generic_error ( self , data , renderer_context ) : response = renderer_context . get ( "response" , None ) status_code = response and response . status_code is_error = ( status . is_client_error ( status_code ) or status . is_server_error ( status_code ) ) if not is_error : raise WrapperNotApplicable ( "Status code must be 4xx or 5xx." ) return self . wrap_error ( data , renderer_context , keys_are_fields = False , issue_is_title = True ) | Convert generic error native data using the JSON API Error format | 129 | 12 |
3,571 | def wrap_error ( self , data , renderer_context , keys_are_fields , issue_is_title ) : response = renderer_context . get ( "response" , None ) status_code = str ( response and response . status_code ) errors = [ ] for field , issues in data . items ( ) : if isinstance ( issues , six . string_types ) : issues = [ issues ] for issue in issues : error = self . dict_class ( ) error [ "status" ] = status_code if issue_is_title : error [ "title" ] = issue else : error [ "detail" ] = issue if keys_are_fields : if field in ( 'non_field_errors' , NON_FIELD_ERRORS ) : error [ "path" ] = '/-' else : error [ "path" ] = '/' + field errors . append ( error ) wrapper = self . dict_class ( ) wrapper [ "errors" ] = errors return wrapper | Convert error native data to the JSON API Error format | 213 | 11 |
3,572 | def wrap_options ( self , data , renderer_context ) : request = renderer_context . get ( "request" , None ) method = request and getattr ( request , 'method' ) if method != 'OPTIONS' : raise WrapperNotApplicable ( "Request method must be OPTIONS" ) wrapper = self . dict_class ( ) wrapper [ "meta" ] = data return wrapper | Wrap OPTIONS data as JSON API meta value | 87 | 10 |
3,573 | def wrap_paginated ( self , data , renderer_context ) : pagination_keys = [ 'count' , 'next' , 'previous' , 'results' ] for key in pagination_keys : if not ( data and key in data ) : raise WrapperNotApplicable ( 'Not paginated results' ) view = renderer_context . get ( "view" , None ) model = self . model_from_obj ( view ) resource_type = self . model_to_resource_type ( model ) try : from rest_framework . utils . serializer_helpers import ReturnList results = ReturnList ( data [ "results" ] , serializer = data . serializer . fields [ "results" ] , ) except ImportError : results = data [ "results" ] # Use default wrapper for results wrapper = self . wrap_default ( results , renderer_context ) # Add pagination metadata pagination = self . dict_class ( ) pagination [ 'previous' ] = data [ 'previous' ] pagination [ 'next' ] = data [ 'next' ] pagination [ 'count' ] = data [ 'count' ] wrapper . setdefault ( 'meta' , self . dict_class ( ) ) wrapper [ 'meta' ] . setdefault ( 'pagination' , self . dict_class ( ) ) wrapper [ 'meta' ] [ 'pagination' ] . setdefault ( resource_type , self . dict_class ( ) ) . update ( pagination ) return wrapper | Convert paginated data to JSON API with meta | 331 | 10 |
3,574 | def wrap_default ( self , data , renderer_context ) : wrapper = self . dict_class ( ) view = renderer_context . get ( "view" , None ) request = renderer_context . get ( "request" , None ) model = self . model_from_obj ( view ) resource_type = self . model_to_resource_type ( model ) if isinstance ( data , list ) : many = True resources = data else : many = False resources = [ data ] items = [ ] links = self . dict_class ( ) linked = self . dict_class ( ) meta = self . dict_class ( ) for resource in resources : converted = self . convert_resource ( resource , data , request ) item = converted . get ( 'data' , { } ) linked_ids = converted . get ( 'linked_ids' , { } ) if linked_ids : item [ "links" ] = linked_ids items . append ( item ) links . update ( converted . get ( 'links' , { } ) ) linked = self . update_nested ( linked , converted . get ( 'linked' , { } ) ) meta . update ( converted . get ( 'meta' , { } ) ) if many : wrapper [ resource_type ] = items else : wrapper [ resource_type ] = items [ 0 ] if links : links = self . prepend_links_with_name ( links , resource_type ) wrapper [ "links" ] = links if linked : wrapper [ "linked" ] = linked if meta : wrapper [ "meta" ] = meta return wrapper | Convert native data to a JSON API resource collection | 341 | 10 |
3,575 | def acquire_lock ( self ) : # first ensure that a record exists for this session id try : self . collection . insert_one ( dict ( _id = self . id ) ) except pymongo . errors . DuplicateKeyError : pass unlocked_spec = dict ( _id = self . id , locked = None ) lock_timer = ( timers . Timer . after ( self . lock_timeout ) if self . lock_timeout else timers . NeverExpires ( ) ) while not lock_timer . expired ( ) : locked_spec = { '$set' : dict ( locked = datetime . datetime . utcnow ( ) ) } res = self . collection . update_one ( unlocked_spec , locked_spec ) if res . raw_result [ 'updatedExisting' ] : # we have the lock break time . sleep ( 0.1 ) else : raise LockTimeout ( f"Timeout acquiring lock for {self.id}" ) self . locked = True | Acquire the lock . Blocks indefinitely until lock is available unless lock_timeout was supplied . If the lock_timeout elapses raises LockTimeout . | 208 | 29 |
3,576 | def set_boot_device ( self , device , persistent = False ) : operation = "set_boot_device" try : self . sp_manager . create_boot_policy ( ) self . sp_manager . set_boot_device ( device ) except UcsException as ex : raise exception . UcsOperationError ( operation = operation , error = ex ) | Set the boot device for the node . | 77 | 8 |
3,577 | def get_boot_device ( self ) : operation = 'get_boot_device' try : boot_device = self . sp_manager . get_boot_device ( ) return boot_device except UcsException as ex : print ( _ ( "Cisco client exception: %(msg)s." ) , { 'msg' : ex } ) raise exception . UcsOperationError ( operation = operation , error = ex ) | Get the current boot device for the node . | 91 | 9 |
3,578 | def lunr ( ref , fields , documents , languages = None ) : if languages is not None and lang . LANGUAGE_SUPPORT : if isinstance ( languages , basestring ) : languages = [ languages ] unsupported_languages = set ( languages ) - set ( lang . SUPPORTED_LANGUAGES ) if unsupported_languages : raise RuntimeError ( "The specified languages {} are not supported, " "please choose one of {}" . format ( ", " . join ( unsupported_languages ) , ", " . join ( lang . SUPPORTED_LANGUAGES . keys ( ) ) , ) ) builder = lang . get_nltk_builder ( languages ) else : builder = Builder ( ) builder . pipeline . add ( trimmer , stop_word_filter , stemmer ) builder . search_pipeline . add ( stemmer ) builder . ref ( ref ) for field in fields : if isinstance ( field , dict ) : builder . field ( * * field ) else : builder . field ( field ) for document in documents : if isinstance ( document , ( tuple , list ) ) : builder . add ( document [ 0 ] , attributes = document [ 1 ] ) else : builder . add ( document ) return builder . build ( ) | A convenience function to configure and construct a lunr . Index . | 268 | 13 |
3,579 | def from_config ( _config , * * options ) : expected_args = ( 'path' , ) rconfig . check_config_options ( "SQLiteEventStore" , expected_args , tuple ( ) , options ) return SQLiteEventStore ( options [ 'path' ] ) | Instantiate an SQLite event store from config . | 62 | 10 |
3,580 | def key_exists ( self , key ) : assert isinstance ( key , str ) cursor = self . conn . cursor ( ) with contextlib . closing ( cursor ) : cursor . execute ( 'SELECT COUNT(*) FROM events WHERE uuid=?' , ( key , ) ) res = cursor . fetchone ( ) count = res [ 0 ] if count == 0 : return False else : assert count in ( 0 , 1 ) , "Duplicate event ids detected: {0}" . format ( count ) return True | Check whether a key exists in the event store . | 111 | 10 |
3,581 | def count ( self ) : cursor = self . conn . cursor ( ) with contextlib . closing ( cursor ) : cursor . execute ( 'SELECT COUNT(*) FROM events' ) res = cursor . fetchone ( ) return res [ 0 ] | Return the number of events in the db . | 51 | 9 |
3,582 | def close ( self ) : if self . conn : self . conn . close ( ) self . conn = None fname = os . path . basename ( self . _path ) checksum_persister = _get_checksum_persister ( self . _path ) hasher = _initialize_hasher ( self . _path ) with contextlib . closing ( checksum_persister ) : checksum_persister [ fname ] = hasher . hexdigest ( ) | Close the event store . | 104 | 5 |
3,583 | def from_config ( config , * * options ) : expected_args = ( 'path' , ) rconfig . check_config_options ( "LogEventStore" , expected_args , tuple ( ) , options ) return LogEventStore ( options [ 'path' ] ) | Instantiate an LogEventStore from config . | 59 | 9 |
3,584 | def key_exists ( self , key ) : assert isinstance ( key , str ) self . _close ( ) try : return self . _unsafe_key_exists ( key ) finally : self . _open ( ) | Check if key has previously been added to this store . | 49 | 11 |
3,585 | def close ( self ) : fname = os . path . basename ( self . _path ) checksum_persister = _get_checksum_persister ( self . _path ) with contextlib . closing ( checksum_persister ) : checksum_persister [ fname ] = self . _hasher . hexdigest ( ) self . _close ( ) | Persist a checksum and close the file . | 81 | 10 |
3,586 | def from_config ( config , * * options ) : expected_args = ( 'prefix' , 'realclass' ) for arg in expected_args : if arg not in options : msg = "Required option missing: {0}" raise rconfig . ConfigurationError ( msg . format ( arg ) ) # Not logging unrecognized options here, because they might be used # by the real event store instantiated below. classpath = options [ 'realclass' ] classpath_pieces = classpath . split ( '.' ) classname = classpath_pieces [ - 1 ] modulepath = '.' . join ( classpath_pieces [ 0 : - 1 ] ) module = importlib . import_module ( modulepath ) estore_class = getattr ( module , classname ) return RotatedEventStore ( lambda fname : estore_class ( fname ) , options [ 'path' ] , options [ 'prefix' ] ) | Instantiate an RotatedEventStore from config . | 197 | 10 |
3,587 | def _construct_filename ( self , batchno ) : return os . path . join ( self . dirpath , "{0}.{1}" . format ( self . prefix , batchno ) ) | Construct a filename for a database . | 41 | 7 |
3,588 | def rotate ( self ) : self . _logger . info ( 'Rotating data files. New batch number will be: %s' , self . batchno + 1 ) self . estore . close ( ) self . estore = None self . batchno += 1 self . estore = self . _open_event_store ( ) | Rotate the files to disk . | 72 | 7 |
3,589 | def _find_batch_containing_event ( self , uuid ) : if self . estore . key_exists ( uuid ) : # Reusing already opened DB if possible return self . batchno else : for batchno in range ( self . batchno - 1 , - 1 , - 1 ) : # Iterating backwards here because we are more likely to find # the event in an later archive, than earlier. db = self . _open_event_store ( batchno ) with contextlib . closing ( db ) : if db . key_exists ( uuid ) : return batchno return None | Find the batch number that contains a certain event . | 129 | 10 |
3,590 | def from_config ( config , * * options ) : required_args = ( 'storage-backends' , ) optional_args = { 'events_per_batch' : 25000 } rconfig . check_config_options ( "SyncedRotationEventStores" , required_args , tuple ( optional_args . keys ( ) ) , options ) if "events_per_batch" in options : events_per_batch = int ( options [ "events_per_batch" ] ) else : events_per_batch = optional_args [ "events_per_batch" ] estore = SyncedRotationEventStores ( events_per_batch ) for section in options [ 'storage-backends' ] . split ( ' ' ) : try : substore = rconfig . construct_eventstore ( config , section ) estore . add_rotated_store ( substore ) except Exception as e : _logger . exception ( 'Could not instantiate substore from' ' section %s' , section ) estore . close ( ) raise return estore | Instantiate an SyncedRotationEventStores from config . | 231 | 13 |
3,591 | def hexdump ( stream ) : if isinstance ( stream , six . string_types ) : stream = BytesIO ( stream ) row = 0 while True : data = stream . read ( 16 ) if not data : break hextets = data . encode ( 'hex' ) . ljust ( 32 ) canonical = printable ( data ) print ( '%08x %s %s |%s|' % ( row * 16 , ' ' . join ( hextets [ x : x + 2 ] for x in range ( 0x00 , 0x10 , 2 ) ) , ' ' . join ( hextets [ x : x + 2 ] for x in range ( 0x10 , 0x20 , 2 ) ) , canonical , ) ) row += 1 | Display stream contents in hexadecimal and ASCII format . The stream specified must either be a file - like object that supports the read method to receive bytes or it can be a string . | 164 | 38 |
3,592 | def printable ( sequence ) : return '' . join ( list ( map ( lambda c : c if c in PRINTABLE else '.' , sequence ) ) ) | Return a printable string from the input sequence | 34 | 9 |
3,593 | def sparkline ( data ) : min_value = float ( min ( data ) ) max_value = float ( max ( data ) ) steps = ( max_value - min_value ) / float ( len ( SPARKCHAR ) - 1 ) return '' . join ( [ SPARKCHAR [ int ( ( float ( value ) - min_value ) / steps ) ] for value in data ] ) | Return a spark line for the given data set . | 84 | 10 |
3,594 | def get_language_stemmer ( language ) : from lunr . languages import SUPPORTED_LANGUAGES from nltk . stem . snowball import SnowballStemmer return SnowballStemmer ( SUPPORTED_LANGUAGES [ language ] ) | Retrieves the SnowballStemmer for a particular language . | 56 | 14 |
3,595 | def nltk_stemmer ( stemmer , token , i = None , tokens = None ) : def wrapped_stem ( token , metadata = None ) : return stemmer . stem ( token ) return token . update ( wrapped_stem ) | Wrapper around a NLTK SnowballStemmer which includes stop words for each language . | 51 | 20 |
3,596 | def is_seq ( obj ) : if not hasattr ( obj , '__iter__' ) : return False if isinstance ( obj , basestring ) : return False return True | Returns True if object is not a string but is iterable | 39 | 12 |
3,597 | def register ( cls , func ) : cls . _add_version_info ( func ) cls . _upgrade_funcs . add ( func ) return func | Decorate a migration function with this method to make it available for migrating cases . | 37 | 16 |
3,598 | def _add_version_info ( func ) : pattern = r'v(?P<source>\d+)_to_(?P<target>\d+)$' match = re . match ( pattern , func . __name__ ) if not match : raise ValueError ( "migration function name must match " + pattern ) func . source , func . target = map ( int , match . groups ( ) ) | Add . source and . target attributes to the registered function . | 89 | 12 |
3,599 | def migrate_doc ( self , doc ) : orig_ver = doc . get ( self . version_attribute_name , 0 ) funcs = self . _get_migrate_funcs ( orig_ver , self . target_version ) for func in funcs : func ( self , doc ) doc [ self . version_attribute_name ] = func . target return doc | Migrate the doc from its current version to the target version and return it . | 80 | 16 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.