idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
42,900 | def ackSoon ( self ) : if self . _ackTimer is None : def originateAck ( ) : self . _ackTimer = None self . originate ( ack = True ) self . _ackTimer = reactor . callLater ( 0.1 , originateAck ) else : self . _ackTimer . reset ( ACK_DELAY ) | Emit an acknowledgement packet soon . |
42,901 | def originate ( self , data = '' , syn = False , ack = False , fin = False , rst = False ) : if self . _ackTimer is not None : self . _ackTimer . cancel ( ) self . _ackTimer = None if syn : assert self . nextSendSeqNum == 0 , ( "NSSN = " + repr ( self . nextSendSeqNum ) ) assert self . hostSendISN == 0 p = PTCPPacket . create ( self . hostPseudoPort , self . peerPseudoPort , seqNum = ( self . nextSendSeqNum + self . hostSendISN ) % ( 2 ** 32 ) , ackNum = self . currentAckNum ( ) , data = data , window = self . recvWindow , syn = syn , ack = ack , fin = fin , rst = rst , destination = self . peerAddressTuple ) sl = p . segmentLength ( ) self . nextSendSeqNum += sl if p . mustRetransmit ( ) : if self . retransmissionQueue : if self . retransmissionQueue [ - 1 ] . fin : raise AssertionError ( "Sending %r after FIN??!" % ( p , ) ) self . retransmissionQueue . append ( p ) self . _retransmitLater ( ) if not self . sendWindowRemaining : self . _writeBufferFull ( ) else : pass self . ptcp . sendPacket ( p ) return p | Create a packet enqueue it to be sent and return it . |
42,902 | def connectionJustEstablished ( self ) : assert not self . disconnecting assert not self . disconnected try : p = self . factory . buildProtocol ( PTCPAddress ( self . peerAddressTuple , self . pseudoPortPair ) ) p . makeConnection ( self ) except : log . msg ( "Exception during PTCP connection setup." ) log . err ( ) self . loseConnection ( ) else : self . protocol = p | We sent out SYN they acknowledged it . Congratulations you have a new baby connection . |
42,903 | def connect ( self , factory , host , port , pseudoPort = 1 ) : sourcePseudoPort = genConnID ( ) % MAX_PSEUDO_PORT conn = self . _connections [ ( pseudoPort , sourcePseudoPort , ( host , port ) ) ] = PTCPConnection ( sourcePseudoPort , pseudoPort , self , factory , ( host , port ) ) conn . machine . appActiveOpen ( ) return conn | Attempt to establish a new connection via PTCP to the given remote address . |
42,904 | def waitForAllConnectionsToClose ( self ) : if not self . _connections : return self . _stop ( ) return self . _allConnectionsClosed . deferred ( ) . addBoth ( self . _stop ) | Wait for all currently - open connections to enter the CLOSED state . Currently this is only usable from test fixtures . |
42,905 | def setup_Q2Q ( self , path , q2qPortnum = q2q . port , inboundTCPPortnum = q2q . port + 1 , publicIP = None ) : store = DirectoryCertificateAndUserStore ( path ) self . attach ( q2q . Q2QService ( protocolFactoryFactory = IdentityAdminFactory ( store ) . examineRequest , certificateStorage = store , portal = Portal ( store , checkers = [ store ] ) , q2qPortnum = q2qPortnum , inboundTCPPortnum = inboundTCPPortnum , publicIP = publicIP , ) ) | Set up a Q2Q service . |
42,906 | def replace_macros ( string , spec = None ) : if spec : assert isinstance ( spec , Spec ) def _is_conditional ( macro : str ) -> bool : return macro . startswith ( "?" ) or macro . startswith ( "!" ) def _test_conditional ( macro : str ) -> bool : if macro [ 0 ] == "?" : return True if macro [ 0 ] == "!" : return False raise Exception ( "Given string is not a conditional macro" ) def _macro_repl ( match ) : macro_name = match . group ( 1 ) if _is_conditional ( macro_name ) and spec : parts = macro_name [ 1 : ] . split ( sep = ":" , maxsplit = 1 ) assert parts if _test_conditional ( macro_name ) : if hasattr ( spec , parts [ 0 ] ) : if len ( parts ) == 2 : return parts [ 1 ] return getattr ( spec , parts [ 0 ] , None ) return "" else : if not hasattr ( spec , parts [ 0 ] ) : if len ( parts ) == 2 : return parts [ 1 ] return getattr ( spec , parts [ 0 ] , None ) return "" if spec : value = getattr ( spec , macro_name , None ) if value : return str ( value ) return match . string [ match . start ( ) : match . end ( ) ] while True : ret = re . sub ( _macro_pattern , _macro_repl , string ) if ret != string : string = ret continue return ret | Replace all macros in given string with corresponding values . |
42,907 | def update ( self , spec_obj , context , match_obj , line ) : assert spec_obj assert context assert match_obj assert line return self . update_impl ( spec_obj , context , match_obj , line ) | Update given spec object and parse context and return them again . |
42,908 | def packages_dict ( self ) : assert self . packages return dict ( zip ( [ package . name for package in self . packages ] , self . packages ) ) | All packages in this RPM spec as a dictionary . |
42,909 | def from_file ( filename ) : spec = Spec ( ) with open ( filename , "r" , encoding = "utf-8" ) as f : parse_context = { "current_subpackage" : None } for line in f : spec , parse_context = _parse ( spec , parse_context , line ) return spec | Creates a new Spec object from a given file . |
42,910 | def from_string ( string : str ) : spec = Spec ( ) parse_context = { "current_subpackage" : None } for line in string . splitlines ( ) : spec , parse_context = _parse ( spec , parse_context , line ) return spec | Creates a new Spec object from a given string . |
42,911 | def parse_acl ( acl_iter ) : if isinstance ( acl_iter , basestring ) : acl_iter = [ acl_iter ] for chunk in acl_iter : if isinstance ( chunk , basestring ) : chunk = chunk . splitlines ( ) chunk = [ re . sub ( r'#.+' , '' , line ) . strip ( ) for line in chunk ] chunk = filter ( None , chunk ) else : chunk = [ chunk ] for ace in chunk : if isinstance ( ace , basestring ) : ace = ace . split ( None , 2 ) state , predicate , permission_set = ace yield parse_state ( state ) , parse_predicate ( predicate ) , parse_permission_set ( permission_set ) else : state , predicate , permission_set = ace yield parse_state ( state ) , parse_predicate ( predicate ) , permission_set | Parse a string or list of ACE definitions into usable ACEs . |
42,912 | def iter_object_acl ( root ) : for obj in iter_object_graph ( root ) : for ace in parse_acl ( getattr ( obj , '__acl__' , ( ) ) ) : yield ace | Child - first discovery of ACEs for an object . |
42,913 | def get_object_context ( root ) : context = { } for obj in iter_object_graph ( root , parents_first = True ) : context . update ( getattr ( obj , '__acl_context__' , { } ) ) return context | Depth - first discovery of authentication context for an object . |
42,914 | def load_skills_data ( ) -> dict : skills_data_file = expanduser ( '~/.mycroft/skills.json' ) if isfile ( skills_data_file ) : try : with open ( skills_data_file ) as f : return json . load ( f ) except json . JSONDecodeError : return { } else : return { } | Contains info on how skills should be updated |
42,915 | def get_skill_entry ( name , skills_data ) -> dict : for e in skills_data . get ( 'skills' , [ ] ) : if e . get ( 'name' ) == name : return e return { } | Find a skill entry in the skills_data and returns it . |
42,916 | def deploy ( Class , name = None , uid = None , gid = None , ** kw ) : svc = Class ( ** kw ) if name is None : name = Class . __name__ svc . setName ( name ) app = service . Application ( name , uid = uid , gid = gid ) app . addComponent ( NotPersistable ( app ) , ignoreClass = True ) svc . setServiceParent ( app ) return app | Create an application with the give name uid and gid . |
42,917 | def addServer ( self , normalPort , sslPort , f , name ) : tcp = internet . TCPServer ( normalPort , f ) tcp . setName ( name ) self . servers . append ( tcp ) if sslPort is not None : ssl = internet . SSLServer ( sslPort , f , contextFactory = self . sslfac ) ssl . setName ( name + 's' ) self . servers . append ( ssl ) | Add a TCP and an SSL server . Name them name and name + s . |
42,918 | def _write ( self , body , id ) : if id not in self . connections : raise error . ConnectionDone ( ) connection = self . connections [ id ] connection . dataReceived ( body ) return { } | Respond to a WRITE command sending some data over a virtual channel created by VIRTUAL . The answer is simply an acknowledgement as it is simply meant to note that the write went through without errors . |
42,919 | def _close ( self , id ) : connection = self . connections [ id ] connection . connectionLost ( Failure ( CONNECTION_DONE ) ) return { } | Respond to a CLOSE command dumping some data onto the stream . As with WRITE this returns an empty acknowledgement . |
42,920 | def _sign ( self , certificate_request , password ) : if self . service . portal is None : raise BadCertificateRequest ( "This agent cannot sign certificates." ) subj = certificate_request . getSubject ( ) sk = subj . keys ( ) if 'commonName' not in sk : raise BadCertificateRequest ( "Certificate requested with bad subject: %s" % ( sk , ) ) uandd = subj . commonName . split ( "@" ) if len ( uandd ) != 2 : raise BadCertificateRequest ( "Won't sign certificates for other domains" ) domain = uandd [ 1 ] CS = self . service . certificateStorage ourCert = CS . getPrivateCertificate ( domain ) D = self . service . portal . login ( UsernameShadowPassword ( subj . commonName , password ) , self , ivertex . IQ2QUser ) def _ ( ial ) : ( iface , aspect , logout ) = ial ser = CS . genSerial ( domain ) return dict ( certificate = aspect . signCertificateRequest ( certificate_request , ourCert , ser ) ) return D . addCallback ( _ ) | Respond to a request to sign a CSR for a user or agent located within our domain . |
42,921 | def _retrieveRemoteCertificate ( self , From , port = port ) : CS = self . service . certificateStorage host = str ( From . domainAddress ( ) ) p = AMP ( ) p . wrapper = self . wrapper f = protocol . ClientCreator ( reactor , lambda : p ) connD = f . connectTCP ( host , port ) def connected ( proto ) : dhost = From . domainAddress ( ) iddom = proto . callRemote ( Identify , subject = dhost ) def gotCert ( identifyBox ) : theirCert = identifyBox [ 'certificate' ] theirIssuer = theirCert . getIssuer ( ) . commonName theirName = theirCert . getSubject ( ) . commonName if ( theirName != str ( dhost ) ) : raise VerifyError ( "%r claimed it was %r in IDENTIFY response" % ( theirName , dhost ) ) if ( theirIssuer != str ( dhost ) ) : raise VerifyError ( "self-signed %r claimed it was issued by " "%r in IDENTIFY response" % ( dhost , theirIssuer ) ) def storedCert ( ignored ) : return theirCert return CS . storeSelfSignedCertificate ( str ( dhost ) , theirCert ) . addCallback ( storedCert ) def nothingify ( x ) : proto . transport . loseConnection ( ) return x return iddom . addCallback ( gotCert ) . addBoth ( nothingify ) connD . addCallback ( connected ) return connD | The entire conversation starting with TCP handshake and ending at disconnect to retrieve a foreign domain s certificate for the first time . |
42,922 | def connect ( self , From , to , protocolName , clientFactory , chooser ) : publicIP = self . _determinePublicIP ( ) A = dict ( From = From , to = to , protocol = protocolName ) if self . service . dispatcher is not None : A [ 'udp_source' ] = ( publicIP , self . service . sharedUDPPortnum ) else : log . msg ( "dispatcher unavailable when connecting" ) D = self . callRemote ( Inbound , ** A ) def _connected ( answer ) : listenersD = defer . maybeDeferred ( chooser , answer [ 'listeners' ] ) def gotListeners ( listeners ) : allConnectionAttempts = [ ] for listener in listeners : d = self . attemptConnectionMethods ( listener [ 'methods' ] , listener [ 'id' ] , From , to , protocolName , clientFactory , ) allConnectionAttempts . append ( d ) return defer . DeferredList ( allConnectionAttempts ) listenersD . addCallback ( gotListeners ) def finishedAllAttempts ( results ) : succeededAny = False failures = [ ] if not results : return Failure ( NoAttemptsMade ( "there was no available path for connections " "(%r->%r/%s)" % ( From , to , protocolName ) ) ) for succeeded , result in results : if succeeded : succeededAny = True randomConnection = result break else : failures . append ( result ) if not succeededAny : return Failure ( AttemptsFailed ( [ failure . getBriefTraceback ( ) for failure in failures ] ) ) return randomConnection return listenersD . addCallback ( finishedAllAttempts ) return D . addCallback ( _connected ) | Issue an INBOUND command creating a virtual connection to the peer given identifying information about the endpoint to connect to and a protocol factory . |
42,923 | def requestAvatarId ( self , credentials ) : username , domain = credentials . username . split ( "@" ) key = self . users . key ( domain , username ) if key is None : return defer . fail ( UnauthorizedLogin ( ) ) def _cbPasswordChecked ( passwordIsCorrect ) : if passwordIsCorrect : return username + '@' + domain else : raise UnauthorizedLogin ( ) return defer . maybeDeferred ( credentials . checkPassword , key ) . addCallback ( _cbPasswordChecked ) | Return the ID associated with these credentials . |
42,924 | def addPrivateCertificate ( self , subjectName , existingCertificate = None ) : if existingCertificate is None : assert '@' not in subjectName , "Don't self-sign user certs!" mainDN = DistinguishedName ( commonName = subjectName ) mainKey = KeyPair . generate ( ) mainCertReq = mainKey . certificateRequest ( mainDN ) mainCertData = mainKey . signCertificateRequest ( mainDN , mainCertReq , lambda dn : True , self . genSerial ( subjectName ) ) mainCert = mainKey . newCertificate ( mainCertData ) else : mainCert = existingCertificate self . localStore [ subjectName ] = mainCert | Add a PrivateCertificate object to this store for this subjectName . |
42,925 | def iterconnections ( self ) : return itertools . chain ( self . secureConnectionCache . cachedConnections . itervalues ( ) , iter ( self . subConnections ) , ( self . dispatcher or ( ) ) and self . dispatcher . iterconnections ( ) ) | Iterator of all connections associated with this service whether cached or not . For testing purposes only . |
42,926 | def listenQ2Q ( self , fromAddress , protocolsToFactories , serverDescription ) : myDomain = fromAddress . domainAddress ( ) D = self . getSecureConnection ( fromAddress , myDomain ) def _secured ( proto ) : lfm = self . localFactoriesMapping def startup ( listenResult ) : for protocol , factory in protocolsToFactories . iteritems ( ) : key = ( fromAddress , protocol ) if key not in lfm : lfm [ key ] = [ ] lfm [ key ] . append ( ( factory , serverDescription ) ) factory . doStart ( ) def shutdown ( ) : for protocol , factory in protocolsToFactories . iteritems ( ) : lfm [ fromAddress , protocol ] . remove ( ( factory , serverDescription ) ) factory . doStop ( ) proto . notifyOnConnectionLost ( shutdown ) return listenResult if self . dispatcher is not None : gp = proto . transport . getPeer ( ) udpAddress = ( gp . host , gp . port ) pubUDPDeferred = self . _retrievePublicUDPPortNumber ( udpAddress ) else : pubUDPDeferred = defer . succeed ( None ) def _gotPubUDPPort ( publicAddress ) : self . _publicUDPAddress = publicAddress return proto . listen ( fromAddress , protocolsToFactories . keys ( ) , serverDescription ) . addCallback ( startup ) pubUDPDeferred . addCallback ( _gotPubUDPPort ) return pubUDPDeferred D . addCallback ( _secured ) return D | Right now this is really only useful in the client implementation since it is transient . protocolFactoryFactory is used for persistent listeners . |
42,927 | def requestCertificateForAddress ( self , fromAddress , sharedSecret ) : kp = KeyPair . generate ( ) subject = DistinguishedName ( commonName = str ( fromAddress ) ) reqobj = kp . requestObject ( subject ) fakereq = kp . requestObject ( subject ) ssigned = kp . signRequestObject ( subject , fakereq , 1 ) certpair = PrivateCertificate . fromCertificateAndKeyPair fakecert = certpair ( ssigned , kp ) apc = self . certificateStorage . addPrivateCertificate gettingSecureConnection = self . getSecureConnection ( fromAddress , fromAddress . domainAddress ( ) , authorize = False , usePrivateCertificate = fakecert , ) def gotSecureConnection ( secured ) : return secured . callRemote ( Sign , certificate_request = reqobj , password = sharedSecret ) gettingSecureConnection . addCallback ( gotSecureConnection ) def gotSignResponse ( signResponse ) : cert = signResponse [ 'certificate' ] privcert = certpair ( cert , kp ) apc ( str ( fromAddress ) , privcert ) return signResponse return gettingSecureConnection . addCallback ( gotSignResponse ) | Connect to the authoritative server for the domain part of the given address and obtain a certificate signed by the root certificate for that domain then store that certificate in my local certificate storage . |
42,928 | def _backup_previous_version ( func : Callable = None ) : @ wraps ( func ) def wrapper ( self , * args , ** kwargs ) : self . old_path = None if self . is_local : self . old_path = join ( gettempdir ( ) , self . name ) if exists ( self . old_path ) : rmtree ( self . old_path ) shutil . copytree ( self . path , self . old_path ) try : func ( self , * args , ** kwargs ) except ( SkillModified , GitError , GitException ) : raise except Exception : LOG . info ( 'Problem performing action. Restoring skill to ' 'previous state...' ) if exists ( self . path ) : rmtree ( self . path ) if self . old_path and exists ( self . old_path ) : shutil . copytree ( self . old_path , self . path ) self . is_local = exists ( self . path ) raise return wrapper | Private decorator to back up previous skill folder |
42,929 | def attach ( self , remote_entry ) : self . name = remote_entry . name self . sha = remote_entry . sha self . url = remote_entry . url self . author = remote_entry . author return self | Attach a remote entry to a local entry |
42,930 | def parse_permission_set ( input ) : if isinstance ( input , basestring ) : for func in current_acl_manager . permission_set_parsers : res = func ( input ) if res is not None : input = res break if isinstance ( input , basestring ) : try : return current_acl_manager . permission_sets [ input ] except KeyError : raise ValueError ( 'unknown permission set %r' % input ) return input | Lookup a permission set name in the defined permissions . |
42,931 | def is_permission_in_set ( perm , perm_set ) : if isinstance ( perm_set , basestring ) : return perm == perm_set elif isinstance ( perm_set , Container ) : return perm in perm_set elif isinstance ( perm_set , Callable ) : return perm_set ( perm ) else : raise TypeError ( 'permission set must be a string, container, or callable' ) | Test if a permission is in the given set . |
42,932 | def check_query_data ( self , query_data ) : for key in self . required_keys : if key not in query_data : raise ValidationError ( 'All items in the {} querylist attribute should contain a ' '`{}` key' . format ( self . __class__ . __name__ , key ) ) | All items in a querylist must at least have queryset key and a serializer_class key . Any querylist item lacking both those keys will raise a ValidationError |
42,933 | def load_queryset ( self , query_data , request , * args , ** kwargs ) : queryset = query_data . get ( 'queryset' , [ ] ) if isinstance ( queryset , QuerySet ) : queryset = queryset . all ( ) queryset = self . filter_queryset ( queryset ) filter_fn = query_data . get ( 'filter_fn' , None ) if filter_fn is not None : queryset = filter_fn ( queryset , request , * args , ** kwargs ) page = self . paginate_queryset ( queryset ) self . is_paginated = page is not None return page if page is not None else queryset | Fetches the queryset and runs any necessary filtering both built - in rest_framework filters and custom filters passed into the querylist |
42,934 | def initial ( self , request , * args , ** kwargs ) : super ( FlatMultipleModelMixin , self ) . initial ( request , * args , ** kwargs ) assert not ( self . sorting_field and self . sorting_fields ) , '{} should either define ``sorting_field`` or ``sorting_fields`` property, not both.' . format ( self . __class__ . __name__ ) if self . sorting_field : warnings . warn ( '``sorting_field`` property is pending its deprecation. Use ``sorting_fields`` instead.' , DeprecationWarning ) self . sorting_fields = [ self . sorting_field ] self . _sorting_fields = self . sorting_fields | Overrides DRF s initial in order to set the _sorting_field from corresponding property in view . Protected property is required in order to support overriding of sorting_field via |
42,935 | def add_to_results ( self , data , label , results ) : for datum in data : if label is not None : datum . update ( { 'type' : label } ) results . append ( datum ) return results | Adds the label to the results as needed then appends the data to the running results tab |
42,936 | def prepare_sorting_fields ( self ) : if self . sorting_parameter_name in self . request . query_params : self . _sorting_fields = [ _ . strip ( ) for _ in self . request . query_params . get ( self . sorting_parameter_name ) . split ( ',' ) ] if self . _sorting_fields : self . _sorting_fields = [ ( self . sorting_fields_map . get ( field . lstrip ( '-' ) , field . lstrip ( '-' ) ) , field [ 0 ] == '-' ) for field in self . _sorting_fields ] | Determine sorting direction and sorting field based on request query parameters and sorting options of self |
42,937 | def get_label ( self , queryset , query_data ) : if query_data . get ( 'label' , False ) : return query_data [ 'label' ] try : return queryset . model . __name__ except AttributeError : return query_data [ 'queryset' ] . model . __name__ | Gets option label for each datum . Can be used for type identification of individual serialized objects |
42,938 | def _extension ( modpath : str ) -> setuptools . Extension : return setuptools . Extension ( modpath , [ modpath . replace ( "." , "/" ) + ".py" ] ) | Make setuptools . Extension . |
42,939 | def extend_env ( conn , arguments ) : try : result = conn . gateway . remote_exec ( "import os; channel.send(os.environ.copy())" ) env = result . receive ( ) except Exception : conn . logger . exception ( 'failed to retrieve the remote environment variables' ) env = { } path = env . get ( 'PATH' , '' ) env [ 'PATH' ] = path + '/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin' arguments [ 'env' ] = env if arguments . get ( 'extend_env' ) : for key , value in arguments [ 'extend_env' ] . items ( ) : arguments [ 'env' ] [ key ] = value arguments . pop ( 'extend_env' ) return arguments | get the remote environment s env so we can explicitly add the path without wiping out everything |
42,940 | def run ( conn , command , exit = False , timeout = None , ** kw ) : stop_on_error = kw . pop ( 'stop_on_error' , True ) if not kw . get ( 'env' ) : kw = extend_env ( conn , kw ) command = conn . cmd ( command ) timeout = timeout or conn . global_timeout conn . logger . info ( 'Running command: %s' % ' ' . join ( admin_command ( conn . sudo , command ) ) ) result = conn . execute ( _remote_run , cmd = command , ** kw ) try : reporting ( conn , result , timeout ) except Exception : remote_trace = traceback . format_exc ( ) remote_error = RemoteError ( remote_trace ) if remote_error . exception_name == 'RuntimeError' : conn . logger . error ( remote_error . exception_line ) else : for tb_line in remote_trace . split ( '\n' ) : conn . logger . error ( tb_line ) if stop_on_error : raise RuntimeError ( 'Failed to execute command: %s' % ' ' . join ( command ) ) if exit : conn . exit ( ) | A real - time - logging implementation of a remote subprocess . Popen call where a command is just executed on the remote end and no other handling is done . |
42,941 | def _write_file_safely ( local_path , fileinfo , response ) : try : _write_file ( local_path , fileinfo , response ) except BaseException as e : logger . warning ( "{} interrupted writing {} -- " "cleaning up partial file" . format ( e . __class__ . __name__ , local_path ) ) os . remove ( local_path ) raise e | attempts to stream a remote file into a local file object removes the local file if it s interrupted by any error |
42,942 | def up_by_files ( to_sync , remote_dir = DEFAULT_REMOTE_DIR , remote_files = None ) : if remote_files is None : remote_files = command . map_files_raw ( remote_dir = remote_dir ) for local_file in to_sync : _sync_local_file ( local_file , remote_dir , remote_files ) | Sync a given list of local files to remote_dir dir |
42,943 | def _upload_file_safely ( fileinfo , remote_dir ) : try : upload . upload_file ( fileinfo . path , remote_dir = remote_dir ) except BaseException as e : logger . warning ( "{} interrupted writing {} -- " "cleaning up partial remote file" . format ( e . __class__ . __name__ , fileinfo . path ) ) upload . delete_file ( fileinfo . path ) raise e | attempts to upload a local file to FlashAir tries to remove the remote file if interrupted by any error |
42,944 | def map_into_range ( low , high , raw_value ) : value = float ( raw_value ) if low < high : if value < low : return 0 elif value > high : return 1.0 elif low > high : if value > low : return 0 elif value < high : return - 1.0 return ( value - low ) / abs ( high - low ) | Map an input function into an output value clamping such that the magnitude of the output is at most 1 . 0 |
42,945 | def map_dual_axis ( low , high , centre , dead_zone , hot_zone , value ) : if value <= centre : return map_single_axis ( centre , low , dead_zone , hot_zone , value ) else : return map_single_axis ( centre , high , dead_zone , hot_zone , value ) | Map an axis with a central dead zone and hot zones at each end to a range from - 1 . 0 to 1 . 0 . This in effect uses two calls to map_single_axis choosing whether to use centre and low or centre and high as the low and high values in that call based on which side of the centre value the input value falls . This is the call that handles mapping of values on regular joysticks where there s a centre point to which the physical control returns when no input is being made . |
42,946 | def axis_updated ( self , event : InputEvent , prefix = None ) : if prefix is not None : axis = self . axes_by_code . get ( prefix + str ( event . code ) ) else : axis = self . axes_by_code . get ( event . code ) if axis is not None : axis . receive_device_value ( event . value ) else : logger . debug ( 'Unknown axis code {} ({}), value {}' . format ( event . code , prefix , event . value ) ) | Called to process an absolute axis event from evdev this is called internally by the controller implementations |
42,947 | def set_axis_centres ( self , * args ) : for axis in self . axes_by_code . values ( ) : if isinstance ( axis , CentredAxis ) : axis . centre = axis . value | Sets the centre points for each axis to the current value for that axis . This centre value is used when computing the value for the axis and is subtracted before applying any scaling . This will only be applied to CentredAxis instances |
42,948 | def names ( self ) -> [ str ] : return sorted ( [ name for name in self . axes_by_sname . keys ( ) if name is not '' ] ) | The snames of all axis objects |
42,949 | def _input_to_raw_value ( self , value : int ) -> float : return ( float ( value ) - self . min_raw_value ) / self . max_raw_value | Convert the value read from evdev to a 0 . 0 to 1 . 0 range . |
42,950 | def button_pressed ( self , key_code , prefix = None ) : if prefix is not None : state = self . buttons_by_code . get ( prefix + str ( key_code ) ) else : state = self . buttons_by_code . get ( key_code ) if state is not None : for handler in state . button_handlers : handler ( state . button ) state . is_pressed = True state . last_pressed = time ( ) state . was_pressed_since_last_check = True else : logger . debug ( 'Unknown button code {} ({})' . format ( key_code , prefix ) ) | Called from the controller classes to update the state of this button manager when a button is pressed . |
42,951 | def button_released ( self , key_code , prefix = None ) : if prefix is not None : state = self . buttons_by_code . get ( prefix + str ( key_code ) ) else : state = self . buttons_by_code . get ( key_code ) if state is not None : state . is_pressed = False state . last_pressed = None | Called from the controller classes to update the state of this button manager when a button is released . |
42,952 | def check_presses ( self ) : pressed = [ ] for button , state in self . buttons . items ( ) : if state . was_pressed_since_last_check : pressed . append ( button ) state . was_pressed_since_last_check = False self . __presses = ButtonPresses ( pressed ) return self . __presses | Return the set of Buttons which have been pressed since this call was last made clearing it as we do . |
42,953 | def held ( self , sname ) : state = self . buttons_by_sname . get ( sname ) if state is not None : if state . is_pressed and state . last_pressed is not None : return time ( ) - state . last_pressed return None | Determines whether a button is currently held identifying it by standard name |
42,954 | def _func ( self ) -> typing . Optional [ typing . Callable [ ... , typing . Union [ "typing.Awaitable[typing.Any]" , typing . Any ] ] ] : return self . __func | Get wrapped function . |
42,955 | def _await_if_required ( target : typing . Callable [ ... , typing . Union [ "typing.Awaitable[typing.Any]" , typing . Any ] ] ) -> typing . Callable [ ... , typing . Any ] : @ functools . wraps ( target ) def wrapper ( * args , ** kwargs ) : result = target ( * args , ** kwargs ) if asyncio . iscoroutine ( result ) : loop = asyncio . new_event_loop ( ) result = loop . run_until_complete ( result ) loop . close ( ) return result return wrapper | Await result if coroutine was returned . |
42,956 | def unique_name ( device : InputDevice ) -> str : if device . uniq : return device . uniq elif device . phys : return device . phys . split ( '/' ) [ 0 ] return '{}-{}-{}-{}' . format ( device . info . vendor , device . info . product , device . info . version , device . path ) | Construct a unique name for the device based on in order if available the uniq ID the phys ID and finally a concatenation of vendor product version and filename . |
42,957 | def find_matching_controllers ( * requirements , ** kwargs ) -> [ ControllerDiscovery ] : requirements = list ( requirements ) if requirements is None or len ( requirements ) == 0 : requirements = [ ControllerRequirement ( ) ] def pop_controller ( r : ControllerRequirement , discoveries : [ ControllerDiscovery ] ) -> ControllerDiscovery : for index , d in enumerate ( discoveries ) : if r . accept ( d ) : return discoveries . pop ( index ) raise ControllerNotFoundError ( ) all_controllers = find_all_controllers ( ** kwargs ) try : return list ( pop_controller ( r , all_controllers ) for r in requirements ) except ControllerNotFoundError as exception : logger . info ( 'Unable to satisfy controller requirements' + ', required {}, found {}' . format ( requirements , find_all_controllers ( ** kwargs ) ) ) raise exception | Find a sequence of controllers which match the supplied requirements or raise an error if no such controllers exist . |
42,958 | def print_devices ( ) : def device_verbose_info ( device : InputDevice ) -> { } : def axis_name ( axis_code ) : try : return ecodes . ABS [ axis_code ] except KeyError : return 'EXTENDED_CODE_{}' . format ( axis_code ) def rel_axis_name ( axis_code ) : try : return ecodes . REL [ axis_code ] except KeyError : return 'EXTENDED_CODE_{}' . format ( axis_code ) axes = None if has_abs_axes ( device ) : axes = { axis_name ( axis_code ) : { 'code' : axis_code , 'min' : axis_info . min , 'max' : axis_info . max , 'fuzz' : axis_info . fuzz , 'flat' : axis_info . flat , 'res' : axis_info . resolution } for axis_code , axis_info in device . capabilities ( ) . get ( 3 ) } rel_axes = None if has_rel_axes ( device ) : print ( device . capabilities ( ) . get ( 2 ) ) rel_axes = { rel_axis_name ( axis_code ) : { 'code' : axis_code } for axis_code in device . capabilities ( ) . get ( 2 ) } buttons = None if has_buttons ( device ) : buttons = { code : names for ( names , code ) in dict ( util . resolve_ecodes_dict ( { 1 : device . capabilities ( ) . get ( 1 ) } ) ) . get ( ( 'EV_KEY' , 1 ) ) } return { 'fn' : device . fn , 'path' : device . path , 'name' : device . name , 'phys' : device . phys , 'uniq' : device . uniq , 'vendor' : device . info . vendor , 'product' : device . info . product , 'version' : device . info . version , 'bus' : device . info . bustype , 'axes' : axes , 'rel_axes' : rel_axes , 'buttons' : buttons , 'unique_name' : unique_name ( device ) } def has_abs_axes ( device ) : return device . capabilities ( ) . get ( 3 ) is not None def has_rel_axes ( device ) : return device . capabilities ( ) . get ( 2 ) is not None def has_buttons ( device ) : return device . capabilities ( ) . get ( 1 ) is not None _check_import ( ) for d in [ InputDevice ( fn ) for fn in list_devices ( ) ] : if has_abs_axes ( d ) or has_rel_axes ( d ) : pp = pprint . PrettyPrinter ( indent = 2 , width = 100 ) pp . pprint ( device_verbose_info ( d ) ) | Simple test function which prints out all devices found by evdev |
42,959 | def print_controllers ( ) : _check_import ( ) pp = pprint . PrettyPrinter ( indent = 2 ) for discovery in find_all_controllers ( ) : pp . pprint ( discovery . controller ) | Pretty - print all controllers found |
42,960 | def accept ( self , discovery : ControllerDiscovery ) : if self . require_class is not None and not isinstance ( discovery . controller , self . require_class ) : return False if self . snames is not None : all_controls = discovery . controller . buttons . names + discovery . controller . axes . names for sname in self . snames : if sname not in all_controls : return False return True | Returns True if the supplied ControllerDiscovery matches this requirement False otherwise |
42,961 | def post ( param_map , url = URL ) : prepped_request = _prep_post ( url = url , ** param_map ) return cgi . send ( prepped_request ) | Posts a param_map created with config to the FlashAir config . cgi entrypoint |
42,962 | def _validate_timeout ( seconds : float ) : val = int ( seconds * 1000 ) assert 60000 <= val <= 4294967294 , "Bad value: {}" . format ( val ) return val | Creates an int from 60000 to 4294967294 that represents a valid millisecond wireless LAN timeout |
42,963 | def parse_datetime ( datetime_input ) : date_els , time_els = _split_datetime ( datetime_input ) date_vals = _parse_date ( date_els ) time_vals = _parse_time ( time_els ) vals = tuple ( date_vals ) + tuple ( time_vals ) return arrow . get ( * vals ) | The arrow library is sadly not good enough to parse certain date strings . It even gives unexpected results for partial date strings such as 2015 - 01 or just 2015 which I think should be seen as the first moment of 2014 . This function should overcome those limitations . |
42,964 | def set_led ( self , led_number , led_value ) : if 1 > led_number > 4 : return write_led_value ( hw_id = self . device_unique_name , led_name = 'sony{}' . format ( led_number ) , value = led_value ) | Set front - panel controller LEDs . The DS3 controller has four labelled LEDs on the front panel that can be either on or off . |
42,965 | def admin_command ( sudo , command ) : if sudo : if not isinstance ( command , list ) : command = [ command ] return [ 'sudo' ] + [ cmd for cmd in command ] return command | If sudo is needed make sure the command is prepended correctly otherwise return the command as it came . |
42,966 | def _encode_time ( mtime : float ) : dt = arrow . get ( mtime ) dt = dt . to ( "local" ) date_val = ( ( dt . year - 1980 ) << 9 ) | ( dt . month << 5 ) | dt . day secs = dt . second + dt . microsecond / 10 ** 6 time_val = ( dt . hour << 11 ) | ( dt . minute << 5 ) | math . floor ( secs / 2 ) return ( date_val << 16 ) | time_val | Encode a mtime float as a 32 - bit FAT time |
42,967 | def threadpooled ( func : typing . Optional [ typing . Callable [ ... , typing . Union [ "typing.Awaitable[typing.Any]" , typing . Any ] ] ] = None , * , loop_getter : typing . Union [ None , typing . Callable [ ... , asyncio . AbstractEventLoop ] , asyncio . AbstractEventLoop ] = None , loop_getter_need_context : bool = False , ) -> typing . Union [ ThreadPooled , typing . Callable [ ... , "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]" ] , ] : if func is None : return ThreadPooled ( func = func , loop_getter = loop_getter , loop_getter_need_context = loop_getter_need_context ) return ThreadPooled ( func = None , loop_getter = loop_getter , loop_getter_need_context = loop_getter_need_context ) ( func ) | Post function to ThreadPoolExecutor . |
42,968 | def configure ( cls : typing . Type [ "ThreadPooled" ] , max_workers : typing . Optional [ int ] = None ) -> None : if isinstance ( cls . __executor , ThreadPoolExecutor ) : if cls . __executor . max_workers == max_workers : return cls . __executor . shutdown ( ) cls . __executor = ThreadPoolExecutor ( max_workers = max_workers ) | Pool executor create and configure . |
42,969 | def executor ( self ) -> "ThreadPoolExecutor" : if not isinstance ( self . __executor , ThreadPoolExecutor ) or self . __executor . is_shutdown : self . configure ( ) return self . __executor | Executor instance . |
42,970 | def loop_getter ( self ) -> typing . Optional [ typing . Union [ typing . Callable [ ... , asyncio . AbstractEventLoop ] , asyncio . AbstractEventLoop ] ] : return self . __loop_getter | Loop getter . |
42,971 | def needs_ssh ( hostname , _socket = None ) : if hostname . lower ( ) in [ 'localhost' , '127.0.0.1' , '127.0.1.1' ] : return False _socket = _socket or socket fqdn = _socket . getfqdn ( ) if hostname == fqdn : return False local_hostname = _socket . gethostname ( ) local_short_hostname = local_hostname . split ( '.' ) [ 0 ] if local_hostname == hostname or local_short_hostname == hostname : return False return True | Obtains remote hostname of the socket and cuts off the domain part of its FQDN . |
42,972 | def get_python_executable ( conn ) : executables = [ 'python3' , 'python' , 'python2.7' ] for executable in executables : conn . logger . debug ( 'trying to determine remote python executable with %s' % executable ) out , err , code = check ( conn , [ 'which' , executable ] ) if code : conn . logger . warning ( 'skipping %s, was not found in path' % executable ) else : try : return out [ 0 ] . strip ( ) except IndexError : conn . logger . warning ( 'could not parse stdout: %s' % out ) conn . logger . info ( 'Falling back to using interpreter: %s' % conn . interpreter ) return conn . interpreter | Try to determine the remote Python version so that it can be used when executing . Avoids the problem of different Python versions or distros that do not use python but do python3 |
42,973 | def import_module ( self , module ) : if self . remote_import_system is not None : if self . remote_import_system == 'json' : self . remote_module = JsonModuleExecute ( self , module , self . logger ) else : self . remote_module = LegacyModuleExecute ( self . gateway , module , self . logger ) else : self . remote_module = LegacyModuleExecute ( self . gateway , module , self . logger ) return self . remote_module | Allows remote execution of a local module . Depending on the remote_import_system attribute it may use execnet s implementation or remoto s own based on JSON . |
42,974 | def get ( name , fallback = 'ssh' ) : mapping = { 'ssh' : ssh . SshConnection , 'oc' : openshift . OpenshiftConnection , 'openshift' : openshift . OpenshiftConnection , 'kubernetes' : kubernetes . KubernetesConnection , 'k8s' : kubernetes . KubernetesConnection , 'local' : local . LocalConnection , 'popen' : local . LocalConnection , 'localhost' : local . LocalConnection , 'docker' : docker . DockerConnection , 'podman' : podman . PodmanConnection , } if not name : name = 'ssh' name = name . strip ( ) . lower ( ) connection_class = mapping . get ( name ) if not connection_class : logger . warning ( 'no connection backend found for: "%s"' % name ) if fallback : logger . info ( 'falling back to "%s"' % fallback ) return mapping . get ( fallback ) return connection_class | Retrieve the matching backend class from a string . If no backend can be matched it raises an error . |
42,975 | def set_leds ( self , hue : float = 0.0 , saturation : float = 1.0 , value : float = 1.0 ) : r , g , b = hsv_to_rgb ( hue , saturation , value ) write_led_value ( self . device_unique_name , 'red' , r * 255.0 ) write_led_value ( self . device_unique_name , 'green' , g * 255.0 ) write_led_value ( self . device_unique_name , 'blue' , b * 255.0 ) | The DualShock4 has an LED bar on the front of the controller . This function allows you to set the value of this bar . Note that the controller must be connected for this to work if it s not the call will just be ignored . |
42,976 | def memory_changed ( url = URL ) : response = _get ( Operation . memory_changed , url ) try : return int ( response . text ) == 1 except ValueError : raise IOError ( "Likely no FlashAir connection, " "memory changed CGI command failed" ) | Returns True if memory has been written to False otherwise |
42,977 | def _get ( operation : Operation , url = URL , ** params ) : prepped_request = _prep_get ( operation , url = url , ** params ) return cgi . send ( prepped_request ) | HTTP GET of the FlashAir command . cgi entrypoint |
42,978 | def asynciotask ( func : typing . Optional [ typing . Callable [ ... , "typing.Awaitable[typing.Any]" ] ] = None , * , loop_getter : typing . Union [ typing . Callable [ ... , asyncio . AbstractEventLoop ] , asyncio . AbstractEventLoop ] = asyncio . get_event_loop , loop_getter_need_context : bool = False , ) -> typing . Union [ AsyncIOTask , typing . Callable [ ... , "asyncio.Task[typing.Any]" ] ] : if func is None : return AsyncIOTask ( func = func , loop_getter = loop_getter , loop_getter_need_context = loop_getter_need_context ) return AsyncIOTask ( func = None , loop_getter = loop_getter , loop_getter_need_context = loop_getter_need_context ) ( func ) | Wrap function in future and return . |
42,979 | def threaded ( name : typing . Optional [ typing . Union [ str , typing . Callable [ ... , typing . Any ] ] ] = None , daemon : bool = False , started : bool = False , ) -> typing . Union [ Threaded , typing . Callable [ ... , threading . Thread ] ] : if callable ( name ) : func , name = ( name , "Threaded: " + getattr ( name , "__name__" , str ( hash ( name ) ) ) ) return Threaded ( name = name , daemon = daemon , started = started ) ( func ) return Threaded ( name = name , daemon = daemon , started = started ) | Run function in separate thread . |
42,980 | def before ( point ) : if not point : return True if isinstance ( point , str ) : point = str_to_time ( point ) elif isinstance ( point , int ) : point = time . gmtime ( point ) return time . gmtime ( ) < point | True if point datetime specification is before now |
42,981 | def epoch_in_a_while ( days = 0 , seconds = 0 , microseconds = 0 , milliseconds = 0 , minutes = 0 , hours = 0 , weeks = 0 ) : dt = time_in_a_while ( days , seconds , microseconds , milliseconds , minutes , hours , weeks ) return int ( ( dt - datetime ( 1970 , 1 , 1 ) ) . total_seconds ( ) ) | Return the number of seconds since epoch a while from now . |
42,982 | def set_defaults ( self ) : for key , val in self . c_default . items ( ) : self . _dict [ key ] = val | Based on specification set a parameters value to the default value . |
42,983 | def to_dict ( self , lev = 0 ) : _spec = self . c_param _res = { } lev += 1 for key , val in self . _dict . items ( ) : try : ( _ , req , _ser , _ , null_allowed ) = _spec [ str ( key ) ] except KeyError : try : _key , lang = key . split ( "#" ) ( _ , req , _ser , _ , null_allowed ) = _spec [ _key ] except ( ValueError , KeyError ) : try : ( _ , req , _ser , _ , null_allowed ) = _spec [ '*' ] except KeyError : _ser = None if _ser : val = _ser ( val , "dict" , lev ) if isinstance ( val , Message ) : _res [ key ] = val . to_dict ( lev + 1 ) elif isinstance ( val , list ) and isinstance ( next ( iter ( val or [ ] ) , None ) , Message ) : _res [ key ] = [ v . to_dict ( lev ) for v in val ] else : _res [ key ] = val return _res | Return a dictionary representation of the class |
42,984 | def from_dict ( self , dictionary , ** kwargs ) : _spec = self . c_param for key , val in dictionary . items ( ) : if val == "" or val == [ "" ] : continue skey = str ( key ) try : ( vtyp , req , _ , _deser , null_allowed ) = _spec [ key ] except KeyError : try : _key , lang = skey . split ( "#" ) except ValueError : try : ( vtyp , _ , _ , _deser , null_allowed ) = _spec [ '*' ] if val is None : self . _dict [ key ] = val continue except KeyError : self . _dict [ key ] = val continue else : try : ( vtyp , req , _ , _deser , null_allowed ) = _spec [ _key ] except KeyError : try : ( vtyp , _ , _ , _deser , null_allowed ) = _spec [ '*' ] if val is None : self . _dict [ key ] = val continue except KeyError : self . _dict [ key ] = val continue self . _add_value ( skey , vtyp , key , val , _deser , null_allowed ) return self | Direct translation so the value for one key might be a list or a single value . |
42,985 | def to_jwt ( self , key = None , algorithm = "" , lev = 0 , lifetime = 0 ) : _jws = JWS ( self . to_json ( lev ) , alg = algorithm ) return _jws . sign_compact ( key ) | Create a signed JWT representation of the class instance |
42,986 | def verify ( self , ** kwargs ) : _spec = self . c_param try : _allowed = self . c_allowed_values except KeyError : _allowed = { } for ( attribute , ( typ , required , _ , _ , na ) ) in _spec . items ( ) : if attribute == "*" : continue try : val = self . _dict [ attribute ] except KeyError : if required : raise MissingRequiredAttribute ( "%s" % attribute ) continue else : if typ == bool : pass elif not val : if required : raise MissingRequiredAttribute ( "%s" % attribute ) continue try : _allowed_val = _allowed [ attribute ] except KeyError : pass else : if not self . _type_check ( typ , _allowed_val , val , na ) : raise NotAllowedValue ( val ) return True | Make sure all the required values are there and that the values are of the correct type |
42,987 | def to_jwe ( self , keys , enc , alg , lev = 0 ) : _jwe = JWE ( self . to_json ( lev ) , alg = alg , enc = enc ) return _jwe . encrypt ( keys ) | Place the information in this instance in a JSON object . Make that JSON object the body of a JWT . Then encrypt that JWT using the specified algorithms and the given keys . Return the encrypted JWT . |
42,988 | def from_jwe ( self , msg , keys ) : jwe = JWE ( ) _res = jwe . decrypt ( msg , keys ) return self . from_json ( _res . decode ( ) ) | Decrypt an encrypted JWT and load the JSON object that was the body of the JWT into this object . |
42,989 | def weed ( self ) : _ext = [ k for k in self . _dict . keys ( ) if k not in self . c_param ] for k in _ext : del self . _dict [ k ] | Get rid of key value pairs that are not standard |
42,990 | def rm_blanks ( self ) : _blanks = [ k for k in self . _dict . keys ( ) if not self . _dict [ k ] ] for key in _blanks : del self . _dict [ key ] | Get rid of parameters that has no value . |
42,991 | def ansi ( color , text ) : code = COLOR_CODES [ color ] return '\033[1;{0}m{1}{2}' . format ( code , text , RESET_TERM ) | Wrap text in an ansi escape sequence |
42,992 | def require_flush ( fun ) : @ wraps ( fun ) def ensure_flushed ( service , * args , ** kwargs ) : if service . app_state . needs_db_flush : session = db . session ( ) if not session . _flushing and any ( isinstance ( m , ( RoleAssignment , SecurityAudit ) ) for models in ( session . new , session . dirty , session . deleted ) for m in models ) : session . flush ( ) service . app_state . needs_db_flush = False return fun ( service , * args , ** kwargs ) return ensure_flushed | Decorator for methods that need to query security . |
42,993 | def _current_user_manager ( self , session = None ) : if session is None : session = db . session ( ) try : user = g . user except Exception : return session . query ( User ) . get ( 0 ) if sa . orm . object_session ( user ) is not session : return session . query ( User ) . get ( user . id ) else : return user | Return the current user or SYSTEM user . |
42,994 | def get_roles ( self , principal , object = None , no_group_roles = False ) : assert principal if hasattr ( principal , "is_anonymous" ) and principal . is_anonymous : return [ AnonymousRole ] query = db . session . query ( RoleAssignment . role ) if isinstance ( principal , Group ) : filter_principal = RoleAssignment . group == principal else : filter_principal = RoleAssignment . user == principal if not no_group_roles : groups = [ g . id for g in principal . groups ] if groups : filter_principal |= RoleAssignment . group_id . in_ ( groups ) query = query . filter ( filter_principal ) if object is not None : assert isinstance ( object , Entity ) query = query . filter ( RoleAssignment . object == object ) roles = { i [ 0 ] for i in query . all ( ) } if object is not None : for attr , role in ( ( "creator" , Creator ) , ( "owner" , Owner ) ) : if getattr ( object , attr ) == principal : roles . add ( role ) return list ( roles ) | Get all the roles attached to given principal on a given object . |
42,995 | def get_principals ( self , role , anonymous = True , users = True , groups = True , object = None , as_list = True ) : if not isinstance ( role , Role ) : role = Role ( role ) assert role assert users or groups query = RoleAssignment . query . filter_by ( role = role ) if not anonymous : query = query . filter ( RoleAssignment . anonymous == False ) if not users : query = query . filter ( RoleAssignment . user == None ) elif not groups : query = query . filter ( RoleAssignment . group == None ) query = query . filter ( RoleAssignment . object == object ) principals = { ( ra . user or ra . group ) for ra in query . all ( ) } if object is not None and role in ( Creator , Owner ) : p = object . creator if role == Creator else object . owner if p : principals . add ( p ) if not as_list : return principals return list ( principals ) | Return all users which are assigned given role . |
42,996 | def register_asset ( self , type_ , * assets ) : supported = list ( self . _assets_bundles . keys ( ) ) if type_ not in supported : msg = "Invalid type: {}. Valid types: {}" . format ( repr ( type_ ) , ", " . join ( sorted ( supported ) ) ) raise KeyError ( msg ) for asset in assets : if not isinstance ( asset , Bundle ) and callable ( asset ) : asset = asset ( ) self . _assets_bundles [ type_ ] . setdefault ( "bundles" , [ ] ) . append ( asset ) | Register webassets bundle to be served on all pages . |
42,997 | def register_base_assets ( self ) : from abilian . web import assets as bundles self . register_asset ( "css" , bundles . LESS ) self . register_asset ( "js-top" , bundles . TOP_JS ) self . register_asset ( "js" , bundles . JS ) self . register_i18n_js ( * bundles . JS_I18N ) | Register assets needed by Abilian . |
42,998 | def user_photo_url ( user , size ) : endpoint , kwargs = user_url_args ( user , size ) return url_for ( endpoint , ** kwargs ) | Return url to use for this user . |
42,999 | def newlogins ( sessions ) : if not sessions : return [ ] , [ ] users = { } dates = { } for session in sessions : user = session . user date = session . started_at . strftime ( "%Y/%m/%d" ) if user not in users : users [ user ] = date if date not in dates : dates [ date ] = [ user ] else : dates [ date ] . append ( user ) data = [ ] total = [ ] previous = 0 for date in sorted ( dates . keys ( ) ) : date_epoch = unix_time_millis ( datetime . strptime ( date , "%Y/%m/%d" ) ) data . append ( { "x" : date_epoch , "y" : len ( dates [ date ] ) } ) previous += len ( dates [ date ] ) total . append ( { "x" : date_epoch , "y" : previous } ) return data , total | Brand new logins each day and total of users each day . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.