idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
10,800
def run_server ( chatrooms , use_default_logging = True ) : if use_default_logging : configure_logging ( ) logger . info ( 'Starting Hermes chatroom server...' ) bots = [ ] for name , params in chatrooms . items ( ) : bot_class = params . get ( 'CLASS' , 'hermes.Chatroom' ) if type ( bot_class ) == type : pass else : b...
Sets up and serves specified chatrooms . Main entrypoint to Hermes .
10,801
def _get_sockets ( bots ) : sockets = { } for bot in bots : bot . connect ( ) sockets [ bot . client . Connection . _sock ] = bot return sockets
Connects and gathers sockets for all chatrooms
10,802
def _listen ( sockets ) : while True : ( i , o , e ) = select . select ( sockets . keys ( ) , [ ] , [ ] , 1 ) for socket in i : if isinstance ( sockets [ socket ] , Chatroom ) : data_len = sockets [ socket ] . client . Process ( 1 ) if data_len is None or data_len == 0 : raise Exception ( 'Disconnected from server' ) e...
Main server loop . Listens for incoming events and dispatches them to appropriate chatroom
10,803
def _send ( self , method , path , data , filename ) : if filename is None : return self . _send_json ( method , path , data ) else : return self . _send_file ( method , path , data , filename )
Send data to a remote server either with a POST or a PUT request .
10,804
def get_platform_settings ( ) : s = settings . PLATFORMS if hasattr ( settings , 'FACEBOOK' ) and settings . FACEBOOK : s . append ( { 'class' : 'bernard.platforms.facebook.platform.Facebook' , 'settings' : settings . FACEBOOK , } ) return s
Returns the content of settings . PLATFORMS with a twist .
10,805
async def run_checks ( self ) : async for check in self . fsm . health_check ( ) : yield check async for check in self . self_check ( ) : yield check for check in MiddlewareManager . health_check ( ) : yield check
Run checks on itself and on the FSM
10,806
async def self_check ( self ) : platforms = set ( ) for platform in get_platform_settings ( ) : try : name = platform [ 'class' ] cls : Type [ Platform ] = import_class ( name ) except KeyError : yield HealthCheckFail ( '00004' , 'Missing platform `class` name in configuration.' ) except ( AttributeError , ImportError ...
Checks that the platforms configuration is all right .
10,807
def _index_classes ( self ) -> Dict [ Text , Type [ Platform ] ] : out = { } for p in get_platform_settings ( ) : cls : Type [ Platform ] = import_class ( p [ 'class' ] ) if 'name' in p : out [ p [ 'name' ] ] = cls else : out [ cls . NAME ] = cls return out
Build a name index for all platform classes
10,808
async def build_platform ( self , cls : Type [ Platform ] , custom_id ) : from bernard . server . http import router p = cls ( ) if custom_id : p . _id = custom_id await p . async_init ( ) p . on_message ( self . fsm . handle_message ) p . hook_up ( router ) return p
Build the Facebook platform . Nothing fancy .
10,809
def get_class ( self , platform ) -> Type [ Platform ] : if platform in self . _classes : return self . _classes [ platform ] raise PlatformDoesNotExist ( 'Platform "{}" is not in configuration' . format ( platform ) )
For a given platform name gets the matching class
10,810
async def get_platform ( self , name : Text ) : if not self . _is_init : await self . init ( ) if name not in self . platforms : self . platforms [ name ] = await self . build_platform ( self . get_class ( name ) , name ) return self . platforms [ name ]
Get a valid instance of the specified platform . Do not cache this object it might change with configuration changes .
10,811
async def get_all_platforms ( self ) -> AsyncIterator [ Platform ] : for name in self . _classes . keys ( ) : yield await self . get_platform ( name )
Returns all platform instances
10,812
async def message_from_token ( self , token : Text , payload : Any ) -> Tuple [ Optional [ BaseMessage ] , Optional [ Platform ] ] : async for platform in self . get_all_platforms ( ) : m = await platform . message_from_token ( token , payload ) if m : return m , platform return None , None
Given an authentication token find the right platform that can recognize this token and create a message for this platform .
10,813
def add_args ( parser , positional = False ) : group = parser . add_argument_group ( "read loading" ) group . add_argument ( "reads" if positional else "--reads" , nargs = "+" , default = [ ] , help = "Paths to bam files. Any number of paths may be specified." ) group . add_argument ( "--read-source-name" , nargs = "+"...
Extends a commandline argument parser with arguments for specifying read sources .
10,814
def load_from_args ( args ) : if not args . reads : return None if args . read_source_name : read_source_names = util . expand ( args . read_source_name , 'read_source_name' , 'read source' , len ( args . reads ) ) else : read_source_names = util . drop_prefix ( args . reads ) filters = [ ] for ( name , info ) in READ_...
Given parsed commandline arguments returns a list of ReadSource objects
10,815
def get_int ( config , key , default ) : try : return int ( config [ key ] ) except ( KeyError , ValueError ) : return default
A helper to retrieve an integer value from a given dictionary containing string values . If the requested value is not present in the dictionary or if it cannot be converted to an integer a default value will be returned instead .
10,816
def compact_bucket ( db , buck_key , limit ) : records = db . lrange ( str ( buck_key ) , 0 , - 1 ) loader = limits . BucketLoader ( limit . bucket_class , db , limit , str ( buck_key ) , records , stop_summarize = True ) buck_record = msgpack . dumps ( dict ( bucket = loader . bucket . dehydrate ( ) , uuid = str ( uui...
Perform the compaction operation . This reads in the bucket information from the database builds a compacted bucket record inserts that record in the appropriate place in the database then removes outdated updates .
10,817
def compactor ( conf ) : db = conf . get_database ( 'compactor' ) limit_map = LimitContainer ( conf , db ) config = conf [ 'compactor' ] if get_int ( config , 'max_updates' , 0 ) <= 0 : LOG . warning ( "Compaction is not enabled. Enable it by " "setting a positive integer value for " "'compactor.max_updates' in the co...
The compactor daemon . This fuction watches the sorted set containing bucket keys that need to be compacted performing the necessary compaction .
10,818
def factory ( cls , config , db ) : if not hasattr ( db , 'register_script' ) : LOG . debug ( "Redis client does not support register_script()" ) return GetBucketKeyByLock ( config , db ) info = db . info ( ) if version_greater ( '2.6' , info [ 'redis_version' ] ) : LOG . debug ( "Redis server supports register_script(...
Given a configuration and database select and return an appropriate instance of a subclass of GetBucketKey . This will ensure that both client and server support are available for the Lua script feature of Redis and if not a lock will be used .
10,819
def get ( self , now ) : with self . lock : items = self . db . zrangebyscore ( self . key , 0 , now - self . min_age , start = 0 , num = 1 ) if not items : return None item = items [ 0 ] self . db . zrem ( item ) return item
Get a bucket key to compact . If none are available returns None . This uses a configured lock to ensure that the bucket key is popped off the sorted set in an atomic fashion .
10,820
def get ( self , now ) : items = self . script ( keys = [ self . key ] , args = [ now - self . min_age ] ) return items [ 0 ] if items else None
Get a bucket key to compact . If none are available returns None . This uses a Lua script to ensure that the bucket key is popped off the sorted set in an atomic fashion .
10,821
def parse ( text , elements , fallback ) : tokens = [ ] for etype in elements : for match in etype . find ( text ) : tokens . append ( Token ( etype , match , text , fallback ) ) tokens . sort ( ) tokens = _resolve_overlap ( tokens ) return make_elements ( tokens , text , fallback = fallback )
Parse given text and produce a list of inline elements .
10,822
def make_elements ( tokens , text , start = 0 , end = None , fallback = None ) : result = [ ] end = end or len ( text ) prev_end = start for token in tokens : if prev_end < token . start : result . append ( fallback ( text [ prev_end : token . start ] ) ) result . append ( token . as_element ( ) ) prev_end = token . en...
Make elements from a list of parsed tokens . It will turn all unmatched holes into fallback elements .
10,823
def main_cli ( ) : args = _cli_argument_parser ( ) delta_secs = args . delay i2cbus = args . bus i2c_address = args . address sensor_key = args . sensor sensor_params = args . params params = { } if sensor_params : def _parse_param ( str_param ) : key , value = str_param . split ( '=' ) try : value = int ( value ) exce...
CLI minimal interface .
10,824
def extract_domain ( var_name , output ) : var = getenv ( var_name ) if var : p = urlparse ( var ) output . append ( p . hostname )
Extracts just the domain name from an URL and adds it to a list
10,825
def numchannels ( samples : np . ndarray ) -> int : if len ( samples . shape ) == 1 : return 1 else : return samples . shape [ 1 ]
return the number of channels present in samples
10,826
def turnstile_filter ( global_conf , ** local_conf ) : klass = TurnstileMiddleware if 'turnstile' in local_conf : klass = utils . find_entrypoint ( 'turnstile.middleware' , local_conf [ 'turnstile' ] , required = True ) def wrapper ( app ) : return klass ( app , local_conf ) return wrapper
Factory function for turnstile .
10,827
def format_delay ( self , delay , limit , bucket , environ , start_response ) : status = self . conf . status headers = HeadersDict ( [ ( 'Retry-After' , "%d" % math . ceil ( delay ) ) ] ) status , entity = limit . format ( status , headers , environ , bucket , delay ) start_response ( status , headers . items ( ) ) re...
Formats the over - limit response for the request . May be overridden in subclasses to allow alternate responses .
10,828
def find_entrypoint ( group , name , compat = True , required = False ) : if group is None or ( compat and ':' in name ) : try : return pkg_resources . EntryPoint . parse ( "x=" + name ) . load ( False ) except ( ImportError , pkg_resources . UnknownExtra ) as exc : pass else : for ep in pkg_resources . iter_entry_poin...
Finds the first available entrypoint with the given name in the given group .
10,829
def transfer ( self , transfer_payload = None , * , from_user , to_user ) : if self . persist_id is None : raise EntityNotYetPersistedError ( ( 'Entities cannot be transferred ' 'until they have been ' 'persisted' ) ) return self . plugin . transfer ( self . persist_id , transfer_payload , from_user = from_user , to_us...
Transfer this entity to another owner on the backing persistence layer
10,830
def transfer ( self , rights_assignment_data = None , * , from_user , to_user , rights_assignment_format = 'jsonld' ) : rights_assignment = RightsAssignment . from_data ( rights_assignment_data or { } , plugin = self . plugin ) transfer_payload = rights_assignment . _to_format ( data_format = rights_assignment_format )...
Transfer this Right to another owner on the backing persistence layer .
10,831
def _set_repo ( self , url ) : if url . startswith ( 'http' ) : try : self . repo = Proxy ( url ) except ProxyError , e : log . exception ( 'Error setting repo: %s' % url ) raise GritError ( e ) else : try : self . repo = Local ( url ) except NotGitRepository : raise GritError ( 'Invalid url: %s' % url ) except Excepti...
sets the underlying repo object
10,832
def new ( self , url , clone_from = None , bare = True ) : if clone_from : self . clone ( path = url , bare = bare ) else : if url . startswith ( 'http' ) : proxy = Proxy ( url ) proxy . new ( path = url , bare = bare ) else : local = Local . new ( path = url , bare = bare ) return Repo ( url )
Creates a new Repo instance .
10,833
def CheckEmails ( self , checkTypo = False , fillWrong = True ) : self . wrong_emails = [ ] for email in self . emails : if self . CheckEmail ( email , checkTypo ) is False : self . wrong_emails . append ( email )
Checks Emails in List Wether they are Correct or not
10,834
def CheckEmail ( self , email , checkTypo = False ) : contents = email . split ( '@' ) if len ( contents ) == 2 : if contents [ 1 ] in self . valid : return True return False
Checks a Single email if it is correct
10,835
def CorrectWrongEmails ( self , askInput = True ) : for email in self . wrong_emails : corrected_email = self . CorrectEmail ( email ) self . emails [ self . emails . index ( email ) ] = corrected_email self . wrong_emails = [ ]
Corrects Emails in wrong_emails
10,836
def CorrectEmail ( self , email ) : print ( "Wrong Email : " + email ) contents = email . split ( '@' ) if len ( contents ) == 2 : domain_data = contents [ 1 ] . split ( '.' ) for vemail in self . valid : alters = perms ( vemail . split ( '.' , 1 ) [ 0 ] ) if domain_data [ 0 ] in alters and qyn . query_yes_no ( "Did yo...
Returns a Corrected email USER INPUT REQUIRED
10,837
def add_element ( self , element , override = False ) : if issubclass ( element , inline . InlineElement ) : dest = self . inline_elements elif issubclass ( element , block . BlockElement ) : dest = self . block_elements else : raise TypeError ( 'The element should be a subclass of either `BlockElement` or ' '`InlineEl...
Add an element to the parser .
10,838
def parse ( self , source_or_text ) : if isinstance ( source_or_text , string_types ) : block . parser = self inline . parser = self return self . block_elements [ 'Document' ] ( source_or_text ) element_list = self . _build_block_element_list ( ) ast = [ ] while not source_or_text . exhausted : for ele_type in element...
Do the actual parsing and returns an AST or parsed element .
10,839
def parse_inline ( self , text ) : element_list = self . _build_inline_element_list ( ) return inline_parser . parse ( text , element_list , fallback = self . inline_elements [ 'RawText' ] )
Parses text into inline elements . RawText is not considered in parsing but created as a wrapper of holes that don t match any other elements .
10,840
def _build_block_element_list ( self ) : return sorted ( [ e for e in self . block_elements . values ( ) if not e . virtual ] , key = lambda e : e . priority , reverse = True )
Return a list of block elements ordered from highest priority to lowest .
10,841
def make_app ( * args , ** kw ) : default_options = [ [ 'content_path' , '.' ] , [ 'uri_marker' , '' ] ] args = list ( args ) options = dict ( default_options ) options . update ( kw ) while default_options and args : _d = default_options . pop ( 0 ) _a = args . pop ( 0 ) options [ _d [ 0 ] ] = _a options [ 'content_pa...
Assembles basic WSGI - compatible application providing functionality of git - http - backend .
10,842
def now ( tzinfo = True ) : if dj_now : return dj_now ( ) if tzinfo : return datetime . utcnow ( ) . replace ( tzinfo = utc ) return datetime . now ( )
Return an aware or naive datetime . datetime depending on settings . USE_TZ .
10,843
def match_unit ( data , p , m = 'a' ) : if data is None : return p is None if m != 'e' and isinstance ( p , six . string_types ) : p = re . compile ( p ) if isinstance ( data , Sequence ) and not isinstance ( data , six . string_types ) : return any ( [ match_unit ( field , p , m = m ) for field in data ] ) elif isinst...
Match data to basic match unit .
10,844
def generate_ppi_network ( ppi_graph_path : str , dge_list : List [ Gene ] , max_adj_p : float , max_log2_fold_change : float , min_log2_fold_change : float , ppi_edge_min_confidence : Optional [ float ] = None , current_disease_ids_path : Optional [ str ] = None , disease_associations_path : Optional [ str ] = None , ...
Generate the protein - protein interaction network .
10,845
def parse_dge ( dge_path : str , entrez_id_header : str , log2_fold_change_header : str , adj_p_header : str , entrez_delimiter : str , base_mean_header : Optional [ str ] = None ) -> List [ Gene ] : if dge_path . endswith ( '.xlsx' ) : return parsers . parse_excel ( dge_path , entrez_id_header = entrez_id_header , log...
Parse a differential expression file .
10,846
def _load ( self , file_path : Text ) -> None : module_ = types . ModuleType ( 'settings' ) module_ . __file__ = file_path try : with open ( file_path , encoding = 'utf-8' ) as f : exec ( compile ( f . read ( ) , file_path , 'exec' ) , module_ . __dict__ ) except IOError as e : e . strerror = 'Unable to load configurat...
Load the configuration from a plain Python file . This file is executed on its own .
10,847
def _settings ( self ) -> Settings : if self . __dict__ [ '__settings' ] is None : self . __dict__ [ '__settings' ] = Settings ( ) for file_path in self . _get_files ( ) : if file_path : self . __dict__ [ '__settings' ] . _load ( file_path ) return self . __dict__ [ '__settings' ]
Return the actual settings object or create it if missing .
10,848
async def _start ( self , key : Text ) -> None : for _ in range ( 0 , 1000 ) : with await self . pool as r : just_set = await r . set ( self . lock_key ( key ) , '' , expire = settings . REGISTER_LOCK_TIME , exist = r . SET_IF_NOT_EXIST , ) if just_set : break await asyncio . sleep ( settings . REDIS_POLL_INTERVAL )
Start the lock .
10,849
async def _get ( self , key : Text ) -> Dict [ Text , Any ] : try : with await self . pool as r : return ujson . loads ( await r . get ( self . register_key ( key ) ) ) except ( ValueError , TypeError ) : return { }
Get the value for the key . It is automatically deserialized from JSON and returns an empty dictionary by default .
10,850
async def _replace ( self , key : Text , data : Dict [ Text , Any ] ) -> None : with await self . pool as r : await r . set ( self . register_key ( key ) , ujson . dumps ( data ) )
Replace the register with a new value .
10,851
def getFileName ( self , suffix = None , extension = "jar" ) : assert ( self . _artifactId is not None ) assert ( self . _version is not None ) return "{0}-{1}{2}{3}" . format ( self . _artifactId , self . _version . getRawString ( ) , "-" + suffix . lstrip ( "-" ) if suffix is not None else "" , "." + extension . lstr...
Returns the basename of the artifact s file using Maven s conventions .
10,852
def getPath ( self , suffix = None , extension = "jar" , separator = os . sep ) : assert ( self . _groupId is not None ) resultComponents = [ self . _groupId . replace ( "." , separator ) ] if self . _artifactId is not None : resultComponents . append ( self . _artifactId ) version = self . _version if version is not N...
Returns the full path relative to the root of a Maven repository of the current artifact using Maven s conventions .
10,853
def allele_support_df ( loci , sources ) : return pandas . DataFrame ( allele_support_rows ( loci , sources ) , columns = EXPECTED_COLUMNS )
Returns a DataFrame of allele counts for all given loci in the read sources
10,854
def variant_support ( variants , allele_support_df , ignore_missing = False ) : missing = [ c for c in EXPECTED_COLUMNS if c not in allele_support_df . columns ] if missing : raise ValueError ( "Missing columns: %s" % " " . join ( missing ) ) allele_support_df [ [ "interbase_start" , "interbase_end" ] ] = ( allele_supp...
Collect the read evidence support for the given variants .
10,855
def sndinfo ( path : str ) -> SndInfo : backend = _getBackend ( path ) logger . debug ( f"sndinfo: using backend {backend.name}" ) return backend . getinfo ( path )
Get info about a soundfile
10,856
def asmono ( samples : np . ndarray , channel : Union [ int , str ] = 0 ) -> np . ndarray : if numchannels ( samples ) == 1 : if isinstance ( samples [ 0 ] , float ) : return samples elif isinstance ( samples [ 0 ] , np . dnarray ) : return np . reshape ( samples , ( len ( samples ) , ) ) else : raise TypeError ( "Samp...
convert samples to mono if they are not mono already .
10,857
def getchannel ( samples : np . ndarray , ch : int ) -> np . ndarray : N = numchannels ( samples ) if ch > ( N - 1 ) : raise ValueError ( "channel %d out of range" % ch ) if N == 1 : return samples return samples [ : , ch ]
Returns a view into a channel of samples .
10,858
def bitdepth ( data : np . ndarray , snap : bool = True ) -> int : data = asmono ( data ) maxitems = min ( 4096 , data . shape [ 0 ] ) maxbits = max ( x . as_integer_ratio ( ) [ 1 ] for x in data [ : maxitems ] ) . bit_length ( ) if snap : if maxbits <= 8 : maxbits = 8 elif maxbits <= 16 : maxbits = 16 elif maxbits <= ...
returns the number of bits actually used to represent the data .
10,859
def sndwrite_like ( samples : np . ndarray , likefile : str , outfile : str ) -> None : info = sndinfo ( likefile ) sndwrite ( samples , info . samplerate , outfile , encoding = info . encoding )
Write samples to outfile with samplerate and encoding taken from likefile
10,860
def _wavReadData ( fid , size : int , channels : int , encoding : str , bigendian : bool ) -> np . ndarray : bits = int ( encoding [ 3 : ] ) if bits == 8 : data = np . fromfile ( fid , dtype = np . ubyte , count = size ) if channels > 1 : data = data . reshape ( - 1 , channels ) else : bytes = bits // 8 if encoding in ...
adapted from scipy . io . wavfile . _read_data_chunk
10,861
def _wavGetInfo ( f : Union [ IO , str ] ) -> Tuple [ SndInfo , Dict [ str , Any ] ] : if isinstance ( f , ( str , bytes ) ) : f = open ( f , 'rb' ) needsclosing = True else : needsclosing = False fsize , bigendian = _wavReadRiff ( f ) fmt = ">i" if bigendian else "<i" while ( f . tell ( ) < fsize ) : chunk_id = f . re...
Read the info of a wav file . taken mostly from scipy . io . wavfile
10,862
def connect ( self ) : "Create connection to server" family , stype , proto , cname , sockaddr = self . best_connection_params ( self . host , self . port ) self . sock = socket . socket ( family , stype ) self . sock . settimeout ( self . timeout ) self . sock . connect ( sockaddr )
Create connection to server
10,863
def getchallenge ( self ) : "Return server challenge" self . sock . send ( CHALLENGE_PACKET ) for packet in self . read_iterator ( self . CHALLENGE_TIMEOUT ) : if packet . startswith ( CHALLENGE_RESPONSE_HEADER ) : return parse_challenge_response ( packet )
Return server challenge
10,864
def send ( self , command ) : "Send rcon command to server" if self . secure_rcon == self . RCON_NOSECURE : self . sock . send ( rcon_nosecure_packet ( self . password , command ) ) elif self . secure_rcon == self . RCON_SECURE_TIME : self . sock . send ( rcon_secure_time_packet ( self . password , command ) ) elif sel...
Send rcon command to server
10,865
def parse ( html_string , wrapper = Parser , * args , ** kwargs ) : return Parser ( lxml . html . fromstring ( html_string ) , * args , ** kwargs )
Parse html with wrapper
10,866
def str2int ( string_with_int ) : return int ( "" . join ( [ char for char in string_with_int if char in string . digits ] ) or 0 )
Collect digits from a string
10,867
def to_unicode ( obj , encoding = 'utf-8' ) : if isinstance ( obj , string_types ) or isinstance ( obj , binary_type ) : if not isinstance ( obj , text_type ) : obj = text_type ( obj , encoding ) return obj
Convert string to unicode string
10,868
def strip_spaces ( s ) : return u" " . join ( [ c for c in s . split ( u' ' ) if c ] )
Strip excess spaces from a string
10,869
def strip_linebreaks ( s ) : return u"\n" . join ( [ c for c in s . split ( u'\n' ) if c ] )
Strip excess line breaks from a string
10,870
def get ( self , selector , index = 0 , default = None ) : elements = self ( selector ) if elements : try : return elements [ index ] except ( IndexError ) : pass return default
Get first element from CSSSelector
10,871
def html ( self , unicode = False ) : html = lxml . html . tostring ( self . element , encoding = self . encoding ) if unicode : html = html . decode ( self . encoding ) return html
Return HTML of element
10,872
def parse ( self , func , * args , ** kwargs ) : result = [ ] for element in self . xpath ( 'child::node()' ) : if isinstance ( element , Parser ) : children = element . parse ( func , * args , ** kwargs ) element_result = func ( element , children , * args , ** kwargs ) if element_result : result . append ( element_re...
Parse element with given function
10,873
def _wrap_result ( self , func ) : def wrapper ( * args ) : result = func ( * args ) if hasattr ( result , '__iter__' ) and not isinstance ( result , etree . _Element ) : return [ self . _wrap_element ( element ) for element in result ] else : return self . _wrap_element ( result ) return wrapper
Wrap result in Parser instance
10,874
def _wrap_element ( self , result ) : if isinstance ( result , lxml . html . HtmlElement ) : return Parser ( result ) else : return result
Wrap single element in Parser instance
10,875
def parse_inline ( self ) : if self . inline_children : self . children = parser . parse_inline ( self . children ) elif isinstance ( getattr ( self , 'children' , None ) , list ) : for child in self . children : if isinstance ( child , BlockElement ) : child . parse_inline ( )
Inline parsing is postponed so that all link references are seen before that .
10,876
def work_model_factory ( * , validator = validators . is_work_model , ** kwargs ) : kwargs [ 'ld_type' ] = 'AbstractWork' return _model_factory ( validator = validator , ** kwargs )
Generate a Work model .
10,877
def manifestation_model_factory ( * , validator = validators . is_manifestation_model , ld_type = 'CreativeWork' , ** kwargs ) : return _model_factory ( validator = validator , ld_type = ld_type , ** kwargs )
Generate a Manifestation model .
10,878
def right_model_factory ( * , validator = validators . is_right_model , ld_type = 'Right' , ** kwargs ) : return _model_factory ( validator = validator , ld_type = ld_type , ** kwargs )
Generate a Right model .
10,879
def copyright_model_factory ( * , validator = validators . is_copyright_model , ** kwargs ) : kwargs [ 'ld_type' ] = 'Copyright' return _model_factory ( validator = validator , ** kwargs )
Generate a Copyright model .
10,880
def mark_error_retryable ( error ) : if isinstance ( error , Exception ) : alsoProvides ( error , IRetryableError ) elif inspect . isclass ( error ) and issubclass ( error , Exception ) : classImplements ( error , IRetryableError ) else : raise ValueError ( 'only exception objects or types may be marked retryable' )
Mark an exception instance or type as retryable . If this exception is caught by pyramid_retry then it may retry the request .
10,881
def is_last_attempt ( request ) : environ = request . environ attempt = environ . get ( 'retry.attempt' ) attempts = environ . get ( 'retry.attempts' ) if attempt is None or attempts is None : return True return attempt + 1 == attempts
Return True if the request is on its last attempt meaning that pyramid_retry will not be issuing any new attempts regardless of what happens when executing this request .
10,882
def includeme ( config ) : settings = config . get_settings ( ) config . add_view_predicate ( 'last_retry_attempt' , LastAttemptPredicate ) config . add_view_predicate ( 'retryable_error' , RetryableErrorPredicate ) def register ( ) : attempts = int ( settings . get ( 'retry.attempts' ) or 3 ) settings [ 'retry.attempt...
Activate the pyramid_retry execution policy in your application .
10,883
def filter_butter_coeffs ( filtertype , freq , samplerate , order = 5 ) : assert filtertype in ( 'low' , 'high' , 'band' ) nyq = 0.5 * samplerate if isinstance ( freq , tuple ) : assert filtertype == 'band' low , high = freq low /= nyq high /= nyq b , a = signal . butter ( order , [ low , high ] , btype = 'band' ) else...
calculates the coefficients for a digital butterworth filter
10,884
def filter_butter ( samples , samplerate , filtertype , freq , order = 5 ) : assert filtertype in ( 'low' , 'high' , 'band' ) b , a = filter_butter_coeffs ( filtertype , freq , samplerate , order = order ) return apply_multichannel ( samples , lambda data : signal . lfilter ( b , a , data ) )
Filters the samples with a digital butterworth filter
10,885
def token_middleware ( ctx , get_response ) : async def middleware ( request ) : params = request . setdefault ( 'params' , { } ) if params . get ( "token" ) is None : params [ 'token' ] = ctx . token return await get_response ( request ) return middleware
Reinject token and consistency into requests .
10,886
def rebuild ( self ) : scene = self . scene ( ) if ( not scene ) : return sourcePos = self . sourceItem ( ) . viewItem ( ) . pos ( ) sourceRect = self . sourceItem ( ) . viewItem ( ) . rect ( ) targetPos = self . targetItem ( ) . viewItem ( ) . pos ( ) targetRect = self . targetItem ( ) . viewItem ( ) . rect ( ) cellWi...
Rebuilds the dependency path for this item .
10,887
def _writeBlock ( block , blockID ) : with open ( "blockIDs.txt" , "a" ) as fp : fp . write ( "blockID: " + str ( blockID ) + "\n" ) sentences = "" for sentence in block : sentences += sentence + "," fp . write ( "block sentences: " + sentences [ : - 1 ] + "\n" ) fp . write ( "\n" )
writes the block to a file with the id
10,888
def _writeSentenceInBlock ( sentence , blockID , sentenceID ) : with open ( "sentenceIDs.txt" , "a" ) as fp : fp . write ( "sentenceID: " + str ( blockID ) + "_" + str ( sentenceID ) + "\n" ) fp . write ( "sentence string: " + sentence + "\n" ) fp . write ( "\n" )
writes the sentence in a block to a file with the id
10,889
def _writeWordFromSentenceInBlock ( word , blockID , sentenceID , wordID ) : with open ( "wordIDs.txt" , "a" ) as fp : fp . write ( "wordID: " + str ( blockID ) + "_" + str ( sentenceID ) + "_" + str ( wordID ) + "\n" ) fp . write ( "wordString: " + word + "\n" ) fp . write ( "\n" )
writes the word from a sentence in a block to a file with the id
10,890
def _writeBk ( target = "sentenceContainsTarget(+SID,+WID)." , treeDepth = "3" , nodeSize = "3" , numOfClauses = "8" ) : with open ( 'bk.txt' , 'w' ) as bk : bk . write ( "useStdLogicVariables: true\n" ) bk . write ( "setParam: treeDepth=" + str ( treeDepth ) + '.\n' ) bk . write ( "setParam: nodeSize=" + str ( nodeSiz...
Writes a background file to disk .
10,891
def traverse_depth_first_pre_order ( self , callback ) : n = len ( self . suftab ) root = [ 0 , 0 , n - 1 , "" ] def _traverse_top_down ( interval ) : callback ( interval ) i , j = interval [ 1 ] , interval [ 2 ] if i != j : children = self . _get_child_intervals ( i , j ) children . sort ( key = lambda child : child [...
Visits the internal nodes of the enhanced suffix array in depth - first pre - order .
10,892
def traverse_depth_first_post_order ( self , callback ) : last_interval = None n = len ( self . suftab ) stack = [ [ 0 , 0 , None , [ ] ] ] for i in xrange ( 1 , n ) : lb = i - 1 while self . lcptab [ i ] < stack [ - 1 ] [ 0 ] : stack [ - 1 ] [ 2 ] = i - 1 last_interval = stack . pop ( ) callback ( last_interval ) lb =...
Visits the internal nodes of the enhanced suffix array in depth - first post - order .
10,893
def _DecodeKey ( self , key ) : if self . dict . attrindex . HasBackward ( key ) : return self . dict . attrindex . GetBackward ( key ) return key
Turn a key into a string if possible
10,894
def AddAttribute ( self , key , value ) : if isinstance ( value , list ) : values = value else : values = [ value ] ( key , values ) = self . _EncodeKeyValues ( key , values ) self . setdefault ( key , [ ] ) . extend ( values )
Add an attribute to the packet .
10,895
def CreateAuthenticator ( ) : data = [ ] for i in range ( 16 ) : data . append ( random_generator . randrange ( 0 , 256 ) ) if six . PY3 : return bytes ( data ) else : return '' . join ( chr ( b ) for b in data )
Create a packet autenticator . All RADIUS packets contain a sixteen byte authenticator which is used to authenticate replies from the RADIUS server and in the password hiding algorithm . This function returns a suitable random string that can be used as an authenticator .
10,896
def DecodePacket ( self , packet ) : try : ( self . code , self . id , length , self . authenticator ) = struct . unpack ( '!BBH16s' , packet [ 0 : 20 ] ) except struct . error : raise PacketError ( 'Packet header is corrupt' ) if len ( packet ) != length : raise PacketError ( 'Packet has invalid length' ) if length > ...
Initialize the object from raw packet data . Decode a packet as received from the network and decode it .
10,897
def PwDecrypt ( self , password ) : buf = password pw = six . b ( '' ) last = self . authenticator while buf : hash = md5_constructor ( self . secret + last ) . digest ( ) if six . PY3 : for i in range ( 16 ) : pw += bytes ( ( hash [ i ] ^ buf [ i ] , ) ) else : for i in range ( 16 ) : pw += chr ( ord ( hash [ i ] ) ^ ...
Unobfuscate a RADIUS password . RADIUS hides passwords in packets by using an algorithm based on the MD5 hash of the packet authenticator and RADIUS secret . This function reverses the obfuscation process .
10,898
def PwCrypt ( self , password ) : if self . authenticator is None : self . authenticator = self . CreateAuthenticator ( ) if isinstance ( password , six . text_type ) : password = password . encode ( 'utf-8' ) buf = password if len ( password ) % 16 != 0 : buf += six . b ( '\x00' ) * ( 16 - ( len ( password ) % 16 ) ) ...
Obfuscate password . RADIUS hides passwords in packets by using an algorithm based on the MD5 hash of the packet authenticator and RADIUS secret . If no authenticator has been set before calling PwCrypt one is created automatically . Changing the authenticator after setting a password that has been encrypted using this...
10,899
def clear ( self ) : super ( XToolBar , self ) . clear ( ) if self . isCollapsable ( ) : self . _collapseButton = QToolButton ( self ) self . _collapseButton . setAutoRaise ( True ) self . _collapseButton . setSizePolicy ( QSizePolicy . Expanding , QSizePolicy . Expanding ) self . addWidget ( self . _collapseButton ) s...
Clears out this toolbar from the system .