idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
48,900
def ks_unif_durbin_matrix ( samples , statistic ) : h , k = modf ( samples * statistic ) k = int ( k ) h = 1 - h m = 2 * k + 1 A = tri ( m , k = 1 ) hs = h ** arange ( 1 , m + 1 ) A [ : , 0 ] -= hs A [ - 1 ] -= hs [ : : - 1 ] if h > .5 : A [ - 1 , 0 ] += ( 2 * h - 1 ) ** m A /= fromfunction ( lambda i , j : gamma ( fmax ( 1 , i - j + 2 ) ) , ( m , m ) ) P = identity ( m ) s = samples eA , eP = 0 , 0 while s != 1 : s , b = divmod ( s , 2 ) if b == 1 : P = dot ( P , A ) eP += eA if P [ k , k ] > factor : P /= factor eP += shift A = dot ( A , A ) eA *= 2 if A [ k , k ] > factor : A /= factor eA += shift P = dot ( P , A ) eP += eA x = P [ k , k ] for i in arange ( 1 , samples + 1 ) : x *= i / samples if x < factorr : x *= factor eP -= shift return x * 2 ** eP
Calculates the probability that the statistic is less than the given value using a fairly accurate implementation of the Durbin s matrix formula .
48,901
def ks_unif_durbin_recurrence_rational ( samples , statistic ) : t = statistic * samples ft1 = int ( floor ( t ) ) + 1 fmt1 = int ( floor ( - t ) ) + 1 fdt1 = int ( floor ( 2 * t ) ) + 1 qs = [ Fraction ( i ** i , factorial ( i ) ) for i in range ( ft1 ) ] qs . extend ( Fraction ( i ** i , factorial ( i ) ) - 2 * t * sum ( ( t + j ) ** ( j - 1 ) / factorial ( j ) * ( i - t - j ) ** ( i - j ) / factorial ( i - j ) for j in range ( i + fmt1 ) ) for i in range ( ft1 , fdt1 ) ) qs . extend ( - sum ( ( - 1 ) ** j * ( 2 * t - j ) ** j / factorial ( j ) * qs [ i - j ] for j in range ( 1 , fdt1 ) ) for i in range ( fdt1 , samples + 1 ) ) return qs [ samples ] * factorial ( samples ) / samples ** samples
Calculates the probability that the statistic is less than the given value using Durbin s recurrence and employing the standard fractions module .
48,902
def ks_unif_pelz_good ( samples , statistic ) : x = 1 / statistic r2 = 1 / samples rx = sqrt ( r2 ) * x r2x = r2 * x r2x2 = r2x * x r4x = r2x * r2 r4x2 = r2x2 * r2 r4x3 = r2x2 * r2x r5x3 = r4x2 * rx r5x4 = r4x3 * rx r6x3 = r4x2 * r2x r7x5 = r5x4 * r2x r9x6 = r7x5 * r2x r11x8 = r9x6 * r2x2 a1 = rx * ( - r6x3 / 108 + r4x2 / 18 - r4x / 36 - r2x / 3 + r2 / 6 + 2 ) a2 = pi2 / 3 * r5x3 * ( r4x3 / 8 - r2x2 * 5 / 12 - r2x * 4 / 45 + x + 1 / 6 ) a3 = pi4 / 9 * r7x5 * ( - r4x3 / 6 + r2x2 / 4 + r2x * 53 / 90 - 1 / 2 ) a4 = pi6 / 108 * r11x8 * ( r2x2 / 6 - 1 ) a5 = pi2 / 18 * r5x3 * ( r2x / 2 - 1 ) a6 = - pi4 * r9x6 / 108 w = - pi2 / 2 * r2x2 return hpi1d2 * ( ( a1 + ( a2 + ( a3 + a4 * hs2 ) * hs2 ) * hs2 ) * exp ( w * hs2 ) + ( a5 + a6 * is2 ) * is2 * exp ( w * is2 ) ) . sum ( )
Approximates the statistic distribution by a transformed Li - Chien formula .
48,903
def activate ( self , * , filter_func = None ) : if self . active : raise RuntimeError ( "Type safety check already active" ) self . __module_finder = ModuleFinder ( Validator . decorate ) if filter_func is not None : self . __module_finder . set_filter ( filter_func ) self . __module_finder . install ( )
Activate the type safety checker . After the call all functions that need to be checked will be .
48,904
def audit ( ** kwargs ) : def wrap ( fn ) : @ functools . wraps ( fn ) def advice ( parent_object , * args , ** kw ) : request = parent_object . request wijziging = request . audit_manager . create_revision ( ) result = fn ( parent_object , * args , ** kw ) if hasattr ( request , 'user' ) and request . user is not None and 'actor' in request . user : actor = request . user [ 'actor' ] attributes = request . user [ 'attributes' ] wijziging . updated_by = actor . get ( 'uri' , None ) if actor . get ( 'uri' ) == actor . get ( 'instantie_actor_uri' ) : wijziging . updated_by_omschrijving = ( attributes . get ( 'displayname' ) or attributes . get ( 'mail' ) or actor . get ( 'omschrijving' ) ) else : wijziging . updated_by_omschrijving = actor . get ( 'omschrijving' ) else : wijziging . updated_by = 'publiek' wijziging . updated_by_omschrijving = 'publiek' r_id = request . matchdict . get ( 'id' ) wijziging . resource_object_id = r_id if result is not None : try : renderer_name = request . registry . settings . get ( 'audit.pyramid.json.renderer' , 'jsonrenderer' ) json_string = renderers . render ( renderer_name , result , request = request ) result_object_json = json . loads ( json_string ) wijziging . resource_object_json = result_object_json wijziging . resource_object_id = _get_id_from_result ( r_id , result_object_json , kwargs ) except Exception as e : log . error ( e ) wijziging . versie = _get_versie_hash ( wijziging ) wijziging . actie = kwargs . get ( 'actie' ) if kwargs . get ( 'actie' ) else _action_from_request ( request ) request . audit_manager . save ( wijziging ) return result return advice return wrap
use this decorator to audit an operation
48,905
def audit_with_request ( ** kwargs ) : def wrap ( fn ) : @ audit ( ** kwargs ) def operation ( parent_object , * args , ** kw ) : return fn ( parent_object . request , * args , ** kw ) @ functools . wraps ( fn ) def advice_with_request ( the_request , * args , ** kw ) : class ParentObject : request = the_request return operation ( ParentObject ( ) , * args , ** kw ) return advice_with_request return wrap
use this decorator to audit an operation with a request as input variable
48,906
def cross_validation_lock ( obj ) : orig = getattr ( obj , '_cross_validation_lock' , False ) try : obj . _cross_validation_lock = True yield finally : obj . _cross_validation_lock = orig
A contextmanager for holding Traited object s cross - validators .
48,907
def copy_and_replace ( file_in , file_out , mapping , ** kwargs ) : separator = '@@' if 'separator' in kwargs : separator = kwargs [ 'separator' ] file_in = open ( file_in , 'r' ) file_out = open ( file_out , 'w' ) s = file_in . read ( ) for find , replace in mapping : find = separator + find + separator print ( u'Replacing {0} with {1}' . format ( find , replace ) ) s = s . replace ( find , replace ) file_out . write ( s )
Copy a file and replace some placeholders with new values .
48,908
def coerce ( cls , key , value ) : if not isinstance ( value , MutableList ) : if isinstance ( value , list ) : return MutableList ( value ) return Mutable . coerce ( key , value ) else : return value
Convert plain list to MutableList .
48,909
def registerWorker ( self , name , worker ) : if not isinstance ( worker , multiprocessing . Process ) : self . logger . error ( "Process {0} is not actually a Process!" . format ( name ) ) raise Exception ( "Process {0} is not actually a Process!" . format ( name ) ) if name in self . worker_list : self . logger . error ( "Process {0} already registered!" . format ( name ) ) raise Exception ( "Process {0} already registered!" . format ( name ) ) self . worker_list [ name ] = worker self . logger . debug ( "Registered worker {0}" . format ( name ) ) return worker
Register a new Worker under the given descriptive name .
48,910
def getWorker ( self , name ) : if not name in self . worker_list : self . logger . error ( "Worker {0} is not registered!" . format ( name ) ) raise Exception ( "Worker {0} is not registered!" . format ( name ) ) return self . worker_list [ name ]
Retrieve the Worker registered under the given name .
48,911
def unregisterWorker ( self , name ) : if not name in self . worker_list : self . logger . error ( "Worker {0} is not registered!" . format ( name ) ) raise Exception ( "Worker {0} is not registered!" . format ( name ) ) del self . worker_list [ name ] self . logger . debug ( "Unregistered worker {0}" . format ( name ) )
Unregister the Worker registered under the given name .
48,912
def startAll ( self ) : self . logger . info ( "Starting all workers..." ) for worker in self . getWorkers ( ) : process = self . getWorker ( worker ) self . logger . debug ( "Starting {0}" . format ( process . name ) ) process . start ( ) self . logger . info ( "Started all workers" )
Start all registered Workers .
48,913
def cast_datetime_filter ( value ) : if isinstance ( value , str ) : dtime = parse_datetime ( value ) elif isinstance ( value , datetime ) : dtime = value else : raise ValueError ( 'Received value of type {0}' . format ( type ( value ) ) ) return dtime . isoformat ( )
Cast a datetime filter value .
48,914
def filter_args_to_dict ( filter_dict , accepted_filter_keys = [ ] ) : out_dict = { } for k , v in filter_dict . items ( ) : if k not in accepted_filter_keys or v is None : logger . debug ( 'Filter was not in accepted_filter_keys or value is None.' ) continue filter_type = filter_type_map . get ( k , None ) if filter_type is None : logger . debug ( 'Filter key not foud in map.' ) continue filter_cast_map = { 'int' : cast_integer_filter , 'datetime' : cast_datetime_filter } cast_function = filter_cast_map . get ( filter_type , None ) if cast_function : out_value = cast_function ( v ) else : out_value = v out_dict [ k ] = out_value return out_dict
Cast and validate filter args .
48,915
def bencode ( obj ) : if isinstance ( obj , int ) : return "i" + str ( obj ) + "e" if isinstance ( obj , str ) : if not obj : return None return str ( len ( obj ) ) + ":" + obj if isinstance ( obj , list ) : res = "l" for elem in obj : elem = bencode ( elem ) if elem : res += elem return res + "e" if isinstance ( obj , dict ) : res = "d" for key in sorted ( obj . keys ( ) ) : if key in obj : value = bencode ( obj [ key ] ) key = bencode ( key ) if key and value : res += key + value return res + "e" if isinstance ( obj , unicode ) : return bencode ( obj . encode ( 'utf-8' ) ) if isinstance ( obj , collections . OrderedDict ) : return bencode ( dict ( obj ) ) raise Exception ( "Unknown object: %s (%s)" % ( repr ( obj ) , repr ( type ( obj ) ) ) )
Bencodes obj and returns it as a string
48,916
def bdecode ( text ) : text = text . decode ( 'utf-8' ) def bdecode_next ( start ) : if text [ start ] == 'i' : end = text . find ( 'e' , start ) return int ( text [ start + 1 : end ] , 10 ) , end + 1 if text [ start ] == 'l' : res = [ ] start += 1 while text [ start ] != 'e' : elem , start = bdecode_next ( start ) res . append ( elem ) return res , start + 1 if text [ start ] == 'd' : res = { } start += 1 while text [ start ] != 'e' : key , start = bdecode_next ( start ) value , start = bdecode_next ( start ) res [ key ] = value return res , start + 1 lenend = text . find ( ':' , start ) length = int ( text [ start : lenend ] , 10 ) end = lenend + length + 1 return text [ lenend + 1 : end ] , end return bdecode_next ( 0 ) [ 0 ]
Decodes a bencoded bytearray and returns it as a python object
48,917
def change_password ( ) : basic_auth = '%s:%s' % ( DEFAULT_USERNAME , DEFAULT_PASSWORD ) try : auth = base64 . encodestring ( basic_auth ) except TypeError : auth = base64 . encodestring ( bytes ( basic_auth , 'utf-8' ) ) . decode ( ) headers = { "Content-Type" : "application/json" , "Accept" : "application/json" , "Authorization" : "Basic %s" % auth . strip ( ) } response = None retry = 0 while not response : sleep ( 1 ) con = http . HTTPConnection ( 'localhost:7474' , timeout = 10 ) try : con . request ( 'GET' , 'http://localhost:7474/user/neo4j' , headers = headers ) response = json . loads ( con . getresponse ( ) . read ( ) . decode ( 'utf-8' ) ) except ValueError : con . close ( ) retry += 1 if retry > 10 : print ( "Could not change password for user neo4j" ) break if response and response . get ( 'password_change_required' , None ) : payload = json . dumps ( { 'password' : 'testing' } ) con . request ( 'POST' , 'http://localhost:7474/user/neo4j/password' , payload , headers ) print ( "Password changed for user neo4j" ) con . close ( )
Changes the standard password from neo4j to testing to be able to run the test suite .
48,918
def get_default_config_file ( argparser , suppress = None , default_override = None ) : if not suppress : suppress = [ ] if not default_override : default_override = { } lines = [ ] seen_arguments = [ ] for arg in argparser . _actions : if arg . dest in suppress : continue if arg . dest in seen_arguments : continue default = arg . default if arg . dest in default_override . keys ( ) : default = default_override [ arg . dest ] lines . append ( "# {0}\n{1}={2}\n" . format ( arg . help , arg . dest , default ) ) seen_arguments . append ( arg . dest ) return "" . join ( lines )
Turn an ArgumentParser into a ConfigObj compatible configuration file .
48,919
def _perform_binds ( self , binds ) : for bind in binds : self . logger . debug ( "Binding queue {0} to exchange {1} with key {2}" . format ( bind [ 'queue' ] , bind [ 'exchange' ] , bind [ 'routing_key' ] ) ) self . channel . queue_bind ( ** bind )
Binds queues to exchanges .
48,920
def _perform_unbinds ( self , binds ) : for bind in binds : self . logger . debug ( "Unbinding queue {0} from exchange {1} with key {2}" . format ( bind [ 'queue' ] , bind [ 'exchange' ] , bind [ 'routing_key' ] ) ) self . channel . queue_unbind ( ** bind )
Unbinds queues from exchanges .
48,921
def close ( self ) : self . cancel ( ) self . logger . debug ( "Closing AMQP connection" ) try : self . connection . close ( ) except Exception , eee : self . logger . warning ( "Received an error while trying to close AMQP connection: " + str ( eee ) )
Closes the internal connection .
48,922
def cancel ( self , consumer_tag = None ) : if not consumer_tag : if not hasattr ( self , "consumer_tag" ) : return consumer_tag = self . consumer_tag self . channel . basic_cancel ( consumer_tag )
Cancels the current consuming action by using the stored consumer_tag . If a consumer_tag is given that one is used instead .
48,923
def json_get ( parsed_json , key ) : if key not in parsed_json : raise ValueError ( "JSON does not contain a {} field" . format ( key ) ) return parsed_json [ key ]
Retrieves the key from a parsed_json dictionary or raises an exception if the key is not present
48,924
def readme ( fname ) : md = open ( os . path . join ( os . path . dirname ( __file__ ) , fname ) ) . read ( ) output = md try : import pypandoc output = pypandoc . convert ( md , 'rst' , format = 'md' ) except ImportError : pass return output
Reads a markdown file and returns the contents formatted as rst
48,925
def get_config ( config_base , custom_file = None , configspec = None ) : logger = logging . getLogger ( __name__ ) logger . debug ( "Expanding variables" ) home = os . path . expanduser ( "~" ) loc = os . path . dirname ( os . path . abspath ( inspect . getfile ( inspect . currentframe ( ) ) ) ) logger . debug ( "Create empty config" ) config = ConfigObj ( ) if os . path . isfile ( os . path . join ( loc , "%s.config" % config_base ) ) : logger . debug ( "Loading config from workingdir" ) cfg = ConfigObj ( os . path . join ( loc , "%s.config" % config_base ) , configspec = configspec ) if configspec : cfg . validate ( Validator ( ) ) config . merge ( cfg ) if os . path . isfile ( "/etc/%s.config" % config_base ) : logger . debug ( "Loading config from /etc" ) cfg = ConfigObj ( "/etc/%s.config" % config_base , configspec = configspec ) if configspec : cfg . validate ( Validator ( ) ) config . merge ( cfg ) if os . path . isfile ( os . path . join ( home , ".%s.config" % config_base ) ) : logger . debug ( "Loading config from homedir" ) cfg = ConfigObj ( os . path . join ( home , ".%s.config" % config_base ) , configspec = configspec ) if configspec : cfg . validate ( Validator ( ) ) config . merge ( cfg ) if custom_file : logger . debug ( "Loading custom config file" ) cfg = ConfigObj ( custom_file , configspec = configspec ) if configspec : cfg . validate ( Validator ( ) ) config . merge ( cfg ) return config
Loads a configuration file from multiple locations and merge the results into one .
48,926
def get_config_dir ( path , pattern = "*.config" , configspec = None , allow_errors = False ) : logger = logging . getLogger ( __name__ ) logger . debug ( "Loading all files matching {0} in {1}" . format ( pattern , path ) ) files = Globber ( path , include = [ pattern ] , recursive = False ) . glob ( ) files = sorted ( files ) config = ConfigObj ( ) for filename in files : logger . debug ( "- Loading config for {0}" . format ( filename ) ) try : conf = ConfigObj ( filename , configspec = configspec ) except ConfigObjError , coe : logger . error ( "An error occurred while parsing {0}: {1}" . format ( filename , str ( coe ) ) ) continue if configspec : conf . validate ( Validator ( ) ) config . merge ( conf ) return config
Load an entire directory of configuration files merging them into one .
48,927
def resolve ( self , * pargs , ** kwargs ) : self . _cached = ( pargs , kwargs ) self . _try_then ( )
Resolve the promise .
48,928
def _try_then ( self ) : if self . _cached is not None and self . _callback is not None : self . _callback ( * self . _cached [ 0 ] , ** self . _cached [ 1 ] )
Check to see if self has been resolved yet if so invoke then .
48,929
def wait_for ( self , timeout = 3000 ) : results = [ None ] results_called = [ False ] def results_callback ( val ) : results [ 0 ] = val results_called [ 0 ] = True self . then ( results_callback ) start = time . time ( ) while not results_called [ 0 ] : if time . time ( ) - start > timeout / 1000. : raise Exception ( 'Timeout of %d ms reached' % timeout ) ip . kernel . do_one_iteration ( ) return results [ 0 ]
Hault execution until self resolves .
48,930
def _is_primitive ( thing ) : primitive = ( int , str , bool , float ) return isinstance ( thing , primitive )
Determine if the value is a primitive
48,931
def _guess_type ( val ) : if isinstance ( val , bool ) : return "choice" elif isinstance ( val , int ) : return "number" elif isinstance ( val , float ) : return "number" elif isinstance ( val , str ) : return "text" elif hasattr ( val , 'read' ) : return "file" else : return "text"
Guess the input type of the parameter based off the default value if unknown use text
48,932
def _params ( sig ) : params = [ ] for p in sig . parameters : param = sig . parameters [ p ] optional = param . default != inspect . Signature . empty default = UIBuilder . _safe_default ( param . default ) if param . default != inspect . Signature . empty else '' annotation = param . annotation if param . annotation != inspect . Signature . empty else '' type = UIBuilder . _guess_type ( default ) p_attr = { "name" : param . name , "label" : param . name , "optional" : optional , "default" : default , "description" : annotation , "hide" : False , "type" : type , "kinds" : None , "choices" : [ ] , "id" : None , "events" : None } if isinstance ( default , bool ) : p_attr [ 'choices' ] = { 'True' : 'true' , 'False' : 'false' } params . append ( p_attr ) return params
Read params values and annotations from the signature
48,933
def _import ( func ) : func_name = func . __name__ if func_name in globals ( ) : return func_name module_name = func . __module__ submodules = module_name . split ( '.' ) if submodules [ 0 ] in globals ( ) : return module_name + '.' + func_name for i in range ( len ( submodules ) ) : m = submodules [ i ] if m in globals ( ) : return '.' . join ( submodules [ i : ] ) + '.' + func_name module_ref = sys . modules [ func . __module__ ] all_globals = globals ( ) for n in all_globals : if all_globals [ n ] == module_ref : return n + '.' + func_name return func_name
Return the namespace path to the function
48,934
def get_logger ( name = None , level = logging . NOTSET , handlers = None ) : logger = logging . getLogger ( name ) if name is None : name = "root" if handlers is None : handlers = [ ] logger . setLevel ( level ) if len ( handlers ) != 0 : logger . handlers = [ ] if "console" in handlers : if not isinstance ( handlers [ 'console' ] , collections . Iterable ) : handlers [ 'console' ] = { } if "handler" in handlers [ 'console' ] : strm = handlers [ 'console' ] [ 'handler' ] else : strm = logging . StreamHandler ( ) if "format" in handlers [ 'console' ] : fmt = logging . Formatter ( handlers [ 'console' ] [ 'format' ] ) else : fmt = logging . Formatter ( '%(message)s' ) strm . setLevel ( level ) strm . setFormatter ( fmt ) logger . addHandler ( strm ) if "file" in handlers : if not isinstance ( handlers [ 'file' ] , collections . Iterable ) : raise TypeError ( "file handler config must be a dict" ) if "logfile" not in handlers [ 'file' ] : raise ValueError ( "file handler config must contain logfile path name" ) fil = logging . handlers . WatchedFileHandler ( handlers [ 'file' ] [ 'logfile' ] ) if "format" in handlers [ 'file' ] : fmt = logging . Formatter ( handlers [ 'file' ] [ 'format' ] ) else : fmt = logging . Formatter ( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) fil . setLevel ( level ) fil . setFormatter ( fmt ) logger . addHandler ( fil ) if "syslog" in handlers : if not isinstance ( handlers [ 'syslog' ] , collections . Iterable ) : handlers [ 'syslog' ] = { } sysl = logging . handlers . SysLogHandler ( address = '/dev/log' , facility = logging . handlers . SysLogHandler . LOG_SYSLOG ) if "format" in handlers [ 'syslog' ] : fmt = logging . Formatter ( handlers [ 'syslog' ] [ 'format' ] ) else : fmt = logging . Formatter ( '%(name)s[%(process)s] %(levelname)-8s: %(message)s' ) sysl . setLevel ( level ) sysl . setFormatter ( fmt ) logger . addHandler ( sysl ) return logger
Create a Python logging Logger for the given name . A special case is when the name is None as this will represent the root Logger object .
48,935
def _attach_endpoints ( self ) : for name , value in inspect . getmembers ( self ) : if inspect . isclass ( value ) and issubclass ( value , self . _Endpoint ) and ( value is not self . _Endpoint ) : endpoint_instance = value ( self . requester ) setattr ( self , endpoint_instance . endpoint_base , endpoint_instance ) if not hasattr ( endpoint_instance , 'get_endpoints' ) : endpoint_instance . get_endpoints = ( ) if not hasattr ( endpoint_instance , 'post_endpoints' ) : endpoint_instance . post_endpoints = ( ) if not hasattr ( endpoint_instance , 'is_callable' ) : endpoint_instance . is_callable = False for endpoint in ( endpoint_instance . get_endpoints + endpoint_instance . post_endpoints ) : function = endpoint_instance . create_endpoint_function ( endpoint ) function_name = endpoint . replace ( '/' , '_' ) setattr ( endpoint_instance , function_name , function ) function . __name__ = str ( function_name ) function . __doc__ = 'Tells the object to make a request to the {0} endpoint' . format ( endpoint )
Dynamically attaches endpoint callables to this client
48,936
def registerThread ( self , name , thread ) : if not isinstance ( thread , threading . Thread ) : self . logger . error ( "Thread {0} is not actually a Thread!" . format ( name ) ) raise Exception ( "Thread {0} is not actually a Thread!" . format ( name ) ) if name in self . thread_list : self . logger . error ( "Thread {0} already registered!" . format ( name ) ) raise Exception ( "Thread {0} already registered!" . format ( name ) ) self . thread_list [ name ] = thread self . logger . debug ( "Registered thread {0}" . format ( name ) ) return thread
Register a new Thread under the given descriptive name .
48,937
def getThread ( self , name ) : if not name in self . thread_list : self . logger . error ( "Thread {0} is not registered!" . format ( name ) ) raise Exception ( "Thread {0} is not registered!" . format ( name ) ) return self . thread_list [ name ]
Retrieve the Thread registered under the given name .
48,938
def unregisterThread ( self , name ) : if not name in self . thread_list : self . logger . error ( "Thread {0} is not registered!" . format ( name ) ) raise Exception ( "Thread {0} is not registered!" . format ( name ) ) del self . thread_list [ name ] self . logger . debug ( "Unregistered thread {0}" . format ( name ) )
Unregister the Thread registered under the given name .
48,939
def startAll ( self ) : self . logger . info ( "Starting all threads..." ) for thread in self . getThreads ( ) : thr = self . getThread ( thread ) self . logger . debug ( "Starting {0}" . format ( thr . name ) ) thr . start ( ) self . logger . info ( "Started all threads" )
Start all registered Threads .
48,940
def usersettings ( request ) : if hasattr ( request , 'usersettings' ) : usersettings = request . usersettings else : from . shortcuts import get_current_usersettings usersettings = get_current_usersettings ( ) return { 'usersettings' : usersettings }
Returns the current UserSettings based on the SITE_ID in the project s settings as context variables
48,941
def check_ensembl_api_version ( self ) : self . attempt = 0 headers = { "content-type" : "application/json" } ext = "/info/rest" r = self . ensembl_request ( ext , headers ) response = json . loads ( r ) self . cache . set_ensembl_api_version ( response [ "release" ] )
check the ensembl api version matches a currently working version This function is included so when the api version changes we notice the change and we can manually check the responses for the new version .
48,942
def open_url ( self , url , headers ) : data = self . cache . get_cached_data ( url ) if data is not None : return data , 200 , headers self . rate_limit_ensembl_requests ( ) req = request . Request ( url , headers = headers ) try : handler = request . urlopen ( req ) except HTTPError as error : handler = error except ( URLError , ConnectionResetError , TimeoutError ) : return '' , 500 , headers status_code = handler . getcode ( ) response = handler . read ( ) if IS_PYTHON3 : response = response . decode ( "utf-8" ) headers = dict ( zip ( map ( str . lower , handler . headers . keys ( ) ) , handler . headers . values ( ) ) ) now = time . strftime ( "%Y-%m-%d %H:%M:%S" , time . gmtime ( ) ) logging . warning ( "{}\t{}\t{}" . format ( now , status_code , url ) ) return response , status_code , headers
open url with python libraries
48,943
def ensembl_request ( self , ext , headers ) : self . attempt += 1 if self . attempt > 5 : raise ValueError ( "too many attempts, figure out why its failing" ) response , status , requested_headers = self . open_url ( self . server + ext , headers = headers ) if status == 429 : if "retry-after" in requested_headers : time . sleep ( float ( requested_headers [ "retry-after" ] ) ) elif "x-ratelimit-reset" in requested_headers : time . sleep ( int ( requested_headers [ "x-ratelimit-reset" ] ) ) return self . ensembl_request ( ext , headers ) elif status in [ 500 , 503 , 504 ] : time . sleep ( 30 ) return self . ensembl_request ( ext , headers ) elif status != 200 : raise ValueError ( "Invalid Ensembl response for {}\nheaders: {}\nresponse: {}" . format ( self . server + ext , requested_headers , response ) ) if requested_headers [ "content-type" ] == "application/json" : try : json . loads ( response ) except ValueError : now = time . strftime ( "%Y-%m-%d %H:%M:%S" , time . gmtime ( ) ) logging . warning ( "{}\t{}\t{}\t{}\t{}" . format ( now , status , self . server + ext , "cannot obtain json output" ) ) return self . ensembl_request ( ext , requested_headers ) self . cache . cache_url_data ( self . server + ext , response ) return response
obtain sequence via the ensembl REST API
48,944
def get_genes_for_hgnc_id ( self , hgnc_symbol ) : headers = { "content-type" : "application/json" } self . attempt = 0 ext = "/xrefs/symbol/homo_sapiens/{}" . format ( hgnc_symbol ) r = self . ensembl_request ( ext , headers ) genes = [ ] for item in json . loads ( r ) : if item [ "type" ] == "gene" : genes . append ( item [ "id" ] ) return genes
obtain the ensembl gene IDs that correspond to a HGNC symbol
48,945
def get_genomic_seq_for_region ( self , chrom , start_pos , end_pos ) : headers = { "content-type" : "text/plain" } self . attempt = 0 ext = "/sequence/region/human/{}:{}..{}:1" . format ( chrom , start_pos , end_pos ) return self . ensembl_request ( ext , headers )
obtain the sequence for a genomic region
48,946
def rate_limit_ensembl_requests ( self ) : current_time = time . time ( ) diff_time = current_time - self . prior_time if diff_time < self . rate_limit : time . sleep ( self . rate_limit - diff_time ) self . prior_time = time . time ( )
limit ensembl requests to one per 0 . 067 s
48,947
def call_simple_cli ( command , cwd = None , universal_newlines = False , redirect_stderr = False ) : return SimpleCliTool ( ) . _call_cli ( command , cwd , universal_newlines , redirect_stderr )
Simple wrapper around SimpleCliTool . Simple .
48,948
def _on_msg ( self , msg ) : data = msg [ 'content' ] [ 'data' ] if 'callback' in data : guid = data [ 'callback' ] callback = callback_registry [ guid ] args = data [ 'arguments' ] args = [ self . deserialize ( a ) for a in args ] index = data [ 'index' ] results = callback ( * args ) return self . serialize ( self . _send ( 'return' , index = index , results = results ) ) else : index = data [ 'index' ] immutable = data [ 'immutable' ] value = data [ 'value' ] if index in self . _callbacks : self . _callbacks [ index ] . resolve ( { 'immutable' : immutable , 'value' : value } ) del self . _callbacks [ index ]
Handle messages from the front - end
48,949
def serialize ( self , obj ) : if hasattr ( obj , '_jsid' ) : return { 'immutable' : False , 'value' : obj . _jsid } else : obj_json = { 'immutable' : True } try : json . dumps ( obj ) obj_json [ 'value' ] = obj except : pass if callable ( obj ) : guid = str ( uuid . uuid4 ( ) ) callback_registry [ guid ] = obj obj_json [ 'callback' ] = guid return obj_json
Serialize an object for sending to the front - end .
48,950
def deserialize ( self , obj ) : if obj [ 'immutable' ] : return obj [ 'value' ] else : guid = obj [ 'value' ] if not guid in object_registry : instance = JSObject ( self , guid ) object_registry [ guid ] = instance return object_registry [ guid ]
Deserialize an object from the front - end .
48,951
def _send ( self , method , ** parameters ) : msg = { 'index' : self . _calls , 'method' : method , } msg . update ( parameters ) promise = SimplePromise ( ) self . _callbacks [ self . _calls ] = promise self . _calls += 1 self . _comm . send ( msg ) return promise
Sends a message to the front - end and returns a promise .
48,952
def register_response ( self , correlation_id = None ) : if not correlation_id : correlation_id = str ( uuid . uuid1 ( ) ) if correlation_id in self . responses : raise KeyError ( "Correlation_id {0} was already registered, and therefor not unique." . format ( correlation_id ) ) self . responses [ correlation_id ] = None return correlation_id
Register the receiving of a RPC response . Will return the given correlation_id after registering or if correlation_id is None will generate a correlation_id and return it after registering . If the given correlation_id has already been used an KeyError will be raised .
48,953
def retrieve_response ( self , correlation_id ) : if correlation_id not in self . responses : raise KeyError ( "Given RPC response correlation_id was not registered." ) if not self . responses [ correlation_id ] : return None response = self . responses [ correlation_id ] del ( self . responses [ correlation_id ] ) return response
Retrieve a registered RPC response . If the correlation_id was not registered an KeyError will the raised . If not value has been received yet None will be returned . After retrieving the response the value will be unset internally .
48,954
def request_response ( self , exchange , routing_key , message , properties = None , correlation_id = None , timeout = 6 ) : if not properties : properties = { } properties [ 'correlation_id' ] = self . register_response ( correlation_id ) properties [ 'reply_to' ] = self . rpc_queue_name if not self . publish ( exchange , routing_key , message , properties , mandatory = True ) : self . retrieve_response ( properties [ 'correlation_id' ] ) raise MessageNotDelivered ( "Message was not delivered to a queue" ) start = int ( time . time ( ) ) if hasattr ( self . channel , "force_data_events" ) : self . channel . force_data_events ( True ) while properties [ 'correlation_id' ] not in self . retrieve_available_responses ( ) : self . connection . process_data_events ( ) if timeout and ( int ( time . time ( ) ) - start ) > timeout : self . retrieve_response ( properties [ 'correlation_id' ] ) raise MessageDeliveryTimeout ( "No response received from RPC server within specified period" ) return self . retrieve_response ( properties [ 'correlation_id' ] )
This function wraps publish and sets the properties necessary to allow end - to - end communication using the Rpc paradigm .
48,955
def get_current ( self ) : from django . conf import settings try : site_id = settings . SITE_ID except AttributeError : raise ImproperlyConfigured ( 'You\'re using the Django "sites framework" without having ' 'set the SITE_ID setting. Create a site in your database and ' 'set the SITE_ID setting to fix this error.' ) try : current_usersettings = USERSETTINGS_CACHE [ site_id ] except KeyError : current_usersettings = self . get ( site_id = site_id ) USERSETTINGS_CACHE [ site_id ] = current_usersettings return current_usersettings
Returns the current UserSettings based on the SITE_ID in the project s settings . The UserSettings object is cached the first time it s retrieved from the database .
48,956
def append_url ( base_url , path ) : if base_url [ - 1 ] != "/" : base_url += "/" if path [ 0 ] == "/" : path = path [ 1 : ] return urljoin ( base_url , path )
Append path to base_url in a sensible way .
48,957
def _randomString ( ) : return '' . join ( random . choice ( string . ascii_uppercase + string . digits ) for x in range ( 10 ) )
Random string for message signing
48,958
def _callFunc ( session , funcName , password , args ) : txid = _randomString ( ) sock = session . socket sock . send ( bytearray ( 'd1:q6:cookie4:txid10:%se' % txid , 'utf-8' ) ) msg = _getMessage ( session , txid ) cookie = msg [ 'cookie' ] txid = _randomString ( ) tohash = ( password + cookie ) . encode ( 'utf-8' ) req = { 'q' : funcName , 'hash' : hashlib . sha256 ( tohash ) . hexdigest ( ) , 'cookie' : cookie , 'args' : args , 'txid' : txid } if password : req [ 'aq' ] = req [ 'q' ] req [ 'q' ] = 'auth' reqBenc = bencode ( req ) . encode ( 'utf-8' ) req [ 'hash' ] = hashlib . sha256 ( reqBenc ) . hexdigest ( ) reqBenc = bencode ( req ) sock . send ( bytearray ( reqBenc , 'utf-8' ) ) return _getMessage ( session , txid )
Call custom cjdns admin function
48,959
def _receiverThread ( session ) : timeOfLastSend = time . time ( ) timeOfLastRecv = time . time ( ) try : while True : if timeOfLastSend + KEEPALIVE_INTERVAL_SECONDS < time . time ( ) : if timeOfLastRecv + 10 < time . time ( ) : raise exceptions . PingTimeout ( ) session . socket . send ( b'd1:q18:Admin_asyncEnabled4:txid8:keepalive' ) timeOfLastSend = time . time ( ) try : data = session . socket . recv ( BUFFER_SIZE ) except socket . timeout : continue try : benc = bdecode ( data ) except ( KeyError , ValueError ) : logger . error ( "error decoding [%s]" , data ) continue if benc [ 'txid' ] == 'keepaliv' : if benc [ 'asyncEnabled' ] == 0 : raise exceptions . SessionLost ( ) timeOfLastRecv = time . time ( ) else : session . queue . put ( benc ) except KeyboardInterrupt : logger . exception ( "interrupted" ) import thread thread . interrupt_main ( )
Receiving messages from cjdns admin server
48,960
def _getMessage ( session , txid ) : while True : if txid in session . messages : msg = session . messages [ txid ] del session . messages [ txid ] return msg else : try : nextMessage = session . queue . get ( timeout = 100 ) except queue . Empty : continue if 'txid' in nextMessage : session . messages [ nextMessage [ 'txid' ] ] = nextMessage else : logger . info ( "message with no txid: %s" % nextMessage )
Getting message associated with txid
48,961
def _functionFabric ( func_name , argList , oargList , password ) : def functionHandler ( self , * args , ** kwargs ) : call_args = { } for ( key , value ) in oargList . items ( ) : call_args [ key ] = value for i , arg in enumerate ( argList ) : if i < len ( args ) : call_args [ arg ] = args [ i ] for ( key , value ) in kwargs . items ( ) : call_args [ key ] = value return _callFunc ( self , func_name , password , call_args ) functionHandler . __name__ = str ( func_name ) return functionHandler
Function fabric for Session class
48,962
def connect ( ipAddr , port , password ) : sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) sock . connect ( ( ipAddr , port ) ) sock . settimeout ( 2 ) sock . send ( b'd1:q4:pinge' ) data = sock . recv ( BUFFER_SIZE ) if not data . endswith ( b'1:q4:ponge' ) : raise exceptions . NotACjdnsAdminSocket ( "Looks like %s:%d is to a non-cjdns socket." , ( ipAddr , port ) ) page = 0 availableFunctions = { } while True : sock . send ( bytearray ( 'd1:q24:Admin_availableFunctions4:argsd4:pagei%seee' % page , 'utf-8' ) ) data = sock . recv ( BUFFER_SIZE ) benc = bdecode ( data ) for func in benc [ 'availableFunctions' ] : availableFunctions [ func ] = benc [ 'availableFunctions' ] [ func ] if 'more' not in benc : break page = page + 1 funcArgs = { } funcOargs = { } for ( i , func ) in availableFunctions . items ( ) : items = func . items ( ) rargList = [ arg for arg , atts in items if atts [ 'required' ] ] argList = rargList + [ arg for arg , atts in items if not atts [ 'required' ] ] oargList = { } for ( arg , atts ) in items : if not atts [ 'required' ] : oargList [ arg ] = ( "''" if func [ arg ] [ 'type' ] == 'Int' else "" ) setattr ( Session , i , _functionFabric ( i , argList , oargList , password ) ) funcArgs [ i ] = rargList funcOargs [ i ] = oargList session = Session ( sock ) kat = threading . Thread ( target = _receiverThread , args = [ session ] ) kat . setDaemon ( True ) kat . start ( ) ret = _callFunc ( session , "ping" , password , { } ) if 'error' in ret : raise exceptions . InvalidAdminPassword ( ret . get ( "error" ) ) session . _functions = "" funcOargs_c = { } for func in funcOargs : funcOargs_c [ func ] = list ( [ key + "=" + str ( value ) for ( key , value ) in funcOargs [ func ] . items ( ) ] ) for func in availableFunctions : session . _functions += ( func + "(" + ', ' . join ( funcArgs [ func ] + funcOargs_c [ func ] ) + ")\n" ) return session
Connect to cjdns admin with this attributes
48,963
def connectWithAdminInfo ( path = None ) : if path is None : path = os . path . expanduser ( '~/.cjdnsadmin' ) try : with open ( path , 'r' ) as adminInfo : data = json . load ( adminInfo ) except IOError : logger . info ( '~/.cjdnsadmin not found; using default credentials' , file = sys . stderr ) data = { 'password' : 'NONE' , 'addr' : '127.0.0.1' , 'port' : 11234 , } return connect ( data [ 'addr' ] , data [ 'port' ] , data [ 'password' ] )
Connect to cjdns admin with data from user file
48,964
def get_options ( ) : parser = argparse . ArgumentParser ( description = 'denovonear cli interface' ) parent = argparse . ArgumentParser ( add_help = False ) parent . add_argument ( "--out" , help = "output filename" ) parent . add_argument ( "--rates" , help = "Path to file containing sequence context-based mutation rates." ) parent . add_argument ( "--genome-build" , choices = [ "grch37" , "GRCh37" , "grch38" , "GRCh38" ] , default = "grch37" , help = "Genome build " "that the de novo coordinates are based on (GRCh37 or GRCh38" ) parent . add_argument ( "--cache-folder" , default = os . path . join ( os . path . expanduser ( '~' ) , ".cache" , 'denovonear' ) , help = "where to cache Ensembl data (default is ~/.cache/denovonear)" ) subparsers = parser . add_subparsers ( ) cluster = subparsers . add_parser ( 'cluster' , parents = [ parent ] , description = "Tests the proximity of de novo mutations in genes." ) cluster . add_argument ( "--in" , dest = "input" , required = True , help = "Path to " "file listing known mutations in genes. See example file in data folder " "for format." ) cluster . set_defaults ( func = clustering ) transcripts = subparsers . add_parser ( 'transcripts' , parents = [ parent ] , description = "Identify transcripts for a gene containing de novo events." ) transcripts . add_argument ( "--de-novos" , required = True , help = "Path to " "file listing de novo variants in genes." ) transcripts . set_defaults ( func = find_transcripts ) group = transcripts . add_mutually_exclusive_group ( required = True ) group . add_argument ( "--all-transcripts" , action = "store_true" , default = False , help = "Flag if you want to identify all transcripts with more than " "one de novo on it." ) group . add_argument ( "--minimise-transcripts" , action = "store_true" , default = False , help = "Flag if you want to identify the minimal set of " "transcripts to contain all de novos." ) rater = subparsers . add_parser ( "rates" , parents = [ parent ] , description = "determine mutation rates for genes given transcript IDs." ) rater . add_argument ( "--genes" , help = "Path to file " "listing HGNC symbols, with one or more transcript IDs per gene. " "The tab-separated input format is gene symbol followed by transcript " "ID. Alternative transcripts are listed on separate lines." ) rater . set_defaults ( func = gene_rates ) args = parser . parse_args ( ) if 'func' not in args : print ( 'Use one of the subcommands: cluster, rates, or transcripts\n' ) parser . print_help ( ) sys . exit ( ) return args
get the command line switches
48,965
def to_ipv6 ( key ) : if key [ - 2 : ] != '.k' : raise ValueError ( 'Key does not end with .k' ) key_bytes = base32 . decode ( key [ : - 2 ] ) hash_one = sha512 ( key_bytes ) . digest ( ) hash_two = sha512 ( hash_one ) . hexdigest ( ) return ':' . join ( [ hash_two [ i : i + 4 ] for i in range ( 0 , 32 , 4 ) ] )
Get IPv6 address from a public key .
48,966
def glob ( self ) : matches = [ ] for root , dirnames , filenames in os . walk ( self . path ) : if not self . recursive : while len ( dirnames ) > 0 : dirnames . pop ( ) for include in self . include : for filename in fnmatch . filter ( filenames , include ) : matches . append ( os . path . join ( root , filename ) ) return matches
Traverse directory and return all absolute filenames of files that match the globbing patterns .
48,967
def select_site_view ( self , request , form_url = '' ) : if not self . has_add_permission ( request ) : raise PermissionDenied extra_qs = '' if request . META [ 'QUERY_STRING' ] : extra_qs = '&' + request . META [ 'QUERY_STRING' ] site_choices = self . get_site_choices ( ) if len ( site_choices ) == 1 : return HttpResponseRedirect ( '?site_id={0}{1}' . format ( site_choices [ 0 ] [ 0 ] , extra_qs ) ) form = self . select_site_form ( data = request . POST if request . method == 'POST' else None , initial = { 'site' : site_choices [ 0 ] [ 0 ] } ) form . fields [ 'site' ] . choices = site_choices if form . is_valid ( ) : return HttpResponseRedirect ( '?site_id={0}{1}' . format ( form . cleaned_data [ 'site' ] , extra_qs ) ) fieldsets = ( ( None , { 'fields' : ( 'site' , ) } ) , ) adminForm = AdminForm ( form , fieldsets , { } , model_admin = self ) media = self . media + adminForm . media context = { 'title' : _ ( 'Add %s' ) % force_text ( self . opts . verbose_name ) , 'adminform' : adminForm , 'is_popup' : '_popup' in request . GET , 'media' : mark_safe ( media ) , 'errors' : AdminErrorList ( form , ( ) ) , 'app_label' : self . opts . app_label , } return self . render_select_site_form ( request , context , form_url )
Display a choice form to select which site to add settings .
48,968
def render_select_site_form ( self , request , context , form_url = '' ) : app_label = self . opts . app_label context . update ( { 'has_change_permission' : self . has_change_permission ( request ) , 'form_url' : mark_safe ( form_url ) , 'opts' : self . opts , 'add' : True , 'save_on_top' : self . save_on_top , } ) return render_to_response ( self . select_site_form_template or [ 'admin/%s/%s/select_site_form.html' % ( app_label , self . opts . object_name . lower ( ) ) , 'admin/%s/select_site_form.html' % app_label , 'admin/usersettings/select_site_form.html' , 'admin/select_site_form.html' ] , context )
Render the site choice form .
48,969
def xauth ( base_url_template = DEFAULT_READER_URL_TEMPLATE , ** xargs ) : consumer_key = xargs . get ( 'consumer_key' ) or required_from_env ( 'READABILITY_CONSUMER_KEY' ) consumer_secret = xargs . get ( 'consumer_secret' ) or required_from_env ( 'READABILITY_CONSUMER_SECRET' ) username = xargs . get ( 'username' ) or required_from_env ( 'READABILITY_USERNAME' ) password = xargs . get ( 'password' ) or required_from_env ( 'READABILITY_PASSWORD' ) client = Client ( consumer_key , client_secret = consumer_secret , signature_type = 'BODY' ) url = base_url_template . format ( ACCESS_TOKEN_URL ) headers = { 'Content-Type' : 'application/x-www-form-urlencoded' } params = { 'x_auth_username' : username , 'x_auth_password' : password , 'x_auth_mode' : 'client_auth' } uri , headers , body = client . sign ( url , http_method = 'POST' , body = urlencode ( params ) , headers = headers ) response = requests . post ( uri , data = body ) logger . debug ( 'POST to %s.' , uri ) token = parse_qs ( response . content ) try : token = ( token [ b'oauth_token' ] [ 0 ] . decode ( ) , token [ b'oauth_token_secret' ] [ 0 ] . decode ( ) ) except KeyError : raise ValueError ( 'Invalid Credentials.' ) return token
Returns an OAuth token tuple that can be used with clients . ReaderClient .
48,970
def log_transform ( rates ) : transformed = [ ] for key in [ 'missense' , 'nonsense' , 'splice_lof' , 'splice_region' , 'synonymous' ] : try : value = math . log10 ( rates [ key ] ) except ValueError : value = "NA" except KeyError : continue transformed . append ( value ) return '\t' . join ( map ( str , transformed ) )
log transform a numeric value unless it is zero or negative
48,971
def get_usersettings_model ( ) : try : from django . apps import apps get_model = apps . get_model except ImportError : from django . db . models . loading import get_model try : app_label , model_name = settings . USERSETTINGS_MODEL . split ( '.' ) except ValueError : raise ImproperlyConfigured ( 'USERSETTINGS_MODEL must be of the ' 'form "app_label.model_name"' ) usersettings_model = get_model ( app_label , model_name ) if usersettings_model is None : raise ImproperlyConfigured ( 'USERSETTINGS_MODEL refers to model "%s" that has ' 'not been installed' % settings . USERSETTINGS_MODEL ) return usersettings_model
Returns the UserSettings model that is active in this project .
48,972
def get_current_usersettings ( ) : USERSETTINGS_MODEL = get_usersettings_model ( ) try : current_usersettings = USERSETTINGS_MODEL . objects . get_current ( ) except USERSETTINGS_MODEL . DoesNotExist : current_usersettings = USERSETTINGS_MODEL . get_default ( ) return current_usersettings
Returns the current UserSettings based on the SITE_ID in the project s settings
48,973
def get_options ( ) : parser = argparse . ArgumentParser ( description = "Script to batch process de" "novo clustering." ) parser . add_argument ( "--in" , dest = "input" , required = True , help = "Path to" "file listing known mutations in genes. See example file in data folder" "for format." ) parser . add_argument ( "--temp-dir" , required = True , help = "path to hold intermediate files" ) parser . add_argument ( "--out" , required = True , help = "Path to output file." ) args = parser . parse_args ( ) return args
get the command line options
48,974
def count_missense_per_gene ( lines ) : counts = { } for x in lines : x = x . split ( "\t" ) gene = x [ 0 ] consequence = x [ 3 ] if gene not in counts : counts [ gene ] = 0 if consequence != "missense_variant" : continue counts [ gene ] += 1 return counts
count the number of missense variants in each gene .
48,975
def split_denovos ( denovo_path , temp_dir ) : with open ( denovo_path , "r" ) as handle : lines = handle . readlines ( ) header = lines . pop ( 0 ) basename = os . path . basename ( denovo_path ) counts = count_missense_per_gene ( lines ) counts = dict ( ( k , v ) for k , v in counts . items ( ) if v > 1 ) genes = set ( [ ] ) for line in sorted ( lines ) : gene = line . split ( "\t" ) [ 0 ] if gene not in genes and gene in counts : genes . add ( gene ) path = os . path . join ( temp_dir , "tmp.{}.txt" . format ( len ( genes ) ) ) output = open ( path , "w" ) output . write ( header ) if gene in counts : output . write ( line ) return len ( genes )
split de novos from an input file into files one for each gene
48,976
def get_random_string ( ) : hash_string = "%8x" % random . getrandbits ( 32 ) hash_string = hash_string . strip ( ) while is_number ( hash_string ) : hash_string = "%8x" % random . getrandbits ( 32 ) hash_string = hash_string . strip ( ) return hash_string
make a random string which we can use for bsub job IDs so that different jobs do not have the same job IDs .
48,977
def batch_process ( de_novo_path , temp_dir , output_path ) : temp_dir = tempfile . mkdtemp ( dir = temp_dir ) count = split_denovos ( de_novo_path , temp_dir ) job_name = "denovonear" job_id = "{0}[1-{1}]%20" . format ( job_name , count ) basename = os . path . basename ( de_novo_path ) infile = os . path . join ( temp_dir , "tmp.\$LSB_JOBINDEX\.txt" ) outfile = os . path . join ( temp_dir , "tmp.\$LSB_JOBINDEX\.output" ) command = [ "denovonear" , "cluster" , "--in" , infile , "--out" , outfile ] submit_bsub_job ( command , job_id , memory = 3500 , requeue_code = 134 , logfile = "clustering.bjob" ) time . sleep ( 2 ) merge_id = "{}_merge" . format ( job_name ) command = [ "head" , "-n" , "1" , os . path . join ( temp_dir , "tmp.1.output" ) , ">" , output_path , "; tail" , "-q" , "-n" , "+2" , os . path . join ( temp_dir , "tmp.*.output" ) , "|" , "sort" , ">>" , output_path ] submit_bsub_job ( command , merge_id , memory = 100 , dependent_id = job_id , logfile = "clustering.bjob" ) time . sleep ( 2 ) submit_bsub_job ( [ "rm" , "-r" , temp_dir ] , job_id = "{}_cleanup" . format ( job_name ) , memory = 100 , dependent_id = merge_id , logfile = "clustering.bjob" )
sets up a lsf job array
48,978
def authenticated_user ( self , auth ) : response = self . get ( "/user" , auth = auth ) return GogsUser . from_json ( response . json ( ) )
Returns the user authenticated by auth
48,979
def get_tokens ( self , auth , username = None ) : if username is None : username = self . authenticated_user ( auth ) . username response = self . get ( "/users/{u}/tokens" . format ( u = username ) , auth = auth ) return [ Token . from_json ( o ) for o in response . json ( ) ]
Returns the tokens owned by the specified user . If no user is specified uses the user authenticated by auth .
48,980
def create_token ( self , auth , name , username = None ) : if username is None : username = self . authenticated_user ( auth ) . username data = { "name" : name } response = self . post ( "/users/{u}/tokens" . format ( u = username ) , auth = auth , data = data ) return Token . from_json ( response . json ( ) )
Creates a new token with the specified name for the specified user . If no user is specified uses user authenticated by auth .
48,981
def ensure_token ( self , auth , name , username = None ) : if username is None : username = self . authenticated_user ( auth ) . username tokens = [ token for token in self . get_tokens ( auth , username ) if token . name == name ] if len ( tokens ) > 0 : return tokens [ 0 ] return self . create_token ( auth , name , username )
Ensures the existence of a token with the specified name for the specified user . Creates a new token if none exists . If no user is specified uses user authenticated by auth .
48,982
def create_repo ( self , auth , name , description = None , private = False , auto_init = False , gitignore_templates = None , license_template = None , readme_template = None , organization = None ) : gitignores = None if gitignore_templates is None else "," . join ( gitignore_templates ) data = { "name" : name , "description" : description , "private" : private , "auto_init" : auto_init , "gitignores" : gitignores , "license" : license_template , "readme" : readme_template } data = { k : v for ( k , v ) in data . items ( ) if v is not None } url = "/org/{0}/repos" . format ( organization ) if organization else "/user/repos" response = self . post ( url , auth = auth , data = data ) return GogsRepo . from_json ( response . json ( ) )
Creates a new repository and returns the created repository .
48,983
def repo_exists ( self , auth , username , repo_name ) : path = "/repos/{u}/{r}" . format ( u = username , r = repo_name ) return self . _get ( path , auth = auth ) . ok
Returns whether a repository with name repo_name owned by the user with username username exists .
48,984
def get_repo ( self , auth , username , repo_name ) : path = "/repos/{u}/{r}" . format ( u = username , r = repo_name ) response = self . get ( path , auth = auth ) return GogsRepo . from_json ( response . json ( ) )
Returns a the repository with name repo_name owned by the user with username username .
48,985
def get_user_repos ( self , auth , username ) : path = "/users/{u}/repos" . format ( u = username ) response = self . get ( path , auth = auth ) return [ GogsRepo . from_json ( repo_json ) for repo_json in response . json ( ) ]
Returns the repositories owned by the user with username username .
48,986
def get_branch ( self , auth , username , repo_name , branch_name ) : path = "/repos/{u}/{r}/branches/{b}" . format ( u = username , r = repo_name , b = branch_name ) response = self . get ( path , auth = auth ) return GogsBranch . from_json ( response . json ( ) )
Returns the branch with name branch_name in the repository with name repo_name owned by the user with username username .
48,987
def get_branches ( self , auth , username , repo_name ) : path = "/repos/{u}/{r}/branches" . format ( u = username , r = repo_name ) response = self . get ( path , auth = auth ) return [ GogsBranch . from_json ( branch_json ) for branch_json in response . json ( ) ]
Returns the branches in the repository with name repo_name owned by the user with username username .
48,988
def delete_repo ( self , auth , username , repo_name ) : path = "/repos/{u}/{r}" . format ( u = username , r = repo_name ) self . delete ( path , auth = auth )
Deletes the repository with name repo_name owned by the user with username username .
48,989
def migrate_repo ( self , auth , clone_addr , uid , repo_name , auth_username = None , auth_password = None , mirror = False , private = False , description = None ) : data = { "clone_addr" : clone_addr , "uid" : uid , "repo_name" : repo_name , "mirror" : mirror , "private" : private , "description" : description , } data = { k : v for ( k , v ) in data . items ( ) if v is not None } url = "/repos/migrate" response = self . post ( url , auth = auth , data = data ) return GogsRepo . from_json ( response . json ( ) )
Migrate a repository from another Git hosting source for the authenticated user .
48,990
def create_user ( self , auth , login_name , username , email , password , send_notify = False ) : data = { "login_name" : login_name , "username" : username , "email" : email , "password" : password , "send_notify" : send_notify } response = self . post ( "/admin/users" , auth = auth , data = data ) return GogsUser . from_json ( response . json ( ) )
Creates a new user and returns the created user .
48,991
def user_exists ( self , username ) : path = "/users/{}" . format ( username ) return self . _get ( path ) . ok
Returns whether a user with username username exists .
48,992
def search_users ( self , username_keyword , limit = 10 ) : params = { "q" : username_keyword , "limit" : limit } response = self . get ( "/users/search" , params = params ) return [ GogsUser . from_json ( user_json ) for user_json in response . json ( ) [ "data" ] ]
Searches for users whose username matches username_keyword and returns a list of matched users .
48,993
def get_user ( self , auth , username ) : path = "/users/{}" . format ( username ) response = self . get ( path , auth = auth ) return GogsUser . from_json ( response . json ( ) )
Returns a representing the user with username username .
48,994
def update_user ( self , auth , username , update ) : path = "/admin/users/{}" . format ( username ) response = self . patch ( path , auth = auth , data = update . as_dict ( ) ) return GogsUser . from_json ( response . json ( ) )
Updates the user with username username according to update .
48,995
def delete_user ( self , auth , username ) : path = "/admin/users/{}" . format ( username ) self . delete ( path , auth = auth )
Deletes the user with username username . Should only be called if the to - be - deleted user has no repositories .
48,996
def get_repo_hooks ( self , auth , username , repo_name ) : path = "/repos/{u}/{r}/hooks" . format ( u = username , r = repo_name ) response = self . get ( path , auth = auth ) return [ GogsRepo . Hook . from_json ( hook ) for hook in response . json ( ) ]
Returns all hooks of repository with name repo_name owned by the user with username username .
48,997
def create_hook ( self , auth , repo_name , hook_type , config , events = None , organization = None , active = False ) : if events is None : events = [ "push" ] data = { "type" : hook_type , "config" : config , "events" : events , "active" : active } url = "/repos/{o}/{r}/hooks" . format ( o = organization , r = repo_name ) if organization is not None else "/repos/{r}/hooks" . format ( r = repo_name ) response = self . post ( url , auth = auth , data = data ) return GogsRepo . Hook . from_json ( response . json ( ) )
Creates a new hook and returns the created hook .
48,998
def update_hook ( self , auth , repo_name , hook_id , update , organization = None ) : if organization is not None : path = "/repos/{o}/{r}/hooks/{i}" . format ( o = organization , r = repo_name , i = hook_id ) else : path = "/repos/{r}/hooks/{i}" . format ( r = repo_name , i = hook_id ) response = self . _patch ( path , auth = auth , data = update . as_dict ( ) ) return GogsRepo . Hook . from_json ( response . json ( ) )
Updates hook with id hook_id according to update .
48,999
def delete_hook ( self , auth , username , repo_name , hook_id ) : path = "/repos/{u}/{r}/hooks/{i}" . format ( u = username , r = repo_name , i = hook_id ) self . delete ( path , auth = auth )
Deletes the hook with id hook_id for repo with name repo_name owned by the user with username username .