signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def libvlc_media_set_user_data ( p_md , p_new_user_data ) : '''Sets media descriptor ' s user _ data . user _ data is specialized data accessed by the host application , VLC . framework uses it as a pointer to an native object that references a L { Media } pointer . @ param p _ md : media descriptor object . @ param p _ new _ user _ data : pointer to user data .'''
f = _Cfunctions . get ( 'libvlc_media_set_user_data' , None ) or _Cfunction ( 'libvlc_media_set_user_data' , ( ( 1 , ) , ( 1 , ) , ) , None , None , Media , ctypes . c_void_p ) return f ( p_md , p_new_user_data )
def _init_from_csc ( self , csc ) : """Initialize data from a CSC matrix ."""
if len ( csc . indices ) != len ( csc . data ) : raise ValueError ( 'length mismatch: {} vs {}' . format ( len ( csc . indices ) , len ( csc . data ) ) ) self . handle = ctypes . c_void_p ( ) _check_call ( _LIB . XGDMatrixCreateFromCSC ( c_array ( ctypes . c_ulong , csc . indptr ) , c_array ( ctypes . c_uint , csc . indices ) , c_array ( ctypes . c_float , csc . data ) , len ( csc . indptr ) , len ( csc . data ) , ctypes . byref ( self . handle ) ) )
def remove_regions_with_no_gates ( regions ) : """Removes all Jove regions from a list of regions . : param regions : A list of tuples ( regionID , regionName ) : type regions : list : return : A list of regions minus those in jove space : rtype : list"""
list_of_gateless_regions = [ ( 10000004 , 'UUA-F4' ) , ( 10000017 , 'J7HZ-F' ) , ( 10000019 , 'A821-A' ) , ] for gateless_region in list_of_gateless_regions : if gateless_region in regions : regions . remove ( gateless_region ) return regions
def add_loghandler ( handler ) : """Add log handler to root logger and LOG _ ROOT and set formatting ."""
format = "%(levelname)s %(name)s %(asctime)s %(threadName)s %(message)s" handler . setFormatter ( logging . Formatter ( format ) ) logging . getLogger ( LOG_ROOT ) . addHandler ( handler ) logging . getLogger ( ) . addHandler ( handler )
def eth_getBalance ( self , address = None , block = BLOCK_TAG_LATEST ) : """TODO : documentation https : / / github . com / ethereum / wiki / wiki / JSON - RPC # eth _ getbalance TESTED"""
address = address or self . eth_coinbase ( ) block = validate_block ( block ) return hex_to_dec ( self . _call ( "eth_getBalance" , [ address , block ] ) )
def firmware_download_input_protocol_type_scp_protocol_scp_user ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) firmware_download = ET . Element ( "firmware_download" ) config = firmware_download input = ET . SubElement ( firmware_download , "input" ) protocol_type = ET . SubElement ( input , "protocol-type" ) scp_protocol = ET . SubElement ( protocol_type , "scp-protocol" ) scp = ET . SubElement ( scp_protocol , "scp" ) user = ET . SubElement ( scp , "user" ) user . text = kwargs . pop ( 'user' ) callback = kwargs . pop ( 'callback' , self . _callback ) return callback ( config )
def to_gds ( self , multiplier ) : """Convert this object to a series of GDSII elements . Parameters multiplier : number A number that multiplies all dimensions written in the GDSII elements . Returns out : string The GDSII binary string that represents this object ."""
data = [ ] for ii in range ( len ( self . polygons ) ) : if len ( self . polygons [ ii ] ) > 4094 : raise ValueError ( "[GDSPY] Polygons with more than 4094 are " "not supported by the GDSII format." ) data . append ( struct . pack ( '>10h' , 4 , 0x0800 , 6 , 0x0D02 , self . layers [ ii ] , 6 , 0x0E02 , self . datatypes [ ii ] , 12 + 8 * len ( self . polygons [ ii ] ) , 0x1003 ) ) data . extend ( struct . pack ( '>2l' , int ( round ( point [ 0 ] * multiplier ) ) , int ( round ( point [ 1 ] * multiplier ) ) ) for point in self . polygons [ ii ] ) data . append ( struct . pack ( '>2l2h' , int ( round ( self . polygons [ ii ] [ 0 ] [ 0 ] * multiplier ) ) , int ( round ( self . polygons [ ii ] [ 0 ] [ 1 ] * multiplier ) ) , 4 , 0x1100 ) ) return b'' . join ( data )
async def handle_post_request ( self , environ ) : """Handle a long - polling POST request from the client ."""
length = int ( environ . get ( 'CONTENT_LENGTH' , '0' ) ) if length > self . server . max_http_buffer_size : raise exceptions . ContentTooLongError ( ) else : body = await environ [ 'wsgi.input' ] . read ( length ) p = payload . Payload ( encoded_payload = body ) for pkt in p . packets : await self . receive ( pkt )
def get_sign_command ( self , filename , signer , sign_password , keystore = None ) : """Return a suitable command for signing a file . : param filename : The pathname to the file to be signed . : param signer : The identifier of the signer of the file . : param sign _ password : The passphrase for the signer ' s private key used for signing . : param keystore : The path to a directory which contains the keys used in verification . If not specified , the instance ' s ` ` gpg _ home ` ` attribute is used instead . : return : The signing command as a list suitable to be passed to : class : ` subprocess . Popen ` ."""
cmd = [ self . gpg , '--status-fd' , '2' , '--no-tty' ] if keystore is None : keystore = self . gpg_home if keystore : cmd . extend ( [ '--homedir' , keystore ] ) if sign_password is not None : cmd . extend ( [ '--batch' , '--passphrase-fd' , '0' ] ) td = tempfile . mkdtemp ( ) sf = os . path . join ( td , os . path . basename ( filename ) + '.asc' ) cmd . extend ( [ '--detach-sign' , '--armor' , '--local-user' , signer , '--output' , sf , filename ] ) logger . debug ( 'invoking: %s' , ' ' . join ( cmd ) ) return cmd , sf
def compute_angle_weights_1d ( angles ) : """Compute the weight for each angle according to the distance between its neighbors . Parameters angles : 1d ndarray of length A Angles in radians Returns weights : 1d ndarray of length A The weights for each angle Notes To compute the weights , the angles are set modulo PI , not modulo 2PI . This reduces artifacts when the angular coverage is between PI and 2PI but does not affect the result when the angles cover the full 2PI interval ."""
# copy and modulo np . pi # This is an array with values in [ 0 , np . pi ) angles = ( angles . flatten ( ) - angles . min ( ) ) % ( np . pi ) # sort the array sortargs = np . argsort ( angles ) sortangl = angles [ sortargs ] # compute weights for sorted angles da = ( np . roll ( sortangl , - 1 ) - np . roll ( sortangl , 1 ) ) % ( np . pi ) weights = da / np . sum ( da ) * da . shape [ 0 ] unsortweights = np . zeros_like ( weights ) # Sort everything back where it belongs unsortweights [ sortargs ] = weights return unsortweights
def ren ( i ) : """Input : { ( repo _ uoa ) - repo UOA module _ uoa - module UOA data _ uoa - old data UOA new _ data _ uoa - new data alias or new _ data _ uid - new data UID ( leave empty to keep old one ) or xcids [ 0 ] - { ' data _ uoa ' } - new data UOA ( new _ uid ) - generate new UID ( remove _ alias ) - if ' yes ' , remove alias ( add _ uid _ to _ alias ) - if ' yes ' , add UID to alias ( share ) - if ' yes ' , try to remove old entry via GIT and add new one Output : { return - return code = 0 , if successful > 0 , if error ( error ) - error text if return > 0"""
# Check if global writing is allowed r = check_writing ( { 'delete' : 'yes' } ) if r [ 'return' ] > 0 : return r o = i . get ( 'out' , '' ) ruoa = i . get ( 'repo_uoa' , '' ) muoa = i . get ( 'module_uoa' , '' ) duoa = i . get ( 'data_uoa' , '' ) if muoa == '' : return { 'return' : 1 , 'error' : 'module UOA is not defined' } if duoa == '' : return { 'return' : 1 , 'error' : 'data UOA is not defined' } # Attempt to load ii = { 'module_uoa' : muoa , 'data_uoa' : duoa } if ruoa != '' : ii [ 'repo_uoa' ] = ruoa r = load ( ii ) if r [ 'return' ] > 0 : return r rdd = r muid = r [ 'module_uid' ] pr = r [ 'path_repo' ] ddi = r [ 'info' ] duoa = r [ 'data_uoa' ] duid = r [ 'data_uid' ] dalias = r [ 'data_alias' ] change_data_name = ( ddi . get ( 'data_name' , '' ) == dalias ) p = r [ 'path' ] pm = r [ 'path_module' ] p1 = os . path . join ( pm , cfg [ 'subdir_ck_ext' ] ) pn = p # Check if writing is allowed ii = { 'module_uoa' : muoa , 'module_uid' : muid , 'repo_uoa' : ruoa , 'repo_uid' : r [ 'repo_uid' ] } r = check_writing ( ii ) if r [ 'return' ] > 0 : return r rd = r . get ( 'repo_dict' , { } ) rshared = rd . get ( 'shared' , '' ) rsync = rd . get ( 'sync' , '' ) shr = i . get ( 'share' , '' ) if shr == 'yes' : rshared = 'git' rsync = 'yes' # Check if index - > delete old index if cfg . get ( 'use_indexing' , '' ) == 'yes' : path = '/' + muid + '/' + duid + '/1' ri = access_index_server ( { 'request' : 'DELETE' , 'path' : path } ) if ri [ 'return' ] > 0 : return ri # Check new data UOA nduoa = i . get ( 'new_data_uoa' , '' ) nduid = i . get ( 'new_data_uid' , '' ) if nduid == '' and i . get ( 'new_uid' , '' ) == 'yes' : rx = gen_uid ( { } ) if rx [ 'return' ] > 0 : return rx nduid = rx [ 'data_uid' ] xcids = i . get ( 'xcids' , [ ] ) if len ( xcids ) > 0 : xcid = xcids [ 0 ] nduoa = xcid . get ( 'data_uoa' , '' ) if i . get ( 'remove_alias' , '' ) == 'yes' : nduoa = duid if nduoa == '' : nduoa = duoa if nduid != duid : # Check that new UID doesn ' t exist p2 = os . path . join ( p1 , cfg [ 'file_alias_u' ] + nduid ) if os . path . isfile ( p2 ) : return { 'return' : 1 , 'error' : 'new UID already exists' } # Check if adding UID to alias if i . get ( 'add_uid_to_alias' , '' ) == 'yes' : x = nduid if x == '' : x = duid nduoa += '-' + x if nduoa != duoa : if not is_uoa ( nduoa ) : return { 'return' : 1 , 'error' : 'alias has disallowed characters' } # Need to rename directory if os . path . isdir ( nduoa ) : return { 'return' : 1 , 'error' : 'new alias already exists' } pn = os . path . join ( pm , nduoa ) if rshared != '' and rsync == 'yes' : import shutil shutil . copytree ( p , pn ) ppp = os . getcwd ( ) pp = os . path . split ( pn ) pp0 = pp [ 0 ] pp1 = pp [ 1 ] os . chdir ( pp0 ) ss = cfg [ 'repo_types' ] [ rshared ] [ 'add' ] . replace ( '$#files#$' , pp1 ) rx = os . system ( ss ) pp = os . path . split ( p ) pp0 = pp [ 0 ] pp1 = pp [ 1 ] ss = cfg [ 'repo_types' ] [ rshared ] [ 'rm' ] . replace ( '$#files#$' , pp1 ) rx = os . system ( ss ) os . chdir ( ppp ) if os . path . isdir ( p ) : shutil . rmtree ( p , onerror = rm_read_only ) else : os . rename ( p , pn ) if nduid != '' or change_data_name : # Change backup _ data _ uid in info file ppi = os . path . join ( pn , cfg [ 'subdir_ck_ext' ] , cfg [ 'file_info' ] ) if nduid != '' : ddi [ 'backup_data_uid' ] = nduid if change_data_name : ddi [ 'data_name' ] = nduoa rx = save_json_to_file ( { 'json_file' : ppi , 'dict' : ddi , 'sort_keys' : 'yes' } ) if rx [ 'return' ] > 0 : return rx if nduid == '' : nduid = duid # Remove old alias disambiguator if not is_uid ( duoa ) : r = delete_alias ( { 'path' : pm , 'data_uid' : duid , 'data_alias' : duoa , 'share' : shr } ) if r [ 'return' ] > 0 : return r # Add new disambiguator , if needed if not is_uid ( nduoa ) : if not os . path . isdir ( p1 ) : # Create . cm directory try : os . mkdir ( p1 ) except Exception as e : return { 'return' : 1 , 'error' : format ( e ) } # Write UOA disambiguator p3 = os . path . join ( p1 , cfg [ 'file_alias_a' ] + nduoa ) ru = save_text_file ( { 'text_file' : p3 , 'string' : nduid + '\n' } ) if ru [ 'return' ] > 0 : return ru # Write UID disambiguator p2 = os . path . join ( p1 , cfg [ 'file_alias_u' ] + nduid ) ru = save_text_file ( { 'text_file' : p2 , 'string' : nduoa + '\n' } ) if ru [ 'return' ] > 0 : return ru if rshared != '' and rsync == 'yes' : ppp = os . getcwd ( ) pp = os . path . split ( p1 ) pp0 = pp [ 0 ] pp1 = pp [ 1 ] os . chdir ( pp0 ) ss = cfg [ 'repo_types' ] [ rshared ] [ 'add' ] . replace ( '$#files#$' , pp1 ) rx = os . system ( ss ) os . chdir ( ppp ) # Check if index and add new if cfg . get ( 'use_indexing' , '' ) == 'yes' : if is_uid ( nduoa ) : nduid = nduoa path = '/' + muid + '/' + nduid + '/1' ri = access_index_server ( { 'request' : 'DELETE' , 'path' : path } ) if ri [ 'return' ] > 0 : return ri ri = access_index_server ( { 'request' : 'PUT' , 'path' : path , 'dict' : rdd } ) if ri [ 'return' ] > 0 : return ri if o == 'con' : out ( 'Entry was successfully renamed!' ) return { 'return' : 0 }
def add_node ( self , node , adapter_number , port_number , label = None , dump = True ) : """Add a node to the link : param dump : Dump project on disk"""
port = node . get_port ( adapter_number , port_number ) if port is None : raise aiohttp . web . HTTPNotFound ( text = "Port {}/{} for {} not found" . format ( adapter_number , port_number , node . name ) ) if port . link is not None : raise aiohttp . web . HTTPConflict ( text = "Port is already used" ) self . _link_type = port . link_type for other_node in self . _nodes : if other_node [ "node" ] == node : raise aiohttp . web . HTTPConflict ( text = "Cannot connect to itself" ) if node . node_type in [ "nat" , "cloud" ] : if other_node [ "node" ] . node_type in [ "nat" , "cloud" ] : raise aiohttp . web . HTTPConflict ( text = "It's not allowed to connect a {} to a {}" . format ( other_node [ "node" ] . node_type , node . node_type ) ) # Check if user is not connecting serial = > ethernet other_port = other_node [ "node" ] . get_port ( other_node [ "adapter_number" ] , other_node [ "port_number" ] ) if other_port is None : raise aiohttp . web . HTTPNotFound ( text = "Port {}/{} for {} not found" . format ( other_node [ "adapter_number" ] , other_node [ "port_number" ] , other_node [ "node" ] . name ) ) if port . link_type != other_port . link_type : raise aiohttp . web . HTTPConflict ( text = "It's not allowed to connect a {} to a {}" . format ( other_port . link_type , port . link_type ) ) if label is None : label = { "x" : - 10 , "y" : - 10 , "rotation" : 0 , "text" : html . escape ( "{}/{}" . format ( adapter_number , port_number ) ) , "style" : "font-size: 10; font-style: Verdana" } self . _nodes . append ( { "node" : node , "adapter_number" : adapter_number , "port_number" : port_number , "port" : port , "label" : label } ) if len ( self . _nodes ) == 2 : yield from self . create ( ) for n in self . _nodes : n [ "node" ] . add_link ( self ) n [ "port" ] . link = self self . _created = True self . _project . controller . notification . emit ( "link.created" , self . __json__ ( ) ) if dump : self . _project . dump ( )
def on_bar_data ( self , bars ) : """Process the incoming tick data array"""
for tick in XmlHelper . node_iter ( bars ) : names = [ str ( tick . getElement ( _ ) . name ( ) ) for _ in range ( tick . numElements ( ) ) ] barmap = { n : XmlHelper . get_child_value ( tick , n ) for n in names } self . response . bars . append ( barmap )
def find_element ( self , value , by = By . ID , update = False ) -> Elements : '''Find a element or the first element .'''
if update or not self . _nodes : self . uidump ( ) for node in self . _nodes : if node . attrib [ by ] == value : bounds = node . attrib [ 'bounds' ] coord = list ( map ( int , re . findall ( r'\d+' , bounds ) ) ) click_point = ( coord [ 0 ] + coord [ 2 ] ) / 2 , ( coord [ 1 ] + coord [ 3 ] ) / 2 return self . _element_cls ( self , node . attrib , by , value , coord , click_point ) raise NoSuchElementException ( f'No such element: {by}={value!r}.' )
def ip_addresses ( self , value ) : """: param value : a list of ip addresses"""
if not isinstance ( value , list ) : raise ValueError ( 'ip_addresses value must be a list' ) # in soem cases self . data might be none , so let ' s instantiate an empty dict if self . data is None : self . data = { } # update field self . data [ 'ip_addresses' ] = ', ' . join ( value )
def Set ( self , interface_name , property_name , value , * args , ** kwargs ) : '''Standard D - Bus API for setting a property value'''
self . log ( 'Set %s.%s%s' % ( interface_name , property_name , self . format_args ( ( value , ) ) ) ) try : iface_props = self . props [ interface_name ] except KeyError : raise dbus . exceptions . DBusException ( 'no such interface ' + interface_name , name = self . interface + '.UnknownInterface' ) if property_name not in iface_props : raise dbus . exceptions . DBusException ( 'no such property ' + property_name , name = self . interface + '.UnknownProperty' ) iface_props [ property_name ] = value self . EmitSignal ( 'org.freedesktop.DBus.Properties' , 'PropertiesChanged' , 'sa{sv}as' , [ interface_name , dbus . Dictionary ( { property_name : value } , signature = 'sv' ) , dbus . Array ( [ ] , signature = 's' ) ] )
def find_contig_distribution ( contig_lengths_dict ) : """Determine the frequency of different contig size ranges for each strain : param contig _ lengths _ dict : : return : contig _ len _ dist _ dict : dictionary of strain name : tuple of contig size range frequencies"""
# Initialise the dictionary contig_len_dist_dict = dict ( ) for file_name , contig_lengths in contig_lengths_dict . items ( ) : # Initialise integers to store the number of contigs that fall into the different bin sizes over_1000000 = 0 over_500000 = 0 over_100000 = 0 over_50000 = 0 over_10000 = 0 over_5000 = 0 other = 0 for contig_length in contig_lengths : # Depending on the size of the contig , increment the appropriate integer if contig_length > 1000000 : over_1000000 += 1 elif contig_length > 500000 : over_500000 += 1 elif contig_length > 100000 : over_100000 += 1 elif contig_length > 50000 : over_50000 += 1 elif contig_length > 10000 : over_10000 += 1 elif contig_length > 5000 : over_5000 += 1 else : other += 1 # Populate the dictionary with a tuple of each of the size range frequencies contig_len_dist_dict [ file_name ] = ( over_1000000 , over_500000 , over_100000 , over_50000 , over_10000 , over_5000 , other ) return contig_len_dist_dict
def _badpath ( path , base ) : """joinpath will ignore base if path is absolute ."""
return not _resolved ( os . path . join ( base , path ) ) . startswith ( base )
def contains_only ( self , elements ) : """Ensures : attr : ` subject ` contains all of * elements * , which must be an iterable , and no other items ."""
for element in self . _subject : if element not in elements : raise self . _error_factory ( _format ( "Expected {} to have only {}, but it contains {}" , self . _subject , elements , element ) ) self . contains_all_of ( elements ) return ChainInspector ( self . _subject )
def update ( self , other , copy = True , * args , ** kwargs ) : """Update this element related to other element . : param other : same type than this . : param bool copy : copy other before update attributes . : param tuple args : copy args . : param dict kwargs : copy kwargs . : return : this"""
if other : # dirty hack for python2.6 if isinstance ( other , self . __class__ ) : if copy : other = other . copy ( * args , ** kwargs ) for slot in other . __slots__ : attr = getattr ( other , slot ) if attr is not None : setattr ( self , slot , attr ) else : raise TypeError ( 'Wrong element to update with {0}: {1}' . format ( self , other ) ) return self
def _output_terms ( self ) : """A list of terms that are outputs of this pipeline . Includes all terms registered as data outputs of the pipeline , plus the screen , if present ."""
terms = list ( six . itervalues ( self . _columns ) ) screen = self . screen if screen is not None : terms . append ( screen ) return terms
def _char_density ( self , c , font = ImageFont . load_default ( ) ) : """Count the number of black pixels in a rendered character ."""
image = Image . new ( '1' , font . getsize ( c ) , color = 255 ) draw = ImageDraw . Draw ( image ) draw . text ( ( 0 , 0 ) , c , fill = "white" , font = font ) return collections . Counter ( image . getdata ( ) ) [ 0 ]
def add_mpl_dendrogram ( dfr , fig , heatmap_gs , orientation = "col" ) : """Return a dendrogram and corresponding gridspec , attached to the fig Modifies the fig in - place . Orientation is either ' row ' or ' col ' and determines location and orientation of the rendered dendrogram ."""
# Row or column axes ? if orientation == "row" : dists = distance . squareform ( distance . pdist ( dfr ) ) spec = heatmap_gs [ 1 , 0 ] orient = "left" nrows , ncols = 1 , 2 height_ratios = [ 1 ] else : # Column dendrogram dists = distance . squareform ( distance . pdist ( dfr . T ) ) spec = heatmap_gs [ 0 , 1 ] orient = "top" nrows , ncols = 2 , 1 height_ratios = [ 1 , 0.15 ] # Create row dendrogram axis gspec = gridspec . GridSpecFromSubplotSpec ( nrows , ncols , subplot_spec = spec , wspace = 0.0 , hspace = 0.1 , height_ratios = height_ratios , ) dend_axes = fig . add_subplot ( gspec [ 0 , 0 ] ) dend = sch . dendrogram ( sch . linkage ( distance . squareform ( dists ) , method = "complete" ) , color_threshold = np . inf , orientation = orient , ) clean_axis ( dend_axes ) return { "dendrogram" : dend , "gridspec" : gspec }
def disconnect ( self ) : """Close the TCP connection with the graphite server ."""
try : self . socket . shutdown ( 1 ) # If its currently a socket , set it to None except AttributeError : self . socket = None except Exception : self . socket = None # Set the self . socket to None , no matter what . finally : self . socket = None
def sender ( self , jid : str ) : """Set jid of the sender Args : jid ( str ) : jid of the sender"""
if jid is not None and not isinstance ( jid , str ) : raise TypeError ( "'sender' MUST be a string" ) self . _sender = aioxmpp . JID . fromstr ( jid ) if jid is not None else None
def request_middleware ( api = None ) : """Registers a middleware function that will be called on every request"""
def decorator ( middleware_method ) : apply_to_api = hug . API ( api ) if api else hug . api . from_object ( middleware_method ) class MiddlewareRouter ( object ) : __slots__ = ( ) def process_request ( self , request , response ) : return middleware_method ( request , response ) apply_to_api . http . add_middleware ( MiddlewareRouter ( ) ) return middleware_method return decorator
def base_ws_uri ( ) : """Base websocket URL that is advertised to external clients . Useful when the websocket URL advertised to the clients needs to be customized ( typically when running behind NAT , firewall , etc . )"""
scheme = config [ 'wsserver' ] [ 'advertised_scheme' ] host = config [ 'wsserver' ] [ 'advertised_host' ] port = config [ 'wsserver' ] [ 'advertised_port' ] return '{}://{}:{}' . format ( scheme , host , port )
def decorate ( text , style ) : """Console decoration style definitions : param text : the text string to decorate : type text : str : param style : the style used to decorate the string : type style : str : return : a decorated string : rtype : str"""
return { 'step-maj' : click . style ( "\n" + '> ' + text , fg = 'yellow' , bold = True ) , 'step-min' : click . style ( ' - ' + text + ' ' , bold = True ) , 'item-maj' : click . style ( ' - ' + text + ' ' ) , 'item-min' : click . style ( ' - ' + text + ' ' ) , 'quote-head-fail' : click . style ( "\n" + chr ( 9485 ) + ( chr ( 9480 ) * 2 ) + ' ' + text , fg = 'red' ) , 'quote-head-pass' : click . style ( "\n" + chr ( 9485 ) + ( chr ( 9480 ) * 2 ) + ' ' + text , fg = 'green' ) , 'quote-head-skip' : click . style ( "\n" + chr ( 9485 ) + ( chr ( 9480 ) * 2 ) + ' ' + text , fg = 'yellow' ) , 'quote-fail' : re . sub ( '^' , click . style ( chr ( 9482 ) + ' ' , fg = 'red' ) , text , flags = re . M ) , 'quote-pass' : re . sub ( '^' , click . style ( chr ( 9482 ) + ' ' , fg = 'green' ) , text , flags = re . M ) , 'quote-skip' : re . sub ( '^' , click . style ( chr ( 9482 ) + ' ' , fg = 'yellow' ) , text , flags = re . M ) , 'fail' : click . style ( text + ' ' , fg = 'red' ) , 'pass' : click . style ( text + ' ' , fg = 'green' ) , 'skip' : click . style ( text + ' ' , fg = 'yellow' ) } . get ( style , '' )
def validate_json ( self , json_value , validator ) : """Validates and returns the parsed JSON string . If the value is not valid JSON , ParseError will be raised . If it is valid JSON , but does not validate against the schema , SchemaValidationError will be raised . : param str json _ value : JSON value . : param validator : An instance of a jsonschema validator class , as created by Schema . get _ validator ( ) . : returns : the parsed JSON value ."""
value = parse_json ( json_value ) return self . validate ( value , validator )
def add_cookie_header ( self , request , referrer_host = None ) : '''Wrapped ` ` add _ cookie _ header ` ` . Args : request : An instance of : class : ` . http . request . Request ` . referrer _ host ( str ) : An hostname or IP address of the referrer URL .'''
new_request = convert_http_request ( request , referrer_host ) self . _cookie_jar . add_cookie_header ( new_request ) request . fields . clear ( ) for name , value in new_request . header_items ( ) : request . fields . add ( name , value )
def body_echo ( cls , request , foo : ( Ptypes . body , String ( 'A body parameter' ) ) ) -> [ ( 200 , 'Ok' , String ) ] : '''Echo the body parameter .'''
log . info ( 'Echoing body param, value is: {}' . format ( foo ) ) for i in range ( randint ( 0 , MAX_LOOP_DURATION ) ) : yield msg = 'The value sent was: {}' . format ( foo ) Respond ( 200 , msg )
def array_repeat ( col , count ) : """Collection function : creates an array containing a column repeated count times . > > > df = spark . createDataFrame ( [ ( ' ab ' , ) ] , [ ' data ' ] ) > > > df . select ( array _ repeat ( df . data , 3 ) . alias ( ' r ' ) ) . collect ( ) [ Row ( r = [ u ' ab ' , u ' ab ' , u ' ab ' ] ) ]"""
sc = SparkContext . _active_spark_context return Column ( sc . _jvm . functions . array_repeat ( _to_java_column ( col ) , count ) )
def run ( args ) : """Start an oct project : param Namespace args : the commande - line arguments"""
kwargs = vars ( args ) if 'func' in kwargs : del kwargs [ 'func' ] project_path = kwargs . pop ( 'project_path' ) config = configure ( project_path , kwargs . get ( 'config_file' ) ) output_dir = kwargs . pop ( 'output_dir' , None ) or generate_output_path ( args , project_path ) stats_handler . init_stats ( output_dir , config ) topic = args . publisher_channel or uuid . uuid4 ( ) . hex print ( "External publishing topic is %s" % topic ) start_hq ( output_dir , config , topic , ** kwargs ) if not args . no_results : process_results ( output_dir , config ) copy_config ( project_path , output_dir ) print ( 'done.\n' )
def to_graphviz ( self ) -> str : """Converts the FSM behaviour structure to Graphviz syntax Returns : str : the graph in Graphviz syntax"""
graph = "digraph finite_state_machine { rankdir=LR; node [fixedsize=true];" for origin , dest in self . _transitions . items ( ) : origin = origin . replace ( " " , "_" ) for d in dest : d = d . replace ( " " , "_" ) graph += "{0} -> {1};" . format ( origin , d ) graph += "}" return graph
def predict ( df , filters , model_fit , ytransform = None ) : """Apply model to new data to predict new dependent values . Parameters df : pandas . DataFrame filters : list of str Any filters to apply before doing prediction . model _ fit : statsmodels . regression . linear _ model . OLSResults Result of model estimation . ytransform : callable , optional A function to call on the array of predicted output . For example , if the model relation is predicting the log of price , you might pass ` ` ytransform = np . exp ` ` so that the results reflect actual price . By default no transformation is applied . Returns result : pandas . Series Predicted values as a pandas Series . Will have the index of ` df ` after applying filters ."""
df = util . apply_filter_query ( df , filters ) with log_start_finish ( 'statsmodels predict' , logger ) : sim_data = model_fit . predict ( df ) if len ( sim_data ) != len ( df ) : raise ModelEvaluationError ( 'Predicted data does not have the same length as input. ' 'This suggests there are null values in one or more of ' 'the input columns.' ) if ytransform : sim_data = ytransform ( sim_data ) return pd . Series ( sim_data , index = df . index )
def get_listeners ( self , event_type : str ) -> List [ Callable ] : """Get all listeners of a particular type of event ."""
if event_type not in self . events : raise ValueError ( f'No event {event_type} in system.' ) return self . events . get_listeners ( event_type )
def __Script_Editor_Output_plainTextEdit_contextMenuEvent ( self , event ) : """Reimplements the : meth : ` QPlainTextEdit . contextMenuEvent ` method . : param event : QEvent . : type event : QEvent"""
menu = self . Script_Editor_Output_plainTextEdit . createStandardContextMenu ( ) menu . addSeparator ( ) menu . addAction ( self . __engine . actions_manager . register_action ( "Actions|Umbra|Components|factory.script_editor|Edit Selected Path" , slot = self . __edit_selected_path_action__triggered ) ) menu . exec_ ( event . globalPos ( ) )
def generate_transaction_id ( stmt_line ) : """Generate pseudo - unique id for given statement line . This function can be used in statement parsers when real transaction id is not available in source statement ."""
return str ( abs ( hash ( ( stmt_line . date , stmt_line . memo , stmt_line . amount ) ) ) )
def set_config_from_commit ( self , commit ) : """Given a git commit , applies config specified in the commit message . Supported : - gitlint - ignore : all"""
for line in commit . message . body : pattern = re . compile ( r"^gitlint-ignore:\s*(.*)" ) matches = pattern . match ( line ) if matches and len ( matches . groups ( ) ) == 1 : self . set_option ( 'general' , 'ignore' , matches . group ( 1 ) )
def write_object_to_file ( self , obj , path = '.' , filename = None ) : """Convert obj ( dict ) to json string and write to file"""
output = self . json_dumps ( obj ) + '\n' if filename is None : filename = self . safe_filename ( obj [ '_type' ] , obj [ '_id' ] ) filename = os . path . join ( path , filename ) self . pr_inf ( "Writing to file: " + filename ) with open ( filename , 'w' ) as f : f . write ( output ) # self . pr _ dbg ( " Contents : " + output ) return filename
def get_build_path ( self ) : """Used to determine where to build the page . Override this if you would like your page at a different location . By default it will be built at self . get _ url ( ) + " / index . html " """
target_path = path . join ( settings . BUILD_DIR , self . get_url ( ) . lstrip ( '/' ) ) if not self . fs . exists ( target_path ) : logger . debug ( "Creating {}" . format ( target_path ) ) self . fs . makedirs ( target_path ) return os . path . join ( target_path , 'index.html' )
def decode ( response ) : """Decodes and returns the response as JSON ( dict ) or raise BackendException : param response : requests . response object : return : dict"""
# Second stage . Errors are backend errors ( bad login , bad url , . . . ) try : response . raise_for_status ( ) except requests . HTTPError as exp : response = { "_status" : "ERR" , "_error" : { "message" : exp , "code" : response . status_code } , "_issues" : { "message" : exp , "code" : response . status_code } } return response else : return response . json ( )
def info ( self , correlation_id , message , * args , ** kwargs ) : """Logs an important information message : param correlation _ id : ( optional ) transaction id to trace execution through call chain . : param message : a human - readable message to log . : param args : arguments to parameterize the message . : param kwargs : arguments to parameterize the message ."""
self . _format_and_write ( LogLevel . Info , correlation_id , None , message , args , kwargs )
def addFilter ( self , field , value ) : """Add a filter to the seach . : param field : what field filter ( see GitHub search ) . : type field : str . : param value : value of the filter ( see GitHub search ) . : type value : str ."""
if "<" not in value or ">" not in value or ".." not in value : value = ":" + value if self . __urlFilters : self . __urlFilters += "+" + field + str ( quote ( value ) ) else : self . __urlFilters += field + str ( quote ( value ) )
def service ( action , service_name , ** kwargs ) : """Control a system service . : param action : the action to take on the service : param service _ name : the name of the service to perform th action on : param * * kwargs : additional params to be passed to the service command in the form of key = value ."""
if init_is_systemd ( ) : cmd = [ 'systemctl' , action , service_name ] else : cmd = [ 'service' , service_name , action ] for key , value in six . iteritems ( kwargs ) : parameter = '%s=%s' % ( key , value ) cmd . append ( parameter ) return subprocess . call ( cmd ) == 0
def watched ( cls , * args , ** kwargs ) : """Create and return a : class : ` Watchable ` with its : class : ` Specatator ` . See : func : ` watch ` for more info on : class : ` Specatator ` registration . Parameters cls : type : A subclass of : class : ` Watchable ` * args : Positional arguments used to create the instance * * kwargs : Keyword arguments used to create the instance ."""
value = cls ( * args , ** kwargs ) return value , watch ( value )
def find_first_object ( self , ObjectClass , ** kwargs ) : """Retrieve the first object of type ` ` ObjectClass ` ` , matching the specified filters in ` ` * * kwargs ` ` - - case sensitive ."""
filter = None for k , v in kwargs . items ( ) : cond = getattr ( ObjectClass , k ) == v filter = cond if filter is None else filter & cond return list ( ObjectClass . scan ( filter , limit = 1 ) ) [ 0 ]
def gen_TKIP_RC4_key ( TSC , TA , TK ) : """Implement TKIP WEPSeed generation TSC : packet IV TA : target addr bytes TK : temporal key"""
assert len ( TSC ) == 6 assert len ( TA ) == 6 assert len ( TK ) == 16 assert all ( isinstance ( x , six . integer_types ) for x in TSC + TA + TK ) # Phase 1 # 802.11i p . 54 # Phase 1 - Step 1 TTAK = [ ] TTAK . append ( _MK16 ( TSC [ 3 ] , TSC [ 2 ] ) ) TTAK . append ( _MK16 ( TSC [ 5 ] , TSC [ 4 ] ) ) TTAK . append ( _MK16 ( TA [ 1 ] , TA [ 0 ] ) ) TTAK . append ( _MK16 ( TA [ 3 ] , TA [ 2 ] ) ) TTAK . append ( _MK16 ( TA [ 5 ] , TA [ 4 ] ) ) # Phase 1 - Step 2 for i in range ( PHASE1_LOOP_CNT ) : j = 2 * ( i & 1 ) TTAK [ 0 ] = _CAST16 ( TTAK [ 0 ] + _SBOX16 ( TTAK [ 4 ] ^ _MK16 ( TK [ 1 + j ] , TK [ 0 + j ] ) ) ) # noqa : E501 TTAK [ 1 ] = _CAST16 ( TTAK [ 1 ] + _SBOX16 ( TTAK [ 0 ] ^ _MK16 ( TK [ 5 + j ] , TK [ 4 + j ] ) ) ) # noqa : E501 TTAK [ 2 ] = _CAST16 ( TTAK [ 2 ] + _SBOX16 ( TTAK [ 1 ] ^ _MK16 ( TK [ 9 + j ] , TK [ 8 + j ] ) ) ) # noqa : E501 TTAK [ 3 ] = _CAST16 ( TTAK [ 3 ] + _SBOX16 ( TTAK [ 2 ] ^ _MK16 ( TK [ 13 + j ] , TK [ 12 + j ] ) ) ) # noqa : E501 TTAK [ 4 ] = _CAST16 ( TTAK [ 4 ] + _SBOX16 ( TTAK [ 3 ] ^ _MK16 ( TK [ 1 + j ] , TK [ 0 + j ] ) ) + i ) # noqa : E501 # Phase 2 # 802.11i p . 56 # Phase 2 - Step 1 PPK = list ( TTAK ) PPK . append ( _CAST16 ( TTAK [ 4 ] + _MK16 ( TSC [ 1 ] , TSC [ 0 ] ) ) ) # Phase 2 - Step 2 PPK [ 0 ] = _CAST16 ( PPK [ 0 ] + _SBOX16 ( PPK [ 5 ] ^ _MK16 ( TK [ 1 ] , TK [ 0 ] ) ) ) PPK [ 1 ] = _CAST16 ( PPK [ 1 ] + _SBOX16 ( PPK [ 0 ] ^ _MK16 ( TK [ 3 ] , TK [ 2 ] ) ) ) PPK [ 2 ] = _CAST16 ( PPK [ 2 ] + _SBOX16 ( PPK [ 1 ] ^ _MK16 ( TK [ 5 ] , TK [ 4 ] ) ) ) PPK [ 3 ] = _CAST16 ( PPK [ 3 ] + _SBOX16 ( PPK [ 2 ] ^ _MK16 ( TK [ 7 ] , TK [ 6 ] ) ) ) PPK [ 4 ] = _CAST16 ( PPK [ 4 ] + _SBOX16 ( PPK [ 3 ] ^ _MK16 ( TK [ 9 ] , TK [ 8 ] ) ) ) PPK [ 5 ] = _CAST16 ( PPK [ 5 ] + _SBOX16 ( PPK [ 4 ] ^ _MK16 ( TK [ 11 ] , TK [ 10 ] ) ) ) PPK [ 0 ] = _CAST16 ( PPK [ 0 ] + _RotR1 ( PPK [ 5 ] ^ _MK16 ( TK [ 13 ] , TK [ 12 ] ) ) ) PPK [ 1 ] = _CAST16 ( PPK [ 1 ] + _RotR1 ( PPK [ 0 ] ^ _MK16 ( TK [ 15 ] , TK [ 14 ] ) ) ) PPK [ 2 ] = _CAST16 ( PPK [ 2 ] + _RotR1 ( PPK [ 1 ] ) ) PPK [ 3 ] = _CAST16 ( PPK [ 3 ] + _RotR1 ( PPK [ 2 ] ) ) PPK [ 4 ] = _CAST16 ( PPK [ 4 ] + _RotR1 ( PPK [ 3 ] ) ) PPK [ 5 ] = _CAST16 ( PPK [ 5 ] + _RotR1 ( PPK [ 4 ] ) ) # Phase 2 - Step 3 WEPSeed = [ ] WEPSeed . append ( TSC [ 1 ] ) WEPSeed . append ( ( TSC [ 1 ] | 0x20 ) & 0x7f ) WEPSeed . append ( TSC [ 0 ] ) WEPSeed . append ( ( ( PPK [ 5 ] ^ _MK16 ( TK [ 1 ] , TK [ 0 ] ) ) >> 1 ) & 0xFF ) for i in range ( 6 ) : WEPSeed . append ( PPK [ i ] & 0xFF ) WEPSeed . append ( PPK [ i ] >> 8 ) assert len ( WEPSeed ) == 16 return b"" . join ( chb ( x ) for x in WEPSeed )
def set_value ( self , value , layer = None , source = None ) : """Set a value for a particular layer with optional metadata about source . Parameters value : str Data to store in the node . layer : str Name of the layer to use . If None then the outermost where the value exists will be used . source : str Metadata indicating the source of this value ( e . g . a file path ) Raises TypeError If the node is frozen KeyError If the named layer does not exist"""
if self . _frozen : raise TypeError ( 'Frozen ConfigNode does not support assignment' ) if not layer : layer = self . _layers [ - 1 ] self . _values [ layer ] = ( source , value )
def declareAsOntology ( self , graph ) : """The file we output needs to be declared as an ontology , including it ' s version information . TEC : I am not convinced dipper reformating external data as RDF triples makes an OWL ontology ( nor that it should be considered a goal ) . Proper ontologies are built by ontologists . Dipper reformats data and anotates / decorates it with a minimal set of carefully arranged terms drawn from from multiple proper ontologies . Which allows the whole ( dipper ' s RDF triples and parent ontologies ) to function as a single ontology we can reason over when combined in a store such as SciGraph . Including more than the minimal ontological terms in dipper ' s RDF output constitutes a liability as it allows greater divergence between dipper artifacts and the proper ontologies . Further information will be augmented in the dataset object . : param version : : return :"""
# < http : / / data . monarchinitiative . org / ttl / biogrid . ttl > a owl : Ontology ; # owl : versionInfo # < https : / / archive . monarchinitiative . org / YYYYMM / ttl / biogrid . ttl > model = Model ( graph ) # is self . outfile suffix set yet ? ? ? ontology_file_id = 'MonarchData:' + self . name + ".ttl" model . addOntologyDeclaration ( ontology_file_id ) # add timestamp as version info cur_time = datetime . now ( ) t_string = cur_time . strftime ( "%Y-%m-%d" ) ontology_version = t_string # TEC this means the MonarchArchive IRI needs the release updated # maybe extract the version info from there # should not hardcode the suffix as it may change archive_url = 'MonarchArchive:' + 'ttl/' + self . name + '.ttl' model . addOWLVersionIRI ( ontology_file_id , archive_url ) model . addOWLVersionInfo ( ontology_file_id , ontology_version )
def record_coverage_zero ( self , rule , offset ) : """Add entry to coverage saying this selector was parsed"""
self . coverage_lines . append ( 'DA:{},0' . format ( rule . source_line + offset ) )
def _process_response ( response ) : """Process the raw AWS response , returning either the mapped exception or deserialized response . : param tornado . concurrent . Future response : The request future : rtype : dict or list : raises : sprockets _ dynamodb . exceptions . DynamoDBException"""
error = response . exception ( ) if error : if isinstance ( error , aws_exceptions . AWSError ) : if error . args [ 1 ] [ 'type' ] in exceptions . MAP : raise exceptions . MAP [ error . args [ 1 ] [ 'type' ] ] ( error . args [ 1 ] [ 'message' ] ) raise error http_response = response . result ( ) if not http_response or not http_response . body : raise exceptions . DynamoDBException ( 'empty response' ) return json . loads ( http_response . body . decode ( 'utf-8' ) )
def process_custom ( custom ) : """Process custom ."""
custom_selectors = { } if custom is not None : for key , value in custom . items ( ) : name = util . lower ( key ) if RE_CUSTOM . match ( name ) is None : raise SelectorSyntaxError ( "The name '{}' is not a valid custom pseudo-class name" . format ( name ) ) if name in custom_selectors : raise KeyError ( "The custom selector '{}' has already been registered" . format ( name ) ) custom_selectors [ css_unescape ( name ) ] = value return custom_selectors
def replace ( self , infile ) : '''Replace : 任意の箇所のバイト列と 同サイズの任意のバイト列を入れ換える'''
gf = infile [ 31 : ] same_size_index = [ ] while len ( same_size_index ) <= 1 : index = random . randint ( 0 , len ( gf ) - 1 ) index_len = len ( gf [ index ] ) same_size_index = [ i for ( i , g ) in enumerate ( gf ) if len ( g ) == index_len ] else : same_size_index = random . choice ( same_size_index [ : ] ) gf [ index ] , gf [ same_size_index ] = gf [ same_size_index ] , gf [ index ] return infile [ : 31 ] + gf
def fillna ( self , value = None , method = None , axis = None , inplace = False , limit = None , downcast = None , ** kwargs ) : """Fill NA / NaN values using the specified method . Args : value : Value to use to fill holes . This value cannot be a list . method : Method to use for filling holes in reindexed Series pad . ffill : propagate last valid observation forward to next valid backfill . bfill : use NEXT valid observation to fill gap . axis : 0 or ' index ' , 1 or ' columns ' . inplace : If True , fill in place . Note : this will modify any other views on this object . limit : If method is specified , this is the maximum number of consecutive NaN values to forward / backward fill . In other words , if there is a gap with more than this number of consecutive NaNs , it will only be partially filled . If method is not specified , this is the maximum number of entries along the entire axis where NaNs will be filled . Must be greater than 0 if not None . downcast : A dict of item - > dtype of what to downcast if possible , or the string ' infer ' which will try to downcast to an appropriate equal type . Returns : filled : DataFrame"""
# TODO implement value passed as DataFrame / Series if isinstance ( value , BasePandasDataset ) : new_query_compiler = self . _default_to_pandas ( "fillna" , value = value . _to_pandas ( ) , method = method , axis = axis , inplace = False , limit = limit , downcast = downcast , ** kwargs ) . _query_compiler return self . _create_or_update_from_compiler ( new_query_compiler , inplace ) inplace = validate_bool_kwarg ( inplace , "inplace" ) axis = self . _get_axis_number ( axis ) if axis is not None else 0 if isinstance ( value , ( list , tuple ) ) : raise TypeError ( '"value" parameter must be a scalar or dict, but ' 'you passed a "{0}"' . format ( type ( value ) . __name__ ) ) if value is None and method is None : raise ValueError ( "must specify a fill method or value" ) if value is not None and method is not None : raise ValueError ( "cannot specify both a fill method and value" ) if method is not None and method not in [ "backfill" , "bfill" , "pad" , "ffill" ] : expecting = "pad (ffill) or backfill (bfill)" msg = "Invalid fill method. Expecting {expecting}. Got {method}" . format ( expecting = expecting , method = method ) raise ValueError ( msg ) new_query_compiler = self . _query_compiler . fillna ( value = value , method = method , axis = axis , inplace = False , limit = limit , downcast = downcast , ** kwargs ) return self . _create_or_update_from_compiler ( new_query_compiler , inplace )
def get_hdrs_len ( self ) : # type : ( ) - > int """get _ hdrs _ len computes the length of the hdrs field To do this computation , the length of the padlen field , the priority information fields and the actual padding is subtracted to the string that was provided to the pre _ dissect fun of the pkt parameter . @ return int : the length of the hdrs field @ raise AssertionError"""
padding_len = self . getfieldval ( 'padlen' ) fld , fval = self . getfield_and_val ( 'padlen' ) padding_len_len = fld . i2len ( self , fval ) bit_cnt = self . get_field ( 'exclusive' ) . size bit_cnt += self . get_field ( 'stream_dependency' ) . size fld , fval = self . getfield_and_val ( 'weight' ) weight_len = fld . i2len ( self , fval ) ret = int ( self . s_len - padding_len_len - padding_len - ( bit_cnt / 8 ) - weight_len ) assert ( ret >= 0 ) return ret
def get_file_to_path ( self , share_name , directory_name , file_name , file_path , open_mode = 'wb' , start_range = None , end_range = None , validate_content = False , progress_callback = None , max_connections = 2 , timeout = None ) : '''Downloads a file to a file path , with automatic chunking and progress notifications . Returns an instance of File with properties and metadata . : param str share _ name : Name of existing share . : param str directory _ name : The path to the directory . : param str file _ name : Name of existing file . : param str file _ path : Path of file to write to . : param str open _ mode : Mode to use when opening the file . Note that specifying append only open _ mode prevents parallel download . So , max _ connections must be set to 1 if this open _ mode is used . : param int start _ range : Start of byte range to use for downloading a section of the file . If no end _ range is given , all bytes after the start _ range will be downloaded . The start _ range and end _ range params are inclusive . Ex : start _ range = 0 , end _ range = 511 will download first 512 bytes of file . : param int end _ range : End of byte range to use for downloading a section of the file . If end _ range is given , start _ range must be provided . The start _ range and end _ range params are inclusive . Ex : start _ range = 0 , end _ range = 511 will download first 512 bytes of file . : param bool validate _ content : If set to true , validates an MD5 hash for each retrieved portion of the file . This is primarily valuable for detecting bitflips on the wire if using http instead of https as https ( the default ) will already validate . Note that the service will only return transactional MD5s for chunks 4MB or less so the first get request will be of size self . MAX _ CHUNK _ GET _ SIZE instead of self . MAX _ SINGLE _ GET _ SIZE . If self . MAX _ CHUNK _ GET _ SIZE was set to greater than 4MB an error will be thrown . As computing the MD5 takes processing time and more requests will need to be done due to the reduced chunk size there may be some increase in latency . : param progress _ callback : Callback for progress with signature function ( current , total ) where current is the number of bytes transfered so far , and total is the size of the file if known . : type progress _ callback : callback function in format of func ( current , total ) : param int max _ connections : If set to 2 or greater , an initial get will be done for the first self . MAX _ SINGLE _ GET _ SIZE bytes of the file . If this is the entire file , the method returns at this point . If it is not , it will download the remaining data parallel using the number of threads equal to max _ connections . Each chunk will be of size self . MAX _ CHUNK _ GET _ SIZE . If set to 1 , a single large get request will be done . This is not generally recommended but available if very few threads should be used , network requests are very expensive , or a non - seekable stream prevents parallel download . This may also be valuable if the file is being concurrently modified to enforce atomicity or if many files are expected to be empty as an extra request is required for empty files if max _ connections is greater than 1. : param int timeout : The timeout parameter is expressed in seconds . This method may make multiple calls to the Azure service and the timeout will apply to each call individually . : return : A File with properties and metadata . : rtype : : class : ` ~ azure . storage . file . models . File `'''
_validate_not_none ( 'share_name' , share_name ) _validate_not_none ( 'file_name' , file_name ) _validate_not_none ( 'file_path' , file_path ) _validate_not_none ( 'open_mode' , open_mode ) if max_connections > 1 and 'a' in open_mode : raise ValueError ( _ERROR_PARALLEL_NOT_SEEKABLE ) with open ( file_path , open_mode ) as stream : file = self . get_file_to_stream ( share_name , directory_name , file_name , stream , start_range , end_range , validate_content , progress_callback , max_connections , timeout ) return file
def mixedToUnder ( s ) : # pragma : no cover """Sample : > > > mixedToUnder ( " FooBarBaz " ) ' foo _ bar _ baz ' Special case for ID : > > > mixedToUnder ( " FooBarID " ) ' foo _ bar _ id '"""
if s . endswith ( 'ID' ) : return mixedToUnder ( s [ : - 2 ] + "_id" ) trans = _mixedToUnderRE . sub ( mixedToUnderSub , s ) if trans . startswith ( '_' ) : trans = trans [ 1 : ] return trans
def find ( * _ , ** kwargs ) : """Find user by id / email"""
click . echo ( green ( '\nFind user:' ) ) click . echo ( green ( '-' * 40 ) ) with get_app ( ) . app_context ( ) : user = find_user ( kwargs ) if not user : click . echo ( red ( 'Not found\n' ) ) return click . echo ( str ( user ) + '\n' ) return
def reset_state ( self ) : """Resets some attributes to their default values . This is especially useful when initializing a newly created : class : ` SMTP ` instance and when closing an existing SMTP session . It allows us to use the same SMTP instance and connect several times ."""
self . last_helo_response = ( None , None ) self . last_ehlo_response = ( None , None ) self . supports_esmtp = False self . esmtp_extensions = { } self . auth_mechanisms = [ ] self . ssl_context = False self . reader = None self . writer = None self . transport = None
def trip ( self , origin_id , dest_id , date = None ) : """trip"""
date = date if date else datetime . now ( ) response = self . _request ( 'trip' , originId = origin_id , destId = dest_id , date = date . strftime ( DATE_FORMAT ) , time = date . strftime ( TIME_FORMAT ) ) return _get_node ( response , 'TripList' , 'Trip' )
def install_tab_event_filter ( self , value ) : """Install an event filter to capture mouse events in the tabs of a QTabBar holding tabified dockwidgets ."""
dock_tabbar = None tabbars = self . main . findChildren ( QTabBar ) for tabbar in tabbars : for tab in range ( tabbar . count ( ) ) : title = tabbar . tabText ( tab ) if title == self . title : dock_tabbar = tabbar break if dock_tabbar is not None : self . dock_tabbar = dock_tabbar # Install filter only once per QTabBar if getattr ( self . dock_tabbar , 'filter' , None ) is None : self . dock_tabbar . filter = TabFilter ( self . dock_tabbar , self . main ) self . dock_tabbar . installEventFilter ( self . dock_tabbar . filter )
def set_selections ( path = None , selection = None , clear = False , saltenv = 'base' ) : '''Change package state in the dpkg database . The state can be any one of , documented in ` ` dpkg ( 1 ) ` ` : - install - hold - deinstall - purge This command is commonly used to mark specific packages to be held from being upgraded , that is , to be kept at a certain version . When a state is changed to anything but being held , then it is typically followed by ` ` apt - get - u dselect - upgrade ` ` . Note : Be careful with the ` ` clear ` ` argument , since it will start with setting all packages to deinstall state . Returns a dict of dicts containing the package names , and the new and old versions : . . code - block : : python { ' < host > ' : { ' < package > ' : { ' new ' : ' < new - state > ' , ' old ' : ' < old - state > ' } CLI Example : . . code - block : : bash salt ' * ' pkg . set _ selections selection = ' { " install " : [ " netcat " ] } ' salt ' * ' pkg . set _ selections selection = ' { " hold " : [ " openssh - server " , " openssh - client " ] } ' salt ' * ' pkg . set _ selections salt : / / path / to / file salt ' * ' pkg . set _ selections salt : / / path / to / file clear = True'''
ret = { } if not path and not selection : return ret if path and selection : err = ( 'The \'selection\' and \'path\' arguments to ' 'pkg.set_selections are mutually exclusive, and cannot be ' 'specified together' ) raise SaltInvocationError ( err ) if isinstance ( selection , six . string_types ) : try : selection = salt . utils . yaml . safe_load ( selection ) except ( salt . utils . yaml . parser . ParserError , salt . utils . yaml . scanner . ScannerError ) as exc : raise SaltInvocationError ( 'Improperly-formatted selection: {0}' . format ( exc ) ) if path : path = __salt__ [ 'cp.cache_file' ] ( path , saltenv ) with salt . utils . files . fopen ( path , 'r' ) as ifile : content = [ salt . utils . stringutils . to_unicode ( x ) for x in ifile . readlines ( ) ] selection = _parse_selections ( content ) if selection : valid_states = ( 'install' , 'hold' , 'deinstall' , 'purge' ) bad_states = [ x for x in selection if x not in valid_states ] if bad_states : raise SaltInvocationError ( 'Invalid state(s): {0}' . format ( ', ' . join ( bad_states ) ) ) if clear : cmd = [ 'dpkg' , '--clear-selections' ] if not __opts__ [ 'test' ] : result = _call_apt ( cmd , scope = False ) if result [ 'retcode' ] != 0 : err = ( 'Running dpkg --clear-selections failed: ' '{0}' . format ( result [ 'stderr' ] ) ) log . error ( err ) raise CommandExecutionError ( err ) sel_revmap = { } for _state , _pkgs in six . iteritems ( get_selections ( ) ) : sel_revmap . update ( dict ( ( _pkg , _state ) for _pkg in _pkgs ) ) for _state , _pkgs in six . iteritems ( selection ) : for _pkg in _pkgs : if _state == sel_revmap . get ( _pkg ) : continue cmd = [ 'dpkg' , '--set-selections' ] cmd_in = '{0} {1}' . format ( _pkg , _state ) if not __opts__ [ 'test' ] : result = _call_apt ( cmd , scope = False , stdin = cmd_in ) if result [ 'retcode' ] != 0 : log . error ( 'failed to set state %s for package %s' , _state , _pkg ) else : ret [ _pkg ] = { 'old' : sel_revmap . get ( _pkg ) , 'new' : _state } return ret
def mod_categorical_expval ( p ) : """Expected value of categorical distribution with parent p of length k - 1. An implicit k ' th category is assumed to exist with associated probability 1 - sum ( p ) ."""
p = extend_dirichlet ( p ) return np . sum ( [ p * i for i , p in enumerate ( p ) ] )
def load_key ( self , path ) : """Load key and secret from file . : param path : path to file with first two lines are key , secret respectively"""
with open ( path , 'r' ) as f : self . key = f . readline ( ) . strip ( ) self . secret = f . readline ( ) . strip ( )
def _extractErrorString ( request ) : """Extract error string from a failed UPnP call . : param request : the failed request result : type request : requests . Response : return : an extracted error text or empty str : rtype : str"""
errorStr = "" tag = None # noinspection PyBroadException try : # parse XML return root = ET . fromstring ( request . text . encode ( 'utf-8' ) ) tag = root [ 0 ] [ 0 ] except : # return an empty string as we can not parse the structure return errorStr for element in tag . getiterator ( ) : tagName = element . tag . lower ( ) if tagName . endswith ( "string" ) : errorStr += element . text + " " elif tagName . endswith ( "description" ) : errorStr += element . text + " " return errorStr
def load_json ( filename , gzip_mode = False ) : '''Return the json - file data , with all strings utf - 8 encoded .'''
open_file = open if gzip_mode : open_file = gzip . open try : with open_file ( filename , 'rt' ) as fh : data = json . load ( fh ) data = convert_unicode_2_utf8 ( data ) return data except AttributeError : # Python - 2.6 fh = open_file ( filename , 'rt' ) data = json . load ( fh ) fh . close ( ) data = convert_unicode_2_utf8 ( data ) return data
def update_config ( self ) : """Creates or updates db config of the term . Requires bound to db tree ."""
dataset = self . _top . _config . dataset session = object_session ( self . _top . _config ) # logger . debug ( ' Updating term config . dataset : { } , type : { } , key : { } , value : { } ' . format ( # dataset , self . _ top . _ type , self . _ key , self . get ( ) ) ) if not self . _parent . _config : self . _parent . update_config ( ) self . _config , created = _get_config_instance ( self , session , parent = self . _parent . _config , d_vid = dataset . vid , type = self . _top . _type , key = self . _key , dataset = dataset ) if created : self . _top . _cached_configs [ self . _get_path ( ) ] = self . _config # We update ScalarTerm and ListTerm values only . Composite terms ( DictTerm for example ) # should not contain value . if isinstance ( self , ( ScalarTerm , ListTerm ) ) : if self . _config . value != self . get ( ) : self . _config . value = self . get ( ) session . merge ( self . _config ) session . commit ( ) self . _top . _add_valid ( self . _config )
def uniquify ( seq ) : """Return unique values in a list in the original order . See : http : / / www . peterbe . com / plog / uniqifiers - benchmark Args : seq ( list ) : original list . Returns : list : list without duplicates preserving original order ."""
seen = set ( ) seen_add = seen . add return [ x for x in seq if x not in seen and not seen_add ( x ) ]
def unpickle_docs ( self ) : """Sets the pointers for the docstrings that have groups ."""
for doc in self . docstring : if ( doc . parent_name is not None and doc . parent_name in self . groups ) : doc . group = self . groups [ doc . parent_name ]
def nn ( self , x , k = 1 , radius = np . inf , eps = 0.0 , p = 2 ) : """Find the k nearest neighbors of x in the observed input data : arg x : center : arg k : the number of nearest neighbors to return ( default : 1) : arg eps : approximate nearest neighbors . the k - th returned value is guaranteed to be no further than (1 + eps ) times the distance to the real k - th nearest neighbor . : arg p : Which Minkowski p - norm to use . ( default : 2 , euclidean ) : arg radius : the maximum radius ( default : + inf ) : return : distance and indexes of found nearest neighbors ."""
assert len ( x ) == self . dim , 'dimension of input {} does not match expected dimension {}.' . format ( len ( x ) , self . dim ) k_x = min ( k , self . size ) # Because linear models requires x vector to be extended to [ 1.0 ] + x # to accomodate a constant , we store them that way . return self . _nn ( np . array ( x ) , k_x , radius = radius , eps = eps , p = p )
def get_column_metadata ( conn , table : str , schema = 'public' ) : """Returns column data following db . Column parameter specification ."""
query = """\ SELECT attname as name, format_type(atttypid, atttypmod) AS data_type, NOT attnotnull AS nullable FROM pg_catalog.pg_attribute WHERE attrelid=%s::regclass AND attnum > 0 AND NOT attisdropped ORDER BY attnum;""" qualified_name = compile_qualified_name ( table , schema = schema ) for record in select_dict ( conn , query , params = ( qualified_name , ) ) : yield record
def cygpath ( path ) : """Use : meth : ` git . cmd . Git . polish _ url ( ) ` instead , that works on any environment ."""
if not path . startswith ( ( '/cygdrive' , '//' ) ) : for regex , parser , recurse in _cygpath_parsers : match = regex . match ( path ) if match : path = parser ( * match . groups ( ) ) if recurse : path = cygpath ( path ) break else : path = _cygexpath ( None , path ) return path
def pull_folder ( self , path , decode = False ) : """Retrieves a folder at ` path ` . Returns the folder ' s contents zipped . Android only . - _ path _ - the path to the folder on the device - _ decode _ - True / False decode the data ( base64 ) before returning it ( default = False )"""
driver = self . _current_application ( ) theFolder = driver . pull_folder ( path ) if decode : theFolder = base64 . b64decode ( theFolder ) return theFolder
def setdefault ( self , key , default = None ) : """Set ` default ` if the key is not in the cache otherwise leave unchanged . Return the value of this key ."""
self . _wlock . acquire ( ) try : try : return self [ key ] except KeyError : self [ key ] = default return default finally : self . _wlock . release ( )
def AND ( * args , ** kwargs ) : """ALL args must not raise an exception when called incrementally . If an exception is specified , raise it , otherwise raise the callable ' s exception . : params iterable [ Certifier ] args : The certifiers to call : param callable kwargs [ ' exc ' ] : Callable that excepts the unexpectedly raised exception as argument and return an exception to raise . : raises CertifierError : The first certifier error if at least one raises a certifier error ."""
for arg in args : try : arg ( ) except CertifierError as e : exc = kwargs . get ( 'exc' , None ) if exc is not None : raise exc ( e ) raise
def p_foreach_variable ( p ) : '''foreach _ variable : VARIABLE | AND VARIABLE'''
if len ( p ) == 2 : p [ 0 ] = ast . ForeachVariable ( p [ 1 ] , False , lineno = p . lineno ( 1 ) ) else : p [ 0 ] = ast . ForeachVariable ( p [ 2 ] , True , lineno = p . lineno ( 1 ) )
def from_array ( array ) : """Deserialize a new InvoiceMessage from a given dictionary . : return : new InvoiceMessage instance . : rtype : InvoiceMessage"""
if array is None or not array : return None # end if assert_type_or_raise ( array , dict , parameter_name = "array" ) from pytgbot . api_types . sendable . payments import LabeledPrice from pytgbot . api_types . sendable . reply_markup import InlineKeyboardMarkup data = { } data [ 'title' ] = u ( array . get ( 'title' ) ) data [ 'description' ] = u ( array . get ( 'description' ) ) data [ 'payload' ] = u ( array . get ( 'payload' ) ) data [ 'provider_token' ] = u ( array . get ( 'provider_token' ) ) data [ 'start_parameter' ] = u ( array . get ( 'start_parameter' ) ) data [ 'currency' ] = u ( array . get ( 'currency' ) ) data [ 'prices' ] = LabeledPrice . from_array_list ( array . get ( 'prices' ) , list_level = 1 ) if array . get ( 'chat_id' ) is None : data [ 'receiver' ] = None elif isinstance ( array . get ( 'chat_id' ) , None ) : data [ 'receiver' ] = None ( array . get ( 'chat_id' ) ) elif isinstance ( array . get ( 'chat_id' ) , str ) : data [ 'receiver' ] = u ( array . get ( 'chat_id' ) ) elif isinstance ( array . get ( 'chat_id' ) , int ) : data [ 'receiver' ] = int ( array . get ( 'chat_id' ) ) else : raise TypeError ( 'Unknown type, must be one of None, str, int or None.' ) # end if if array . get ( 'reply_to_message_id' ) is None : data [ 'reply_id' ] = None elif isinstance ( array . get ( 'reply_to_message_id' ) , DEFAULT_MESSAGE_ID ) : data [ 'reply_id' ] = DEFAULT_MESSAGE_ID ( array . get ( 'reply_to_message_id' ) ) elif isinstance ( array . get ( 'reply_to_message_id' ) , int ) : data [ 'reply_id' ] = int ( array . get ( 'reply_to_message_id' ) ) else : raise TypeError ( 'Unknown type, must be one of DEFAULT_MESSAGE_ID, int or None.' ) # end if data [ 'provider_data' ] = u ( array . get ( 'provider_data' ) ) if array . get ( 'provider_data' ) is not None else None data [ 'photo_url' ] = u ( array . get ( 'photo_url' ) ) if array . get ( 'photo_url' ) is not None else None data [ 'photo_size' ] = int ( array . get ( 'photo_size' ) ) if array . get ( 'photo_size' ) is not None else None data [ 'photo_width' ] = int ( array . get ( 'photo_width' ) ) if array . get ( 'photo_width' ) is not None else None data [ 'photo_height' ] = int ( array . get ( 'photo_height' ) ) if array . get ( 'photo_height' ) is not None else None data [ 'need_name' ] = bool ( array . get ( 'need_name' ) ) if array . get ( 'need_name' ) is not None else None data [ 'need_phone_number' ] = bool ( array . get ( 'need_phone_number' ) ) if array . get ( 'need_phone_number' ) is not None else None data [ 'need_email' ] = bool ( array . get ( 'need_email' ) ) if array . get ( 'need_email' ) is not None else None data [ 'need_shipping_address' ] = bool ( array . get ( 'need_shipping_address' ) ) if array . get ( 'need_shipping_address' ) is not None else None data [ 'send_phone_number_to_provider' ] = bool ( array . get ( 'send_phone_number_to_provider' ) ) if array . get ( 'send_phone_number_to_provider' ) is not None else None data [ 'send_email_to_provider' ] = bool ( array . get ( 'send_email_to_provider' ) ) if array . get ( 'send_email_to_provider' ) is not None else None data [ 'is_flexible' ] = bool ( array . get ( 'is_flexible' ) ) if array . get ( 'is_flexible' ) is not None else None data [ 'disable_notification' ] = bool ( array . get ( 'disable_notification' ) ) if array . get ( 'disable_notification' ) is not None else None data [ 'reply_markup' ] = InlineKeyboardMarkup . from_array ( array . get ( 'reply_markup' ) ) if array . get ( 'reply_markup' ) is not None else None return InvoiceMessage ( ** data )
def cosi_pdf ( z , k = 1 ) : """Equation ( 11 ) of Morton & Winn ( 2014)"""
return 2 * k / ( np . pi * np . sinh ( k ) ) * quad ( cosi_integrand , z , 1 , args = ( k , z ) ) [ 0 ]
def ddspmt ( t , peak_delay = 6 , under_delay = 16 , peak_disp = 1 , under_disp = 1 , p_u_ratio = 6 ) : """SPM canonical HRF dispersion derivative , values for time values ` t ` Parameters t : array - like vector of times at which to sample HRF Returns hrf : array vector length ` ` len ( t ) ` ` of samples from HRF at times ` t ` Notes [1 ] This is the canonical HRF dispersion derivative function as used in SPM [2 ] It is the numerical difference between the HRF sampled at time ` t ` , and values at ` t ` for another HRF shape with a small change in the peak dispersion parameter ( ` ` peak _ disp ` ` in func : ` spm _ hrf _ compat ` ) . References : [1 ] http : / / nipy . org / [2 ] https : / / github . com / fabianp / hrf _ estimation"""
_spm_dd_func = partial ( spmt , peak_delay = peak_delay , under_delay = under_delay , under_disp = under_disp , p_u_ratio = p_u_ratio , peak_disp = 1.01 ) return ( spmt ( t ) - _spm_dd_func ( t ) ) / 0.01
def get_devices_by_parent ( self , hid_filter = None ) : """Group devices returned from filter query in order \ by devcice parent id ."""
all_devs = self . get_devices ( hid_filter ) dev_group = dict ( ) for hid_device in all_devs : # keep a list of known devices matching parent device Ids parent_id = hid_device . get_parent_instance_id ( ) device_set = dev_group . get ( parent_id , [ ] ) device_set . append ( hid_device ) if parent_id not in dev_group : # add new dev_group [ parent_id ] = device_set return dev_group
def FromBinary ( cls , record_data , record_count = 1 ) : """Create an UpdateRecord subclass from binary record data . This should be called with a binary record blob ( NOT including the record type header ) and it will decode it into a ReflashTileRecord . Args : record _ data ( bytearray ) : The raw record data that we wish to parse into an UpdateRecord subclass NOT including its 8 byte record header . record _ count ( int ) : The number of records included in record _ data . Raises : ArgumentError : If the record _ data is malformed and cannot be parsed . Returns : ReflashTileRecord : The decoded reflash tile record ."""
if len ( record_data ) < ReflashTileRecord . RecordHeaderLength : raise ArgumentError ( "Record was too short to contain a full reflash record header" , length = len ( record_data ) , header_length = ReflashTileRecord . RecordHeaderLength ) offset , data_length , raw_target , hardware_type = struct . unpack_from ( "<LL8sB3x" , record_data ) bindata = record_data [ ReflashTileRecord . RecordHeaderLength : ] if len ( bindata ) != data_length : raise ArgumentError ( "Embedded firmware length did not agree with actual length of embeded data" , length = len ( bindata ) , embedded_length = data_length ) target = _parse_target ( raw_target ) if target [ 'controller' ] : raise ArgumentError ( "Invalid targetting information, you " "cannot reflash a controller with a ReflashTileRecord" , target = target ) return ReflashTileRecord ( target [ 'slot' ] , bindata , offset , hardware_type )
def format_datetime ( cls , timestamp ) : """Creates a string representing the date and time information provided by the given ` timestamp ` object ."""
if not timestamp : raise DateTimeFormatterException ( 'timestamp must a valid string {}' . format ( timestamp ) ) return timestamp . strftime ( cls . DATETIME_FORMAT )
def to_input_req ( self ) : """Converts the ` ` self ` ` instance to the desired input request format . Returns : dict : Containing the " WarmStartType " and " ParentHyperParameterTuningJobs " as the first class fields . Examples : > > > warm _ start _ config = WarmStartConfig ( warm _ start _ type = WarmStartTypes . TransferLearning , parents = [ " p1 , p2 " ] ) > > > warm _ start _ config . to _ input _ req ( ) " WarmStartType " : " TransferLearning " , " ParentHyperParameterTuningJobs " : [ { ' HyperParameterTuningJobName ' : " p1 " } , { ' HyperParameterTuningJobName ' : " p2 " } ,"""
return { WARM_START_TYPE : self . type . value , PARENT_HYPERPARAMETER_TUNING_JOBS : [ { HYPERPARAMETER_TUNING_JOB_NAME : parent } for parent in self . parents ] }
def add ( self , origin , rel , target , attrs = None , rid = None ) : '''Add one relationship to the extent origin - origin of the relationship ( similar to an RDF subject ) rel - type IRI of the relationship ( similar to an RDF predicate ) target - target of the relationship ( similar to an RDF object ) , a boolean , floating point or unicode object attrs - optional attribute mapping of relationship metadata , i . e . { attrname1 : attrval1 , attrname2 : attrval2} rid - optional ID for the relationship in IRI form . If not specified one will be generated .'''
# FIXME no it doesn ' t re : # returns an ID ( IRI ) for the resulting relationship cur = self . _conn . cursor ( ) # relationship . if rid : querystr = "INSERT INTO relationship (origin, rel, target, rid) VALUES (%s, %s, %s, %s) RETURNING rawid;" cur . execute ( querystr , ( origin , rel , target , rid ) ) else : querystr = "INSERT INTO relationship (origin, rel, target) VALUES (%s, %s, %s) RETURNING rawid;" cur . execute ( querystr , ( origin , rel , target ) ) rawid = cur . fetchone ( ) [ 0 ] for a_name , a_val in attrs . items ( ) : querystr = "INSERT INTO attribute (rawid, name, value) VALUES (%s, %s, %s);" cur . execute ( querystr , ( rawid , a_name , a_val ) ) self . _conn . commit ( ) cur . close ( ) return
def _adjust_legend ( self , overlay , axis ) : """Accumulate the legend handles and labels for all subplots and set up the legend"""
legend_data = [ ] dimensions = overlay . kdims title = ', ' . join ( [ d . name for d in dimensions ] ) for key , subplot in self . subplots . items ( ) : element = overlay . data . get ( key , False ) if not subplot . show_legend or not element : continue title = ', ' . join ( [ d . name for d in dimensions ] ) handle = subplot . traverse ( lambda p : p . handles [ 'artist' ] , [ lambda p : 'artist' in p . handles ] ) if isinstance ( overlay , NdOverlay ) : key = ( dim . pprint_value ( k ) for k , dim in zip ( key , dimensions ) ) label = ',' . join ( [ str ( k ) + dim . unit if dim . unit else str ( k ) for dim , k in zip ( dimensions , key ) ] ) if handle : legend_data . append ( ( handle , label ) ) else : if isinstance ( subplot , OverlayPlot ) : legend_data += subplot . handles . get ( 'legend_data' , { } ) . items ( ) elif element . label and handle : legend_data . append ( ( handle , element . label ) ) all_handles , all_labels = list ( zip ( * legend_data ) ) if legend_data else ( [ ] , [ ] ) data = OrderedDict ( ) used_labels = [ ] for handle , label in zip ( all_handles , all_labels ) : # Ensure that artists with multiple handles are supported if isinstance ( handle , list ) : handle = tuple ( handle ) if handle and ( handle not in data ) and label and label not in used_labels : data [ handle ] = label used_labels . append ( label ) if ( not len ( set ( data . values ( ) ) ) > 0 ) or not self . show_legend : legend = axis . get_legend ( ) if legend : legend . set_visible ( False ) else : leg_spec = self . legend_specs [ self . legend_position ] if self . legend_cols : leg_spec [ 'ncol' ] = self . legend_cols leg = axis . legend ( list ( data . keys ( ) ) , list ( data . values ( ) ) , title = title , scatterpoints = 1 , ** dict ( leg_spec , ** self . _fontsize ( 'legend' ) ) ) title_fontsize = self . _fontsize ( 'legend_title' ) if title_fontsize : leg . get_title ( ) . set_fontsize ( title_fontsize [ 'fontsize' ] ) frame = leg . get_frame ( ) frame . set_facecolor ( '1.0' ) frame . set_edgecolor ( '0.0' ) frame . set_linewidth ( '1.0' ) leg . set_zorder ( 10e6 ) self . handles [ 'legend' ] = leg self . handles [ 'bbox_extra_artists' ] . append ( leg ) self . handles [ 'legend_data' ] = data
def _ParseFilterOptions ( self , options ) : """Parses the filter options . Args : options ( argparse . Namespace ) : command line arguments . Raises : BadConfigOption : if the options are invalid ."""
names = [ 'artifact_filters' , 'date_filters' , 'filter_file' ] helpers_manager . ArgumentHelperManager . ParseOptions ( options , self , names = names ) extensions_string = self . ParseStringOption ( options , 'extensions_string' ) self . _ParseExtensionsString ( extensions_string ) names_string = getattr ( options , 'names_string' , None ) self . _ParseNamesString ( names_string ) signature_identifiers = getattr ( options , 'signature_identifiers' , None ) try : self . _ParseSignatureIdentifiers ( self . _data_location , signature_identifiers ) except ( IOError , ValueError ) as exception : raise errors . BadConfigOption ( exception ) if self . _artifact_filters or self . _filter_file : self . has_filters = True else : self . has_filters = self . _filter_collection . HasFilters ( )
def init_argparser_loaderplugin_registry ( self , argparser , default = None , help = ( 'the name of the registry to use for the handling of loader ' 'plugins that may be loaded from the given Python packages' ) ) : """Default helper for setting up the loaderplugin registries flags . Note that this is NOT part of the init _ argparser due to implementation specific requirements . Subclasses should consider modifying the default value help message to cater to the toolchain it encapsulates ."""
argparser . add_argument ( '--loaderplugin-registry' , default = default , dest = CALMJS_LOADERPLUGIN_REGISTRY_NAME , action = 'store' , metavar = metavar ( 'registry' ) , help = help , )
def resume_training ( self , sgd = None , ** cfg ) : """Continue training a pre - trained model . Create and return an optimizer , and initialize " rehearsal " for any pipeline component that has a . rehearse ( ) method . Rehearsal is used to prevent models from " forgetting " their initialised " knowledge " . To perform rehearsal , collect samples of text you want the models to retain performance on , and call nlp . rehearse ( ) with a batch of Doc objects ."""
if cfg . get ( "device" , - 1 ) >= 0 : util . use_gpu ( cfg [ "device" ] ) if self . vocab . vectors . data . shape [ 1 ] >= 1 : self . vocab . vectors . data = Model . ops . asarray ( self . vocab . vectors . data ) link_vectors_to_models ( self . vocab ) if self . vocab . vectors . data . shape [ 1 ] : cfg [ "pretrained_vectors" ] = self . vocab . vectors . name if sgd is None : sgd = create_default_optimizer ( Model . ops ) self . _optimizer = sgd for name , proc in self . pipeline : if hasattr ( proc , "_rehearsal_model" ) : proc . _rehearsal_model = deepcopy ( proc . model ) return self . _optimizer
def _check_special_kwargs ( self , name ) : '''check special functions for kwargs Checks the content of the special functions ( % methodname ) for any keyword arguments referenced within Parameters : name ( str ) : A path key name Returns : A list of keyword arguments found in any special functions'''
keys = [ ] # find any % method names in the template string functions = re . findall ( r"\%\w+" , self . templates [ name ] ) if not functions : return keys # loop over special method names and extract keywords for function in functions : method = getattr ( self , function [ 1 : ] ) # get source code of special method source = self . _find_source ( method ) fkeys = re . findall ( r'kwargs\[(.*?)\]' , source ) if fkeys : # evaluate to proper string fkeys = [ ast . literal_eval ( k ) for k in fkeys ] keys . extend ( fkeys ) return keys
def display ( self ) : "Renders the scene once every refresh"
self . compositor . waitGetPoses ( self . poses , openvr . k_unMaxTrackedDeviceCount , None , 0 ) hmd_pose0 = self . poses [ openvr . k_unTrackedDeviceIndex_Hmd ] if not hmd_pose0 . bPoseIsValid : return # hmd _ pose = hmd _ pose0 . mDeviceToAbsoluteTracking # 1 ) On - screen render : if True : glClearColor ( 0.8 , 0.4 , 0.4 , 0 ) # Pink background glClear ( GL_COLOR_BUFFER_BIT ) # glutSwapBuffers ( ) glFlush ( ) # Single buffer # 2 ) VR render # TODO : render different things to each eye glBindFramebuffer ( GL_FRAMEBUFFER , self . fb ) glClearColor ( 0.8 , 0.4 , 0.4 , 0 ) # Pink background glClear ( GL_COLOR_BUFFER_BIT ) glBindFramebuffer ( GL_FRAMEBUFFER , 0 ) # TODO : use different textures for each eye self . compositor . submit ( openvr . Eye_Left , self . texture ) self . compositor . submit ( openvr . Eye_Right , self . texture ) glBindFramebuffer ( GL_FRAMEBUFFER , 0 )
def context ( self ) : """Create an exectution context . : rtype : execution . Context : return : The created execution context ."""
return execution . Context ( self . __base_dir , self . __prof_dir , self . __prof_name )
def render ( self , at ) : # draw bg surf = self . surf surf . fill ( BASE3 ) bg = pygame . Surface ( ( self . size [ 0 ] , self . bar_height ) ) bg . fill ( BASE2 ) surf . blit ( bg , ( 0 , 0 ) ) # draw bar ratio = self . gauge . get ( at ) / float ( self . gauge . max ( at ) ) if ratio > 1 : bar_color = BLUE ratio = 1 elif ratio == 1 : bar_color = CYAN elif ratio > 0.3 : bar_color = GREEN elif ratio > 0.1 : bar_color = YELLOW elif ratio > 0 : bar_color = ORANGE if ratio > 0 : bar = pygame . Surface ( ( int ( self . size [ 0 ] * ratio ) , self . bar_height ) ) bar . fill ( bar_color ) surf . blit ( bar , ( 0 , 0 ) ) # write current state text = font . render ( '{0}/{1}' . format ( int ( self . gauge . get ( at ) ) , self . gauge . max ( at ) ) , True , BASE1 ) surf . blit ( text , ( 10 , font . get_height ( ) / 2 ) ) # write time recover in speed = self . gauge . velocity ( at ) if speed != 0 : text = font . render ( '{0:+.2f}/s' . format ( speed ) , True , GREEN if speed > 0 else RED ) surf . blit ( text , ( surf . get_width ( ) - text . get_width ( ) - 10 , font . get_height ( ) / 2 ) ) '''try : move _ in = self . gauge . momenta [ 0 ] . move _ in ( self . gauge , at ) except ( AttributeError , IndexError ) : pass else : if move _ in : move _ in = math . ceil ( move _ in ) text = font . render ( ' { 0:02.0f } : { 1:02.0f } ' . format ( move _ in / 60 , move _ in % 60 ) , True , text _ colors [ 1 ] ) surf . blit ( text , ( surf . get _ width ( ) - text . get _ width ( ) - 10, font . get _ height ( ) / 2 ) )'''
return surf
def installation_refused ( self , requirement , missing_dependencies , reason ) : """Raise : exc : ` . DependencyInstallationRefused ` with a user friendly message . : param requirement : A : class : ` . Requirement ` object . : param missing _ dependencies : A list of strings with missing dependencies . : param reason : The reason why installation was refused ( a string ) ."""
msg = "Missing %s (%s) required by Python package %s (%s) but %s!" raise DependencyInstallationRefused ( msg % ( pluralize ( len ( missing_dependencies ) , "system package" , "system packages" ) , concatenate ( missing_dependencies ) , requirement . name , requirement . version , reason ) )
def _proxy ( self ) : """Generate an instance context for the instance , the context is capable of performing various actions . All instance actions are proxied to the context : returns : UserChannelContext for this UserChannelInstance : rtype : twilio . rest . chat . v2 . service . user . user _ channel . UserChannelContext"""
if self . _context is None : self . _context = UserChannelContext ( self . _version , service_sid = self . _solution [ 'service_sid' ] , user_sid = self . _solution [ 'user_sid' ] , channel_sid = self . _solution [ 'channel_sid' ] , ) return self . _context
def _run_ensemble ( batch_id , vrn_files , config_file , base_dir , ref_file , data ) : """Run an ensemble call using merging and SVM - based approach in bcbio . variation"""
out_vcf_file = os . path . join ( base_dir , "{0}-ensemble.vcf" . format ( batch_id ) ) out_bed_file = os . path . join ( base_dir , "{0}-callregions.bed" . format ( batch_id ) ) work_dir = "%s-work" % os . path . splitext ( out_vcf_file ) [ 0 ] if not utils . file_exists ( out_vcf_file ) : _bcbio_variation_ensemble ( vrn_files , out_vcf_file , ref_file , config_file , base_dir , data ) if not utils . file_exists ( out_vcf_file ) : base_vcf = glob . glob ( os . path . join ( work_dir , "prep" , "*-cfilter.vcf" ) ) [ 0 ] utils . symlink_plus ( base_vcf , out_vcf_file ) if not utils . file_exists ( out_bed_file ) : multi_beds = glob . glob ( os . path . join ( work_dir , "prep" , "*-multicombine.bed" ) ) if len ( multi_beds ) > 0 : utils . symlink_plus ( multi_beds [ 0 ] , out_bed_file ) return { "variantcaller" : "ensemble" , "vrn_file" : out_vcf_file , "bed_file" : out_bed_file if os . path . exists ( out_bed_file ) else None }
def v_depth ( d , depth ) : """Iterate values on specific depth . depth has to be greater equal than 0. Usage reference see : meth : ` DictTree . kv _ depth ( ) < DictTree . kv _ depth > `"""
if depth == 0 : yield d else : for node in DictTree . v ( d ) : for node1 in DictTree . v_depth ( node , depth - 1 ) : yield node1
def _append_slash_if_dir_path ( self , relpath ) : """For a dir path return a path that has a trailing slash ."""
if self . _isdir_raw ( relpath ) : return self . _append_trailing_slash ( relpath ) return relpath
def daemonize ( ** params ) : """This is a simple daemonization method . It just does a double fork ( ) and the parent exits after closing a good clump of possibly open file descriptors . The child redirects stdin from / dev / null and sets a new process group and session . If you need fancier , suggest you look at http : / / pypi . python . org / pypi / python - daemon / Application logging setup needs to be delayed until after daemonize ( ) is called . Supported params : redir - Redirect stdin , stdout , and stderr to / dev / null . Default is True , use " redir = False " to leave std files unchanged . log - logging function , default is no logging . A logging function works best if you use stderr or a higher fd because these are closed last . But note that all fds are closed or associated with / dev / null , so the log param is really only useful for debugging this function itself . A caller needing logging should probably use syslog . plus params appropriate for taskforce . utils . closeall ( ) ."""
log = params . get ( 'log' ) redir = params . get ( 'redir' , True ) try : if os . fork ( ) != 0 : os . _exit ( 0 ) except Exception as e : if log : log ( "First fork failed -- %s" , e ) return False try : os . setsid ( ) except Exception as e : if log : log ( "Setsid() failed -- %s" , e ) try : if os . fork ( ) != 0 : os . _exit ( 0 ) except Exception as e : if log : log ( "Second fork failed, pressing on -- %s" , e ) try : os . chdir ( '/' ) except Exception as e : if log : log ( "Chdir('/') failed -- %s" , e ) if redir : try : os . close ( 0 ) except Exception as e : if log : log ( "Stdin close failed -- %s" , e ) try : fd = os . open ( '/dev/null' , os . O_RDONLY ) except Exception as e : if log : log ( "Stdin open failed -- %s" , e ) if fd != 0 : if log : log ( "Stdin open returned %d, should be 0" , fd ) try : os . close ( 1 ) except Exception as e : if log : log ( "Stdout close failed -- %s" , e ) try : fd = os . open ( '/dev/null' , os . O_WRONLY ) except Exception as e : if log : log ( "Stdout open failed -- %s" , e ) if fd != 1 : if log : log ( "Stdout open returned %d, should be 1" , fd ) try : os . setpgrp ( ) except Exception as e : if log : log ( "Setpgrp failed -- %s" , e ) if redir : try : os . close ( 2 ) except Exception as e : if log : log ( "Stderr close failed -- %s" , e ) try : fd = os . dup ( 1 ) except Exception as e : if log : log ( "Stderr dup failed -- %s" , e ) if fd != 2 : if log : log ( "Stderr dup returned %d, should be 2" , fd ) if 'exclude' not in params : params [ 'exclude' ] = [ 0 , 1 , 2 ] closeall ( ** params )
def fisher_angular_deviation ( dec = None , inc = None , di_block = None , confidence = 95 ) : '''The angle from the true mean within which a chosen percentage of directions lie can be calculated from the Fisher distribution . This function uses the calculated Fisher concentration parameter to estimate this angle from directional data . The 63 percent confidence interval is often called the angular standard deviation . Parameters dec : list of declinations or longitudes inc : list of inclinations or latitudes di _ block : a nested list of [ dec , inc , 1.0] A di _ block can be provided instead of dec , inc lists in which case it will be used . Either dec , inc lists or a di _ block need to be provided . confidence : 50 percent , 63 percent or 95 percent Returns theta : critical angle of interest from the mean which contains the percentage of directions specified by the confidence parameter'''
if di_block is None : di_block = make_di_block ( dec , inc ) mean = pmag . fisher_mean ( di_block ) else : mean = pmag . fisher_mean ( di_block ) if confidence == 50 : theta = old_div ( 67.5 , np . sqrt ( mean [ 'k' ] ) ) if confidence == 63 : theta = old_div ( 81 , np . sqrt ( mean [ 'k' ] ) ) if confidence == 95 : theta = old_div ( 140 , np . sqrt ( mean [ 'k' ] ) ) return theta