signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def rolling_fltr ( dem , f = np . nanmedian , size = 3 , circular = True , origmask = False ) : """General rolling filter ( default operator is median filter ) Can input any function f Efficient for smaller arrays , correclty handles NaN , fills gaps"""
print ( "Applying rolling filter: %s with size %s" % ( f . __name__ , size ) ) dem = malib . checkma ( dem ) # Convert to float32 so we can fill with nan dem = dem . astype ( np . float32 ) newshp = ( dem . size , size * size ) # Force a step size of 1 t = malib . sliding_window_padded ( dem . filled ( np . nan ) , ( size , size ) , ( 1 , 1 ) ) if circular : if size > 3 : mask = circular_mask ( size ) t [ : , mask ] = np . nan t = t . reshape ( newshp ) out = f ( t , axis = 1 ) . reshape ( dem . shape ) out = np . ma . fix_invalid ( out ) . astype ( dem . dtype ) out . set_fill_value ( dem . fill_value ) if origmask : out = np . ma . array ( out , mask = np . ma . getmaskarray ( dem ) ) return out
def termpt ( method , ilusrc , target , et , fixref , abcorr , corloc , obsrvr , refvec , rolstp , ncuts , schstp , soltol , maxn ) : """Find terminator points on a target body . The caller specifies half - planes , bounded by the illumination source center - target center vector , in which to search for terminator points . The terminator can be either umbral or penumbral . The umbral terminator is the boundary of the region on the target surface where no light from the source is visible . The penumbral terminator is the boundary of the region on the target surface where none of the light from the source is blocked by the target itself . The surface of the target body may be represented either by a triaxial ellipsoid or by topographic data . https : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / termpt _ c . html : param method : Computation method . : type method : str : param ilusrc : Illumination source . : type ilusrc : str : param target : Name of target body . : type target : str : param et : Epoch in ephemeris seconds past J2000 TDB . : type et : float : param fixref : Body - fixed , body - centered target body frame . : type fixref : str : param abcorr : Aberration correction . : type abcorr : str : param corloc : Aberration correction locus . : type corloc : str : param obsrvr : Name of observing body . : type obsrvr : str : param refvec : Reference vector for cutting half - planes . : type refvec : 3 - Element Array of floats : param rolstp : Roll angular step for cutting half - planes . : type rolstp : float : param ncuts : Number of cutting half - planes . : type ncuts : int : param schstp : Angular step size for searching . : type schstp : float : param soltol : Solution convergence tolerance . : type soltol : float : param maxn : Maximum number of entries in output arrays . : type maxn : int : return : Counts of terminator points corresponding to cuts , Terminator points , Times associated with terminator points , Terminator vectors emanating from the observer : rtype : tuple"""
method = stypes . stringToCharP ( method ) ilusrc = stypes . stringToCharP ( ilusrc ) target = stypes . stringToCharP ( target ) et = ctypes . c_double ( et ) fixref = stypes . stringToCharP ( fixref ) abcorr = stypes . stringToCharP ( abcorr ) corloc = stypes . stringToCharP ( corloc ) obsrvr = stypes . stringToCharP ( obsrvr ) refvec = stypes . toDoubleVector ( refvec ) rolstp = ctypes . c_double ( rolstp ) ncuts = ctypes . c_int ( ncuts ) schstp = ctypes . c_double ( schstp ) soltol = ctypes . c_double ( soltol ) maxn = ctypes . c_int ( maxn ) npts = stypes . emptyIntVector ( maxn . value ) points = stypes . emptyDoubleMatrix ( 3 , maxn . value ) epochs = stypes . emptyDoubleVector ( maxn ) trmvcs = stypes . emptyDoubleMatrix ( 3 , maxn . value ) libspice . termpt_c ( method , ilusrc , target , et , fixref , abcorr , corloc , obsrvr , refvec , rolstp , ncuts , schstp , soltol , maxn , npts , points , epochs , trmvcs ) # Clip the empty elements out of returned results npts = stypes . cVectorToPython ( npts ) valid_points = numpy . where ( npts >= 1 ) return npts [ valid_points ] , stypes . cMatrixToNumpy ( points ) [ valid_points ] , stypes . cVectorToPython ( epochs ) [ valid_points ] , stypes . cMatrixToNumpy ( trmvcs ) [ valid_points ]
def create ( cls , tx_signers , recipients , metadata = None , asset = None ) : """A simple way to generate a ` CREATE ` transaction . Note : This method currently supports the following Cryptoconditions use cases : - Ed25519 - ThresholdSha256 Additionally , it provides support for the following BigchainDB use cases : - Multiple inputs and outputs . Args : tx _ signers ( : obj : ` list ` of : obj : ` str ` ) : A list of keys that represent the signers of the CREATE Transaction . recipients ( : obj : ` list ` of : obj : ` tuple ` ) : A list of ( [ keys ] , amount ) that represent the recipients of this Transaction . metadata ( dict ) : The metadata to be stored along with the Transaction . asset ( dict ) : The metadata associated with the asset that will be created in this Transaction . Returns : : class : ` ~ bigchaindb . common . transaction . Transaction `"""
( inputs , outputs ) = cls . validate_create ( tx_signers , recipients , asset , metadata ) return cls ( cls . CREATE , { 'data' : asset } , inputs , outputs , metadata )
def get_default_config ( self ) : """Return the default config for the handler"""
config = super ( NullHandler , self ) . get_default_config ( ) config . update ( { } ) return config
def _CamelCaseToSnakeCase ( path_name ) : """Converts a field name from camelCase to snake _ case ."""
result = [ ] for c in path_name : if c == '_' : raise ParseError ( 'Fail to parse FieldMask: Path name ' '{0} must not contain "_"s.' . format ( path_name ) ) if c . isupper ( ) : result += '_' result += c . lower ( ) else : result += c return '' . join ( result )
def data_labels ( self ) : """| DataLabels | instance providing properties and methods on the collection of data labels associated with this plot ."""
dLbls = self . _element . dLbls if dLbls is None : raise ValueError ( 'plot has no data labels, set has_data_labels = True first' ) return DataLabels ( dLbls )
def image_recognition ( image , cloud = None , batch = False , api_key = None , version = None , ** kwargs ) : """Given an input image , returns a dictionary of image classifications with associated scores * Input can be either grayscale or rgb color and should either be a numpy array or nested list format . * Input data should be either uint8 0-255 range values or floating point between 0 and 1. * Large images ( i . e . 1024x768 + ) are much bigger than needed , minaxis resizing will be done internally to 144 if needed . * For ideal performance , images should be square aspect ratio but non - square aspect ratios are supported as well . Example usage : . . code - block : : python > > > from indicoio import image _ recognition > > > features = image _ recognition ( < filename > ) : param image : The image to be analyzed . : type image : str : rtype : dict containing classifications"""
image = data_preprocess ( image , batch = batch , size = 144 , min_axis = True ) url_params = { "batch" : batch , "api_key" : api_key , "version" : version } return api_handler ( image , cloud = cloud , api = "imagerecognition" , url_params = url_params , ** kwargs )
def register_layout ( self , name , layout ) : """Registers given layout . : param name : Layout name . : type name : unicode : param layout : Layout object . : type layout : Layout : return : Method success . : rtype : bool"""
if name in self : raise umbra . exceptions . LayoutRegistrationError ( "{0} | '{1}' layout is already registered!" . format ( self . __class__ . __name__ , name ) ) self . __layouts [ name ] = layout return True
def _get_xy_tree ( xy , degree ) : """Evaluates the entire tree of 2d mononomials . The return value is a list of arrays , where ` out [ k ] ` hosts the ` 2 * k + 1 ` values of the ` k ` th level of the tree (0 , 0) (1 , 0 ) ( 0 , 1) (2 , 0 ) ( 1 , 1 ) ( 0 , 2)"""
x , y = xy tree = [ numpy . array ( [ numpy . ones ( x . shape , dtype = int ) ] ) ] for d in range ( degree ) : tree . append ( numpy . concatenate ( [ tree [ - 1 ] * x , [ tree [ - 1 ] [ - 1 ] * y ] ] ) ) return tree
def purge ( self , queue , virtual_host = '/' ) : """Purge a Queue . : param str queue : Queue name : param str virtual _ host : Virtual host name : raises ApiError : Raises if the remote server encountered an error . : raises ApiConnectionError : Raises if there was a connectivity issue . : rtype : None"""
virtual_host = quote ( virtual_host , '' ) return self . http_client . delete ( API_QUEUE_PURGE % ( virtual_host , queue ) )
def payment_end ( self , account , wallet ) : """End a payment session . Marks the account as available for use in a payment session . : param account : Account to mark available : type account : str : param wallet : Wallet to end payment session for : type wallet : str : raises : : py : exc : ` nano . rpc . RPCException ` > > > rpc . payment _ end ( . . . account = " xrb _ 3e3j5tkog48pnny9dmfzj1r16pg8t1e76dz5tmac6iq689wyjfpi00000 " , . . . wallet = " FFFD1BAEC8EC20814BBB9059B393051AAA8380F9B5A2E6B2489A277D81789EEE " True"""
account = self . _process_value ( account , 'account' ) wallet = self . _process_value ( wallet , 'wallet' ) payload = { "account" : account , "wallet" : wallet } resp = self . call ( 'payment_end' , payload ) return resp == { }
def leading_particle ( df ) : """Grab leading particle ( neutrino , most energetic bundle muon ) . Note : selecting the most energetic mc particle does not always select the neutrino ! In some sub - percent cases , the post - interaction secondaries can have more energy than the incoming neutrino ! aanet convention : mc _ tracks [ 0 ] = neutrino so grab the first row if the first row is not unique ( neutrinos are unique ) , it ' s a muon bundle grab the most energetic then"""
leading = df . groupby ( 'event_id' , as_index = False ) . first ( ) unique = leading . type . unique ( ) if len ( unique ) == 1 and unique [ 0 ] == 0 : leading = most_energetic ( df ) return leading
def save_group ( store , * args , ** kwargs ) : """Convenience function to save several NumPy arrays to the local file system , following a similar API to the NumPy savez ( ) / savez _ compressed ( ) functions . Parameters store : MutableMapping or string Store or path to directory in file system or name of zip file . args : ndarray NumPy arrays with data to save . kwargs NumPy arrays with data to save . Examples Save several arrays to a directory on the file system ( uses a : class : ` DirectoryStore ` ) : > > > import zarr > > > import numpy as np > > > a1 = np . arange ( 10000) > > > a2 = np . arange ( 10000 , 0 , - 1) > > > zarr . save _ group ( ' data / example . zarr ' , a1 , a2) > > > loader = zarr . load ( ' data / example . zarr ' ) > > > loader < LazyLoader : arr _ 0 , arr _ 1 > > > > loader [ ' arr _ 0 ' ] array ( [ 0 , 1 , 2 , . . . , 9997 , 9998 , 9999 ] ) > > > loader [ ' arr _ 1 ' ] array ( [ 10000 , 9999 , 9998 , . . . , 3 , 2 , 1 ] ) Save several arrays using named keyword arguments : : > > > zarr . save _ group ( ' data / example . zarr ' , foo = a1 , bar = a2) > > > loader = zarr . load ( ' data / example . zarr ' ) > > > loader < LazyLoader : bar , foo > > > > loader [ ' foo ' ] array ( [ 0 , 1 , 2 , . . . , 9997 , 9998 , 9999 ] ) > > > loader [ ' bar ' ] array ( [ 10000 , 9999 , 9998 , . . . , 3 , 2 , 1 ] ) Store several arrays in a single zip file ( uses a : class : ` ZipStore ` ) : : > > > zarr . save _ group ( ' data / example . zip ' , foo = a1 , bar = a2) > > > loader = zarr . load ( ' data / example . zip ' ) > > > loader < LazyLoader : bar , foo > > > > loader [ ' foo ' ] array ( [ 0 , 1 , 2 , . . . , 9997 , 9998 , 9999 ] ) > > > loader [ ' bar ' ] array ( [ 10000 , 9999 , 9998 , . . . , 3 , 2 , 1 ] ) Notes Default compression options will be used ."""
if len ( args ) == 0 and len ( kwargs ) == 0 : raise ValueError ( 'at least one array must be provided' ) # handle polymorphic store arg may_need_closing = isinstance ( store , str ) store = normalize_store_arg ( store , clobber = True ) try : grp = _create_group ( store , overwrite = True ) for i , arr in enumerate ( args ) : k = 'arr_{}' . format ( i ) grp . create_dataset ( k , data = arr , overwrite = True ) for k , arr in kwargs . items ( ) : grp . create_dataset ( k , data = arr , overwrite = True ) finally : if may_need_closing and hasattr ( store , 'close' ) : # needed to ensure zip file records are written store . close ( )
def MarkdownColumn ( name , deferred = False , group = None , ** kwargs ) : """Create a composite column that autogenerates HTML from Markdown text , storing data in db columns named with ` ` _ html ` ` and ` ` _ text ` ` prefixes ."""
return composite ( MarkdownComposite , Column ( name + '_text' , UnicodeText , ** kwargs ) , Column ( name + '_html' , UnicodeText , ** kwargs ) , deferred = deferred , group = group or name )
def _valid_lsid ( self ) : """Performs some basic ( non - comprehensive ) LSID validation : return :"""
if not isinstance ( self . lsid , str ) : raise TypeError ( "lsid is not a string, string expected: " + str ( self . lsid ) ) if self . lsid . count ( ':' ) != 4 : raise ValueError ( "lsid contains incorrect number of colons, 4 expected: " + str ( self . lsid ) ) if self . lsid . split ( ':' ) [ 0 ] . lower ( ) != 'urn' : raise ValueError ( "lsid does not begin with urn: " + str ( self . lsid ) ) # If an LSID authority is specified , check with the authority if self . lsid_authority : if not self . lsid_authority . validate ( self . lsid , check_existing = False ) : raise ValueError ( "lsid does not the authority: " + str ( self . lsid ) )
def p_field_optional2_2 ( self , p ) : """field : name arguments selection _ set"""
p [ 0 ] = Field ( name = p [ 1 ] , arguments = p [ 2 ] , selections = p [ 3 ] )
def check_base_suggested_attributes ( self , dataset ) : '''Check the global suggested attributes for 2.0 templates . These go an extra step besides just checking that they exist . : param netCDF4 . Dataset dataset : An open netCDF dataset : creator _ type = " " ; / / . . . . . SUGGESTED - Specifies type of creator with one of the following : ' person ' , ' group ' , ' institution ' , or ' position ' . ( ACDD ) : creator _ institution = " " ; / / . . . . . SUGGESTED - The institution of the creator ; should uniquely identify the creator ' s institution . ( ACDD ) : publisher _ type = " " ; / / . . . . . SUGGESTED - Specifies type of publisher with one of the following : ' person ' , ' group ' , ' institution ' , or ' position ' . ( ACDD ) : publisher _ institution = " " ; / / . . . . . SUGGESTED - The institution that presented the data file or equivalent product to users ; should uniquely identify the institution . ( ACDD ) : program = " " ; / / . . . . . SUGGESTED - The overarching program ( s ) of which the dataset is a part . ( ACDD ) : contributor _ name = " " ; / / . . . . . SUGGESTED - The name of any individuals , projects , or institutions that contributed to the creation of this data . ( ACDD ) : contributor _ role = " " ; / / . . . . . SUGGESTED - The role of any individuals , projects , or institutions that contributed to the creation of this data . ( ACDD ) : geospatial _ lat _ units = " degrees _ north " ; / / . . . . . SUGGESTED - Units for the latitude axis described in " geospatial _ lat _ min " and " geospatial _ lat _ max " attributes . Use UDUNITS compatible units . ( ACDD ) : geospatial _ lon _ units = " degrees _ east " ; / / . . . . . SUGGESTED - Units for the longitude axis described in " geospatial _ lon _ min " and " geospatial _ lon _ max " attributes . Use UDUNITS compatible units . ( ACDD ) : geospatial _ vertical _ units = " " ; / / . . . . . SUGGESTED - Units for the vertical axis described in " geospatial _ vertical _ min " and " geospatial _ vertical _ max " attributes . The default is EPSG : 4979 . ( ACDD ) : date _ modified = " " ; / / . . . . . SUGGESTED - The date on which the data was last modified . Note that this applies just to the data , not the metadata . Use ISO 8601:2004 for date and time . ( ACDD ) : date _ issued = " " ; / / . . . . . SUGGESTED - The date on which this data ( including all modifications ) was formally issued ( i . e . , made available to a wider audience ) . Note that these apply just to the data , not the metadata . Use ISO 8601:2004 for date and time . ( ACDD ) : date _ metadata _ modified = " " ; / / . . . . . SUGGESTED - The date on which the metadata was last modified . Use ISO 8601:2004 for date and time . ( ACDD ) : product _ version = " " ; / / . . . . . SUGGESTED - Version identifier of the data file or product as assigned by the data creator . ( ACDD ) : keywords _ vocabulary = " " ; / / . . . . . SUGGESTED - Identifies the controlled keyword vocabulary used to specify the values within the attribute " keywords " . Example : ' GCMD : GCMD Keywords ' ACDD ) : platform = " " ; / / . . . . . SUGGESTED - Name of the platform ( s ) that supported the sensor data used to create this data set or product . Platforms can be of any type , including satellite , ship , station , aircraft or other . ( ACDD ) : platform _ vocabulary = " " ; / / . . . . . SUGGESTED - Controlled vocabulary for the names used in the " platform " attribute . Example : ‘ NASA / GCMD Platform Keywords Version 8.1 ’ ( ACDD ) : instrument = " " ; / / . . . . . SUGGESTED - Name of the contributing instrument ( s ) or sensor ( s ) used to create this data set or product . ( ACDD ) : instrument _ vocabulary = " " ; / / . . . . . SUGGESTED - Controlled vocabulary for the names used in the " instrument " attribute . Example : ‘ NASA / GCMD Instrument Keywords Version 8.1 ’ ( ACDD ) : cdm _ data _ type = " Point " ; / / . . . . . SUGGESTED - The data type , as derived from Unidata ' s Common Data Model Scientific Data types and understood by THREDDS . ( ACDD ) : metadata _ link = " " ; / / . . . . . SUGGESTED - A URL that gives the location of more complete metadata . A persistent URL is recommended for this attribute . ( ACDD ) : references = " " ; / / . . . . . SUGGESTED - Published or web - based references that describe the data or methods used to produce it . Recommend URIs ( such as a URL or DOI ) for papers or other references . ( CF )'''
suggested_ctx = TestCtx ( BaseCheck . LOW , 'Suggested global attributes' ) # Do any of the variables define platform ? platform_name = getattr ( dataset , 'platform' , '' ) suggested_ctx . assert_true ( platform_name != '' , 'platform should exist and point to a term in :platform_vocabulary.' ) cdm_data_type = getattr ( dataset , 'cdm_data_type' , '' ) suggested_ctx . assert_true ( cdm_data_type . lower ( ) in [ 'grid' , 'image' , 'point' , 'radial' , 'station' , 'swath' , 'trajectory' ] , 'cdm_data_type must be one of Grid, Image, Point, Radial, Station, Swath, Trajectory: {}' . format ( cdm_data_type ) ) # Parse dates , check for ISO 8601 for attr in [ 'date_modified' , 'date_issued' , 'date_metadata_modified' ] : attr_value = getattr ( dataset , attr , '' ) try : parse_datetime ( attr_value ) suggested_ctx . assert_true ( True , '' ) # Score it True ! except ISO8601Error : suggested_ctx . assert_true ( False , '{} should exist and be ISO-8601 format (example: PT1M30S), currently: {}' . format ( attr , attr_value ) ) units = getattr ( dataset , 'geospatial_lat_units' , '' ) . lower ( ) suggested_ctx . assert_true ( units == 'degrees_north' , 'geospatial_lat_units attribute should be degrees_north: {}' . format ( units ) ) units = getattr ( dataset , 'geospatial_lon_units' , '' ) . lower ( ) suggested_ctx . assert_true ( units == 'degrees_east' , 'geospatial_lon_units attribute should be degrees_east: {}' . format ( units ) ) contributor_name = getattr ( dataset , 'contributor_name' , '' ) contributor_role = getattr ( dataset , 'contributor_role' , '' ) names = contributor_role . split ( ',' ) roles = contributor_role . split ( ',' ) suggested_ctx . assert_true ( contributor_name != '' , 'contributor_name should exist and not be empty.' ) suggested_ctx . assert_true ( len ( names ) == len ( roles ) , 'length of contributor names matches length of roles' ) suggested_ctx . assert_true ( contributor_role != '' , 'contributor_role should exist and not be empty.' ) suggested_ctx . assert_true ( len ( names ) == len ( roles ) , 'length of contributor names matches length of roles' ) return suggested_ctx . to_result ( )
def saveVirtualOutputs ( self , outdict ) : """Assign in - memory versions of generated products for this ` ` imageObject ` ` based on dictionary ' outdict ' ."""
if not self . inmemory : return for outname in outdict : self . virtualOutputs [ outname ] = outdict [ outname ]
def has_event ( self , event , cameo_code ) : """Test whether there is an " event2 " or " event3 " entry for the given cameo code Args : event : cameo _ code : Returns :"""
if self . has_cameo_code ( cameo_code ) : entry = self . mapping . get ( cameo_code ) if entry : return entry [ self . event_name [ event ] ] return False
def _get_deleted_fs ( name , blade ) : '''Private function to check if a file systeem has already been deleted'''
try : _fs = _get_fs ( name , blade ) if _fs and _fs . destroyed : return _fs except rest . ApiException : return None
async def run_with_interrupt ( task , * events , loop = None ) : """Awaits a task while allowing it to be interrupted by one or more ` asyncio . Event ` s . If the task finishes without the events becoming set , the results of the task will be returned . If the event become set , the task will be cancelled ` ` None ` ` will be returned . : param task : Task to run : param events : One or more ` asyncio . Event ` s which , if set , will interrupt ` task ` and cause it to be cancelled . : param loop : Optional event loop to use other than the default ."""
loop = loop or asyncio . get_event_loop ( ) task = asyncio . ensure_future ( task , loop = loop ) event_tasks = [ loop . create_task ( event . wait ( ) ) for event in events ] done , pending = await asyncio . wait ( [ task ] + event_tasks , loop = loop , return_when = asyncio . FIRST_COMPLETED ) for f in pending : f . cancel ( ) # cancel unfinished tasks for f in done : f . exception ( ) # prevent " exception was not retrieved " errors if task in done : return task . result ( ) # may raise exception else : return None
def sayHelloAsync ( self , name = "Not given" , message = "nothing" ) : """Implementation of IHello . sayHelloAsync . This method will be executed via some thread , and the remote caller will not block . This method should return either a String result ( since the return type of IHello . sayHelloAsync is CompletableFuture < String > , OR a Future that returns a python string . In this case , it returns the string directly ."""
print ( "Python.sayHelloAsync called by: {0} " "with message: '{1}'" . format ( name , message ) ) return ( "PythonAsync says: Howdy {0} " "that's a nice runtime you got there" . format ( name ) )
def edit_labels_on_gce_role ( self , name , add = None , remove = None , mount_point = DEFAULT_MOUNT_POINT ) : """Edit labels for an existing GCE role in the backend . This allows you to add or remove labels ( keys , values , or both ) from the list of keys on the role . Supported methods : POST : / auth / { mount _ point } / role / { name } / labels . Produces : 204 ( empty body ) : param name : The name of an existing gce role . This will return an error if role is not a gce type role . : type name : str | unicode : param add : The list of key : value labels to add to the GCE role ' s bound labels . : type add : list : param remove : The list of label keys to remove from the role ' s bound labels . If any of the specified keys do not exist , no error is returned ( idempotent ) . : type remove : list : param mount _ point : The " path " the method / backend was mounted on . : type mount _ point : str | unicode : return : The response of the edit _ labels _ on _ gce _ role request . : rtype : requests . Response"""
params = { 'add' : add , 'remove' : remove , } api_path = '/v1/auth/{mount_point}/role/{name}/labels' . format ( mount_point = mount_point , name = name , ) return self . _adapter . post ( url = api_path , json = params , )
def undefine ( self ) : """Undefine the Generic . Python equivalent of the CLIPS undefgeneric command . The object becomes unusable after this method has been called ."""
if lib . EnvUndefgeneric ( self . _env , self . _gnc ) != 1 : raise CLIPSError ( self . _env ) self . _env = None
def blockAllSignals ( self , state ) : """Fully blocks all signals - tree , header signals . : param state | < bool >"""
self . blockSignals ( state ) self . header ( ) . blockSignals ( state )
def set_sound_mode ( self , sound_mode ) : """Set sound _ mode of device . Valid values depend on the device and should be taken from " sound _ mode _ list " . Return " True " on success and " False " on fail ."""
if sound_mode == ALL_ZONE_STEREO : if self . _set_all_zone_stereo ( True ) : self . _sound_mode_raw = ALL_ZONE_STEREO return True else : return False if self . _sound_mode_raw == ALL_ZONE_STEREO : if not self . _set_all_zone_stereo ( False ) : return False # For selection of sound mode other names then at receiving sound modes # have to be used # Therefore source mapping is needed to get sound _ mode # Create command URL and send command via HTTP GET command_url = self . _urls . command_sel_sound_mode + sound_mode # sent command try : if self . send_get_command ( command_url ) : self . _sound_mode_raw = self . _sound_mode_dict [ sound_mode ] [ 0 ] return True else : return False except requests . exceptions . RequestException : _LOGGER . error ( "Connection error: sound mode function %s not set." , sound_mode ) return False
def retry ( * dargs , ** dkw ) : """Decorator function that instantiates the Retrying object @ param * dargs : positional arguments passed to Retrying object @ param * * dkw : keyword arguments passed to the Retrying object"""
# support both @ retry and @ retry ( ) as valid syntax if len ( dargs ) == 1 and callable ( dargs [ 0 ] ) : def wrap_simple ( f ) : @ six . wraps ( f ) def wrapped_f ( * args , ** kw ) : return Retrying ( ) . call ( f , * args , ** kw ) return wrapped_f return wrap_simple ( dargs [ 0 ] ) else : def wrap ( f ) : @ six . wraps ( f ) def wrapped_f ( * args , ** kw ) : return Retrying ( * dargs , ** dkw ) . call ( f , * args , ** kw ) return wrapped_f return wrap
def _submit_task_with_template ( self , task_ids ) : '''Submit tasks by interpolating a shell script defined in job _ template'''
runtime = self . config runtime . update ( { 'workdir' : os . getcwd ( ) , 'cur_dir' : os . getcwd ( ) , # for backward compatibility 'verbosity' : env . verbosity , 'sig_mode' : env . config . get ( 'sig_mode' , 'default' ) , 'run_mode' : env . config . get ( 'run_mode' , 'run' ) , 'home_dir' : os . path . expanduser ( '~' ) } ) if '_runtime' in env . sos_dict : runtime . update ( { x : env . sos_dict [ '_runtime' ] [ x ] for x in ( 'nodes' , 'cores' , 'workdir' , 'mem' , 'walltime' ) if x in env . sos_dict [ '_runtime' ] } ) if 'nodes' not in runtime : runtime [ 'nodes' ] = 1 if 'cores' not in runtime : runtime [ 'cores' ] = 1 # let us first prepare a task file job_text = '' for task_id in task_ids : runtime [ 'task' ] = task_id try : job_text += cfg_interpolate ( self . job_template , runtime ) job_text += '\n' except Exception as e : raise ValueError ( f'Failed to generate job file for task {task_id}: {e}' ) filename = task_ids [ 0 ] + ( '.sh' if len ( task_ids ) == 1 else f'-{task_ids[-1]}.sh' ) # now we need to write a job file job_file = os . path . join ( os . path . expanduser ( '~' ) , '.sos' , 'tasks' , filename ) # do not translate newline under windows because the script will be executed # under linux / mac with open ( job_file , 'w' , newline = '' ) as job : job . write ( job_text ) # then copy the job file to remote host if necessary self . agent . send_task_file ( job_file ) try : cmd = f'bash ~/.sos/tasks/{filename}' self . agent . run_command ( cmd , wait_for_task = self . wait_for_task ) except Exception as e : raise RuntimeError ( f'Failed to submit task {task_ids}: {e}' ) return True
def remove_entry ( self , jid , * , timeout = None ) : """Request removal of the roster entry identified by the given bare ` jid ` . If the entry currently has any subscription state , the server will send the corresponding unsubscribing presence stanzas . ` timeout ` is the maximum time in seconds to wait for a reply from the server . This may raise arbitrary : class : ` . errors . XMPPError ` exceptions if the server replies with an error and also any kind of connection error if the connection gets fatally terminated while waiting for a response ."""
yield from self . client . send ( stanza . IQ ( structs . IQType . SET , payload = roster_xso . Query ( items = [ roster_xso . Item ( jid = jid , subscription = "remove" ) ] ) ) , timeout = timeout )
def compile ( self , script , bare = False ) : '''compile a CoffeeScript code to a JavaScript code . if bare is True , then compile the JavaScript without the top - level function safety wrapper ( like the coffee command ) .'''
if not hasattr ( self , '_context' ) : self . _context = self . _runtime . compile ( self . _compiler_script ) return self . _context . call ( "CoffeeScript.compile" , script , { 'bare' : bare } )
def reorient_image ( image , axis1 , axis2 = None , doreflection = False , doscale = 0 , txfn = None ) : """Align image along a specified axis ANTsR function : ` reorientImage ` Arguments image : ANTsImage image to reorient axis1 : list / tuple of integers vector of size dim , might need to play w / axis sign axis2 : list / tuple of integers vector of size dim for 3D doreflection : boolean whether to reflect doscale : scalar value 1 allows automated estimate of scaling txfn : string file name for transformation Returns ANTsImage Example > > > import ants > > > image = ants . image _ read ( ants . get _ ants _ data ( ' r16 ' ) ) > > > ants . reorient _ image ( image , ( 1,0 ) )"""
inpixeltype = image . pixeltype if image . pixeltype != 'float' : image = image . clone ( 'float' ) axis_was_none = False if axis2 is None : axis_was_none = True axis2 = [ 0 ] * image . dimension axis1 = np . array ( axis1 ) axis2 = np . array ( axis2 ) axis1 = axis1 / np . sqrt ( np . sum ( axis1 * axis1 ) ) * ( - 1 ) axis1 = axis1 . astype ( 'int' ) if not axis_was_none : axis2 = axis2 / np . sqrt ( np . sum ( axis2 * axis2 ) ) * ( - 1 ) axis2 = axis2 . astype ( 'int' ) else : axis2 = np . array ( [ 0 ] * image . dimension ) . astype ( 'int' ) if txfn is None : txfn = mktemp ( suffix = '.mat' ) if isinstance ( doreflection , tuple ) : doreflection = list ( doreflection ) if not isinstance ( doreflection , list ) : doreflection = [ doreflection ] if isinstance ( doscale , tuple ) : doscale = list ( doscale ) if not isinstance ( doscale , list ) : doscale = [ doscale ] if len ( doreflection ) == 1 : doreflection = [ doreflection [ 0 ] ] * image . dimension if len ( doscale ) == 1 : doscale = [ doscale [ 0 ] ] * image . dimension libfn = utils . get_lib_fn ( 'reorientImage%s' % image . _libsuffix ) libfn ( image . pointer , txfn , axis1 . tolist ( ) , axis2 . tolist ( ) , doreflection , doscale ) image2 = apply_transforms ( image , image , transformlist = [ txfn ] ) if image . pixeltype != inpixeltype : image2 = image2 . clone ( inpixeltype ) return { 'reoimage' : image2 , 'txfn' : txfn }
def stop_Note ( self , note , channel = 1 ) : """Stop a note on a channel . If Note . channel is set , it will take presedence over the channel argument given here ."""
if hasattr ( note , 'channel' ) : channel = note . channel self . stop_event ( int ( note ) + 12 , int ( channel ) ) self . notify_listeners ( self . MSG_STOP_INT , { 'channel' : int ( channel ) , 'note' : int ( note ) + 12 } ) self . notify_listeners ( self . MSG_STOP_NOTE , { 'channel' : int ( channel ) , 'note' : note } ) return True
def _select_options ( pat ) : """returns a list of keys matching ` pat ` if pat = = " all " , returns all registered options"""
# short - circuit for exact key if pat in _registered_options : return [ pat ] # else look through all of them keys = sorted ( _registered_options . keys ( ) ) if pat == 'all' : # reserved key return keys return [ k for k in keys if re . search ( pat , k , re . I ) ]
def interpret ( self , infile ) : """Process a file of rest and return list of dicts"""
data = [ ] for record in self . generate_records ( infile ) : data . append ( record ) return data
def compare ( s1 , s2 , ** kwargs ) : """Compares two strings and returns their similarity . : param s1 : first string : param s2 : second string : param kwargs : additional keyword arguments passed to _ _ init _ _ . : return : similarity between 0.0 and 1.0. > > > from ngram import NGram > > > NGram . compare ( ' spa ' , ' spam ' ) 0.375 > > > NGram . compare ( ' ham ' , ' bam ' ) 0.25 > > > NGram . compare ( ' spam ' , ' pam ' ) # N = 2 0.375 > > > NGram . compare ( ' ham ' , ' ams ' , N = 1) 0.5"""
if s1 is None or s2 is None : if s1 == s2 : return 1.0 return 0.0 try : return NGram ( [ s1 ] , ** kwargs ) . search ( s2 ) [ 0 ] [ 1 ] except IndexError : return 0.0
def walk_regularity_symmetry ( self , data_frame ) : """This method extracts the step and stride regularity and also walk symmetry . : param data _ frame : The data frame . It should have x , y , and z columns . : type data _ frame : pandas . DataFrame : return step _ regularity : Regularity of steps on [ x , y , z ] coordinates , defined as the consistency of the step - to - step pattern . : rtype step _ regularity : numpy . ndarray : return stride _ regularity : Regularity of stride on [ x , y , z ] coordinates , defined as the consistency of the stride - to - stride pattern . : rtype stride _ regularity : numpy . ndarray : return walk _ symmetry : Symmetry of walk on [ x , y , z ] coordinates , defined as the difference between step and stride regularity . : rtype walk _ symmetry : numpy . ndarray"""
def _symmetry ( v ) : maxtab , _ = peakdet ( v , self . delta ) return maxtab [ 1 ] [ 1 ] , maxtab [ 2 ] [ 1 ] step_regularity_x , stride_regularity_x = _symmetry ( autocorrelation ( data_frame . x ) ) step_regularity_y , stride_regularity_y = _symmetry ( autocorrelation ( data_frame . y ) ) step_regularity_z , stride_regularity_z = _symmetry ( autocorrelation ( data_frame . z ) ) symmetry_x = step_regularity_x - stride_regularity_x symmetry_y = step_regularity_y - stride_regularity_y symmetry_z = step_regularity_z - stride_regularity_z step_regularity = np . array ( [ step_regularity_x , step_regularity_y , step_regularity_z ] ) stride_regularity = np . array ( [ stride_regularity_x , stride_regularity_y , stride_regularity_z ] ) walk_symmetry = np . array ( [ symmetry_x , symmetry_y , symmetry_z ] ) return step_regularity , stride_regularity , walk_symmetry
def is_address_executable_and_writeable ( self , address ) : """Determines if an address belongs to a commited , writeable and executable page . The page may or may not have additional permissions . Looking for writeable and executable pages is important when exploiting a software vulnerability . @ note : Returns always C { False } for kernel mode addresses . @ type address : int @ param address : Memory address to query . @ rtype : bool @ return : C { True } if the address belongs to a commited , writeable and executable page . @ raise WindowsError : An exception is raised on error ."""
try : mbi = self . mquery ( address ) except WindowsError : e = sys . exc_info ( ) [ 1 ] if e . winerror == win32 . ERROR_INVALID_PARAMETER : return False raise return mbi . is_executable_and_writeable ( )
def get_asset_tarball ( asset_name , src_dir , dest_project , dest_folder , json_out ) : """If the src _ dir contains a " resources " directory its contents are archived and the archived file is uploaded to the platform"""
if os . path . isdir ( os . path . join ( src_dir , "resources" ) ) : temp_dir = tempfile . mkdtemp ( ) try : resource_file = os . path . join ( temp_dir , asset_name + "_resources.tar.gz" ) cmd = [ "tar" , "-czf" , resource_file , "-C" , os . path . join ( src_dir , "resources" ) , "." ] subprocess . check_call ( cmd ) file_id = dx_upload ( resource_file , dest_project , dest_folder , json_out ) return file_id finally : shutil . rmtree ( temp_dir )
def _button_delete_clicked ( self ) : """Save the selected cmap ."""
name = str ( self . _combobox_cmaps . currentText ( ) ) self . delete_colormap ( name ) self . _combobox_cmaps . setEditText ( "" ) self . _load_cmap_list ( )
def read_secret ( path , key = None ) : '''Return the value of key at path in vault , or entire secret Jinja Example : . . code - block : : jinja my - secret : { { salt [ ' vault ' ] . read _ secret ( ' secret / my / secret ' , ' some - key ' ) } } . . code - block : : jinja { % set supersecret = salt [ ' vault ' ] . read _ secret ( ' secret / my / secret ' ) % } secrets : first : { { supersecret . first } } second : { { supersecret . second } }'''
log . debug ( 'Reading Vault secret for %s at %s' , __grains__ [ 'id' ] , path ) try : url = 'v1/{0}' . format ( path ) response = __utils__ [ 'vault.make_request' ] ( 'GET' , url ) if response . status_code != 200 : response . raise_for_status ( ) data = response . json ( ) [ 'data' ] if key is not None : return data [ key ] return data except Exception as err : log . error ( 'Failed to read secret! %s: %s' , type ( err ) . __name__ , err ) return None
def binary_operation_math ( self , rule , left , right , ** kwargs ) : """Implementation of : py : func : ` pynspect . traversers . RuleTreeTraverser . binary _ operation _ math ` interface ."""
return self . evaluate_binop_math ( rule . operation , left , right , ** kwargs )
def clean ( self , config = True , catalog = False , check = False ) : """Remove the generated SExtractor files ( if any ) . If config is True , remove generated configuration files . If catalog is True , remove the output catalog . If check is True , remove output check image ."""
try : if ( config ) : os . unlink ( self . config [ 'FILTER_NAME' ] ) os . unlink ( self . config [ 'PARAMETERS_NAME' ] ) os . unlink ( self . config [ 'STARNNW_NAME' ] ) os . unlink ( self . config [ 'CONFIG_FILE' ] ) if ( catalog ) : os . unlink ( self . config [ 'CATALOG_NAME' ] ) if ( check ) : os . unlink ( self . config [ 'CHECKIMAGE_NAME' ] ) except OSError : pass
def _prepare_init_params_from_job_description ( cls , job_details ) : """Convert the transform job description to init params that can be handled by the class constructor Args : job _ details ( dict ) : the returned job details from a describe _ transform _ job API call . Returns : dict : The transformed init _ params"""
init_params = dict ( ) init_params [ 'model_name' ] = job_details [ 'ModelName' ] init_params [ 'instance_count' ] = job_details [ 'TransformResources' ] [ 'InstanceCount' ] init_params [ 'instance_type' ] = job_details [ 'TransformResources' ] [ 'InstanceType' ] init_params [ 'volume_kms_key' ] = job_details [ 'TransformResources' ] . get ( 'VolumeKmsKeyId' ) init_params [ 'strategy' ] = job_details . get ( 'BatchStrategy' ) init_params [ 'assemble_with' ] = job_details [ 'TransformOutput' ] . get ( 'AssembleWith' ) init_params [ 'output_path' ] = job_details [ 'TransformOutput' ] [ 'S3OutputPath' ] init_params [ 'output_kms_key' ] = job_details [ 'TransformOutput' ] . get ( 'KmsKeyId' ) init_params [ 'accept' ] = job_details [ 'TransformOutput' ] . get ( 'Accept' ) init_params [ 'max_concurrent_transforms' ] = job_details . get ( 'MaxConcurrentTransforms' ) init_params [ 'max_payload' ] = job_details . get ( 'MaxPayloadInMB' ) init_params [ 'base_transform_job_name' ] = job_details [ 'TransformJobName' ] return init_params
def is_progressive ( image ) : """Check to see if an image is progressive ."""
if not isinstance ( image , Image . Image ) : # Can only check PIL images for progressive encoding . return False return ( 'progressive' in image . info ) or ( 'progression' in image . info )
def reject ( self ) : """Handle ESC key"""
if self . controller . is_running : self . info ( self . tr ( "Stopping.." ) ) self . controller . is_running = False
def evaluation_termination ( population , num_generations , num_evaluations , args ) : """Return True if the number of function evaluations meets or exceeds a maximum . This function compares the number of function evaluations that have been generated with a specified maximum . It returns True if the maximum is met or exceeded . . . Arguments : population - - the population of Individuals num _ generations - - the number of elapsed generations num _ evaluations - - the number of candidate solution evaluations args - - a dictionary of keyword arguments Optional keyword arguments in args : - * max _ evaluations * - - the maximum candidate solution evaluations ( default len ( population ) )"""
max_evaluations = args . setdefault ( 'max_evaluations' , len ( population ) ) return num_evaluations >= max_evaluations
def to_json ( self ) : """Convert the Design Day to a dictionary ."""
return { 'location' : self . location . to_json ( ) , 'design_days' : [ des_d . to_json ( ) for des_d in self . design_days ] }
def map_texture_to_surface ( texture , surface ) : """Returns values on a surface for points on a texture . Args : texture ( texture ) : the texture to trace over the surface surface ( surface ) : the surface to trace along Returns : an array of surface heights for each point in the texture . Line separators ( i . e . values that are ` ` nan ` ` in the texture ) will be ` ` nan ` ` in the output , so the output will have the same dimensions as the x / y axes in the input texture ."""
texture_x , texture_y = texture surface_h , surface_w = surface . shape surface_x = np . clip ( np . int32 ( surface_w * texture_x - 1e-9 ) , 0 , surface_w - 1 ) surface_y = np . clip ( np . int32 ( surface_h * texture_y - 1e-9 ) , 0 , surface_h - 1 ) surface_z = surface [ surface_y , surface_x ] return surface_z
def get_arg_names ( target ) -> typing . List [ str ] : """Gets the list of named arguments for the target function : param target : Function for which the argument names will be retrieved"""
code = getattr ( target , '__code__' ) if code is None : return [ ] arg_count = code . co_argcount kwarg_count = code . co_kwonlyargcount args_index = get_args_index ( target ) kwargs_index = get_kwargs_index ( target ) arg_names = list ( code . co_varnames [ : arg_count ] ) if args_index != - 1 : arg_names . append ( code . co_varnames [ args_index ] ) arg_names += list ( code . co_varnames [ arg_count : ( arg_count + kwarg_count ) ] ) if kwargs_index != - 1 : arg_names . append ( code . co_varnames [ kwargs_index ] ) if len ( arg_names ) > 0 and arg_names [ 0 ] in [ 'self' , 'cls' ] : arg_count -= 1 arg_names . pop ( 0 ) return arg_names
def _validate ( self , writing = False ) : """Verify that the box obeys the specifications ."""
# channel type and association must be specified . if not ( ( len ( self . index ) == len ( self . channel_type ) ) and ( len ( self . channel_type ) == len ( self . association ) ) ) : msg = ( "The length of the index ({index}), channel_type " "({channel_type}), and association ({association}) inputs " "must be the same." ) msg = msg . format ( index = len ( self . index ) , channel_type = len ( self . channel_type ) , association = len ( self . association ) ) self . _dispatch_validation_error ( msg , writing = writing ) # channel types must be one of 0 , 1 , 2 , 65535 if any ( x not in [ 0 , 1 , 2 , 65535 ] for x in self . channel_type ) : msg = ( "channel_type specified as {channel_type}, but all values " "must be in the set of\n\n" " 0 - colour image data for associated color\n" " 1 - opacity\n" " 2 - premultiplied opacity\n" " 65535 - unspecified\n" ) msg = msg . format ( channel_type = self . channel_type ) self . _dispatch_validation_error ( msg , writing = writing )
def go_to_marker ( self , row , col , table_type ) : """Move to point in time marked by the marker . Parameters row : QtCore . int column : QtCore . int table _ type : str ' dataset ' table or ' annot ' table , it works on either"""
if table_type == 'dataset' : marker_time = self . idx_marker . property ( 'start' ) [ row ] marker_end_time = self . idx_marker . property ( 'end' ) [ row ] else : marker_time = self . idx_annot_list . property ( 'start' ) [ row ] marker_end_time = self . idx_annot_list . property ( 'end' ) [ row ] window_length = self . parent . value ( 'window_length' ) if self . parent . traces . action [ 'centre_event' ] . isChecked ( ) : window_start = ( marker_time + marker_end_time - window_length ) / 2 else : window_start = floor ( marker_time / window_length ) * window_length self . parent . overview . update_position ( window_start ) if table_type == 'annot' : for annot in self . parent . traces . idx_annot : if annot . marker . x ( ) == marker_time : self . parent . traces . highlight_event ( annot ) break
def all_subnets_shorter_prefix ( ip_net , cidr , include_default = False ) : """Function to return every subnet a ip can belong to with a shorter prefix Args : ip _ net : Unicast or Multicast IP address or subnet in the following format 192.168.1.1 , 239.1.1.1 cidr : CIDR value of 0 to 32 include _ default : If you want the list to inlclude the default route set to True Returns : returns a list of subnets"""
subnets_list = list ( ) if include_default : while int ( cidr ) >= 0 : try : subnets_list . append ( '%s/%s' % ( whole_subnet_maker ( ip_net , cidr ) , cidr ) ) except Exception as e : LOGGER . critical ( 'Function all_subnets_shorter_prefix {item}' . format ( item = e ) ) cidr = str ( int ( cidr ) - 1 ) else : while int ( cidr ) > 0 : try : subnets_list . append ( '%s/%s' % ( whole_subnet_maker ( ip_net , cidr ) , cidr ) ) except Exception as e : LOGGER . critical ( 'Function all_subnets_shorter_prefix {item}' . format ( item = e ) ) cidr = str ( int ( cidr ) - 1 ) return subnets_list
def close ( self , reason = None ) : """Stop consuming messages and perform an orderly shutdown . If ` ` reason ` ` is None , then this is considered a regular close ."""
with self . _closing : if self . _closed : return self . _websocket . close ( ) self . _consumer . join ( ) self . _consumer = None self . _websocket = None self . _closed = True for cb in self . _close_callbacks : cb ( self , reason )
def get_gemeente_by_id ( self , id ) : '''Retrieve a ` gemeente ` by the crab id . : param integer id : The CRAB id of the gemeente . : rtype : : class : ` Gemeente `'''
def creator ( ) : res = crab_gateway_request ( self . client , 'GetGemeenteByGemeenteId' , id ) if res == None : raise GatewayResourceNotFoundException ( ) return Gemeente ( res . GemeenteId , res . GemeenteNaam , res . NisGemeenteCode , Gewest ( res . GewestId ) , res . TaalCode , ( res . CenterX , res . CenterY ) , ( res . MinimumX , res . MinimumY , res . MaximumX , res . MaximumY ) , Metadata ( res . BeginDatum , res . BeginTijd , self . get_bewerking ( res . BeginBewerking ) , self . get_organisatie ( res . BeginOrganisatie ) ) ) if self . caches [ 'long' ] . is_configured : key = 'GetGemeenteByGemeenteId#%s' % id gemeente = self . caches [ 'long' ] . get_or_create ( key , creator ) else : gemeente = creator ( ) gemeente . set_gateway ( self ) return gemeente
def _daemon ( self , please_stop , only_coverage_revisions = False ) : '''Runs continuously to prefill the temporal and annotations table with the coverage revisions * . * A coverage revision is a revision which has had code coverage run on it . : param please _ stop : Used to stop the daemon : return : None'''
while not please_stop : # Get all known files and their latest revisions on the frontier files_n_revs = self . conn . get ( "SELECT file, revision FROM latestFileMod" ) # Split these files into groups of revisions to make it # easier to update them . If we group them together , we # may end up updating groups that are new back to older # revisions . revs = { rev : [ ] for rev in set ( [ file_n_rev [ 1 ] for file_n_rev in files_n_revs ] ) } for file_n_rev in files_n_revs : revs [ file_n_rev [ 1 ] ] . append ( file_n_rev [ 0 ] ) # Go through each frontier and update it ran_changesets = False coverage_revisions = None for frontier in revs : if please_stop : return files = revs [ frontier ] # Go through changeset logs until we find the last # known frontier for this revision group . csets = [ ] final_rev = '' found_last_frontier = False Log . note ( "Searching for frontier: {{frontier}} " , frontier = frontier ) Log . note ( "HG URL: {{url}}" , url = self . hg_url / self . config . hg . branch / 'rev' / frontier ) while not found_last_frontier : # Get a changelog clog_url = self . hg_url / self . config . hg . branch / 'json-log' / final_rev try : clog_obj = http . get_json ( clog_url , retry = RETRY ) except Exception as e : Log . error ( "Unexpected error getting changset-log for {{url}}" , url = clog_url , error = e ) cset = '' still_looking = True # For each changeset / node for clog_cset in clog_obj [ 'changesets' ] : cset = clog_cset [ 'node' ] [ : 12 ] if cset == frontier : still_looking = False break csets . append ( cset ) if not still_looking : found_last_frontier = True final_rev = cset # No csets found means that we are already # at the latest revisions . if len ( csets ) == 0 : continue # Get all the latest ccov and jsdcov revisions if ( not coverage_revisions ) and only_coverage_revisions : active_data_url = 'http://activedata.allizom.org/query' query_json = { "limit" : 1000 , "from" : "task" , "where" : { "and" : [ { "in" : { "build.type" : [ "ccov" , "jsdcov" ] } } , { "gte" : { "run.timestamp" : { "date" : "today-day" } } } , { "eq" : { "repo.branch.name" : self . config . hg . branch } } ] } , "select" : [ { "aggregate" : "min" , "value" : "run.timestamp" } , { "aggregate" : "count" } ] , "groupby" : [ "repo.changeset.id12" ] } coverage_revisions_resp = http . post_json ( active_data_url , retry = RETRY , data = query_json ) coverage_revisions = [ rev_arr [ 0 ] for rev_arr in coverage_revisions_resp . data ] # Reverse changeset list and for each code coverage revision # found by going through the list from oldest to newest , # update _ all known _ file frontiers to that revision . csets . reverse ( ) prev_cset = frontier for cset in csets : if please_stop : return if only_coverage_revisions : if cset not in coverage_revisions : continue if DEBUG : Log . note ( "Moving frontier {{frontier}} forward to {{cset}}." , frontier = prev_cset , cset = cset ) # Update files self . get_tuids_from_files ( files , cset , going_forward = True ) ran_changesets = True prev_cset = cset if not ran_changesets : ( please_stop | Till ( seconds = DAEMON_WAIT_AT_NEWEST . seconds ) ) . wait ( )
def listen_now_dismissed_items ( self ) : """Get a listing of items dismissed from Listen Now tab ."""
response = self . _call ( mc_calls . ListenNowGetDismissedItems ) dismissed_items = response . body . get ( 'items' , [ ] ) return dismissed_items
def annotation_wrapper ( annotation , doc = None ) : """Defines an annotation , which can be applied to attributes in a database model ."""
def decorator ( attr ) : __cache__ . setdefault ( attr , [ ] ) . append ( annotation ) # Also mark the annotation on the object itself . This will # fail if the object has a restrictive _ _ slots _ _ , but it ' s # required for some objects like Column because SQLAlchemy copies # them in subclasses , changing their hash and making them # undiscoverable via the cache . try : if not hasattr ( attr , '_coaster_annotations' ) : setattr ( attr , '_coaster_annotations' , [ ] ) attr . _coaster_annotations . append ( annotation ) except AttributeError : pass return attr decorator . __name__ = decorator . name = annotation decorator . __doc__ = doc return decorator
def deposit ( self , beneficiary : Address , total_deposit : TokenAmount , block_identifier : BlockSpecification , ) -> None : """Deposit provided amount into the user - deposit contract to the beneficiary ' s account ."""
token_address = self . token_address ( block_identifier ) token = Token ( jsonrpc_client = self . client , token_address = token_address , contract_manager = self . contract_manager , ) log_details = { 'beneficiary' : pex ( beneficiary ) , 'contract' : pex ( self . address ) , 'total_deposit' : total_deposit , } checking_block = self . client . get_checking_block ( ) error_prefix = 'Call to deposit will fail' with self . deposit_lock : amount_to_deposit , log_details = self . _deposit_preconditions ( total_deposit = total_deposit , beneficiary = beneficiary , token = token , block_identifier = block_identifier , ) gas_limit = self . proxy . estimate_gas ( checking_block , 'deposit' , to_checksum_address ( beneficiary ) , total_deposit , ) if gas_limit : error_prefix = 'Call to deposit failed' log . debug ( 'deposit called' , ** log_details ) transaction_hash = self . proxy . transact ( 'deposit' , safe_gas_limit ( gas_limit ) , to_checksum_address ( beneficiary ) , total_deposit , ) self . client . poll ( transaction_hash ) receipt_or_none = check_transaction_threw ( self . client , transaction_hash ) transaction_executed = gas_limit is not None if not transaction_executed or receipt_or_none : if transaction_executed : block = receipt_or_none [ 'blockNumber' ] else : block = checking_block self . proxy . jsonrpc_client . check_for_insufficient_eth ( transaction_name = 'deposit' , transaction_executed = transaction_executed , required_gas = GAS_REQUIRED_FOR_UDC_DEPOSIT , block_identifier = block , ) msg = self . _check_why_deposit_failed ( token = token , amount_to_deposit = amount_to_deposit , total_deposit = total_deposit , block_identifier = block , ) error_msg = f'{error_prefix}. {msg}' log . critical ( error_msg , ** log_details ) raise RaidenUnrecoverableError ( error_msg ) log . info ( 'deposit successful' , ** log_details )
def Restore ( cls , state ) : """Unserialize this object ."""
target = SlotIdentifier . FromString ( state . get ( 'target' ) ) data = base64 . b64decode ( state . get ( 'data' ) ) var_id = state . get ( 'var_id' ) valid = state . get ( 'valid' ) return ConfigEntry ( target , var_id , data , valid )
def startproject ( project_name ) : """build a full status project"""
# the destination path dst_path = os . path . join ( os . getcwd ( ) , project_name ) start_init_info ( dst_path ) # create dst path _mkdir_p ( dst_path ) # create project tree os . chdir ( dst_path ) # create files init_code ( 'manage.py' , _manage_admin_code ) init_code ( 'requirement.txt' , _requirement_admin_code ) init_code ( 'config.py' , _config_sql_code ) # create app / app_path = os . path . join ( dst_path , 'app' ) _mkdir_p ( app_path ) # create files os . chdir ( app_path ) init_code ( 'models.py' , _models_admin_code ) init_code ( '__init__.py' , _init_admin_code ) # create templates and static css_path , templates_path = create_templates_static_files ( app_path ) # create css files os . chdir ( css_path ) init_code ( 'sign.css' , _auth_login_css_code ) # create main blueprint create_blueprint ( app_path , 'main' , _views_blueprint_code % ( 'main' , 'main' ) , _forms_basic_code , templates_path ) # create auth blueprint auth_templates_path = create_blueprint ( app_path , 'auth' , _auth_views_code , _auth_forms_code , templates_path ) # create auth templates files os . chdir ( auth_templates_path ) init_code ( 'login.html' , _auth_login_html_code ) # create admin site admin_path = os . path . join ( app_path , 'admin' ) _mkdir_p ( admin_path ) # create admin files os . chdir ( admin_path ) init_code ( '__init__.py' , '' ) init_code ( 'views.py' , _admin_views_code ) # create admin templates os . chdir ( templates_path ) admin_templates_path = os . path . join ( templates_path , 'admin' ) _mkdir_p ( admin_templates_path ) # create admin templates files os . chdir ( admin_templates_path ) init_code ( 'index.html' , _admin_index_html_code ) init_code ( 'logout.html' , _admin_logout_html_code ) init_done_info ( )
def Validate ( self ) : """Check the method is well constructed ."""
if not self . check_id : raise DefinitionError ( "Check has missing check_id value" ) cls_name = self . check_id if not self . method : raise DefinitionError ( "Check %s has no methods" % cls_name ) ValidateMultiple ( self . method , "Check %s has invalid method definitions" % cls_name )
def upload_file ( self , path = None , stream = None , name = None , ** kwargs ) : """Uploads file to WeedFS I takes either path or stream and name and upload it to WeedFS server . Returns fid of the uploaded file . : param string path : : param string stream : : param string name : : rtype : string or None"""
params = "&" . join ( [ "%s=%s" % ( k , v ) for k , v in kwargs . items ( ) ] ) url = "http://{master_addr}:{master_port}/dir/assign{params}" . format ( master_addr = self . master_addr , master_port = self . master_port , params = "?" + params if params else '' ) data = json . loads ( self . conn . get_data ( url ) ) if data . get ( "error" ) is not None : return None post_url = "http://{url}/{fid}" . format ( url = data [ 'publicUrl' if self . use_public_url else 'url' ] , fid = data [ 'fid' ] ) if path is not None : filename = os . path . basename ( path ) with open ( path , "rb" ) as file_stream : res = self . conn . post_file ( post_url , filename , file_stream ) # we have file like object and filename elif stream is not None and name is not None : res = self . conn . post_file ( post_url , name , stream ) else : raise ValueError ( "If `path` is None then *both* `stream` and `name` must not" " be None " ) response_data = json . loads ( res ) if "size" in response_data : return data . get ( 'fid' ) return None
def transpose ( self , * dims ) -> 'DataArray' : """Return a new DataArray object with transposed dimensions . Parameters * dims : str , optional By default , reverse the dimensions . Otherwise , reorder the dimensions to this order . Returns transposed : DataArray The returned DataArray ' s array is transposed . Notes This operation returns a view of this array ' s data . It is lazy for dask - backed DataArrays but not for numpy - backed DataArrays - - the data will be fully loaded . See Also numpy . transpose Dataset . transpose"""
variable = self . variable . transpose ( * dims ) return self . _replace ( variable )
def _fetch_atari_metrics ( base_env ) : """Atari games have multiple logical episodes , one per life . However for metrics reporting we count full episodes all lives included ."""
unwrapped = base_env . get_unwrapped ( ) if not unwrapped : return None atari_out = [ ] for u in unwrapped : monitor = get_wrapper_by_cls ( u , MonitorEnv ) if not monitor : return None for eps_rew , eps_len in monitor . next_episode_results ( ) : atari_out . append ( RolloutMetrics ( eps_len , eps_rew , { } , { } , { } ) ) return atari_out
def record_state ( serv_name , state , conn = None ) : """MAKE A DOCSTRING : param serv _ name : < str > service name of plugin instance : param state : < dictionary > plugin state : param conn : < rethinkdb . DefaultConnection > : return :"""
serv_filter = { SERVICE_KEY : serv_name } updated = { PLUGIN_STATE_KEY : state } return RPC . filter ( serv_filter ) . update ( updated ) . run ( conn )
def register ( cls ) : """Register a given model in the registry"""
registry_entry = RegistryEntry ( category = cls . category , namespace = cls . namespace , name = cls . name , cls = cls ) if registry_entry not in registry and not exists_in_registry ( cls . category , cls . namespace , cls . name ) : registry . append ( registry_entry ) else : log . warn ( "Class {0} already in registry" . format ( cls ) )
def insert_sort ( node , target ) : """Insert node into sorted position in target tree . Uses sort function and language from target"""
sort = target . sort lang = target . lang collator = Collator . createInstance ( Locale ( lang ) if lang else Locale ( ) ) for child in target . tree : if collator . compare ( sort ( child ) or '' , sort ( node ) or '' ) > 0 : child . addprevious ( node ) break else : target . tree . append ( node )
def replace_namespaced_horizontal_pod_autoscaler ( self , name , namespace , body , ** kwargs ) : """replace the specified HorizontalPodAutoscaler This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please pass async _ req = True > > > thread = api . replace _ namespaced _ horizontal _ pod _ autoscaler ( name , namespace , body , async _ req = True ) > > > result = thread . get ( ) : param async _ req bool : param str name : name of the HorizontalPodAutoscaler ( required ) : param str namespace : object name and auth scope , such as for teams and projects ( required ) : param V2beta1HorizontalPodAutoscaler body : ( required ) : param str pretty : If ' true ' , then the output is pretty printed . : param str dry _ run : When present , indicates that modifications should not be persisted . An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request . Valid values are : - All : all dry run stages will be processed : param str field _ manager : fieldManager is a name associated with the actor or entity that is making these changes . The value must be less than or 128 characters long , and only contain printable characters , as defined by https : / / golang . org / pkg / unicode / # IsPrint . : return : V2beta1HorizontalPodAutoscaler If the method is called asynchronously , returns the request thread ."""
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async_req' ) : return self . replace_namespaced_horizontal_pod_autoscaler_with_http_info ( name , namespace , body , ** kwargs ) else : ( data ) = self . replace_namespaced_horizontal_pod_autoscaler_with_http_info ( name , namespace , body , ** kwargs ) return data
def run ( cmd , * , args = '' , timeout = 600 ) : """Execute a process : param cmd ( str ) : name of the executable : param args ( str , optional ) : arbitrary arguments : param timeout ( int , optional ) : Execution timeout : raises OSError : if the execution of cmd fails"""
# type checks utils . chkstr ( cmd , 'cmd' ) utils . chkstr ( args , 'args' ) # execute the command r = envoy . run ( "{cmd} {args}" . format ( cmd = cmd , args = args ) , timeout = timeout ) # log stdout log . msg_debug ( "{cmd} > {stdout}" . format ( cmd = cmd , stdout = r . std_out . strip ( ) ) ) # In this way , we will know what went wrong on execution if r . status_code : log . msg_err ( "{cmd} > {stderr}" . format ( cmd = cmd , stderr = r . std_err . strip ( ) ) ) raise OSError ( "[{cmd}] execution failed!" . format ( cmd = cmd ) )
def i2m ( self , pkt , i ) : """" Internal " ( IP as bytes , mask as int ) to " machine " representation ."""
mask , ip = i ip = socket . inet_aton ( ip ) return struct . pack ( ">B" , mask ) + ip [ : self . mask2iplen ( mask ) ]
def auth_add_creds ( self , username , password , pwtype = 'plain' ) : """Add a valid set of credentials to be accepted for authentication . Calling this function will automatically enable requiring authentication . Passwords can be provided in either plaintext or as a hash by specifying the hash type in the * pwtype * argument . : param str username : The username of the credentials to be added . : param password : The password data of the credentials to be added . : type password : bytes , str : param str pwtype : The type of the * password * data , ( plain , md5 , sha1 , etc . ) ."""
if not isinstance ( password , ( bytes , str ) ) : raise TypeError ( "auth_add_creds() argument 2 must be bytes or str, not {0}" . format ( type ( password ) . __name__ ) ) pwtype = pwtype . lower ( ) if not pwtype in ( 'plain' , 'md5' , 'sha1' , 'sha256' , 'sha384' , 'sha512' ) : raise ValueError ( 'invalid password type, must be \'plain\', or supported by hashlib' ) if self . __config . get ( 'basic_auth' ) is None : self . __config [ 'basic_auth' ] = { } self . logger . info ( 'basic authentication has been enabled' ) if pwtype != 'plain' : algorithms_available = getattr ( hashlib , 'algorithms_available' , ( ) ) or getattr ( hashlib , 'algorithms' , ( ) ) if pwtype not in algorithms_available : raise ValueError ( 'hashlib does not support the desired algorithm' ) # only md5 and sha1 hex for backwards compatibility if pwtype == 'md5' and len ( password ) == 32 : password = binascii . unhexlify ( password ) elif pwtype == 'sha1' and len ( password ) == 40 : password = binascii . unhexlify ( password ) if not isinstance ( password , bytes ) : password = password . encode ( 'UTF-8' ) if len ( hashlib . new ( pwtype , b'foobar' ) . digest ( ) ) != len ( password ) : raise ValueError ( 'the length of the password hash does not match the type specified' ) self . __config [ 'basic_auth' ] [ username ] = { 'value' : password , 'type' : pwtype }
def _find_entry_call ( self , frames ) : """attempts to auto - discover the correct frame"""
back_i = 0 pout_path = self . _get_src_file ( self . modname ) for frame_i , frame in enumerate ( frames ) : if frame [ 1 ] == pout_path : back_i = frame_i return Call ( frames [ back_i ] )
def get_list_qtype ( array ) : '''Finds out a corresponding qtype for a specified ` QList ` / ` numpy . ndarray ` instance . : Parameters : - ` array ` ( ` QList ` or ` numpy . ndarray ` ) - array to be checked : returns : ` integer ` - qtype matching the specified array object'''
if not isinstance ( array , numpy . ndarray ) : raise ValueError ( 'array parameter is expected to be of type: numpy.ndarray, got: %s' % type ( array ) ) if isinstance ( array , QList ) : return - abs ( array . meta . qtype ) qtype = None if str ( array . dtype ) in ( '|S1' , '<U1' , '>U1' , '|U1' ) : qtype = QCHAR if qtype is None : qtype = Q_TYPE . get ( array . dtype . type , None ) if qtype is None and array . dtype . type in ( numpy . datetime64 , numpy . timedelta64 ) : qtype = TEMPORAL_PY_TYPE . get ( str ( array . dtype ) , None ) if qtype is None : # determinate type based on first element of the numpy array qtype = Q_TYPE . get ( type ( array [ 0 ] ) , QGENERAL_LIST ) return qtype
def loglike ( self , y , f ) : r"""Poisson log likelihood . Parameters y : ndarray array of integer targets f : ndarray latent function from the GLM prior ( : math : ` \ mathbf { f } = \ boldsymbol \ Phi \ mathbf { w } ` ) Returns logp : ndarray the log likelihood of each y given each f under this likelihood ."""
y , f = np . broadcast_arrays ( y , f ) if self . tranfcn == 'exp' : g = np . exp ( f ) logg = f else : g = softplus ( f ) logg = np . log ( g ) return y * logg - g - gammaln ( y + 1 )
def iteritems ( self , key_type = None , return_all_keys = False ) : """Returns an iterator over the dictionary ' s ( key , value ) pairs . @ param key _ type if specified , iterator will be returning only ( key , value ) pairs for this type of key . Otherwise ( if not specified ) ( ( keys , . . . ) , value ) i . e . ( tuple of keys , values ) pairs for all items in this dictionary will be generated . @ param return _ all _ keys if set to True - tuple of keys is retuned instead of a key of this type ."""
if key_type is None : for item in self . items_dict . items ( ) : yield item return used_keys = set ( ) key = str ( key_type ) if key in self . __dict__ : for key , keys in self . __dict__ [ key ] . items ( ) : if keys in used_keys : continue used_keys . add ( keys ) value = self . items_dict [ keys ] if not return_all_keys : keys = tuple ( k for k in keys if isinstance ( k , key_type ) ) yield keys , value
def search ( self , q , search_handler = None , ** kwargs ) : """Performs a search and returns the results . Requires a ` ` q ` ` for a string version of the query to run . Optionally accepts ` ` * * kwargs ` ` for additional options to be passed through the Solr URL . Returns ` ` self . results _ cls ` ` class object ( defaults to ` ` pysolr . Results ` ` ) Usage : : # All docs . results = solr . search ( ' * : * ' ) # Search with highlighting . results = solr . search ( ' ponies ' , * * { ' hl ' : ' true ' , ' hl . fragsize ' : 10,"""
params = { 'q' : q } params . update ( kwargs ) response = self . _select ( params , handler = search_handler ) decoded = self . decoder . decode ( response ) self . log . debug ( "Found '%s' search results." , # cover both cases : there is no response key or value is None ( decoded . get ( 'response' , { } ) or { } ) . get ( 'numFound' , 0 ) ) return self . results_cls ( decoded )
def access_token ( self ) : """Stores always valid OAuth2 access token . Note : Accessing this property may result in HTTP request . Returns : str"""
if ( self . _access_token is None or self . expiration_time <= int ( time . time ( ) ) ) : resp = self . make_access_request ( ) self . _access_token = resp . json ( ) [ 'access_token' ] return self . _access_token
def parse_pylint_output ( output ) : """Parses pylint output , counting number of errors , conventions , etc : param output : output list generated by run _ pylint ( ) : return :"""
stripped_output = [ x [ 0 ] for x in output ] counter = Counter ( stripped_output ) return counter
def pk_field ( self ) : '''Name of the primary key field as retrieved from rethinkdb table metadata , ' id ' by default . Should not be overridden . Override ` table _ create ` if you want to use a nonstandard field as the primary key .'''
if not self . _pk : try : pk = self . rr . db ( 'rethinkdb' ) . table ( 'table_config' ) . filter ( { 'db' : self . rr . dbname , 'name' : self . table } ) . get_field ( 'primary_key' ) [ 0 ] . run ( ) self . _pk = pk except Exception as e : raise Exception ( 'problem determining primary key for table %s.%s: %s' , self . rr . dbname , self . table , e ) return self . _pk
def cache_meta ( request , cache_key , start_index = 0 ) : """Inspect request for objects in _ ultracache and set appropriate entries in Django ' s cache ."""
path = request . get_full_path ( ) # todo : cache headers on the request since they never change during the # request . # Reduce headers to the subset as defined by the settings headers = OrderedDict ( ) for k , v in sorted ( request . META . items ( ) ) : if ( k == "HTTP_COOKIE" ) and CONSIDER_COOKIES : cookie = SimpleCookie ( ) cookie . load ( v ) headers [ "cookie" ] = "; " . join ( [ "%s=%s" % ( k , morsel . value ) for k , morsel in sorted ( cookie . items ( ) ) if k in CONSIDER_COOKIES ] ) elif k . startswith ( "HTTP_" ) : k = k [ 5 : ] . replace ( "_" , "-" ) . lower ( ) if k in CONSIDER_HEADERS : headers [ k ] = v # Lists needed for cache . get _ many to_set_get_keys = [ ] to_set_paths_get_keys = [ ] to_set_content_types_get_keys = [ ] to_set_content_types_paths_get_keys = [ ] # Dictionaries needed for cache . set _ many to_set = { } to_set_paths = { } to_set_content_types = { } to_set_content_types_paths = { } to_delete = [ ] to_set_objects = [ ] for ctid , obj_pk in request . _ultracache [ start_index : ] : # The object appears in these cache entries . If the object is modified # then these cache entries are deleted . key = "ucache-%s-%s" % ( ctid , obj_pk ) if key not in to_set_get_keys : to_set_get_keys . append ( key ) # The object appears in these paths . If the object is modified then any # caches that are read from when browsing to this path are cleared . key = "ucache-pth-%s-%s" % ( ctid , obj_pk ) if key not in to_set_paths_get_keys : to_set_paths_get_keys . append ( key ) # The content type appears in these cache entries . If an object of this # content type is created then these cache entries are cleared . key = "ucache-ct-%s" % ctid if key not in to_set_content_types_get_keys : to_set_content_types_get_keys . append ( key ) # The content type appears in these paths . If an object of this content # type is created then any caches that are read from when browsing to # this path are cleared . key = "ucache-ct-pth-%s" % ctid if key not in to_set_content_types_paths_get_keys : to_set_content_types_paths_get_keys . append ( key ) # A list of objects that contribute to a cache entry tu = ( ctid , obj_pk ) if tu not in to_set_objects : to_set_objects . append ( tu ) # todo : rewrite to handle absence of get _ many di = cache . get_many ( to_set_get_keys ) for key in to_set_get_keys : v = di . get ( key , None ) keep = [ ] if v is not None : keep , toss = reduce_list_size ( v ) if toss : to_set [ key ] = keep to_delete . extend ( toss ) if cache_key not in keep : if key not in to_set : to_set [ key ] = keep to_set [ key ] = to_set [ key ] + [ cache_key ] if to_set == di : to_set = { } di = cache . get_many ( to_set_paths_get_keys ) for key in to_set_paths_get_keys : v = di . get ( key , None ) keep = [ ] if v is not None : keep , toss = reduce_list_size ( v ) if toss : to_set_paths [ key ] = keep if [ path , headers ] not in keep : if key not in to_set_paths : to_set_paths [ key ] = keep to_set_paths [ key ] = to_set_paths [ key ] + [ [ path , headers ] ] if to_set_paths == di : to_set_paths = { } di = cache . get_many ( to_set_content_types_get_keys ) for key in to_set_content_types_get_keys : v = di . get ( key , None ) keep = [ ] if v is not None : keep , toss = reduce_list_size ( v ) if toss : to_set_content_types [ key ] = keep to_delete . extend ( toss ) if cache_key not in keep : if key not in to_set_content_types : to_set_content_types [ key ] = keep to_set_content_types [ key ] = to_set_content_types [ key ] + [ cache_key ] if to_set_content_types == di : to_set_content_types = { } di = cache . get_many ( to_set_content_types_paths_get_keys ) for key in to_set_content_types_paths_get_keys : v = di . get ( key , None ) keep = [ ] if v is not None : keep , toss = reduce_list_size ( v ) if toss : to_set_content_types_paths [ key ] = keep if [ path , headers ] not in keep : if key not in to_set_content_types_paths : to_set_content_types_paths [ key ] = keep to_set_content_types_paths [ key ] = to_set_content_types_paths [ key ] + [ [ path , headers ] ] if to_set_content_types_paths == di : to_set_content_types_paths = { } # Deletion must happen first because set may set some of these keys if to_delete : try : cache . delete_many ( to_delete ) except NotImplementedError : for k in to_delete : cache . delete ( k ) # Do one set _ many di = { } di . update ( to_set ) del to_set di . update ( to_set_paths ) del to_set_paths di . update ( to_set_content_types ) del to_set_content_types di . update ( to_set_content_types_paths ) del to_set_content_types_paths if to_set_objects : di [ cache_key + "-objs" ] = to_set_objects if di : try : cache . set_many ( di , 86400 ) except NotImplementedError : for k , v in di . items ( ) : cache . set ( k , v , 86400 )
def initialize_simulation ( components : List , input_config : Mapping = None , plugin_config : Mapping = None ) -> InteractiveContext : """Construct a simulation from a list of components , component configuration , and a plugin configuration . The simulation context returned by this method still needs to be setup by calling its setup method . It is mostly useful for testing and debugging . Parameters components A list of initialized simulation components . Corresponds to the components block of a model specification . input _ config A nested dictionary with any additional simulation configuration information needed . Corresponds to the configuration block of a model specification . plugin _ config A dictionary containing a description of any simulation plugins to include in the simulation . If you ' re using this argument , you ' re either deep in the process of simulation development or the maintainers have done something wrong . Corresponds to the plugins block of a model specification . Returns An initialized ( but not set up ) simulation context ."""
config = build_simulation_configuration ( ) config . update ( input_config ) plugin_manager = PluginManager ( plugin_config ) return InteractiveContext ( config , components , plugin_manager )
def _ParseLayerConfigJSON ( self , parser_mediator , file_object ) : """Extracts events from a Docker filesystem layer configuration file . The path of each filesystem layer config file is : DOCKER _ DIR / graph / < layer _ id > / json Args : parser _ mediator ( ParserMediator ) : mediates interactions between parsers and other components , such as storage and dfvfs . file _ object ( dfvfs . FileIO ) : a file - like object . Raises : UnableToParseFile : when the file is not a valid layer config file ."""
file_content = file_object . read ( ) file_content = codecs . decode ( file_content , self . _ENCODING ) json_dict = json . loads ( file_content ) if 'docker_version' not in json_dict : raise errors . UnableToParseFile ( 'not a valid Docker layer configuration file, missing ' '\'docker_version\' key.' ) if 'created' in json_dict : layer_creation_command_array = [ x . strip ( ) for x in json_dict [ 'container_config' ] [ 'Cmd' ] ] layer_creation_command = ' ' . join ( layer_creation_command_array ) . replace ( '\t' , '' ) event_data = DockerJSONLayerEventData ( ) event_data . command = layer_creation_command event_data . layer_id = self . _GetIdentifierFromPath ( parser_mediator ) timestamp = timelib . Timestamp . FromTimeString ( json_dict [ 'created' ] ) event = time_events . TimestampEvent ( timestamp , definitions . TIME_DESCRIPTION_ADDED ) parser_mediator . ProduceEventWithEventData ( event , event_data )
def combine_slices ( self , slices , tensor_shape , device = None ) : """Turns a set of slices into a single tensor . Args : slices : list of tf . Tensor with length self . size . tensor _ shape : Shape . device : optional str . If absent , we use the devices of the slices . Returns : tf . Tensor ."""
if tensor_shape . ndims == 0 : return slices [ 0 ] ret = slices [ : ] tensor_layout = self . tensor_layout ( tensor_shape ) for mesh_dim , tensor_axis in zip ( self . shape , tensor_layout . mesh_axis_to_tensor_axis ( self . ndims ) ) : slice_size = len ( ret ) // mesh_dim . size if tensor_axis is None : ret = ret [ : slice_size ] else : if device : devices = [ device ] * slice_size else : devices = [ ret [ i ] . device for i in xrange ( slice_size ) ] concat_inputs = [ ] for i in xrange ( slice_size ) : concat_inputs . append ( [ ret [ i + slice_size * j ] for j in xrange ( mesh_dim . size ) ] ) ret = parallel ( devices , tf . concat , concat_inputs , axis = [ tensor_axis ] * len ( devices ) ) assert len ( ret ) == 1 return ret [ 0 ]
def determine_actions ( self , request , view ) : """For generic class based views we return information about the fields that are accepted for ' PUT ' and ' POST ' methods ."""
actions = { } for method in { 'PUT' , 'POST' } & set ( view . allowed_methods ) : view . request = clone_request ( request , method ) try : # Test global permissions if hasattr ( view , 'check_permissions' ) : view . check_permissions ( view . request ) # Test object permissions if method == 'PUT' and hasattr ( view , 'get_object' ) : view . get_object ( ) except ( exceptions . APIException , PermissionDenied , Http404 ) : pass else : # If user has appropriate permissions for the view , include # appropriate metadata about the fields that should be supplied . serializer = view . get_serializer ( ) actions [ method ] = self . get_serializer_info ( serializer ) finally : view . request = request return actions
def make_pairs_for_model ( model_num = 0 ) : """Create a list of pairs of model nums ; play every model nearby , then every other model after that , then every fifth , etc . Returns a list like [ [ N , N - 1 ] , [ N , N - 2 ] , . . . , [ N , N - 12 ] , . . . , [ N , N - 50 ] ]"""
if model_num == 0 : return pairs = [ ] mm_fib = lambda n : int ( ( np . matrix ( [ [ 2 , 1 ] , [ 1 , 1 ] ] ) ** ( n // 2 ) ) [ 0 , ( n + 1 ) % 2 ] ) for i in range ( 2 , 14 ) : # Max is 233 if mm_fib ( i ) >= model_num : break pairs += [ [ model_num , model_num - mm_fib ( i ) ] ] return pairs
def update_userpass_password ( self , username , password , mount_point = 'userpass' ) : """POST / auth / < mount point > / users / < username > / password : param username : : type username : : param password : : type password : : param mount _ point : : type mount _ point : : return : : rtype :"""
params = { 'password' : password } return self . _adapter . post ( '/v1/auth/{}/users/{}/password' . format ( mount_point , username ) , json = params )
def dump ( self ) : """Prints the project attributes ."""
id = self . get ( "id" ) if not id : id = "(none)" else : id = id [ 0 ] parent = self . get ( "parent" ) if not parent : parent = "(none)" else : parent = parent [ 0 ] print "'%s'" % id print "Parent project:%s" , parent print "Requirements:%s" , self . get ( "requirements" ) print "Default build:%s" , string . join ( self . get ( "debuild-build" ) ) print "Source location:%s" , string . join ( self . get ( "source-location" ) ) print "Projects to build:%s" , string . join ( self . get ( "projects-to-build" ) . sort ( ) ) ;
def count ( self , ** kwargs ) : """Counts the number of non - NaN objects for each column or row . Return : A new QueryCompiler object containing counts of non - NaN objects from each column or row ."""
if self . _is_transposed : kwargs [ "axis" ] = kwargs . get ( "axis" , 0 ) ^ 1 return self . transpose ( ) . count ( ** kwargs ) axis = kwargs . get ( "axis" , 0 ) map_func = self . _build_mapreduce_func ( pandas . DataFrame . count , ** kwargs ) reduce_func = self . _build_mapreduce_func ( pandas . DataFrame . sum , ** kwargs ) return self . _full_reduce ( axis , map_func , reduce_func )
def insertSite ( self , businput ) : """Input dictionary has to have the following keys : site _ name it builds the correct dictionary for dao input and executes the dao"""
conn = self . dbi . connection ( ) tran = conn . begin ( ) try : siteobj = { # FIXME : unused ? "site_name" : businput [ "site_name" ] } businput [ "site_id" ] = self . sm . increment ( conn , "SEQ_SI" , tran ) self . sitein . execute ( conn , businput , tran ) tran . commit ( ) except Exception as ex : if ( str ( ex ) . lower ( ) . find ( "unique constraint" ) != - 1 or str ( ex ) . lower ( ) . find ( "duplicate" ) != - 1 ) : # already exists , lets fetch the ID self . logger . warning ( "Ignoring unique constraint violation" ) self . logger . warning ( ex ) else : if tran : tran . rollback ( ) self . logger . exception ( ex ) raise finally : if tran : tran . close ( ) if conn : conn . close ( )
def label ( self , t ) : """Get the label of the song at a given time in seconds"""
if self . labels is None : return None prev_label = None for l in self . labels : if l . time > t : break prev_label = l if prev_label is None : return None return prev_label . name
def download_encrypted_file ( job , input_args , name ) : """Downloads encrypted files from S3 via header injection input _ args : dict Input dictionary defined in main ( ) name : str Symbolic name associated with file"""
work_dir = job . fileStore . getLocalTempDir ( ) key_path = input_args [ 'ssec' ] file_path = os . path . join ( work_dir , name ) url = input_args [ name ] with open ( key_path , 'r' ) as f : key = f . read ( ) if len ( key ) != 32 : raise RuntimeError ( 'Invalid Key! Must be 32 bytes: {}' . format ( key ) ) key = generate_unique_key ( key_path , url ) encoded_key = base64 . b64encode ( key ) encoded_key_md5 = base64 . b64encode ( hashlib . md5 ( key ) . digest ( ) ) h1 = 'x-amz-server-side-encryption-customer-algorithm:AES256' h2 = 'x-amz-server-side-encryption-customer-key:{}' . format ( encoded_key ) h3 = 'x-amz-server-side-encryption-customer-key-md5:{}' . format ( encoded_key_md5 ) try : subprocess . check_call ( [ 'curl' , '-fs' , '--retry' , '5' , '-H' , h1 , '-H' , h2 , '-H' , h3 , url , '-o' , file_path ] ) except OSError : raise RuntimeError ( 'Failed to find "curl". Install via "apt-get install curl"' ) assert os . path . exists ( file_path ) return job . fileStore . writeGlobalFile ( file_path )
def _authenticate ( self ) : '''Authenticate with the master , this method breaks the functional paradigm , it will update the master information from a fresh sign in , signing in can occur as often as needed to keep up with the revolving master AES key . : rtype : Crypticle : returns : A crypticle used for encryption operations'''
acceptance_wait_time = self . opts [ 'acceptance_wait_time' ] acceptance_wait_time_max = self . opts [ 'acceptance_wait_time_max' ] if not acceptance_wait_time_max : acceptance_wait_time_max = acceptance_wait_time creds = None channel = salt . transport . client . AsyncReqChannel . factory ( self . opts , crypt = 'clear' , io_loop = self . io_loop ) try : error = None while True : try : creds = yield self . sign_in ( channel = channel ) except SaltClientError as exc : error = exc break if creds == 'retry' : if self . opts . get ( 'detect_mode' ) is True : error = SaltClientError ( 'Detect mode is on' ) break if self . opts . get ( 'caller' ) : # We have a list of masters , so we should break # and try the next one in the list . if self . opts . get ( 'local_masters' , None ) : error = SaltClientError ( 'Minion failed to authenticate' ' with the master, has the ' 'minion key been accepted?' ) break else : print ( 'Minion failed to authenticate with the master, ' 'has the minion key been accepted?' ) sys . exit ( 2 ) if acceptance_wait_time : log . info ( 'Waiting %s seconds before retry.' , acceptance_wait_time ) yield tornado . gen . sleep ( acceptance_wait_time ) if acceptance_wait_time < acceptance_wait_time_max : acceptance_wait_time += acceptance_wait_time log . debug ( 'Authentication wait time is %s' , acceptance_wait_time ) continue break if not isinstance ( creds , dict ) or 'aes' not in creds : if self . opts . get ( 'detect_mode' ) is True : error = SaltClientError ( '-|RETRY|-' ) try : del AsyncAuth . creds_map [ self . __key ( self . opts ) ] except KeyError : pass if not error : error = SaltClientError ( 'Attempt to authenticate with the salt master failed' ) self . _authenticate_future . set_exception ( error ) else : key = self . __key ( self . opts ) AsyncAuth . creds_map [ key ] = creds self . _creds = creds self . _crypticle = Crypticle ( self . opts , creds [ 'aes' ] ) self . _authenticate_future . set_result ( True ) # mark the sign - in as complete # Notify the bus about creds change if self . opts . get ( 'auth_events' ) is True : event = salt . utils . event . get_event ( self . opts . get ( '__role' ) , opts = self . opts , listen = False ) event . fire_event ( { 'key' : key , 'creds' : creds } , salt . utils . event . tagify ( prefix = 'auth' , suffix = 'creds' ) ) finally : channel . close ( )
def write_block_data ( self , i2c_addr , register , data , force = None ) : """Write a block of byte data to a given register . : param i2c _ addr : i2c address : type i2c _ addr : int : param register : Start register : type register : int : param data : List of bytes : type data : list : param force : : type force : Boolean : rtype : None"""
length = len ( data ) if length > I2C_SMBUS_BLOCK_MAX : raise ValueError ( "Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX ) self . _set_address ( i2c_addr , force = force ) msg = i2c_smbus_ioctl_data . create ( read_write = I2C_SMBUS_WRITE , command = register , size = I2C_SMBUS_BLOCK_DATA ) msg . data . contents . block [ 0 ] = length msg . data . contents . block [ 1 : length + 1 ] = data ioctl ( self . fd , I2C_SMBUS , msg )
def asn1_generaltime_to_seconds ( timestr ) : """The given string has one of the following formats YYYYMMDDhhmmssZ YYYYMMDDhhmmss + hhmm YYYYMMDDhhmmss - hhmm @ return : a datetime object or None on error"""
res = None timeformat = "%Y%m%d%H%M%S" try : res = datetime . strptime ( timestr , timeformat + 'Z' ) except ValueError : try : res = datetime . strptime ( timestr , timeformat + '%z' ) except ValueError : pass return res
def set_multiple ( self , ** kwargs ) : """Configure multiple app key / value pairs"""
quiet = False if not kwargs : return cmd = [ "heroku" , "config:set" ] for k in sorted ( kwargs ) : cmd . append ( "{}={}" . format ( k , quote ( str ( kwargs [ k ] ) ) ) ) if self . _is_sensitive_key ( k ) : quiet = True cmd . extend ( [ "--app" , self . name ] ) if quiet : self . _run_quiet ( cmd ) else : self . _run ( cmd )
def _reindex_output ( self , result ) : """If we have categorical groupers , then we want to make sure that we have a fully reindex - output to the levels . These may have not participated in the groupings ( e . g . may have all been nan groups ) ; This can re - expand the output space"""
# we need to re - expand the output space to accomodate all values # whether observed or not in the cartesian product of our groupes groupings = self . grouper . groupings if groupings is None : return result elif len ( groupings ) == 1 : return result # if we only care about the observed values # we are done elif self . observed : return result # reindexing only applies to a Categorical grouper elif not any ( isinstance ( ping . grouper , ( Categorical , CategoricalIndex ) ) for ping in groupings ) : return result levels_list = [ ping . group_index for ping in groupings ] index , _ = MultiIndex . from_product ( levels_list , names = self . grouper . names ) . sortlevel ( ) if self . as_index : d = { self . obj . _get_axis_name ( self . axis ) : index , 'copy' : False } return result . reindex ( ** d ) # GH 13204 # Here , the categorical in - axis groupers , which need to be fully # expanded , are columns in ` result ` . An idea is to do : # result = result . set _ index ( self . grouper . names ) # . reindex ( index ) . reset _ index ( ) # but special care has to be taken because of possible not - in - axis # groupers . # So , we manually select and drop the in - axis grouper columns , # reindex ` result ` , and then reset the in - axis grouper columns . # Select in - axis groupers in_axis_grps = ( ( i , ping . name ) for ( i , ping ) in enumerate ( groupings ) if ping . in_axis ) g_nums , g_names = zip ( * in_axis_grps ) result = result . drop ( labels = list ( g_names ) , axis = 1 ) # Set a temp index and reindex ( possibly expanding ) result = result . set_index ( self . grouper . result_index ) . reindex ( index , copy = False ) # Reset in - axis grouper columns # ( using level numbers ` g _ nums ` because level names may not be unique ) result = result . reset_index ( level = g_nums ) return result . reset_index ( drop = True )
def close_all_files ( self ) : """Close all open files ( so that we can open more ) ."""
while len ( self . open_file_infos ) > 0 : file_info = self . open_file_infos . pop ( 0 ) file_info . file_handle . close ( ) file_info . file_handle = None self . closed_file_infos . append ( file_info ) self . can_open_more_files = True
def should_see_id ( self , element_id ) : """Assert an element with the given ` ` id ` ` is visible ."""
elements = ElementSelector ( world . browser , 'id("%s")' % element_id , filter_displayed = True , ) if not elements : raise AssertionError ( "Expected element with given id." )
async def _drop_databases ( cls ) -> None : """Tries to drop all databases provided in config passed to ` ` . init ( ) ` ` method . Normally should be used only for testing purposes ."""
if not cls . _inited : raise ConfigurationError ( "You have to call .init() first before deleting schemas" ) for connection in cls . _connections . values ( ) : await connection . close ( ) await connection . db_delete ( ) cls . _connections = { } await cls . _reset_apps ( )
def get_ancestors ( self ) : """: returns : A queryset containing the current node object ' s ancestors , starting by the root node and descending to the parent ."""
if self . is_root ( ) : return get_result_class ( self . __class__ ) . objects . none ( ) paths = [ self . path [ 0 : pos ] for pos in range ( 0 , len ( self . path ) , self . steplen ) [ 1 : ] ] return get_result_class ( self . __class__ ) . objects . filter ( path__in = paths ) . order_by ( 'depth' )