signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def frames ( self , skip_registration = False ) : """Retrieve a new frame from the Kinect and convert it to a ColorImage , a DepthImage , and an IrImage . Parameters skip _ registration : bool If True , the registration step is skipped . Returns : obj : ` tuple ` of : obj : ` ColorImage ` , : obj : ` DepthImage ` , : obj : ` IrImage ` , : obj : ` numpy . ndarray ` The ColorImage , DepthImage , and IrImage of the current frame . Raises RuntimeError If the Kinect stream is not running ."""
color_im , depth_im , ir_im , _ = self . _frames_and_index_map ( skip_registration = skip_registration ) return color_im , depth_im , ir_im
def set_position ( self , x , y , speed = None ) : '''Move chuck to absolute position in um'''
if speed : self . _intf . write ( 'MoveChuckSubsite %1.1f %1.1f R Y %d' % ( x , y , speed ) ) else : self . _intf . write ( 'MoveChuckSubsite %1.1f %1.1f R Y' % ( x , y ) )
def load_pip_addons ( _globals ) : '''Load all known fabsetup addons which are installed as pypi pip - packages . Args : _ globals ( dict ) : the globals ( ) namespace of the fabric script . Return : None'''
for package_name in known_pip_addons : _ , username = package_username ( package_name ) try : load_addon ( username , package_name . replace ( '-' , '_' ) , _globals ) except ImportError : pass
def create_keytype_item_node ( field , state ) : """Create a definition list item node that describes the key type of a dict - type config field . Parameters field : ` ` lsst . pex . config . Field ` ` A ` ` lsst . pex . config . DictField ` ` or ` ` lsst . pex . config . DictConfigField ` ` . state : ` ` docutils . statemachine . State ` ` Usually the directive ' s ` ` state ` ` attribute . Returns ` ` docutils . nodes . definition _ list _ item ` ` Definition list item that describes the key type for the field ."""
keytype_node = nodes . definition_list_item ( ) keytype_node = nodes . term ( text = 'Key type' ) keytype_def = nodes . definition ( ) keytype_def += make_python_xref_nodes_for_type ( field . keytype , state , hide_namespace = False ) keytype_node += keytype_def return keytype_node
def train_epoch ( self , epoch_info , source : 'vel.api.Source' , interactive = True ) : """Run a single training epoch"""
self . train ( ) if interactive : iterator = tqdm . tqdm ( source . train_loader ( ) , desc = "Training" , unit = "iter" , file = sys . stdout ) else : iterator = source . train_loader ( ) for batch_idx , ( data , target ) in enumerate ( iterator ) : batch_info = BatchInfo ( epoch_info , batch_idx ) batch_info . on_batch_begin ( ) self . train_batch ( batch_info , data , target ) batch_info . on_batch_end ( ) iterator . set_postfix ( loss = epoch_info . result_accumulator . intermediate_value ( 'loss' ) )
def parse_fields ( attributes ) : """Parse model fields ."""
return tuple ( field . bind_name ( name ) for name , field in six . iteritems ( attributes ) if isinstance ( field , fields . Field ) )
def get_outliers ( self ) : '''Performs iterative sigma clipping to get outliers .'''
log . info ( "Clipping outliers..." ) log . info ( 'Iter %d/%d: %d outliers' % ( 0 , self . oiter , len ( self . outmask ) ) ) def M ( x ) : return np . delete ( x , np . concatenate ( [ self . nanmask , self . badmask , self . transitmask ] ) , axis = 0 ) t = M ( self . time ) outmask = [ np . array ( [ - 1 ] ) , np . array ( self . outmask ) ] # Loop as long as the last two outlier arrays aren ' t equal while not np . array_equal ( outmask [ - 2 ] , outmask [ - 1 ] ) : # Check if we ' ve done this too many times if len ( outmask ) - 1 > self . oiter : log . error ( 'Maximum number of iterations in ' + '``get_outliers()`` exceeded. Skipping...' ) break # Check if we ' re going in circles if np . any ( [ np . array_equal ( outmask [ - 1 ] , i ) for i in outmask [ : - 1 ] ] ) : log . error ( 'Function ``get_outliers()`` ' + 'is going in circles. Skipping...' ) break # Compute the model to get the flux self . compute ( ) # Get the outliers f = SavGol ( M ( self . flux ) ) med = np . nanmedian ( f ) MAD = 1.4826 * np . nanmedian ( np . abs ( f - med ) ) inds = np . where ( ( f > med + self . osigma * MAD ) | ( f < med - self . osigma * MAD ) ) [ 0 ] # Project onto unmasked time array inds = np . array ( [ np . argmax ( self . time == t [ i ] ) for i in inds ] ) self . outmask = np . array ( inds , dtype = int ) # Add them to the running list outmask . append ( np . array ( inds ) ) # Log log . info ( 'Iter %d/%d: %d outliers' % ( len ( outmask ) - 2 , self . oiter , len ( self . outmask ) ) )
def get_http_json ( self , url = None , retry_count = 3 , rate_limit_timeout = 120 , headers = None ) : """The function for retrieving a json result via HTTP . Args : url ( : obj : ` str ` ) : The URL to retrieve ( required ) . retry _ count ( : obj : ` int ` ) : The number of times to retry in case socket errors , timeouts , connection resets , etc . are encountered . Defaults to 3. rate _ limit _ timeout ( : obj : ` int ` ) : The number of seconds to wait before retrying when a rate limit notice is returned via rdap + json or HTTP error 429 . Defaults to 60. headers ( : obj : ` dict ` ) : The HTTP headers . The Accept header defaults to ' application / rdap + json ' . Returns : dict : The data in json format . Raises : HTTPLookupError : The HTTP lookup failed . HTTPRateLimitError : The HTTP request rate limited and retries were exhausted ."""
if headers is None : headers = { 'Accept' : 'application/rdap+json' } try : # Create the connection for the whois query . log . debug ( 'HTTP query for {0} at {1}' . format ( self . address_str , url ) ) conn = Request ( url , headers = headers ) data = self . opener . open ( conn , timeout = self . timeout ) try : d = json . loads ( data . readall ( ) . decode ( 'utf-8' , 'ignore' ) ) except AttributeError : # pragma : no cover d = json . loads ( data . read ( ) . decode ( 'utf-8' , 'ignore' ) ) try : # Tests written but commented out . I do not want to send a # flood of requests on every test . for tmp in d [ 'notices' ] : # pragma : no cover if tmp [ 'title' ] == 'Rate Limit Notice' : log . debug ( 'RDAP query rate limit exceeded.' ) if retry_count > 0 : log . debug ( 'Waiting {0} seconds...' . format ( str ( rate_limit_timeout ) ) ) sleep ( rate_limit_timeout ) return self . get_http_json ( url = url , retry_count = retry_count - 1 , rate_limit_timeout = rate_limit_timeout , headers = headers ) else : raise HTTPRateLimitError ( 'HTTP lookup failed for {0}. Rate limit ' 'exceeded, wait and try again (possibly a ' 'temporary block).' . format ( url ) ) except ( KeyError , IndexError ) : # pragma : no cover pass return d except HTTPError as e : # pragma : no cover # RIPE is producing this HTTP error rather than a JSON error . if e . code == 429 : log . debug ( 'HTTP query rate limit exceeded.' ) if retry_count > 0 : log . debug ( 'Waiting {0} seconds...' . format ( str ( rate_limit_timeout ) ) ) sleep ( rate_limit_timeout ) return self . get_http_json ( url = url , retry_count = retry_count - 1 , rate_limit_timeout = rate_limit_timeout , headers = headers ) else : raise HTTPRateLimitError ( 'HTTP lookup failed for {0}. Rate limit ' 'exceeded, wait and try again (possibly a ' 'temporary block).' . format ( url ) ) else : raise HTTPLookupError ( 'HTTP lookup failed for {0} with error ' 'code {1}.' . format ( url , str ( e . code ) ) ) except ( URLError , socket . timeout , socket . error ) as e : log . debug ( 'HTTP query socket error: {0}' . format ( e ) ) if retry_count > 0 : log . debug ( 'HTTP query retrying (count: {0})' . format ( str ( retry_count ) ) ) return self . get_http_json ( url = url , retry_count = retry_count - 1 , rate_limit_timeout = rate_limit_timeout , headers = headers ) else : raise HTTPLookupError ( 'HTTP lookup failed for {0}.' . format ( url ) ) except ( HTTPLookupError , HTTPRateLimitError ) as e : # pragma : no cover raise e except : # pragma : no cover raise HTTPLookupError ( 'HTTP lookup failed for {0}.' . format ( url ) )
def get_dim_indexers ( data_obj , indexers ) : """Given a xarray data object and label based indexers , return a mapping of label indexers with only dimension names as keys . It groups multiple level indexers given on a multi - index dimension into a single , dictionary indexer for that dimension ( Raise a ValueError if it is not possible ) ."""
invalid = [ k for k in indexers if k not in data_obj . dims and k not in data_obj . _level_coords ] if invalid : raise ValueError ( "dimensions or multi-index levels %r do not exist" % invalid ) level_indexers = defaultdict ( dict ) dim_indexers = { } for key , label in indexers . items ( ) : dim , = data_obj [ key ] . dims if key != dim : # assume here multi - index level indexer level_indexers [ dim ] [ key ] = label else : dim_indexers [ key ] = label for dim , level_labels in level_indexers . items ( ) : if dim_indexers . get ( dim , False ) : raise ValueError ( "cannot combine multi-index level indexers " "with an indexer for dimension %s" % dim ) dim_indexers [ dim ] = level_labels return dim_indexers
def get_execution_info ( self , driver_id , function_descriptor ) : """Get the FunctionExecutionInfo of a remote function . Args : driver _ id : ID of the driver that the function belongs to . function _ descriptor : The FunctionDescriptor of the function to get . Returns : A FunctionExecutionInfo object ."""
if self . _worker . load_code_from_local : # Load function from local code . # Currently , we don ' t support isolating code by drivers , # thus always set driver ID to NIL here . driver_id = ray . DriverID . nil ( ) if not function_descriptor . is_actor_method ( ) : self . _load_function_from_local ( driver_id , function_descriptor ) else : # Load function from GCS . # Wait until the function to be executed has actually been # registered on this worker . We will push warnings to the user if # we spend too long in this loop . # The driver function may not be found in sys . path . Try to load # the function from GCS . with profiling . profile ( "wait_for_function" ) : self . _wait_for_function ( function_descriptor , driver_id ) try : function_id = function_descriptor . function_id info = self . _function_execution_info [ driver_id ] [ function_id ] except KeyError as e : message = ( "Error occurs in get_execution_info: " "driver_id: %s, function_descriptor: %s. Message: %s" % ( driver_id , function_descriptor , e ) ) raise KeyError ( message ) return info
def asm_app ( parser , cmd , args ) : # pragma : no cover """Assemble code from commandline or stdin . Please not that all semi - colons are replaced with carriage returns unless source is read from stdin ."""
parser . add_argument ( 'source' , help = 'the code to assemble, read from stdin if omitted' , nargs = '?' ) pwnypack . main . add_target_arguments ( parser ) parser . add_argument ( '--syntax' , '-s' , choices = AsmSyntax . __members__ . keys ( ) , default = None , ) parser . add_argument ( '--address' , '-o' , type = lambda v : int ( v , 0 ) , default = 0 , help = 'the address where the code is expected to run' , ) args = parser . parse_args ( args ) target = pwnypack . main . target_from_arguments ( args ) if args . syntax is not None : syntax = AsmSyntax . __members__ [ args . syntax ] else : syntax = None if args . source is None : args . source = sys . stdin . read ( ) else : args . source = args . source . replace ( ';' , '\n' ) return asm ( args . source , syntax = syntax , target = target , addr = args . address , )
def openTypeHeadCreatedFallback ( info ) : """Fallback to the environment variable SOURCE _ DATE _ EPOCH if set , otherwise now ."""
if "SOURCE_DATE_EPOCH" in os . environ : t = datetime . utcfromtimestamp ( int ( os . environ [ "SOURCE_DATE_EPOCH" ] ) ) return t . strftime ( _date_format ) else : return dateStringForNow ( )
def mass_3d ( self , R , Rs , rho0 , r_core ) : """: param R : projected distance : param Rs : scale radius : param rho0 : central core density : param r _ core : core radius"""
Rs = float ( Rs ) b = r_core * Rs ** - 1 c = R * Rs ** - 1 M0 = 4 * np . pi * Rs ** 3 * rho0 return M0 * ( 1 + b ** 2 ) ** - 1 * ( 0.5 * np . log ( 1 + c ** 2 ) + b ** 2 * np . log ( c * b ** - 1 + 1 ) - b * np . arctan ( c ) )
def delete_expired_requests ( ) : """Delete expired inclusion requests ."""
InclusionRequest . query . filter_by ( InclusionRequest . expiry_date > datetime . utcnow ( ) ) . delete ( ) db . session . commit ( )
def add_file_handler ( logger , level , tags ) : """Creates and Adds a file handler ( ` logging . FileHandler ` instance ) to the specified logger . Args : logger : The ` logging . Logger ` instance to add the new file handler to . level : ` str ` . The logging level for which the handler accepts messages , i . e . ` logging . INFO ` . tags : ` list ` of tags to append to the log file name . Each tag will be ' _ ' delimited . Each tag will be added in the same order as provided ."""
f_formatter = logging . Formatter ( '%(asctime)s:%(name)s:\t%(message)s' ) filename = get_logfile_name ( tags ) handler = logging . FileHandler ( filename = filename , mode = "a" ) handler . setLevel ( level ) handler . setFormatter ( f_formatter ) logger . addHandler ( handler )
def turbulent_von_Karman ( Re , Pr , fd ) : r'''Calculates internal convection Nusselt number for turbulent flows in pipe according to [ 2 ] _ as in [ 1 ] _ . . . math : : Nu = \ frac { ( f / 8 ) Re Pr } { 1 + 5 ( f / 8 ) ^ { 0.5 } \ left [ Pr - 1 + \ ln \ left ( \ frac { 5Pr + 1} {6 } \ right ) \ right ] } Parameters Re : float Reynolds number , [ - ] Pr : float Prandtl number , [ - ] fd : float Darcy friction factor [ - ] Returns Nu : float Nusselt number , [ - ] Notes Range according to [ 1 ] _ is 0.5 ≤ Pr ≤ 3 and 10 ^ 4 ≤ Re ≤ 10 ^ 5. Examples > > > turbulent _ von _ Karman ( Re = 1E5 , Pr = 1.2 , fd = 0.0185) 255.7243541243272 References . . [ 1 ] Rohsenow , Warren and James Hartnett and Young Cho . Handbook of Heat Transfer , 3E . New York : McGraw - Hill , 1998. . . [ 2 ] T . von Karman , " The Analogy Between Fluid Friction and Heat Transfer , " Trans . ASME , ( 61 ) : 705-710,1939.'''
return fd / 8. * Re * Pr / ( 1 + 5 * ( fd / 8. ) ** 0.5 * ( Pr - 1 + log ( ( 5 * Pr + 1 ) / 6. ) ) )
def flash ( function , links , thread_count ) : """Process the URLs and uses a threadpool to execute a function ."""
# Convert links ( set ) to list links = list ( links ) threadpool = concurrent . futures . ThreadPoolExecutor ( max_workers = thread_count ) futures = ( threadpool . submit ( function , link ) for link in links ) for i , _ in enumerate ( concurrent . futures . as_completed ( futures ) ) : if i + 1 == len ( links ) or ( i + 1 ) % thread_count == 0 : print ( '%s Progress: %i/%i' % ( info , i + 1 , len ( links ) ) , end = '\r' ) print ( '' )
def analyze_async ( output_dir , dataset , cloud = False , project_id = None ) : """Analyze data locally or in the cloud with BigQuery . Produce analysis used by training . This can take a while , even for small datasets . For small datasets , it may be faster to use local _ analysis . Args : output _ dir : The output directory to use . dataset : only CsvDataSet is supported currently . cloud : If False , runs analysis locally with Pandas . If Ture , runs analysis in the cloud with BigQuery . project _ id : Uses BigQuery with this project id . Default is datalab ' s default project id . Returns : A google . datalab . utils . Job object that can be used to query state from or wait ."""
import google . datalab . utils as du with warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" ) fn = lambda : _analyze ( output_dir , dataset , cloud , project_id ) # noqa return du . LambdaJob ( fn , job_id = None )
def available_delegations ( self ) : """Instance depends on the API version : * 2018-08-01 : : class : ` AvailableDelegationsOperations < azure . mgmt . network . v2018_08_01 . operations . AvailableDelegationsOperations > `"""
api_version = self . _get_api_version ( 'available_delegations' ) if api_version == '2018-08-01' : from . v2018_08_01 . operations import AvailableDelegationsOperations as OperationClass else : raise NotImplementedError ( "APIVersion {} is not available" . format ( api_version ) ) return OperationClass ( self . _client , self . config , Serializer ( self . _models_dict ( api_version ) ) , Deserializer ( self . _models_dict ( api_version ) ) )
def setLocked ( self , state ) : """Sets the locked state for this view widget . When locked , a user no longer has control over editing views and layouts . A view panel with a single entry will hide its tab bar , and if it has multiple views , it will simply hide the editing buttons . : param state | < bool >"""
changed = state != self . _locked self . _locked = state for panel in self . panels ( ) : panel . setLocked ( state ) if changed and not self . signalsBlocked ( ) : self . lockToggled . emit ( state )
def SelectFromListId ( LId , Val = None , Crit = 'Name' , PreExp = None , PostExp = None , Log = 'any' , InOut = 'In' , Out = bool ) : """Return the indices or instances of all LOS matching criteria The selection can be done according to 2 different mechanisms Mechanism ( 1 ) : provide the value ( Val ) a criterion ( Crit ) should match The criteria are typically attributes of : class : ` ~ tofu . pathfile . ID ` ( i . e . : name , or user - defined attributes like the camera head . . . ) Mechanism ( 2 ) : ( used if Val = None ) Provide a str expression ( or a list of such ) to be fed to eval ( ) Used to check on quantitative criteria . - PreExp : placed before the criterion value ( e . g . : ' not ' or ' < = ' ) - PostExp : placed after the criterion value - you can use both Other parameters are used to specify logical operators for the selection ( match any or all the criterion . . . ) and the type of output . Parameters Crit : str Flag indicating which criterion to use for discrimination Can be set to : - any attribute of : class : ` ~ tofu . pathfile . ID ` ( e . g . : ' Name ' , ' SaveName ' , ' SavePath ' . . . ) - any key of ID . USRdict ( e . g . : ' Exp ' . . . ) Val : None / list / str The value to match for the chosen criterion , can be a list Used for selection mechanism ( 1) PreExp : None / list / str A str ( or list of such ) expression to be fed to eval ( ) , Placed before the criterion value Used for selection mechanism ( 2) PostExp : None / list / str A str ( or list of such ) expression to be fed to eval ( ) Placed after the criterion value Used for selection mechanism ( 2) Log : str Flag indicating whether the criterion shall match : - ' all ' : all provided values - ' any ' : at least one of them InOut : str Flag indicating whether the returned indices are : - ' In ' : the ones matching the criterion - ' Out ' : the ones not matching it Out : type / str Flag indicating in which form to return the result : - int : as an array of integer indices - bool : as an array of boolean indices - ' Name ' : as a list of names - ' LOS ' : as a list of : class : ` ~ tofu . geom . LOS ` instances Returns ind : list / np . ndarray The computed output , of nature defined by parameter Out"""
C0 = type ( Crit ) is str C1 = type ( Crit ) is list and all ( [ type ( cc ) is str for cc in Crit ] ) assert C0 or C1 , "Arg Crit must be a str or list of str !" for rr in [ PreExp , PostExp ] : if rr is not None : C0 = type ( rr ) is str C1 = type ( rr ) is list and all ( [ type ( ee ) is str for ee in rr ] ) assert C0 or C1 , "Args %S must be a str or list of str !" % rr assert Log in [ 'any' , 'all' ] , "Arg Log must be in ['any','all'] !" assert InOut in [ 'In' , 'Out' ] , "Arg InOut must be in ['In','Out'] !" if Val is None and PreExp is None and PostExp is None : ind = np . ones ( ( 1 , len ( LId ) ) , dtype = bool ) elif not Val is None : if type ( Val ) is str : Val = [ Val ] N = len ( Val ) ind = np . zeros ( ( N , len ( LId ) ) , dtype = bool ) if Crit in dir ( ID ) : for ii in range ( 0 , N ) : ind [ ii , : ] = np . asarray ( [ getattr ( iid , Crit ) == Val [ ii ] for iid in LId ] , dtype = bool ) else : for ii in range ( 0 , N ) : ind [ ii , : ] = np . asarray ( [ iid . USRdict [ Crit ] == Val [ ii ] for iid in LId ] , dtype = bool ) else : if type ( PreExp ) is str : PreExp = [ PreExp ] if type ( PostExp ) is str : PostExp = [ PostExp ] if PreExp is None : PreExp = [ "" for ss in PostExp ] if PostExp is None : PostExp = [ "" for ss in PreExp ] assert len ( PreExp ) == len ( PostExp ) , "len(PreExp) should be =len(PostExp)" N = len ( PreExp ) ind = np . zeros ( ( N , len ( LId ) ) , dtype = bool ) if Crit in dir ( ID ) : for ii in range ( 0 , N ) : List = [ eval ( PreExp [ ii ] + " getattr(iid,'%s') " % Crit + PostExp [ ii ] ) for iid in LId ] ind [ ii , : ] = np . array ( List , dtype = bool ) else : for ii in range ( 0 , N ) : List = [ eval ( PreExp [ ii ] + " iid.USRdict['%s'] " % Crit + PostExp [ ii ] ) for iid in LId ] ind [ ii , : ] = np . asarray ( List , dtype = bool ) ind = np . any ( ind , axis = 0 ) if Log == 'any' else np . all ( ind , axis = 0 ) if InOut == 'Out' : ind = ~ ind if Out == int : ind = ind . nonzero ( ) [ 0 ] elif Out is not bool and hasattr ( ID , Out ) : ind = [ getattr ( LId [ ii ] , Out ) for ii in ind . nonzero ( ) [ 0 ] ] elif Out is not bool and Out in LId [ 0 ] . USRdict . keys ( ) : ind = [ LId [ ii ] . USRdict [ Out ] for ii in ind . nonzero ( ) [ 0 ] ] return ind
def iter_following ( username , number = - 1 , etag = None ) : """List the people ` ` username ` ` follows . : param str username : ( required ) , login of the user : param int number : ( optional ) , number of users being followed by username to return . Default : - 1 , return all of them : param str etag : ( optional ) , ETag from a previous request to the same endpoint : returns : generator of : class : ` User < github3 . users . User > `"""
return gh . iter_following ( username , number , etag ) if username else [ ]
def onAIOCompletion ( self ) : """Call when eventfd notified events are available ."""
event_count = self . eventfd . read ( ) trace ( 'eventfd reports %i events' % event_count ) # Even though eventfd signaled activity , even though it may give us # some number of pending events , some events seem to have been already # processed ( maybe during io _ cancel call ? ) . # So do not trust eventfd value , and do not even trust that there must # be even one event to process . self . _aio_context . getEvents ( 0 )
def extract_detections ( detections , templates , archive , arc_type , extract_len = 90.0 , outdir = None , extract_Z = True , additional_stations = [ ] ) : """Extract waveforms associated with detections Takes a list of detections for the template , template . Waveforms will be returned as a list of : class : ` obspy . core . stream . Stream ` containing segments of extract _ len . They will also be saved if outdir is set . The default is unset . The default extract _ len is 90 seconds per channel . : type detections : list : param detections : List of : class : ` eqcorrscan . core . match _ filter . Detection ` . : type templates : list : param templates : A list of tuples of the template name and the template Stream used to detect detections . : type archive : str : param archive : Either name of archive or path to continuous data , see : func : ` eqcorrscan . utils . archive _ read ` for details : type arc _ type : str : param arc _ type : Type of archive , either seishub , FDSN , day _ vols : type extract _ len : float : param extract _ len : Length to extract around the detection ( will be equally cut around the detection time ) in seconds . Default is 90.0. : type outdir : str : param outdir : Default is None , with None set , no files will be saved , if set each detection will be saved into this directory with files named according to the detection time , NOT than the waveform start time . Detections will be saved into template subdirectories . Files written will be multiplexed miniseed files , the encoding will be chosen automatically and will likely be float . : type extract _ Z : bool : param extract _ Z : Set to True to also extract Z channels for detections delays will be the same as horizontal channels , only applies if only horizontal channels were used in the template . : type additional _ stations : list : param additional _ stations : List of tuples of ( station , channel ) to also extract data for using an average delay . : returns : list of : class : ` obspy . core . streams . Stream ` : rtype : list . . rubric : Example > > > from eqcorrscan . utils . clustering import extract _ detections > > > from eqcorrscan . core . match _ filter import Detection > > > from obspy import read , UTCDateTime > > > # Get the path to the test data > > > import eqcorrscan > > > import os > > > TEST _ PATH = os . path . dirname ( eqcorrscan . _ _ file _ _ ) + ' / tests / test _ data ' > > > # Use some dummy detections , you would use real one > > > detections = [ Detection ( . . . template _ name = ' temp1 ' , detect _ time = UTCDateTime ( 2012 , 3 , 26 , 9 , 15 ) , . . . no _ chans = 2 , chans = [ ' WHYM ' , ' EORO ' ] , detect _ val = 2 , threshold = 1.2, . . . typeofdet = ' corr ' , threshold _ type = ' MAD ' , threshold _ input = 8.0 ) , . . . Detection ( . . . template _ name = ' temp2 ' , detect _ time = UTCDateTime ( 2012 , 3 , 26 , 18 , 5 ) , . . . no _ chans = 2 , chans = [ ' WHYM ' , ' EORO ' ] , detect _ val = 2 , threshold = 1.2, . . . typeofdet = ' corr ' , threshold _ type = ' MAD ' , threshold _ input = 8.0 ) ] > > > archive = os . path . join ( TEST _ PATH , ' day _ vols ' ) > > > template _ files = [ os . path . join ( TEST _ PATH , ' temp1 . ms ' ) , . . . os . path . join ( TEST _ PATH , ' temp2 . ms ' ) ] > > > templates = [ ( ' temp ' + str ( i ) , read ( filename ) ) . . . for i , filename in enumerate ( template _ files ) ] > > > extracted = extract _ detections ( detections , templates , . . . archive = archive , arc _ type = ' day _ vols ' ) Working on detections for day : 2012-03-26T00:00:00.00000Z Cutting for detections at : 2012/03/26 09:15:00 Cutting for detections at : 2012/03/26 18:05:00 > > > print ( extracted [ 0 ] . sort ( ) ) 2 Trace ( s ) in Stream : AF . EORO . . SHZ | 2012-03-26T09:14:15.00000Z - 2012-03-26T09:15:45.00000Z | 1.0 Hz , 91 samples AF . WHYM . . SHZ | 2012-03-26T09:14:15.00000Z - 2012-03-26T09:15:45.00000Z | 1.0 Hz , 91 samples > > > print ( extracted [ 1 ] . sort ( ) ) 2 Trace ( s ) in Stream : AF . EORO . . SHZ | 2012-03-26T18:04:15.00000Z - 2012-03-26T18:05:45.00000Z | 1.0 Hz , 91 samples AF . WHYM . . SHZ | 2012-03-26T18:04:15.00000Z - 2012-03-26T18:05:45.00000Z | 1.0 Hz , 91 samples > > > # Extract from stations not included in the detections > > > extracted = extract _ detections ( . . . detections , templates , archive = archive , arc _ type = ' day _ vols ' , . . . additional _ stations = [ ( ' GOVA ' , ' SHZ ' ) ] ) Adding additional stations Added station GOVA . SHZ Added station GOVA . SHZ Working on detections for day : 2012-03-26T00:00:00.00000Z Cutting for detections at : 2012/03/26 09:15:00 Cutting for detections at : 2012/03/26 18:05:00 > > > print ( extracted [ 0 ] . sort ( ) ) 3 Trace ( s ) in Stream : AF . EORO . . SHZ | 2012-03-26T09:14:15.00000Z - 2012-03-26T09:15:45.00000Z | 1.0 Hz , 91 samples AF . GOVA . . SHZ | 2012-03-26T09:14:15.00000Z - 2012-03-26T09:15:45.00000Z | 1.0 Hz , 91 samples AF . WHYM . . SHZ | 2012-03-26T09:14:15.00000Z - 2012-03-26T09:15:45.00000Z | 1.0 Hz , 91 samples > > > # The detections can be saved to a file : > > > extract _ detections ( detections , templates , archive = archive , . . . arc _ type = ' day _ vols ' , . . . additional _ stations = [ ( ' GOVA ' , ' SHZ ' ) ] , outdir = ' . ' ) Adding additional stations Added station GOVA . SHZ Added station GOVA . SHZ Working on detections for day : 2012-03-26T00:00:00.00000Z Cutting for detections at : 2012/03/26 09:15:00 Written file : . / temp1/2012-03-26_09-15-00 . ms Cutting for detections at : 2012/03/26 18:05:00 Written file : . / temp2/2012-03-26_18-05-00 . ms"""
# Sort the template according to start - times , needed so that stachan [ i ] # corresponds to delays [ i ] all_delays = [ ] # List of tuples of template name , delays all_stachans = [ ] for template in templates : templatestream = template [ 1 ] . sort ( [ 'starttime' ] ) stachans = [ ( tr . stats . station , tr . stats . channel ) for tr in templatestream ] mintime = templatestream [ 0 ] . stats . starttime delays = [ tr . stats . starttime - mintime for tr in templatestream ] all_delays . append ( ( template [ 0 ] , delays ) ) all_stachans . append ( ( template [ 0 ] , stachans ) ) # Sort the detections and group by day detections . sort ( key = lambda d : d . detect_time ) detection_days = [ detection . detect_time . date for detection in detections ] detection_days = list ( set ( detection_days ) ) detection_days . sort ( ) detection_days = [ UTCDateTime ( d ) for d in detection_days ] # Initialize output list detection_wavefiles = [ ] # Also include Z channels when extracting detections if extract_Z : new_all_stachans = [ ] new_all_delays = [ ] for t , template in enumerate ( all_stachans ) : stachans = template [ 1 ] delays = all_delays [ t ] [ 1 ] new_stachans = [ ] new_delays = [ ] j = 0 for i , stachan in enumerate ( stachans ) : if j == 1 : new_stachans . append ( ( stachan [ 0 ] , stachan [ 1 ] [ 0 ] + 'Z' ) ) new_delays . append ( delays [ i ] ) new_stachans . append ( stachan ) new_delays . append ( delays [ i ] ) j = 0 else : new_stachans . append ( stachan ) new_delays . append ( delays [ i ] ) j += 1 new_all_stachans . append ( ( template [ 0 ] , new_stachans ) ) new_all_delays . append ( ( template [ 0 ] , new_delays ) ) all_delays = new_all_delays all_stachans = new_all_stachans if not len ( additional_stations ) == 0 : print ( 'Adding additional stations' ) for t , template in enumerate ( all_stachans ) : av_delay = np . mean ( all_delays [ t ] [ 1 ] ) for sta in additional_stations : if sta not in template [ 1 ] : print ( 'Added station ' + '.' . join ( sta ) ) template [ 1 ] . append ( sta ) all_delays [ t ] [ 1 ] . append ( av_delay ) del stachans # Loop through the days for detection_day in detection_days : print ( 'Working on detections for day: ' + str ( detection_day ) ) stachans = list ( set ( [ stachans [ 1 ] for stachans in all_stachans ] [ 0 ] ) ) # List of all unique stachans - read in all data st = read_data ( archive = archive , arc_type = arc_type , day = detection_day , stachans = stachans ) st . merge ( fill_value = 'interpolate' ) day_detections = [ detection for detection in detections if UTCDateTime ( detection . detect_time . date ) == detection_day ] del stachans , delays for detection in day_detections : print ( 'Cutting for detections at: ' + detection . detect_time . strftime ( '%Y/%m/%d %H:%M:%S' ) ) detect_wav = st . copy ( ) for tr in detect_wav : t1 = UTCDateTime ( detection . detect_time ) - extract_len / 2 t2 = UTCDateTime ( detection . detect_time ) + extract_len / 2 tr . trim ( starttime = t1 , endtime = t2 ) if outdir : if not os . path . isdir ( os . path . join ( outdir , detection . template_name ) ) : os . makedirs ( os . path . join ( outdir , detection . template_name ) ) detect_wav . write ( os . path . join ( outdir , detection . template_name , detection . detect_time . strftime ( '%Y-%m-%d_%H-%M-%S' ) + '.ms' ) , format = 'MSEED' ) print ( 'Written file: %s' % '/' . join ( [ outdir , detection . template_name , detection . detect_time . strftime ( '%Y-%m-%d_%H-%M-%S' ) + '.ms' ] ) ) if not outdir : detection_wavefiles . append ( detect_wav ) del detect_wav del st if outdir : detection_wavefiles = [ ] if not outdir : return detection_wavefiles else : return
def get_next_logs ( self , n ) : """gets next n objects from list"""
# Implemented from kitosid template for - # osid . resource . ResourceList . get _ next _ resources if n > self . available ( ) : # ! ! ! This is not quite as specified ( see method docs ) ! ! ! raise IllegalState ( 'not enough elements available in this list' ) else : next_list = [ ] i = 0 while i < n : try : next_list . append ( next ( self ) ) except StopIteration : break i += 1 return next_list
def get_queryset ( self , ** kwargs ) : """Gets our queryset . This takes care of filtering if there are any fields to filter by ."""
queryset = self . derive_queryset ( ** kwargs ) return self . order_queryset ( queryset )
def extract_value ( self , string , key , inline_code = True ) : """Extract strings from the docstrings . . code - block : : text * synopsis : ` ` % shorten { text , max _ size } ` ` * example : ` ` % shorten { $ title , 32 } ` ` * description : Shorten “ text ” on word boundarys ."""
regex = r'\* ' + key + ': ' if inline_code : regex = regex + '``(.*)``' else : regex = regex + '(.*)' value = re . findall ( regex , string ) if value : return value [ 0 ] . replace ( '``' , '' ) else : return False
def get_context ( self , language ) : """Get the context ( description ) of this task"""
context = self . gettext ( language , self . _context ) if self . _context else "" vals = self . _hook_manager . call_hook ( 'task_context' , course = self . get_course ( ) , task = self , default = context ) return ParsableText ( vals [ 0 ] , "rst" , self . _translations . get ( language , gettext . NullTranslations ( ) ) ) if len ( vals ) else ParsableText ( context , "rst" , self . _translations . get ( language , gettext . NullTranslations ( ) ) )
def _check_file_io ( self ) : """Create and define logging directory"""
folder = 'Model' + str ( self . flags [ 'RUN_NUM' ] ) + '/' folder_restore = 'Model' + str ( self . flags [ 'MODEL_RESTORE' ] ) + '/' self . flags [ 'RESTORE_DIRECTORY' ] = self . flags [ 'SAVE_DIRECTORY' ] + self . flags [ 'MODEL_DIRECTORY' ] + folder_restore self . flags [ 'LOGGING_DIRECTORY' ] = self . flags [ 'SAVE_DIRECTORY' ] + self . flags [ 'MODEL_DIRECTORY' ] + folder self . make_directory ( self . flags [ 'LOGGING_DIRECTORY' ] ) sys . stdout = Logger ( self . flags [ 'LOGGING_DIRECTORY' ] + 'ModelInformation.log' ) print ( self . flags )
def GetLocations ( ) : """Return all cloud locations available to the calling alias ."""
r = clc . v1 . API . Call ( 'post' , 'Account/GetLocations' , { } ) if r [ 'Success' ] != True : if clc . args : clc . v1 . output . Status ( 'ERROR' , 3 , 'Error calling %s. Status code %s. %s' % ( 'Account/GetLocations' , r [ 'StatusCode' ] , r [ 'Message' ] ) ) raise Exception ( 'Error calling %s. Status code %s. %s' % ( 'Account/GetLocations' , r [ 'StatusCode' ] , r [ 'Message' ] ) ) elif int ( r [ 'StatusCode' ] ) == 0 : clc . LOCATIONS = [ x [ 'Alias' ] for x in r [ 'Locations' ] ] return ( r [ 'Locations' ] )
def main ( ) : '''Calculate the distance of an object in inches using a HCSR04 sensor and a Raspberry Pi'''
trig_pin = 17 echo_pin = 27 # Default values # unit = ' metric ' # temperature = 20 # round _ to = 1 # Create a distance reading with the hcsr04 sensor module # and overide the default values for temp , unit and rounding ) value = sensor . Measurement ( trig_pin , echo_pin , temperature = 68 , unit = 'imperial' , round_to = 2 ) raw_measurement = value . raw_distance ( ) # Calculate the distance in inches imperial_distance = value . distance_imperial ( raw_measurement ) print ( "The Distance = {} inches" . format ( imperial_distance ) )
def _packet_manager ( self ) : """Watch packet list for timeouts ."""
while True : if self . _packets : with self . _packet_lock : now = time . time ( ) self . _packets [ : ] = [ packet for packet in self . _packets if self . _packet_timeout ( packet , now ) ] # c . f . nyquist time . sleep ( ACK_RESEND / 2 )
def get_compute_usage ( access_token , subscription_id , location ) : '''List compute usage and limits for a location . Args : access _ token ( str ) : A valid Azure authentication token . subscription _ id ( str ) : Azure subscription id . location ( str ) : Azure data center location . E . g . westus . Returns : HTTP response . JSON body of Compute usage and limits data .'''
endpoint = '' . join ( [ get_rm_endpoint ( ) , '/subscriptions/' , subscription_id , '/providers/Microsoft.compute/locations/' , location , '/usages?api-version=' , COMP_API ] ) return do_get ( endpoint , access_token )
def start ( self , name : str , increment_count : bool = True ) -> None : """Start a named timer . Args : name : name of the timer increment _ count : increment the start count for this timer"""
if not self . _timing : return now = get_now_utc_pendulum ( ) # If we were already timing something else , pause that . if self . _stack : last = self . _stack [ - 1 ] self . _totaldurations [ last ] += now - self . _starttimes [ last ] # Start timing our new thing if name not in self . _starttimes : self . _totaldurations [ name ] = datetime . timedelta ( ) self . _count [ name ] = 0 self . _starttimes [ name ] = now if increment_count : self . _count [ name ] += 1 self . _stack . append ( name )
def acquire_lock ( self ) : """Acquire lock before scheduling jobs to prevent another scheduler from scheduling jobs at the same time . This function returns True if a lock is acquired . False otherwise ."""
key = '%s_lock' % self . scheduler_key now = time . time ( ) expires = int ( self . _interval ) + 10 self . _lock_acquired = self . connection . set ( key , now , ex = expires , nx = True ) return self . _lock_acquired
def _from_dict ( cls , _dict ) : """Initialize a Expansions object from a json dictionary ."""
args = { } if 'expansions' in _dict : args [ 'expansions' ] = [ Expansion . _from_dict ( x ) for x in ( _dict . get ( 'expansions' ) ) ] else : raise ValueError ( 'Required property \'expansions\' not present in Expansions JSON' ) return cls ( ** args )
def instance ( self , * args , ** kwargs ) : """Create an instance of the specified class in the initializer . : param args : the arguments given to the initializer of the new class : param kwargs : the keyword arguments given to the initializer of the new class"""
mod , cls = self . get_module_class ( ) inst = cls ( * args , ** kwargs ) logger . debug ( f'inst: {inst}' ) return inst
def pressision_try ( orbitals , U , beta , step ) : """perform a better initial guess of lambda no improvement"""
mu , lam = main ( orbitals , U , beta , step ) mu2 , lam2 = linspace ( 0 , U * orbitals , step ) , zeros ( step ) for i in range ( 99 ) : lam2 [ i + 1 ] = fsolve ( restriction , lam2 [ i ] , ( mu2 [ i + 1 ] , orbitals , U , beta ) ) plot ( mu2 , 2 * orbitals * fermi_dist ( - ( mu2 + lam2 ) , beta ) , label = 'Test guess' ) legend ( loc = 0 )
def matchingFilePaths ( targetfilename , directory , targetFileExtension = None , selector = None ) : """Search for files in all subfolders of specified directory , return filepaths of all matching instances . : param targetfilename : filename to search for , only the string before the last " . " is used for filename matching . Ignored if a selector function is specified . : param directory : search directory , including all subdirectories : param targetFileExtension : string after the last " . " in the filename , has to be identical if specified . " . " in targetFileExtension are ignored , thus " . txt " is treated equal to " txt " . : param selector : a function which is called with the value of targetfilename and has to return True ( include value ) or False ( discard value ) . If no selector is specified , equality to targetfilename is used . : returns : list of matching file paths ( str )"""
targetFilePaths = list ( ) targetfilename = os . path . splitext ( targetfilename ) [ 0 ] targetFileExtension = targetFileExtension . replace ( '.' , '' ) matchExtensions = False if targetFileExtension is None else True if selector is None : selector = functools . partial ( operator . eq , targetfilename ) for dirpath , dirnames , filenames in os . walk ( directory ) : for filename in filenames : filenameNoextension = os . path . splitext ( filename ) [ 0 ] if selector ( filenameNoextension ) : if matchExtensions : if not filename . endswith ( '.' + targetFileExtension ) : continue targetFilePaths . append ( joinpath ( dirpath , filename ) ) return targetFilePaths
def _rsassa_pss_sign ( self , M , h = None , mgf = None , sLen = None ) : """Implements RSASSA - PSS - SIGN ( ) function described in Sect . 8.1.1 of RFC 3447. Input : M : message to be signed , an octet string Output : signature , an octet string of length k , where k is the length in octets of the RSA modulus n . On error , None is returned ."""
# Set default parameters if not provided if h is None : # By default , sha1 h = "sha1" if not h in _hashFuncParams : warning ( "Key._rsassa_pss_sign(): unknown hash function " "provided (%s)" % h ) return None if mgf is None : # use mgf1 with underlying hash function mgf = lambda x , y : pkcs_mgf1 ( x , y , h ) if sLen is None : # use Hash output length ( A . 2.3 of RFC 3447) hLen = _hashFuncParams [ h ] [ 0 ] sLen = hLen # 1 ) EMSA - PSS encoding modBits = self . modulusLen k = modBits / 8 EM = pkcs_emsa_pss_encode ( M , modBits - 1 , h , mgf , sLen ) if EM is None : warning ( "Key._rsassa_pss_sign(): unable to encode" ) return None # 2 ) RSA signature m = pkcs_os2ip ( EM ) # 2 . a ) s = self . _rsasp1 ( m ) # 2 . b ) S = pkcs_i2osp ( s , k ) # 2 . c ) return S
def get_log_entry_log_assignment_session ( self , proxy ) : """Gets the session for assigning log entry to log mappings . arg : proxy ( osid . proxy . Proxy ) : a proxy return : ( osid . logging . LogEntryLogAssignmentSession ) - a ` ` LogEntryLogAssignmentSession ` ` raise : NullArgument - ` ` proxy ` ` is ` ` null ` ` raise : OperationFailed - unable to complete request raise : Unimplemented - ` ` supports _ log _ entry _ log _ assignment ( ) ` ` is ` ` false ` ` * compliance : optional - - This method must be implemented if ` ` supports _ log _ entry _ log _ assignment ( ) ` ` is ` ` true ` ` . *"""
if not self . supports_log_entry_log_assignment ( ) : raise errors . Unimplemented ( ) # pylint : disable = no - member return sessions . LogEntryLogAssignmentSession ( proxy = proxy , runtime = self . _runtime )
def indexableText ( self , include_properties = True ) : """Words found in the various texts of the document . : param include _ properties : Adds words from properties : return : Space separated words of the document ."""
text = set ( ) for extractor in self . _text_extractors : if extractor . content_type in self . content_types . overrides : for tree in self . content_types . getTreesFor ( self , extractor . content_type ) : words = extractor . indexableText ( tree ) text |= words if include_properties : for prop_value in self . allProperties . values ( ) : if prop_value is not None : text . add ( prop_value ) return u' ' . join ( [ word for word in text ] )
def branchings_segments ( self ) : """Detect branchings and partition the data into corresponding segments . Detect all branchings up to ` n _ branchings ` . Writes segs : np . ndarray Array of dimension ( number of segments ) × ( number of data points ) . Each row stores a mask array that defines a segment . segs _ tips : np . ndarray Array of dimension ( number of segments ) × 2 . Each row stores the indices of the two tip points of each segment . segs _ names : np . ndarray Array of dimension ( number of data points ) . Stores an integer label for each segment ."""
self . detect_branchings ( ) self . postprocess_segments ( ) self . set_segs_names ( ) self . order_pseudotime ( )
def detach ( self , * items ) : """Unlinks all of the specified items from the tree . The items and all of their descendants are still present , and may be reinserted at another point in the tree , but will not be displayed . The root item may not be detached . : param items : list of item identifiers : type items : sequence [ str ]"""
self . _visual_drag . detach ( * items ) ttk . Treeview . detach ( self , * items )
def create ( name , packages = None , user = None ) : """Create a conda env"""
packages = packages or '' packages = packages . split ( ',' ) packages . append ( 'pip' ) args = packages + [ '--yes' , '-q' ] cmd = _create_conda_cmd ( 'create' , args = args , env = name , user = user ) ret = _execcmd ( cmd , user = user , return0 = True ) if ret [ 'retcode' ] == 0 : ret [ 'result' ] = True ret [ 'comment' ] = 'Virtual enviroment "%s" successfully created' % name else : if ret [ 'stderr' ] . startswith ( 'Error: prefix already exists:' ) : ret [ 'result' ] = True ret [ 'comment' ] = 'Virtual enviroment "%s" already exists' % name else : ret [ 'result' ] = False ret [ 'error' ] = salt . exceptions . CommandExecutionError ( ret [ 'stderr' ] ) return ret
def parse_component_config ( self , component_config : Dict [ str , Union [ Dict , List ] ] ) -> List [ str ] : """Parses a hierarchical component specification into a list of standardized component definitions . This default parser expects component configurations as a list of dicts . Each dict at the top level corresponds to a different package and has a single key . This key may be just the name of the package or a Python style import path to the module in which components live . The values of the top level dicts are a list of dicts or strings . If dicts , the keys are another step along the import path . If strings , the strings are representations of calls to the class constructor of components to be generated . This pattern may be arbitrarily nested . Parameters component _ config : A hierarchical component specification blob . Returns List A list of standardized component definitions . Component definition strings are specified as ` ` ' absolute . import . path . ClassName ( " argument1 " , " argument2 " , . . . ) ' ` ` ."""
return _parse_component_config ( component_config )
def scalloping_loss ( wnd ) : """Positive number with the scalloping loss in dB ."""
return - dB20 ( abs ( sum ( wnd * cexp ( line ( len ( wnd ) , 0 , - 1j * pi ) ) ) ) / sum ( wnd ) )
def inception_v3_base ( inputs , final_endpoint = 'Mixed_7c' , min_depth = 16 , depth_multiplier = 1.0 , scope = None ) : """Inception model from http : / / arxiv . org / abs / 1512.00567. Constructs an Inception v3 network from inputs to the given final endpoint . This method can construct the network up to the final inception block Mixed _ 7c . Note that the names of the layers in the paper do not correspond to the names of the endpoints registered by this function although they build the same network . Here is a mapping from the old _ names to the new names : Old name | New name conv0 | Conv2d _ 1a _ 3x3 conv1 | Conv2d _ 2a _ 3x3 conv2 | Conv2d _ 2b _ 3x3 pool1 | MaxPool _ 3a _ 3x3 conv3 | Conv2d _ 3b _ 1x1 conv4 | Conv2d _ 4a _ 3x3 pool2 | MaxPool _ 5a _ 3x3 mixed _ 35x35x256a | Mixed _ 5b mixed _ 35x35x288a | Mixed _ 5c mixed _ 35x35x288b | Mixed _ 5d mixed _ 17x17x768a | Mixed _ 6a mixed _ 17x17x768b | Mixed _ 6b mixed _ 17x17x768c | Mixed _ 6c mixed _ 17x17x768d | Mixed _ 6d mixed _ 17x17x768e | Mixed _ 6e mixed _ 8x8x1280a | Mixed _ 7a mixed _ 8x8x2048a | Mixed _ 7b mixed _ 8x8x2048b | Mixed _ 7c Args : inputs : a tensor of size [ batch _ size , height , width , channels ] . final _ endpoint : specifies the endpoint to construct the network up to . It can be one of [ ' Conv2d _ 1a _ 3x3 ' , ' Conv2d _ 2a _ 3x3 ' , ' Conv2d _ 2b _ 3x3 ' , ' MaxPool _ 3a _ 3x3 ' , ' Conv2d _ 3b _ 1x1 ' , ' Conv2d _ 4a _ 3x3 ' , ' MaxPool _ 5a _ 3x3 ' , ' Mixed _ 5b ' , ' Mixed _ 5c ' , ' Mixed _ 5d ' , ' Mixed _ 6a ' , ' Mixed _ 6b ' , ' Mixed _ 6c ' , ' Mixed _ 6d ' , ' Mixed _ 6e ' , ' Mixed _ 7a ' , ' Mixed _ 7b ' , ' Mixed _ 7c ' ] . min _ depth : Minimum depth value ( number of channels ) for all convolution ops . Enforced when depth _ multiplier < 1 , and not an active constraint when depth _ multiplier > = 1. depth _ multiplier : Float multiplier for the depth ( number of channels ) for all convolution ops . The value must be greater than zero . Typical usage will be to set this value in ( 0 , 1 ) to reduce the number of parameters or computation cost of the model . scope : Optional variable _ scope . Returns : tensor _ out : output tensor corresponding to the final _ endpoint . end _ points : a set of activations for external use , for example summaries or losses . Raises : ValueError : if final _ endpoint is not set to one of the predefined values , or depth _ multiplier < = 0"""
# end _ points will collect relevant activations for external use , for example # summaries or losses . end_points = { } if depth_multiplier <= 0 : raise ValueError ( 'depth_multiplier is not greater than zero.' ) def depth ( d ) : return max ( int ( d * depth_multiplier ) , min_depth ) with tf . variable_scope ( scope , 'InceptionV3' , [ inputs ] ) : with slim . arg_scope ( [ slim . conv2d , slim . max_pool2d , slim . avg_pool2d ] , stride = 1 , padding = 'VALID' ) : # 299 x 299 x 3 end_point = 'Conv2d_1a_3x3' net = slim . conv2d ( inputs , depth ( 32 ) , [ 3 , 3 ] , stride = 2 , scope = end_point ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # 149 x 149 x 32 end_point = 'Conv2d_2a_3x3' net = slim . conv2d ( net , depth ( 32 ) , [ 3 , 3 ] , scope = end_point ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # 147 x 147 x 32 end_point = 'Conv2d_2b_3x3' net = slim . conv2d ( net , depth ( 64 ) , [ 3 , 3 ] , padding = 'SAME' , scope = end_point ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # 147 x 147 x 64 end_point = 'MaxPool_3a_3x3' net = slim . max_pool2d ( net , [ 3 , 3 ] , stride = 2 , scope = end_point ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # 73 x 73 x 64 end_point = 'Conv2d_3b_1x1' net = slim . conv2d ( net , depth ( 80 ) , [ 1 , 1 ] , scope = end_point ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # 73 x 73 x 80. end_point = 'Conv2d_4a_3x3' net = slim . conv2d ( net , depth ( 192 ) , [ 3 , 3 ] , scope = end_point ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # 71 x 71 x 192. end_point = 'MaxPool_5a_3x3' net = slim . max_pool2d ( net , [ 3 , 3 ] , stride = 2 , scope = end_point ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # 35 x 35 x 192. # Inception blocks with slim . arg_scope ( [ slim . conv2d , slim . max_pool2d , slim . avg_pool2d ] , stride = 1 , padding = 'SAME' ) : # mixed : 35 x 35 x 256. end_point = 'Mixed_5b' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 64 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 48 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_1 = slim . conv2d ( branch_1 , depth ( 64 ) , [ 5 , 5 ] , scope = 'Conv2d_0b_5x5' ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . conv2d ( net , depth ( 64 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 96 ) , [ 3 , 3 ] , scope = 'Conv2d_0b_3x3' ) branch_2 = slim . conv2d ( branch_2 , depth ( 96 ) , [ 3 , 3 ] , scope = 'Conv2d_0c_3x3' ) with tf . variable_scope ( 'Branch_3' ) : branch_3 = slim . avg_pool2d ( net , [ 3 , 3 ] , scope = 'AvgPool_0a_3x3' ) branch_3 = slim . conv2d ( branch_3 , depth ( 32 ) , [ 1 , 1 ] , scope = 'Conv2d_0b_1x1' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 , branch_3 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # mixed _ 1 : 35 x 35 x 288. end_point = 'Mixed_5c' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 64 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 48 ) , [ 1 , 1 ] , scope = 'Conv2d_0b_1x1' ) branch_1 = slim . conv2d ( branch_1 , depth ( 64 ) , [ 5 , 5 ] , scope = 'Conv_1_0c_5x5' ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . conv2d ( net , depth ( 64 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 96 ) , [ 3 , 3 ] , scope = 'Conv2d_0b_3x3' ) branch_2 = slim . conv2d ( branch_2 , depth ( 96 ) , [ 3 , 3 ] , scope = 'Conv2d_0c_3x3' ) with tf . variable_scope ( 'Branch_3' ) : branch_3 = slim . avg_pool2d ( net , [ 3 , 3 ] , scope = 'AvgPool_0a_3x3' ) branch_3 = slim . conv2d ( branch_3 , depth ( 64 ) , [ 1 , 1 ] , scope = 'Conv2d_0b_1x1' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 , branch_3 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # mixed _ 2 : 35 x 35 x 288. end_point = 'Mixed_5d' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 64 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 48 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_1 = slim . conv2d ( branch_1 , depth ( 64 ) , [ 5 , 5 ] , scope = 'Conv2d_0b_5x5' ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . conv2d ( net , depth ( 64 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 96 ) , [ 3 , 3 ] , scope = 'Conv2d_0b_3x3' ) branch_2 = slim . conv2d ( branch_2 , depth ( 96 ) , [ 3 , 3 ] , scope = 'Conv2d_0c_3x3' ) with tf . variable_scope ( 'Branch_3' ) : branch_3 = slim . avg_pool2d ( net , [ 3 , 3 ] , scope = 'AvgPool_0a_3x3' ) branch_3 = slim . conv2d ( branch_3 , depth ( 64 ) , [ 1 , 1 ] , scope = 'Conv2d_0b_1x1' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 , branch_3 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # mixed _ 3 : 17 x 17 x 768. end_point = 'Mixed_6a' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 384 ) , [ 3 , 3 ] , stride = 2 , padding = 'VALID' , scope = 'Conv2d_1a_1x1' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 64 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_1 = slim . conv2d ( branch_1 , depth ( 96 ) , [ 3 , 3 ] , scope = 'Conv2d_0b_3x3' ) branch_1 = slim . conv2d ( branch_1 , depth ( 96 ) , [ 3 , 3 ] , stride = 2 , padding = 'VALID' , scope = 'Conv2d_1a_1x1' ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . max_pool2d ( net , [ 3 , 3 ] , stride = 2 , padding = 'VALID' , scope = 'MaxPool_1a_3x3' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # mixed4 : 17 x 17 x 768. end_point = 'Mixed_6b' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 128 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_1 = slim . conv2d ( branch_1 , depth ( 128 ) , [ 1 , 7 ] , scope = 'Conv2d_0b_1x7' ) branch_1 = slim . conv2d ( branch_1 , depth ( 192 ) , [ 7 , 1 ] , scope = 'Conv2d_0c_7x1' ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . conv2d ( net , depth ( 128 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 128 ) , [ 7 , 1 ] , scope = 'Conv2d_0b_7x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 128 ) , [ 1 , 7 ] , scope = 'Conv2d_0c_1x7' ) branch_2 = slim . conv2d ( branch_2 , depth ( 128 ) , [ 7 , 1 ] , scope = 'Conv2d_0d_7x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 192 ) , [ 1 , 7 ] , scope = 'Conv2d_0e_1x7' ) with tf . variable_scope ( 'Branch_3' ) : branch_3 = slim . avg_pool2d ( net , [ 3 , 3 ] , scope = 'AvgPool_0a_3x3' ) branch_3 = slim . conv2d ( branch_3 , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0b_1x1' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 , branch_3 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # mixed _ 5 : 17 x 17 x 768. end_point = 'Mixed_6c' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 160 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_1 = slim . conv2d ( branch_1 , depth ( 160 ) , [ 1 , 7 ] , scope = 'Conv2d_0b_1x7' ) branch_1 = slim . conv2d ( branch_1 , depth ( 192 ) , [ 7 , 1 ] , scope = 'Conv2d_0c_7x1' ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . conv2d ( net , depth ( 160 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 160 ) , [ 7 , 1 ] , scope = 'Conv2d_0b_7x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 160 ) , [ 1 , 7 ] , scope = 'Conv2d_0c_1x7' ) branch_2 = slim . conv2d ( branch_2 , depth ( 160 ) , [ 7 , 1 ] , scope = 'Conv2d_0d_7x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 192 ) , [ 1 , 7 ] , scope = 'Conv2d_0e_1x7' ) with tf . variable_scope ( 'Branch_3' ) : branch_3 = slim . avg_pool2d ( net , [ 3 , 3 ] , scope = 'AvgPool_0a_3x3' ) branch_3 = slim . conv2d ( branch_3 , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0b_1x1' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 , branch_3 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # mixed _ 6 : 17 x 17 x 768. end_point = 'Mixed_6d' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 160 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_1 = slim . conv2d ( branch_1 , depth ( 160 ) , [ 1 , 7 ] , scope = 'Conv2d_0b_1x7' ) branch_1 = slim . conv2d ( branch_1 , depth ( 192 ) , [ 7 , 1 ] , scope = 'Conv2d_0c_7x1' ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . conv2d ( net , depth ( 160 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 160 ) , [ 7 , 1 ] , scope = 'Conv2d_0b_7x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 160 ) , [ 1 , 7 ] , scope = 'Conv2d_0c_1x7' ) branch_2 = slim . conv2d ( branch_2 , depth ( 160 ) , [ 7 , 1 ] , scope = 'Conv2d_0d_7x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 192 ) , [ 1 , 7 ] , scope = 'Conv2d_0e_1x7' ) with tf . variable_scope ( 'Branch_3' ) : branch_3 = slim . avg_pool2d ( net , [ 3 , 3 ] , scope = 'AvgPool_0a_3x3' ) branch_3 = slim . conv2d ( branch_3 , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0b_1x1' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 , branch_3 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # mixed _ 7 : 17 x 17 x 768. end_point = 'Mixed_6e' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_1 = slim . conv2d ( branch_1 , depth ( 192 ) , [ 1 , 7 ] , scope = 'Conv2d_0b_1x7' ) branch_1 = slim . conv2d ( branch_1 , depth ( 192 ) , [ 7 , 1 ] , scope = 'Conv2d_0c_7x1' ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . conv2d ( net , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 192 ) , [ 7 , 1 ] , scope = 'Conv2d_0b_7x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 192 ) , [ 1 , 7 ] , scope = 'Conv2d_0c_1x7' ) branch_2 = slim . conv2d ( branch_2 , depth ( 192 ) , [ 7 , 1 ] , scope = 'Conv2d_0d_7x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 192 ) , [ 1 , 7 ] , scope = 'Conv2d_0e_1x7' ) with tf . variable_scope ( 'Branch_3' ) : branch_3 = slim . avg_pool2d ( net , [ 3 , 3 ] , scope = 'AvgPool_0a_3x3' ) branch_3 = slim . conv2d ( branch_3 , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0b_1x1' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 , branch_3 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # mixed _ 8 : 8 x 8 x 1280. end_point = 'Mixed_7a' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_0 = slim . conv2d ( branch_0 , depth ( 320 ) , [ 3 , 3 ] , stride = 2 , padding = 'VALID' , scope = 'Conv2d_1a_3x3' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_1 = slim . conv2d ( branch_1 , depth ( 192 ) , [ 1 , 7 ] , scope = 'Conv2d_0b_1x7' ) branch_1 = slim . conv2d ( branch_1 , depth ( 192 ) , [ 7 , 1 ] , scope = 'Conv2d_0c_7x1' ) branch_1 = slim . conv2d ( branch_1 , depth ( 192 ) , [ 3 , 3 ] , stride = 2 , padding = 'VALID' , scope = 'Conv2d_1a_3x3' ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . max_pool2d ( net , [ 3 , 3 ] , stride = 2 , padding = 'VALID' , scope = 'MaxPool_1a_3x3' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # mixed _ 9 : 8 x 8 x 2048. end_point = 'Mixed_7b' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 320 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 384 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_1 = tf . concat ( [ slim . conv2d ( branch_1 , depth ( 384 ) , [ 1 , 3 ] , scope = 'Conv2d_0b_1x3' ) , slim . conv2d ( branch_1 , depth ( 384 ) , [ 3 , 1 ] , scope = 'Conv2d_0b_3x1' ) ] , 3 ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . conv2d ( net , depth ( 448 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 384 ) , [ 3 , 3 ] , scope = 'Conv2d_0b_3x3' ) branch_2 = tf . concat ( [ slim . conv2d ( branch_2 , depth ( 384 ) , [ 1 , 3 ] , scope = 'Conv2d_0c_1x3' ) , slim . conv2d ( branch_2 , depth ( 384 ) , [ 3 , 1 ] , scope = 'Conv2d_0d_3x1' ) ] , 3 ) with tf . variable_scope ( 'Branch_3' ) : branch_3 = slim . avg_pool2d ( net , [ 3 , 3 ] , scope = 'AvgPool_0a_3x3' ) branch_3 = slim . conv2d ( branch_3 , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0b_1x1' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 , branch_3 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points # mixed _ 10 : 8 x 8 x 2048. end_point = 'Mixed_7c' with tf . variable_scope ( end_point ) : with tf . variable_scope ( 'Branch_0' ) : branch_0 = slim . conv2d ( net , depth ( 320 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) with tf . variable_scope ( 'Branch_1' ) : branch_1 = slim . conv2d ( net , depth ( 384 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_1 = tf . concat ( [ slim . conv2d ( branch_1 , depth ( 384 ) , [ 1 , 3 ] , scope = 'Conv2d_0b_1x3' ) , slim . conv2d ( branch_1 , depth ( 384 ) , [ 3 , 1 ] , scope = 'Conv2d_0c_3x1' ) ] , 3 ) with tf . variable_scope ( 'Branch_2' ) : branch_2 = slim . conv2d ( net , depth ( 448 ) , [ 1 , 1 ] , scope = 'Conv2d_0a_1x1' ) branch_2 = slim . conv2d ( branch_2 , depth ( 384 ) , [ 3 , 3 ] , scope = 'Conv2d_0b_3x3' ) branch_2 = tf . concat ( [ slim . conv2d ( branch_2 , depth ( 384 ) , [ 1 , 3 ] , scope = 'Conv2d_0c_1x3' ) , slim . conv2d ( branch_2 , depth ( 384 ) , [ 3 , 1 ] , scope = 'Conv2d_0d_3x1' ) ] , 3 ) with tf . variable_scope ( 'Branch_3' ) : branch_3 = slim . avg_pool2d ( net , [ 3 , 3 ] , scope = 'AvgPool_0a_3x3' ) branch_3 = slim . conv2d ( branch_3 , depth ( 192 ) , [ 1 , 1 ] , scope = 'Conv2d_0b_1x1' ) net = tf . concat ( [ branch_0 , branch_1 , branch_2 , branch_3 ] , 3 ) end_points [ end_point ] = net if end_point == final_endpoint : return net , end_points raise ValueError ( 'Unknown final endpoint %s' % final_endpoint )
def get ( self , ** kwargs ) : """: returns : Full specification of the remote app object : rtype : dict Returns the contents of the app . The result includes the key - value pairs as specified in the API documentation for the ` / app - xxxx / get < https : / / wiki . dnanexus . com / API - Specification - v1.0.0 / Apps # API - method % 253A - % 252Fapp - xxxx % 255B % 252Fyyyy % 255D % 252Fget > ` _ method ."""
if self . _dxid is not None : return dxpy . api . app_get ( self . _dxid , ** kwargs ) else : return dxpy . api . app_get ( 'app-' + self . _name , alias = self . _alias , ** kwargs )
def set ( self , name , value ) : """Set an option value . Args : name ( str ) : The name of the option . value : The value to set the option to . Raises : TypeError : If the value is not a string or appropriate native type . ValueError : If the value is a string but cannot be coerced . If the name is not registered a new option will be created using the option generator ."""
if name not in self . _options : self . register ( name , self . _generator ( ) ) return self . _options [ name ] . __set__ ( self , value )
def parse_arguments ( argv : Optional [ Sequence [ str ] ] = None ) -> argparse . Namespace : """Parse the command line arguments . Args : argv : If not ` ` None ` ` , use the provided command line arguments for parsing . Otherwise , extract them automatically . Returns : The argparse object representing the parsed arguments ."""
parser = argparse . ArgumentParser ( description = 'Git credential helper using pass as the data source.' , formatter_class = argparse . ArgumentDefaultsHelpFormatter ) parser . add_argument ( '-m' , '--mapping' , type = argparse . FileType ( 'r' ) , metavar = 'MAPPING_FILE' , default = None , help = 'A mapping file to be used, specifying how hosts ' 'map to pass entries. Overrides the default mapping files from ' 'XDG config locations, usually: {config_file}' . format ( config_file = DEFAULT_CONFIG_FILE ) ) parser . add_argument ( '-l' , '--logging' , action = 'store_true' , default = False , help = 'Print debug messages on stderr. ' 'Might include sensitive information' ) parser . add_argument ( 'action' , type = str , metavar = 'ACTION' , help = 'Action to preform as specified in the git credential API' ) args = parser . parse_args ( argv ) return args
def execute ( options ) : """execute the tool with given options ."""
# Load the key in PKCS 12 format that you downloaded from the Google APIs # Console when you created your Service account . package_name = options [ '<package>' ] source_directory = options [ '<output_dir>' ] if options [ 'upload' ] is True : upstream = True else : upstream = False sub_tasks = { 'images' : options [ '--images' ] , 'listings' : options [ '--listings' ] , 'inapp' : options [ '--inapp' ] } if sub_tasks == { 'images' : False , 'listings' : False , 'inapp' : False } : sub_tasks = { 'images' : True , 'listings' : True , 'inapp' : True } credentials = create_credentials ( credentials_file = options [ '--credentials' ] , service_email = options [ '--service-email' ] , service_key = options [ '--key' ] ) command = SyncCommand ( package_name , source_directory , upstream , credentials , ** sub_tasks ) command . execute ( )
def file_uptodate ( fname , cmp_fname ) : """Check if a file exists , is non - empty and is more recent than cmp _ fname ."""
try : return ( file_exists ( fname ) and file_exists ( cmp_fname ) and os . path . getmtime ( fname ) >= os . path . getmtime ( cmp_fname ) ) except OSError : return False
def import_from_chain ( cls , name , certificate_file , private_key = None ) : """Import the server certificate , intermediate and optionally private key from a certificate chain file . The expected format of the chain file follows RFC 4346. In short , the server certificate should come first , followed by any intermediate certificates , optionally followed by the root trusted authority . The private key can be anywhere in this order . See https : / / tools . ietf . org / html / rfc4346 # section - 7.4.2. . . note : : There is no validation done on the certificates , therefore the order is assumed to be true . In addition , the root certificate will not be imported and should be separately imported as a trusted root CA using : class : ` ~ TLSCertificateAuthority . create ` If the certificate chain file has only two entries , it is assumed to be the server certificate and root certificate ( no intermediates ) . In which case only the certificate is imported . If the chain file has 3 or more entries ( all certificates ) , it will import the first as the server certificate , 2nd as the intermediate and ignore the root cert . You can optionally provide a seperate location for a private key file if this is not within the chain file contents . . . warning : : A private key is required to create a valid TLS Server Credential . : param str name : name of TLS Server Credential : param str certificate _ file : fully qualified path to chain file or file object : param str private _ key : fully qualified path to chain file or file object : raises IOError : error occurred reading or finding specified file : raises ValueError : Format issues with chain file or empty : rtype : TLSServerCredential"""
contents = load_cert_chain ( certificate_file ) for pem in list ( contents ) : if b'PRIVATE KEY' in pem [ 0 ] : private_key = pem [ 1 ] contents . remove ( pem ) if not private_key : raise ValueError ( 'Private key was not found in chain file and ' 'was not provided. The private key is required to create a ' 'TLS Server Credential.' ) if contents : if len ( contents ) == 1 : certificate = contents [ 0 ] [ 1 ] intermediate = None else : certificate = contents [ 0 ] [ 1 ] intermediate = contents [ 1 ] [ 1 ] else : raise ValueError ( 'No certificates found in certificate chain file. Did you ' 'provide only a private key?' ) tls = TLSServerCredential . create ( name ) try : tls . import_certificate ( certificate ) tls . import_private_key ( private_key ) if intermediate is not None : tls . import_intermediate_certificate ( intermediate ) except CertificateImportError : tls . delete ( ) raise return tls
def send_stdout ( cls , sock , payload ) : """Send the Stdout chunk over the specified socket ."""
cls . write_chunk ( sock , ChunkType . STDOUT , payload )
def _parse_flags ( element ) : """Parse OSM XML element for generic data . Args : element ( etree . Element ) : Element to parse Returns : tuple : Generic OSM data for object instantiation"""
visible = True if element . get ( 'visible' ) else False user = element . get ( 'user' ) timestamp = element . get ( 'timestamp' ) if timestamp : timestamp = utils . Timestamp . parse_isoformat ( timestamp ) tags = { } try : for tag in element [ 'tag' ] : key = tag . get ( 'k' ) value = tag . get ( 'v' ) tags [ key ] = value except AttributeError : pass return visible , user , timestamp , tags
def stageContent ( self , configFiles , dateTimeFormat = None ) : """Parses a JSON configuration file to stage content . Args : configFiles ( list ) : A list of JSON files on disk containing configuration data for staging content . dateTimeFormat ( str ) : A valid date formatting directive , as understood by : py : meth : ` datetime . datetime . strftime ` . Defaults to ` ` None ` ` , i . e . , ` ` ' % Y - % m - % d % H : % M ' ` ` ."""
results = None groups = None items = None group = None content = None contentInfo = None startTime = None orgTools = None if dateTimeFormat is None : dateTimeFormat = '%Y-%m-%d %H:%M' scriptStartTime = datetime . datetime . now ( ) try : print ( "********************Stage Content Started********************" ) print ( "Script started at %s" % scriptStartTime . strftime ( dateTimeFormat ) ) if self . securityhandler . valid == False : print ( "Login required" ) else : orgTools = orgtools . orgtools ( securityinfo = self ) if orgTools is None : print ( "Error creating org tools" ) else : for configFile in configFiles : config = common . init_config_json ( config_file = configFile ) if config is not None : if 'ContentItems' in config : startTime = datetime . datetime . now ( ) print ( "Processing config %s, starting at: %s" % ( configFile , startTime . strftime ( dateTimeFormat ) ) ) contentInfo = config [ 'ContentItems' ] for cont in contentInfo : content = cont [ 'Content' ] group = cont [ 'ShareToGroup' ] print ( "Sharing content to: %s" % group ) if os . path . isfile ( content ) : with open ( content , 'rb' ) as csvfile : items = [ ] groups = [ ] for row in csv . DictReader ( csvfile , dialect = 'excel' ) : if cont [ 'Type' ] == "Group" : groups . append ( row [ 'id' ] ) elif cont [ 'Type' ] == "Items" : items . append ( row [ 'id' ] ) results = orgTools . shareItemsToGroup ( shareToGroupName = group , items = items , groups = groups ) print ( "Config %s completed, time to complete: %s" % ( configFile , str ( datetime . datetime . now ( ) - startTime ) ) ) else : print ( "Config file missing ContentItems section" ) else : print ( "Config %s not found" % configFile ) except ( TypeError , ValueError , AttributeError ) as e : print ( e ) except ( common . ArcRestHelperError ) as e : print ( "error in function: %s" % e [ 0 ] [ 'function' ] ) print ( "error on line: %s" % e [ 0 ] [ 'line' ] ) print ( "error in file name: %s" % e [ 0 ] [ 'filename' ] ) print ( "with error message: %s" % e [ 0 ] [ 'synerror' ] ) if 'arcpyError' in e [ 0 ] : print ( "with arcpy message: %s" % e [ 0 ] [ 'arcpyError' ] ) except Exception as e : if ( reportToolsInstalled ) : if isinstance ( e , ( ReportTools . ReportToolsError , DataPrep . DataPrepError ) ) : print ( "error in function: %s" % e [ 0 ] [ 'function' ] ) print ( "error on line: %s" % e [ 0 ] [ 'line' ] ) print ( "error in file name: %s" % e [ 0 ] [ 'filename' ] ) print ( "with error message: %s" % e [ 0 ] [ 'synerror' ] ) if 'arcpyError' in e [ 0 ] : print ( "with arcpy message: %s" % e [ 0 ] [ 'arcpyError' ] ) else : line , filename , synerror = trace ( ) print ( "error on line: %s" % line ) print ( "error in file name: %s" % filename ) print ( "with error message: %s" % synerror ) else : line , filename , synerror = trace ( ) print ( "error on line: %s" % line ) print ( "error in file name: %s" % filename ) print ( "with error message: %s" % synerror ) finally : print ( "Script complete, time to complete: %s" % str ( datetime . datetime . now ( ) - scriptStartTime ) ) print ( "###############Stage Content Completed#################" ) print ( "" ) # if orgTools is not None : # orgTools . dispose ( ) results = None groups = None items = None group = None content = None contentInfo = None startTime = None orgTools = None del results del groups del items del group del content del contentInfo del startTime del orgTools gc . collect ( )
def register_workflow ( self , name , workflow ) : """Register an workflow to be showed in the workflows list ."""
assert name not in self . workflows self . workflows [ name ] = workflow
def _ReadStructureFromByteStream ( self , byte_stream , file_offset , data_type_map , description , context = None ) : """Reads a structure from a byte stream . Args : byte _ stream ( bytes ) : byte stream . file _ offset ( int ) : offset of the data relative from the start of the file - like object . data _ type _ map ( dtfabric . DataTypeMap ) : data type map of the structure . description ( str ) : description of the structure . context ( Optional [ dtfabric . DataTypeMapContext ] ) : data type map context . Returns : object : structure values object . Raises : FileFormatError : if the structure cannot be read . ValueError : if file - like object or date type map are invalid ."""
if not byte_stream : raise ValueError ( 'Invalid byte stream.' ) if not data_type_map : raise ValueError ( 'Invalid data type map.' ) try : return data_type_map . MapByteStream ( byte_stream , context = context ) except dtfabric_errors . MappingError as exception : raise errors . FileFormatError ( ( 'Unable to map {0:s} data at offset: 0x{1:08x} with error: ' '{2!s}' ) . format ( description , file_offset , exception ) )
def _to_cwlfile_with_indexes ( val , get_retriever ) : """Convert reads with ready to go indexes into the right CWL object . Identifies the top level directory and creates a tarball , avoiding trying to handle complex secondary setups which are not cross platform . Skips doing this for reference files and standard setups like bwa , which take up too much time and space to unpack multiple times ."""
val [ "indexes" ] = _index_blacklist ( val [ "indexes" ] ) tval = { "base" : _remove_remote_prefix ( val [ "base" ] ) , "indexes" : [ _remove_remote_prefix ( f ) for f in val [ "indexes" ] ] } # Standard named set of indices , like bwa # Do not include snpEff , which we need to isolate inside a nested directory # hisat2 indices do also not localize cleanly due to compilicated naming cp_dir , cp_base = os . path . split ( os . path . commonprefix ( [ tval [ "base" ] ] + tval [ "indexes" ] ) ) if ( cp_base and cp_dir == os . path . dirname ( tval [ "base" ] ) and not ( "/snpeff/" in cp_dir or "/hisat2" in cp_dir ) ) : return _item_to_cwldata ( val [ "base" ] , get_retriever , val [ "indexes" ] ) else : dirname = os . path . dirname ( tval [ "base" ] ) assert all ( [ x . startswith ( dirname ) for x in tval [ "indexes" ] ] ) return { "class" : "File" , "path" : directory_tarball ( dirname ) }
def israw ( self ) : """Returns True if the PTY should operate in raw mode . If the container was not started with tty = True , this will return False ."""
if self . raw is None : info = self . container_info ( ) self . raw = self . stdout . isatty ( ) and info [ 'Config' ] [ 'Tty' ] return self . raw
async def helo ( self , from_host = None ) : """Sends a SMTP ' HELO ' command . - Identifies the client and starts the session . If given ` ` from _ host ` ` is None , defaults to the client FQDN . For further details , please check out ` RFC 5321 § 4.1.1.1 ` _ . Args : from _ host ( str or None ) : Name to use to identify the client . Raises : ConnectionResetError : If the connection with the server is unexpectedely lost . SMTPCommandFailedError : If the server refuses our HELO greeting . Returns : ( int , str ) : A ( code , message ) 2 - tuple containing the server response . . . _ ` RFC 5321 § 4.1.1.1 ` : https : / / tools . ietf . org / html / rfc5321 # section - 4.1.1.1"""
if from_host is None : from_host = self . fqdn code , message = await self . do_cmd ( "HELO" , from_host ) self . last_helo_response = ( code , message ) return code , message
def host_is_trusted ( hostname , trusted_list ) : """Checks if a host is trusted against a list . This also takes care of port normalization . . . versionadded : : 0.9 : param hostname : the hostname to check : param trusted _ list : a list of hostnames to check against . If a hostname starts with a dot it will match against all subdomains as well ."""
if not hostname : return False if isinstance ( trusted_list , string_types ) : trusted_list = [ trusted_list ] def _normalize ( hostname ) : if ':' in hostname : hostname = hostname . rsplit ( ':' , 1 ) [ 0 ] return _encode_idna ( hostname ) hostname = _normalize ( hostname ) for ref in trusted_list : if ref . startswith ( '.' ) : ref = ref [ 1 : ] suffix_match = True else : suffix_match = False ref = _normalize ( ref ) if ref == hostname : return True if suffix_match and hostname . endswith ( '.' + ref ) : return True return False
def get_choices_file_urls_map ( self ) : """stub"""
file_urls_map = [ ] for choice in self . get_choices ( ) : choice = dict ( choice ) small_asset_content = self . _get_asset_content ( Id ( choice [ 'assetId' ] ) , OV_SET_SMALL_ASSET_CONTENT_TYPE ) choice [ 'smallOrthoViewSet' ] = small_asset_content . get_url ( ) small_asset_content = self . _get_asset_content ( Id ( choice [ 'assetId' ] ) , OV_SET_LARGE_ASSET_CONTENT_TYPE ) choice [ 'largeOrthoViewSet' ] = small_asset_content . get_url ( ) del choice [ 'assetId' ] file_urls_map . append ( choice ) return file_urls_map
def _load32 ( ins ) : """Load a 32 bit value from a memory address If 2nd arg . start with ' * ' , it is always treated as an indirect value ."""
output = _32bit_oper ( ins . quad [ 2 ] ) output . append ( 'push de' ) output . append ( 'push hl' ) return output
def register_layouts ( layouts , app , url = "/api/props/" , brand = "Pyxley" ) : """register UILayout with the flask app create a function that will send props for each UILayout Args : layouts ( dict ) : dict of UILayout objects by name app ( object ) : flask app url ( string ) : address of props ; default is / api / props /"""
def props ( name ) : if name not in layouts : # cast as list for python3 name = list ( layouts . keys ( ) ) [ 0 ] return jsonify ( { "layouts" : layouts [ name ] [ "layout" ] } ) def apps ( ) : paths = [ ] for i , k in enumerate ( layouts . keys ( ) ) : if i == 0 : paths . append ( { "path" : "/" , "label" : layouts [ k ] . get ( "title" , k ) } ) paths . append ( { "path" : "/" + k , "label" : layouts [ k ] . get ( "title" , k ) } ) return jsonify ( { "brand" : brand , "navlinks" : paths } ) app . add_url_rule ( url + "<string:name>/" , view_func = props ) app . add_url_rule ( url , view_func = apps )
def error_state ( self ) : """Set the error condition"""
self . buildstate . state . lasttime = time ( ) self . buildstate . commit ( ) return self . buildstate . state . error
def loads ( s , encoding = None , cls = JSONTreeDecoder , object_hook = None , parse_float = None , parse_int = None , parse_constant = None , object_pairs_hook = None , ** kargs ) : """JSON load from string function that defaults the loading class to be JSONTreeDecoder"""
return json . loads ( s , encoding , cls , object_hook , parse_float , parse_int , parse_constant , object_pairs_hook , ** kargs )
def get_grid_point_from_address ( grid_address , mesh ) : """Return grid point index by tranlating grid address"""
_set_no_error ( ) return spg . grid_point_from_address ( np . array ( grid_address , dtype = 'intc' ) , np . array ( mesh , dtype = 'intc' ) )
def set_font_size ( self , pt = None , px = None ) : """Set the font size to the desired size , in pt or px ."""
self . font . set_size ( pt , px ) self . _render ( )
def generate_permissions ( urlpatterns , permissions = { } ) : """Generate names for permissions ."""
for pattern in urlpatterns : if isinstance ( pattern , urlresolvers . RegexURLPattern ) : perm = generate_perm_name ( pattern . callback ) if is_allowed_view ( perm ) and perm not in permissions : permissions [ ACL_CODE_PREFIX + perm ] = ACL_NAME_PREFIX + perm elif isinstance ( pattern , urlresolvers . RegexURLResolver ) : generate_permissions ( pattern . url_patterns , permissions ) return permissions
def check ( self ) : """Checks if the list of tracked terms has changed . Returns True if changed , otherwise False ."""
new_tracking_terms = self . update_tracking_terms ( ) terms_changed = False # any deleted terms ? if self . _tracking_terms_set > new_tracking_terms : logging . debug ( "Some tracking terms removed" ) terms_changed = True # any added terms ? elif self . _tracking_terms_set < new_tracking_terms : logging . debug ( "Some tracking terms added" ) terms_changed = True # Go ahead and store for later self . _tracking_terms_set = new_tracking_terms # If the terms changed , we need to restart the stream return terms_changed
def _getPayload ( self , record ) : """The data that will be sent to loggly ."""
payload = super ( LogglyHandler , self ) . _getPayload ( record ) payload [ 'tags' ] = self . _implodeTags ( ) return payload
def import_from_json ( self , data , * , override = False ) : """Import a JSON dictionary which must have the same format as exported by : meth : ` export ` . If * override * is true , the existing data in the pin store will be overriden with the data from ` data ` . Otherwise , the ` data ` will be merged into the store ."""
if override : self . _storage = { hostname : set ( self . _decode_key ( key ) for key in pins ) for hostname , pins in data . items ( ) } return for hostname , pins in data . items ( ) : existing_pins = self . _storage . setdefault ( hostname , set ( ) ) existing_pins . update ( self . _decode_key ( key ) for key in pins )
def get ( self , now ) : """Get a bucket key to compact . If none are available , returns None . This uses a Lua script to ensure that the bucket key is popped off the sorted set in an atomic fashion . : param now : The current time , as a float . Used to ensure the bucket key has been aged sufficiently to be quiescent . : returns : A bucket key ready for compaction , or None if no bucket keys are available or none have aged sufficiently ."""
items = self . script ( keys = [ self . key ] , args = [ now - self . min_age ] ) return items [ 0 ] if items else None
def _compute_ticks ( self , element , edges , widths , lims ) : """Compute the ticks either as cyclic values in degrees or as roughly evenly spaced bin centers ."""
if self . xticks is None or not isinstance ( self . xticks , int ) : return None if self . cyclic : x0 , x1 , _ , _ = lims xvals = np . linspace ( x0 , x1 , self . xticks ) labels = [ "%.0f" % np . rad2deg ( x ) + '\N{DEGREE SIGN}' for x in xvals ] elif self . xticks : dim = element . get_dimension ( 0 ) inds = np . linspace ( 0 , len ( edges ) , self . xticks , dtype = np . int ) edges = list ( edges ) + [ edges [ - 1 ] + widths [ - 1 ] ] xvals = [ edges [ i ] for i in inds ] labels = [ dim . pprint_value ( v ) for v in xvals ] return [ xvals , labels ]
def _scrub_participant_table ( path_to_data ) : """Scrub PII from the given participant table ."""
path = os . path . join ( path_to_data , "participant.csv" ) with open_for_csv ( path , "r" ) as input , open ( "{}.0" . format ( path ) , "w" ) as output : reader = csv . reader ( input ) writer = csv . writer ( output ) headers = next ( reader ) writer . writerow ( headers ) for i , row in enumerate ( reader ) : row [ headers . index ( "worker_id" ) ] = row [ headers . index ( "id" ) ] row [ headers . index ( "unique_id" ) ] = "{}:{}" . format ( row [ headers . index ( "id" ) ] , row [ headers . index ( "assignment_id" ) ] ) writer . writerow ( row ) os . rename ( "{}.0" . format ( path ) , path )
def bind_context ( context_filename ) : """loads context from file and binds to it : param : context _ filename absolute path of the context file called by featuredjango . startup . select _ product prior to selecting the individual features"""
global PRODUCT_CONTEXT if PRODUCT_CONTEXT is None : with open ( context_filename ) as contextfile : try : context = json . loads ( contextfile . read ( ) ) except ValueError as e : raise ContextParseError ( 'Error parsing %s: %s' % ( context_filename , str ( e ) ) ) context [ 'PRODUCT_CONTEXT_FILENAME' ] = context_filename context [ 'PRODUCT_EQUATION_FILENAME' ] = os . environ [ 'PRODUCT_EQUATION_FILENAME' ] context [ 'PRODUCT_NAME' ] = os . environ [ 'PRODUCT_NAME' ] context [ 'CONTAINER_NAME' ] = os . environ [ 'CONTAINER_NAME' ] context [ 'PRODUCT_DIR' ] = os . environ [ 'PRODUCT_DIR' ] context [ 'CONTAINER_DIR' ] = os . environ [ 'CONTAINER_DIR' ] context [ 'APE_ROOT_DIR' ] = os . environ [ 'APE_ROOT_DIR' ] context [ 'APE_GLOBAL_DIR' ] = os . environ [ 'APE_GLOBAL_DIR' ] PRODUCT_CONTEXT = ContextAccessor ( context ) else : # bind _ context called but context already bound # harmless rebind ( with same file ) is ignored # otherwise this is a serious error if PRODUCT_CONTEXT . PRODUCT_CONTEXT_FILENAME != context_filename : raise ContextBindingError ( 'product context bound multiple times using different data!' )
def _preprocess_and_rename_grid_attrs ( func , grid_attrs = None , ** kwargs ) : """Call a custom preprocessing method first then rename grid attrs . This wrapper is needed to generate a single function to pass to the ` ` preprocesss ` ` of xr . open _ mfdataset . It makes sure that the user - specified preprocess function is called on the loaded Dataset before aospy ' s is applied . An example for why this might be needed is output from the WRF model ; one needs to add a CF - compliant units attribute to the time coordinate of all input files , because it is not present by default . Parameters func : function An arbitrary function to call before calling ` ` grid _ attrs _ to _ aospy _ names ` ` in ` ` _ load _ data _ from _ disk ` ` . Must take an xr . Dataset as an argument as well as ` ` * * kwargs ` ` . grid _ attrs : dict ( optional ) Overriding dictionary of grid attributes mapping aospy internal names to names of grid attributes used in a particular model . Returns function A function that calls the provided function ` ` func ` ` on the Dataset before calling ` ` grid _ attrs _ to _ aospy _ names ` ` ; this is meant to be passed as a ` ` preprocess ` ` argument to ` ` xr . open _ mfdataset ` ` ."""
def func_wrapper ( ds ) : return grid_attrs_to_aospy_names ( func ( ds , ** kwargs ) , grid_attrs ) return func_wrapper
def SLT ( self , o ) : """Signed less than : param o : The other operand : return : TrueResult ( ) , FalseResult ( ) , or MaybeResult ( )"""
signed_bounds_1 = self . _signed_bounds ( ) signed_bounds_2 = o . _signed_bounds ( ) ret = [ ] for lb_1 , ub_1 in signed_bounds_1 : for lb_2 , ub_2 in signed_bounds_2 : if ub_1 < lb_2 : ret . append ( TrueResult ( ) ) elif lb_1 >= ub_2 : ret . append ( FalseResult ( ) ) else : ret . append ( MaybeResult ( ) ) if all ( r . identical ( TrueResult ( ) ) for r in ret ) : return TrueResult ( ) elif all ( r . identical ( FalseResult ( ) ) for r in ret ) : return FalseResult ( ) else : return MaybeResult ( )
def AddNIC ( self , network_id , ip = '' ) : """Add a NIC from the provided network to server and , if provided , assign a provided IP address https : / / www . ctl . io / api - docs / v2 / # servers - add - secondary - network Requires package ID , currently only available by browsing control and browsing for the package itself . The UUID parameter is the package ID we need . network _ id - ID associated with the network to add ip - Explicit IP address to assign ( optional ) Need to reinstantiate the server object after execution completes to see the assigned IP address . > > > network = clc . v2 . Networks ( location = " VA1 " ) . Get ( " 10.128.166.0/24 " ) > > > clc . v2 . Server ( alias = ' BTDI ' , id = ' WA1BTDIKRT06 ' ) . AddNIC ( network _ id = network . id , ip = ' ' ) . WaitUntilComplete ( )"""
return ( clc . v2 . Requests ( clc . v2 . API . Call ( 'POST' , 'servers/%s/%s/networks' % ( self . alias , self . id ) , json . dumps ( { 'networkId' : network_id , 'ipAddress' : ip } ) , session = self . session ) , alias = self . alias , session = self . session ) )
def set_tcp_port ( self , tcp_port ) : """Defines the TCP port number . : type tcp _ port : int : param tcp _ port : The TCP port number ."""
if tcp_port is None : self . tcp_port = None return self . tcp_port = int ( tcp_port )
def ensure_int_vector ( I , require_order = False ) : """Checks if the argument can be converted to an array of ints and does that . Parameters I : int or iterable of int require _ order : bool If False ( default ) , an unordered set is accepted . If True , a set is not accepted . Returns arr : ndarray ( n ) numpy array with the integers contained in the argument"""
if is_int_vector ( I ) : return I elif is_int ( I ) : return np . array ( [ I ] ) elif is_list_of_int ( I ) : return np . array ( I ) elif is_tuple_of_int ( I ) : return np . array ( I ) elif isinstance ( I , set ) : if require_order : raise TypeError ( 'Argument is an unordered set, but I require an ordered array of integers' ) else : lI = list ( I ) if is_list_of_int ( lI ) : return np . array ( lI ) else : raise TypeError ( 'Argument is not of a type that is convertible to an array of integers.' )
def set_compression_pool_size ( pool_size ) : """Set the size of the compression workers thread pool . If the pool is already created , it waits until all jobs are finished , and then proceeds with setting the new size . Parameters pool _ size : ` int ` The size of the pool ( must be a positive integer ) Returns ` None `"""
pool_size = int ( pool_size ) if pool_size < 1 : raise ValueError ( "The compression thread pool size cannot be of size {}" . format ( pool_size ) ) global _compress_thread_pool if _compress_thread_pool is not None : _compress_thread_pool . close ( ) _compress_thread_pool . join ( ) _compress_thread_pool = ThreadPool ( pool_size )
def list_storage_accounts ( call = None ) : '''List storage accounts within the subscription .'''
if call == 'action' : raise SaltCloudSystemExit ( 'The list_storage_accounts function must be called with ' '-f or --function' ) storconn = get_conn ( client_type = 'storage' ) ret = { } try : accounts_query = storconn . storage_accounts . list ( ) accounts = __utils__ [ 'azurearm.paged_object_to_list' ] ( accounts_query ) for account in accounts : ret [ account [ 'name' ] ] = account except CloudError as exc : __utils__ [ 'azurearm.log_cloud_error' ] ( 'storage' , exc . message ) ret = { 'Error' : exc . message } return ret
def generate_repr ( * members ) : """Decorator that binds an auto - generated ` ` _ _ repr _ _ ( ) ` ` function to a class . The generated ` ` _ _ repr _ _ ( ) ` ` function prints in following format : < ClassName object ( field1 = 1 , field2 = ' A string ' , field3 = [ 1 , 2 , 3 ] ) at 0xAAAA > Note that this decorator modifies the given class in place ! : param members : An iterable of member names to include into the representation - string . Providing no members yields to inclusion of all member variables and properties in alphabetical order ( except if they start with an underscore ) . To control the representation of each member , you can also pass a tuple where the first element contains the member to print and the second one the representation function ( which defaults to the built - in ` ` repr ( ) ` ` ) . Using None as representation function is the same as using ` ` repr ( ) ` ` . Supported members are fields / variables , properties and getter - like functions ( functions that accept no arguments ) . : raises ValueError : Raised when the passed ( member , repr - function ) - tuples have not a length of : raises AttributeError : Raised when a given member / attribute was not found in class . : raises TypeError : Raised when a provided member is a bound method that is not a getter - like function ( means it must accept no parameters ) . : return : The class armed with an auto - generated _ _ repr _ _ function ."""
def decorator ( cls ) : cls . __repr__ = __repr__ return cls if members : # Prepare members list . members_to_print = list ( members ) for i , member in enumerate ( members_to_print ) : if isinstance ( member , tuple ) : # Check tuple dimensions . length = len ( member ) if length == 2 : members_to_print [ i ] = ( member [ 0 ] , member [ 1 ] if member [ 1 ] else repr ) else : raise ValueError ( "Passed tuple " + repr ( member ) + " needs to be 2-dimensional, but has " + str ( length ) + " dimensions." ) else : members_to_print [ i ] = ( member , repr ) def __repr__ ( self ) : return _construct_repr_string ( self , members_to_print ) else : def __repr__ ( self ) : # Need to fetch member variables every time since they are unknown # until class instantation . members_to_print = get_public_members ( self ) member_repr_list = ( ( member , repr ) for member in sorted ( members_to_print , key = str . lower ) ) return _construct_repr_string ( self , member_repr_list ) return decorator
def _set_chain_sid ( chain_model , sid ) : """Set or update SID for chain . If the chain already has a SID , ` ` sid ` ` must either be None or match the existing SID ."""
if not sid : return if chain_model . sid and chain_model . sid . did != sid : raise d1_common . types . exceptions . ServiceFailure ( 0 , 'Attempted to modify existing SID. ' 'existing_sid="{}", new_sid="{}"' . format ( chain_model . sid . did , sid ) , ) chain_model . sid = d1_gmn . app . did . get_or_create_did ( sid ) chain_model . save ( )
def invert ( self , points ) : """Invert the distortion Parameters points : ndarray Input image points Returns ndarray Undistorted points"""
X = points if not points . ndim == 1 else points . reshape ( ( points . size , 1 ) ) wx , wy = self . wc # Switch to polar coordinates rn = np . sqrt ( ( X [ 0 , : ] - wx ) ** 2 + ( X [ 1 , : ] - wy ) ** 2 ) phi = np . arctan2 ( X [ 1 , : ] - wy , X [ 0 , : ] - wx ) # ' atan ' method r = np . tan ( rn * self . lgamma ) / self . lgamma ; # Switch back to rectangular coordinates Y = np . ones ( X . shape ) Y [ 0 , : ] = wx + r * np . cos ( phi ) Y [ 1 , : ] = wy + r * np . sin ( phi ) return Y
def verify_signature ( message , signature , address ) : """This function verifies a bitcoin signed message . : param message : the plain text of the message to verify : param signature : the signature in base64 format : param address : the signing address"""
if ( len ( signature ) != SIGNATURE_LENGTH ) : return False try : binsig = base64 . b64decode ( signature ) except : return False r = intbytes . from_bytes ( binsig [ 1 : 33 ] ) s = intbytes . from_bytes ( binsig [ 33 : 65 ] ) val = intbytes . from_bytes ( bitcoin_sig_hash ( message . encode ( ) ) ) pubpairs = possible_public_pairs_for_signature ( generator_secp256k1 , val , ( r , s ) ) addr_hash160 = bitcoin_address_to_hash160_sec ( address ) for pair in pubpairs : if ( public_pair_to_hash160_sec ( pair , True ) == addr_hash160 ) : return True if ( public_pair_to_hash160_sec ( pair , False ) == addr_hash160 ) : return True return False
def find ( self , id ) : """breadth - first sprite search by ID"""
for sprite in self . sprites : if sprite . id == id : return sprite for sprite in self . sprites : found = sprite . find ( id ) if found : return found
def get_param_info ( self , obj , include_super = True ) : """Get the parameter dictionary , the list of modifed parameters and the dictionary or parameter values . If include _ super is True , parameters are also collected from the super classes ."""
params = dict ( obj . param . objects ( 'existing' ) ) if isinstance ( obj , type ) : changed = [ ] val_dict = dict ( ( k , p . default ) for ( k , p ) in params . items ( ) ) self_class = obj else : changed = [ name for ( name , _ ) in obj . param . get_param_values ( onlychanged = True ) ] val_dict = dict ( obj . param . get_param_values ( ) ) self_class = obj . __class__ if not include_super : params = dict ( ( k , v ) for ( k , v ) in params . items ( ) if k in self_class . __dict__ ) params . pop ( 'name' ) # Already displayed in the title . return ( params , val_dict , changed )
def training_base_config ( estimator , inputs = None , job_name = None , mini_batch_size = None ) : """Export Airflow base training config from an estimator Args : estimator ( sagemaker . estimator . EstimatorBase ) : The estimator to export training config from . Can be a BYO estimator , Framework estimator or Amazon algorithm estimator . inputs : Information about the training data . Please refer to the ` ` fit ( ) ` ` method of the associated estimator , as this can take any of the following forms : * ( str ) - The S3 location where training data is saved . * ( dict [ str , str ] or dict [ str , sagemaker . session . s3 _ input ] ) - If using multiple channels for training data , you can specify a dict mapping channel names to strings or : func : ` ~ sagemaker . session . s3 _ input ` objects . * ( sagemaker . session . s3 _ input ) - Channel configuration for S3 data sources that can provide additional information about the training dataset . See : func : ` sagemaker . session . s3 _ input ` for full details . * ( sagemaker . amazon . amazon _ estimator . RecordSet ) - A collection of Amazon : class : ~ ` Record ` objects serialized and stored in S3. For use with an estimator for an Amazon algorithm . * ( list [ sagemaker . amazon . amazon _ estimator . RecordSet ] ) - A list of : class : ~ ` sagemaker . amazon . amazon _ estimator . RecordSet ` objects , where each instance is a different channel of training data . job _ name ( str ) : Specify a training job name if needed . mini _ batch _ size ( int ) : Specify this argument only when estimator is a built - in estimator of an Amazon algorithm . For other estimators , batch size should be specified in the estimator . Returns : dict : Training config that can be directly used by SageMakerTrainingOperator in Airflow ."""
default_bucket = estimator . sagemaker_session . default_bucket ( ) s3_operations = { } if job_name is not None : estimator . _current_job_name = job_name else : base_name = estimator . base_job_name or utils . base_name_from_image ( estimator . train_image ( ) ) estimator . _current_job_name = utils . name_from_base ( base_name ) if estimator . output_path is None : estimator . output_path = 's3://{}/' . format ( default_bucket ) if isinstance ( estimator , sagemaker . estimator . Framework ) : prepare_framework ( estimator , s3_operations ) elif isinstance ( estimator , amazon_estimator . AmazonAlgorithmEstimatorBase ) : prepare_amazon_algorithm_estimator ( estimator , inputs , mini_batch_size ) job_config = job . _Job . _load_config ( inputs , estimator , expand_role = False , validate_uri = False ) train_config = { 'AlgorithmSpecification' : { 'TrainingImage' : estimator . train_image ( ) , 'TrainingInputMode' : estimator . input_mode } , 'OutputDataConfig' : job_config [ 'output_config' ] , 'StoppingCondition' : job_config [ 'stop_condition' ] , 'ResourceConfig' : job_config [ 'resource_config' ] , 'RoleArn' : job_config [ 'role' ] , } if job_config [ 'input_config' ] is not None : train_config [ 'InputDataConfig' ] = job_config [ 'input_config' ] if job_config [ 'vpc_config' ] is not None : train_config [ 'VpcConfig' ] = job_config [ 'vpc_config' ] if estimator . hyperparameters ( ) is not None : hyperparameters = { str ( k ) : str ( v ) for ( k , v ) in estimator . hyperparameters ( ) . items ( ) } if hyperparameters and len ( hyperparameters ) > 0 : train_config [ 'HyperParameters' ] = hyperparameters if s3_operations : train_config [ 'S3Operations' ] = s3_operations return train_config
def from_binary ( self , d ) : """Given a binary payload d , update the appropriate payload fields of the message ."""
p = MsgEphemerisGPSDepF . _parser . parse ( d ) for n in self . __class__ . __slots__ : setattr ( self , n , getattr ( p , n ) )
def remove_service_checks ( self , service_id ) : """Remove all checks from a service ."""
from hypermap . aggregator . models import Service service = Service . objects . get ( id = service_id ) service . check_set . all ( ) . delete ( ) layer_to_process = service . layer_set . all ( ) for layer in layer_to_process : layer . check_set . all ( ) . delete ( )
def dew_point ( self , db ) : """Get the dew point ( C ) , which is constant throughout the day ( except at saturation ) . args : db : The maximum dry bulb temperature over the day ."""
if self . _hum_type == 'Dewpoint' : return self . _hum_value elif self . _hum_type == 'Wetbulb' : return dew_point_from_db_wb ( db , self . _hum_value , self . _barometric_pressure ) elif self . _hum_type == 'HumidityRatio' : return dew_point_from_db_hr ( db , self . _hum_value , self . _barometric_pressure ) elif self . _hum_type == 'Enthalpy' : return dew_point_from_db_enth ( db , self . _hum_value / 1000 , self . _barometric_pressure )
def generate_lambda_functions ( ) : """Create the Blockade lambda functions ."""
logger . debug ( "[#] Setting up the Lambda functions" ) aws_lambda = boto3 . client ( 'lambda' , region_name = PRIMARY_REGION ) functions = aws_lambda . list_functions ( ) . get ( 'Functions' ) existing_funcs = [ x [ 'FunctionName' ] for x in functions ] iam = boto3 . resource ( 'iam' ) account_id = iam . CurrentUser ( ) . arn . split ( ':' ) [ 4 ] responses = list ( ) for label in LAMBDA_FUNCTIONS : if label in existing_funcs : logger . debug ( "[*] Lambda function %s already exists" % ( label ) ) continue dir_path = os . path . dirname ( os . path . realpath ( __file__ ) ) dir_path = dir_path . replace ( '/cli' , '/aws' ) kwargs = { 'Runtime' : 'python2.7' , 'Role' : 'arn:aws:iam::{0}:role/{1}' . format ( account_id , BLOCKADE_ROLE ) , 'Timeout' : 3 , 'MemorySize' : 128 , 'Publish' : True , 'Code' : { 'ZipFile' : open ( "{0}/lambda-zips/{1}.zip" . format ( dir_path , label ) , 'rb' ) . read ( ) } } kwargs . update ( LAMBDA_SCHEMA [ label ] ) logger . debug ( "[#] Setting up the %s Lambda function" % ( label ) ) response = aws_lambda . create_function ( ** kwargs ) responses . append ( response ) logger . debug ( "[#] Successfully setup Lambda function %s" % ( label ) ) logger . info ( "[#] Successfully setup Lambda functions" ) return responses
def is_paired ( text , open = '(' , close = ')' ) : """Check if the text only contains : 1 . blackslash escaped parentheses , or 2 . parentheses paired ."""
count = 0 escape = False for c in text : if escape : escape = False elif c == '\\' : escape = True elif c == open : count += 1 elif c == close : if count == 0 : return False count -= 1 return count == 0
def timetuple ( self ) : "Return local time tuple compatible with time . localtime ( ) ."
dst = self . dst ( ) if dst is None : dst = - 1 elif dst : dst = 1 else : dst = 0 return _build_struct_time ( self . year , self . month , self . day , self . hour , self . minute , self . second , dst )
def push ( self , item ) : """Push and item onto the sorting stack . @ param item : An item to push . @ type item : I { item } @ return : The number of items pushed . @ rtype : int"""
if item in self . pushed : return frame = ( item , iter ( item [ 1 ] ) ) self . stack . append ( frame ) self . pushed . add ( item )
def getEntityType ( self , found = None ) : '''Method to recover the value of the entity in case it may vary . : param found : The expression to be analysed . : return : The entity type returned will be an s ' i3visio . email ' for foo @ bar . com and an ' i3visio . text ' for foo [ at ] bar [ dot ] com .'''
# character may be ' @ ' or ' . ' for character in self . substitutionValues . keys ( ) : for value in self . substitutionValues [ character ] : if value in found : return "i3visio.text" # If none of the values were found . . . Returning as usual the ' i3visio . email ' string . return self . name