signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def get_tweet ( self , id ) : """Get an existing tweet . : param id : ID of the tweet in question : return : Tweet object . None if not found"""
try : return Tweet ( self . _client . get_status ( id = id ) . _json ) except TweepError as e : if e . api_code == TWITTER_TWEET_NOT_FOUND_ERROR : return None raise
def pack ( self , value = None ) : """Pack the struct in a binary representation . Merge some fields to ensure correct packing . If no arguments are set for a particular instance , it is interpreted as abscence of VLAN information , and the pack ( ) method will return an empty binary string . Returns : bytes : Binary representation of this instance ."""
if isinstance ( value , type ( self ) ) : return value . pack ( ) if self . pcp is None and self . cfi is None and self . vid is None : return b'' self . pcp = self . pcp if self . pcp is not None else 0 self . cfi = self . cfi if self . cfi is not None else 0 self . vid = self . vid if self . vid is not None else 0 self . _tci = self . pcp << 13 | self . cfi << 12 | self . vid return super ( ) . pack ( )
def colorize ( img , heatmap ) : """img : bgr , [ 0,255] heatmap : [ 0,1]"""
heatmap = viz . intensity_to_rgb ( heatmap , cmap = 'jet' ) [ : , : , : : - 1 ] return img * 0.5 + heatmap * 0.5
def requests_retry_session ( retries = 3 , backoff_factor = 0.3 , status_forcelist = ( 500 , 502 , 504 ) , session = None ) : """Create a requests session that handles errors by retrying . Parameters retries : ` int ` , optional Number of retries to attempt . backoff _ factor : ` float ` , optional Backoff factor . status _ forcelist : sequence of ` str ` , optional Status codes that must be retried . session : ` requests . Session ` An existing requests session to configure . Returns session : ` requests . Session ` Requests session that can take ` ` get ` ` and ` ` post ` ` methods , for example . Notes This function is based on https : / / www . peterbe . com / plog / best - practice - with - retries - with - requests by Peter Bengtsson ."""
session = session or requests . Session ( ) retry = Retry ( total = retries , read = retries , connect = retries , backoff_factor = backoff_factor , status_forcelist = status_forcelist , ) adapter = HTTPAdapter ( max_retries = retry ) session . mount ( 'http://' , adapter ) session . mount ( 'https://' , adapter ) return session
def survey_basis ( self , keys = None , alias = None , step = None ) : """Look at the basis of all the curves in ` ` well . data ` ` and return a basis with the minimum start , maximum depth , and minimum step . Args : keys ( list ) : List of strings : the keys of the data items to survey , if not all of them . alias ( dict ) : a dictionary mapping mnemonics to lists of mnemonics . step ( float ) : a new step , if you want to change it . Returns : ndarray . The most complete common basis ."""
if keys is None : keys = [ k for k , v in self . data . items ( ) if isinstance ( v , Curve ) ] else : keys = utils . flatten_list ( keys ) starts , stops , steps = [ ] , [ ] , [ ] for k in keys : d = self . get_curve ( k , alias = alias ) if keys and ( d is None ) : continue try : starts . append ( d . basis [ 0 ] ) stops . append ( d . basis [ - 1 ] ) steps . append ( d . basis [ 1 ] - d . basis [ 0 ] ) except Exception as e : pass if starts and stops and steps : step = step or min ( steps ) return np . arange ( min ( starts ) , max ( stops ) + 1e-9 , step ) else : return None
def build_js ( ctx , force = False ) : """Build all javascript files ."""
for fname in JSX_FILENAMES : jstools . babel ( ctx , '{pkg.source_js}/' + fname , '{pkg.django_static}/{pkg.name}/js/' + fname + '.js' , force = force )
def generate_filename ( self , mark , ** kwargs ) : """Comes up with a good filename for the watermarked image"""
kwargs = kwargs . copy ( ) kwargs [ 'opacity' ] = int ( kwargs [ 'opacity' ] * 100 ) kwargs [ 'st_mtime' ] = kwargs [ 'fstat' ] . st_mtime kwargs [ 'st_size' ] = kwargs [ 'fstat' ] . st_size params = [ '%(original_basename)s' , 'wm' , 'w%(watermark)i' , 'o%(opacity)i' , 'gs%(greyscale)i' , 'r%(rotation)i' , 'fm%(st_mtime)i' , 'fz%(st_size)i' , 'p%(position)s' , ] scale = kwargs . get ( 'scale' , None ) if scale and scale != mark . size : params . append ( '_s%i' % ( float ( kwargs [ 'scale' ] [ 0 ] ) / mark . size [ 0 ] * 100 ) ) if kwargs . get ( 'tile' , None ) : params . append ( '_tiled' ) # make thumbnail filename filename = '%s%s' % ( '_' . join ( params ) , kwargs [ 'ext' ] ) return filename % kwargs
def __get_agent_host_port ( self ) : """Iterates the the various ways the host and port of the Instana host agent may be configured : default , env vars , sensor options . . ."""
host = AGENT_DEFAULT_HOST port = AGENT_DEFAULT_PORT if "INSTANA_AGENT_HOST" in os . environ : host = os . environ [ "INSTANA_AGENT_HOST" ] if "INSTANA_AGENT_PORT" in os . environ : port = int ( os . environ [ "INSTANA_AGENT_PORT" ] ) elif "INSTANA_AGENT_IP" in os . environ : # Deprecated : INSTANA _ AGENT _ IP environment variable # To be removed in a future version host = os . environ [ "INSTANA_AGENT_IP" ] if "INSTANA_AGENT_PORT" in os . environ : port = int ( os . environ [ "INSTANA_AGENT_PORT" ] ) elif self . agent . sensor . options . agent_host != "" : host = self . agent . sensor . options . agent_host if self . agent . sensor . options . agent_port != 0 : port = self . agent . sensor . options . agent_port return host , port
def get_tool_filepath ( self , tool_alias ) : """Given a visible tool alias , return the full path to the executable . Args : tool _ alias ( str ) : Tool alias to search for . Returns : ( str ) : Filepath of executable , or None if the tool is not in the suite . May also return None because this suite has not been saved to disk , so a filepath hasn ' t yet been established ."""
tools_dict = self . get_tools ( ) if tool_alias in tools_dict : if self . tools_path is None : return None else : return os . path . join ( self . tools_path , tool_alias ) else : return None
def _select_broker_pair ( self , rg_destination , victim_partition ) : """Select best - fit source and destination brokers based on partition count and presence of partition over the broker . * Get overloaded and underloaded brokers Best - fit Selection Criteria : Source broker : Select broker containing the victim - partition with maximum partitions . Destination broker : NOT containing the victim - partition with minimum partitions . If no such broker found , return first broker . This helps in ensuring : - * Topic - partitions are distributed across brokers . * Partition - count is balanced across replication - groups ."""
broker_source = self . _elect_source_broker ( victim_partition ) broker_destination = rg_destination . _elect_dest_broker ( victim_partition ) return broker_source , broker_destination
def QA_indicator_MA ( DataFrame , * args , ** kwargs ) : """MA Arguments : DataFrame { [ type ] } - - [ description ] Returns : [ type ] - - [ description ]"""
CLOSE = DataFrame [ 'close' ] return pd . DataFrame ( { 'MA{}' . format ( N ) : MA ( CLOSE , N ) for N in list ( args ) } )
def vector_normalize ( mat , max_vec_norm = 1. ) : """Normalize each column vector in mat to length max _ vec _ norm if it is longer than max _ vec _ norm"""
assert mat . flags . c_contiguous n , m = mat . shape vector_normalize_kernel . prepared_call ( ( m , 1 , 1 ) , ( 32 , 1 , 1 ) , mat . gpudata , np . float32 ( max_vec_norm ) , np . int32 ( m ) , np . int32 ( n ) )
def derivative ( self , t = None , n = 1 ) : """returns the nth derivative of the segment at t ."""
assert self . end != self . start if n == 1 : return self . end - self . start elif n > 1 : return 0 else : raise ValueError ( "n should be a positive integer." )
def ensure_directory ( path ) : """Ensure directory exists for a given file path ."""
dirname = os . path . dirname ( path ) if not os . path . exists ( dirname ) : os . makedirs ( dirname )
def get_emails ( self , mailinglist_dir , all , exclude_lists ) : """Generator function that get the emails from each mailing list dump dirctory . If ` all ` is set to True all the emails in the mbox will be imported if not it will just resume from the last message previously imported . The lists set in ` exclude _ lists ` won ' t be imported . Yield : A tuple in the form : ( mailing list name , email message ) ."""
self . log ( "Getting emails dumps from: %s" % mailinglist_dir ) # Get the list of directories ending with . mbox mailing_lists_mboxes = ( mbox for mbox in os . listdir ( mailinglist_dir ) if mbox . endswith ( '.mbox' ) ) # Get messages from each mbox for mbox in mailing_lists_mboxes : mbox_path = os . path . join ( mailinglist_dir , mbox , mbox ) mailinglist_name = mbox . split ( '.' ) [ 0 ] # Check if the mailinglist is set not to be imported if exclude_lists and mailinglist_name in exclude_lists : continue # Find the index of the last imported message if all : n_msgs = 0 else : try : mailinglist = MailingList . objects . get ( name = mailinglist_name ) n_msgs = mailinglist . last_imported_index except MailingList . DoesNotExist : n_msgs = 0 for index , msg in self . parse_emails ( mbox_path , n_msgs ) : yield mailinglist_name , msg , index
def plot_roc ( y_true , y_probas , title = 'ROC Curves' , plot_micro = True , plot_macro = True , classes_to_plot = None , ax = None , figsize = None , cmap = 'nipy_spectral' , title_fontsize = "large" , text_fontsize = "medium" ) : """Generates the ROC curves from labels and predicted scores / probabilities Args : y _ true ( array - like , shape ( n _ samples ) ) : Ground truth ( correct ) target values . y _ probas ( array - like , shape ( n _ samples , n _ classes ) ) : Prediction probabilities for each class returned by a classifier . title ( string , optional ) : Title of the generated plot . Defaults to " ROC Curves " . plot _ micro ( boolean , optional ) : Plot the micro average ROC curve . Defaults to ` ` True ` ` . plot _ macro ( boolean , optional ) : Plot the macro average ROC curve . Defaults to ` ` True ` ` . classes _ to _ plot ( list - like , optional ) : Classes for which the ROC curve should be plotted . e . g . [ 0 , ' cold ' ] . If given class does not exist , it will be ignored . If ` ` None ` ` , all classes will be plotted . Defaults to ` ` None ` ` ax ( : class : ` matplotlib . axes . Axes ` , optional ) : The axes upon which to plot the curve . If None , the plot is drawn on a new set of axes . figsize ( 2 - tuple , optional ) : Tuple denoting figure size of the plot e . g . ( 6 , 6 ) . Defaults to ` ` None ` ` . cmap ( string or : class : ` matplotlib . colors . Colormap ` instance , optional ) : Colormap used for plotting the projection . View Matplotlib Colormap documentation for available options . https : / / matplotlib . org / users / colormaps . html title _ fontsize ( string or int , optional ) : Matplotlib - style fontsizes . Use e . g . " small " , " medium " , " large " or integer - values . Defaults to " large " . text _ fontsize ( string or int , optional ) : Matplotlib - style fontsizes . Use e . g . " small " , " medium " , " large " or integer - values . Defaults to " medium " . Returns : ax ( : class : ` matplotlib . axes . Axes ` ) : The axes on which the plot was drawn . Example : > > > import scikitplot as skplt > > > nb = GaussianNB ( ) > > > nb = nb . fit ( X _ train , y _ train ) > > > y _ probas = nb . predict _ proba ( X _ test ) > > > skplt . metrics . plot _ roc ( y _ test , y _ probas ) < matplotlib . axes . _ subplots . AxesSubplot object at 0x7fe967d64490 > > > > plt . show ( ) . . image : : _ static / examples / plot _ roc _ curve . png : align : center : alt : ROC Curves"""
y_true = np . array ( y_true ) y_probas = np . array ( y_probas ) classes = np . unique ( y_true ) probas = y_probas if classes_to_plot is None : classes_to_plot = classes if ax is None : fig , ax = plt . subplots ( 1 , 1 , figsize = figsize ) ax . set_title ( title , fontsize = title_fontsize ) fpr_dict = dict ( ) tpr_dict = dict ( ) indices_to_plot = np . in1d ( classes , classes_to_plot ) for i , to_plot in enumerate ( indices_to_plot ) : fpr_dict [ i ] , tpr_dict [ i ] , _ = roc_curve ( y_true , probas [ : , i ] , pos_label = classes [ i ] ) if to_plot : roc_auc = auc ( fpr_dict [ i ] , tpr_dict [ i ] ) color = plt . cm . get_cmap ( cmap ) ( float ( i ) / len ( classes ) ) ax . plot ( fpr_dict [ i ] , tpr_dict [ i ] , lw = 2 , color = color , label = 'ROC curve of class {0} (area = {1:0.2f})' '' . format ( classes [ i ] , roc_auc ) ) if plot_micro : binarized_y_true = label_binarize ( y_true , classes = classes ) if len ( classes ) == 2 : binarized_y_true = np . hstack ( ( 1 - binarized_y_true , binarized_y_true ) ) fpr , tpr , _ = roc_curve ( binarized_y_true . ravel ( ) , probas . ravel ( ) ) roc_auc = auc ( fpr , tpr ) ax . plot ( fpr , tpr , label = 'micro-average ROC curve ' '(area = {0:0.2f})' . format ( roc_auc ) , color = 'deeppink' , linestyle = ':' , linewidth = 4 ) if plot_macro : # Compute macro - average ROC curve and ROC area # First aggregate all false positive rates all_fpr = np . unique ( np . concatenate ( [ fpr_dict [ x ] for x in range ( len ( classes ) ) ] ) ) # Then interpolate all ROC curves at this points mean_tpr = np . zeros_like ( all_fpr ) for i in range ( len ( classes ) ) : mean_tpr += interp ( all_fpr , fpr_dict [ i ] , tpr_dict [ i ] ) # Finally average it and compute AUC mean_tpr /= len ( classes ) roc_auc = auc ( all_fpr , mean_tpr ) ax . plot ( all_fpr , mean_tpr , label = 'macro-average ROC curve ' '(area = {0:0.2f})' . format ( roc_auc ) , color = 'navy' , linestyle = ':' , linewidth = 4 ) ax . plot ( [ 0 , 1 ] , [ 0 , 1 ] , 'k--' , lw = 2 ) ax . set_xlim ( [ 0.0 , 1.0 ] ) ax . set_ylim ( [ 0.0 , 1.05 ] ) ax . set_xlabel ( 'False Positive Rate' , fontsize = text_fontsize ) ax . set_ylabel ( 'True Positive Rate' , fontsize = text_fontsize ) ax . tick_params ( labelsize = text_fontsize ) ax . legend ( loc = 'lower right' , fontsize = text_fontsize ) return ax
def fetch ( self ) : """Fetch a AvailablePhoneNumberCountryInstance : returns : Fetched AvailablePhoneNumberCountryInstance : rtype : twilio . rest . api . v2010 . account . available _ phone _ number . AvailablePhoneNumberCountryInstance"""
params = values . of ( { } ) payload = self . _version . fetch ( 'GET' , self . _uri , params = params , ) return AvailablePhoneNumberCountryInstance ( self . _version , payload , account_sid = self . _solution [ 'account_sid' ] , country_code = self . _solution [ 'country_code' ] , )
def get_version ( cls ) : """Return the version number of the tool ."""
cmd_pieces = [ cls . tool , '--version' ] process = Popen ( cmd_pieces , stdout = PIPE , stderr = PIPE ) out , err = process . communicate ( ) if err : return '' else : return out . splitlines ( ) [ 0 ] . strip ( )
def get_authorization_lookup_session_for_vault ( self , vault_id , proxy ) : """Gets the ` ` OsidSession ` ` associated with the authorization lookup service for the given vault . arg : vault _ id ( osid . id . Id ) : the ` ` Id ` ` of the vault arg : proxy ( osid . proxy . Proxy ) : a proxy return : ( osid . authorization . AuthorizationLookupSession ) - ` ` an _ authorization _ lookup _ session ` ` raise : NotFound - ` ` vault _ id ` ` not found raise : NullArgument - ` ` vault _ id ` ` or ` ` proxy ` ` is ` ` null ` ` raise : OperationFailed - ` ` unable to complete request ` ` raise : Unimplemented - ` ` supports _ authorization _ lookup ( ) ` ` or ` ` supports _ visible _ federation ( ) ` ` is ` ` false ` ` * compliance : optional - - This method must be implemented if ` ` supports _ authorization _ lookup ( ) ` ` and ` ` supports _ visible _ federation ( ) ` ` are ` ` true ` ` . *"""
if not self . supports_authorization_lookup ( ) : raise errors . Unimplemented ( ) # Also include check to see if the catalog Id is found otherwise raise errors . NotFound # pylint : disable = no - member return sessions . AuthorizationLookupSession ( vault_id , proxy , self . _runtime )
def rand_str ( length , allowed = CHARSET_ALPHA_DIGITS ) : """Generate fixed - length random string from your allowed character pool . : param length : total length of this string . : param allowed : allowed charset . Example : : > > > import string > > > rand _ str ( 32) H6ExQPNLzb4Vp3YZtfpyzLNPFwdfnwz6"""
res = list ( ) for _ in range ( length ) : res . append ( random . choice ( allowed ) ) return "" . join ( res )
def physical_cpus ( ) : """Get cpus identifiers , for instance set ( [ " 0 " , " 1 " , " 2 " , " 3 " ] ) : return Number of physical CPUs available : rtype : int"""
if platform . system ( ) == 'Darwin' : ncores = subprocess . check_output ( [ '/usr/sbin/sysctl' , '-n' , 'hw.ncpu' ] , shell = False ) return int ( ncores . strip ( ) ) sockets = set ( ) with open ( '/proc/cpuinfo' ) as istr : for line in istr : if line . startswith ( 'physical id' ) : sockets . add ( line . split ( ':' ) [ - 1 ] . strip ( ) ) return len ( sockets )
def this_month ( self ) : """Access the this _ month : returns : twilio . rest . api . v2010 . account . usage . record . this _ month . ThisMonthList : rtype : twilio . rest . api . v2010 . account . usage . record . this _ month . ThisMonthList"""
if self . _this_month is None : self . _this_month = ThisMonthList ( self . _version , account_sid = self . _solution [ 'account_sid' ] , ) return self . _this_month
def find_smallest ( num1 , num2 ) : """A Python function to return the smallest of two given numbers . Arguments : num1 , num2 : Two numbers to compare Returns : The smallest number among num1 and num2 Examples : > > > find _ smallest ( 1 , 2) > > > find _ smallest ( ( - 5 ) , ( - 4 ) ) > > > find _ smallest ( 0 , 0)"""
return num1 if num1 <= num2 else num2
def get_latex ( self ) : """Bibliographic entry in LaTeX format ."""
if len ( self . authors ) > 1 : authors = _list_authors ( self . authors ) else : a = self . authors authors = ' ' . join ( [ a . given_name , a . surname ] ) if self . volume and self . issueIdentifier : volissue = '\\textbf{{{}({})}}' . format ( self . volume , self . issueIdentifier ) elif self . volume : volissue = '\\textbf{{{}}}' . format ( self . volume ) else : volissue = 'no volume' pages = _parse_pages ( self ) s = '{auth}, \\textit{{{title}}}, {jour}, {vol}, {pages} ({year}).' . format ( auth = authors , title = self . title , jour = self . publicationName , vol = volissue , pages = pages , year = self . coverDate [ : 4 ] ) if self . doi is not None : s += ' \\href{{https://doi.org/{0}}}{{doi:{0}}}, ' . format ( self . doi ) s += '\\href{{{0}}}{{scopus:{1}}}.' . format ( self . scopus_link , self . eid ) return s
def save_connection_settings ( self ) : """Save user ' s kernel connection settings ."""
if not self . save_layout . isChecked ( ) : return is_ssh_key = bool ( self . kf_radio . isChecked ( ) ) connection_settings = { "json_file_path" : self . cf . text ( ) , "is_remote" : self . rm_group . isChecked ( ) , "username" : self . un . text ( ) , "hostname" : self . hn . text ( ) , "port" : self . pn . text ( ) , "is_ssh_keyfile" : is_ssh_key , "ssh_key_file_path" : self . kf . text ( ) } CONF . set ( "existing-kernel" , "settings" , connection_settings ) try : import keyring if is_ssh_key : keyring . set_password ( "spyder_remote_kernel" , "ssh_key_passphrase" , self . kfp . text ( ) ) else : keyring . set_password ( "spyder_remote_kernel" , "ssh_password" , self . pw . text ( ) ) except Exception : pass
def _adjust_axis ( self , axis ) : """Return raw axis / axes corresponding to apparent axis / axes . This method adjusts user provided ' axis ' parameter , for some of the cube operations , mainly ' margin ' . The user never sees the MR selections dimension , and treats all MRs as single dimensions . Thus we need to adjust the values of axis ( to sum across ) to what the user would ' ve specified if he were aware of the existence of the MR selections dimension . The reason for this adjustment is that all of the operations performed troughout the margin calculations will be carried on an internal array , containing all the data ( together with all selections ) . For more info on how it needs to operate , check the unit tests ."""
if not self . _is_axis_allowed ( axis ) : ca_error_msg = "Direction {} not allowed (items dimension)" raise ValueError ( ca_error_msg . format ( axis ) ) if isinstance ( axis , int ) : # If single axis was provided , create a list out of it , so that # we can do the subsequent iteration . axis = list ( [ axis ] ) elif axis is None : # If axis was None , create what user would expect in terms of # finding out the Total ( s ) . In case of 2D cube , this will be the # axis of all the dimensions that the user can see , that is ( 0 , 1 ) , # because the selections dimension is invisible to the user . In # case of 3D cube , this will be the " total " across each slice , so # we need to drop the 0th dimension , and only take last two ( 1 , 2 ) . axis = range ( self . ndim ) [ - 2 : ] else : # In case of a tuple , just keep it as a list . axis = list ( axis ) axis = np . array ( axis ) # Create new array for storing updated values of axis . It ' s necessary # because it ' s hard to update the values in place . new_axis = np . array ( axis ) # Iterate over user - visible dimensions , and update axis when MR is # detected . For each detected MR , we need to increment all subsequent # axis ( that were provided by the user ) . But we don ' t need to update # the axis that are " behind " the current MR . for i , dim in enumerate ( self . dimensions ) : if dim . dimension_type == DT . MR_SUBVAR : # This formula updates only the axis that come " after " the # current MR ( items ) dimension . new_axis [ axis >= i ] += 1 return tuple ( new_axis )
def scenario_risk ( riskinputs , riskmodel , param , monitor ) : """Core function for a scenario computation . : param riskinput : a of : class : ` openquake . risklib . riskinput . RiskInput ` object : param riskmodel : a : class : ` openquake . risklib . riskinput . CompositeRiskModel ` instance : param param : dictionary of extra parameters : param monitor : : class : ` openquake . baselib . performance . Monitor ` instance : returns : a dictionary { ' agg ' : array of shape ( E , L , R , 2 ) , ' avg ' : list of tuples ( lt _ idx , rlz _ idx , asset _ ordinal , statistics ) where E is the number of simulated events , L the number of loss types , R the number of realizations and statistics is an array of shape ( n , R , 4 ) , with n the number of assets in the current riskinput object"""
E = param [ 'E' ] L = len ( riskmodel . loss_types ) result = dict ( agg = numpy . zeros ( ( E , L ) , F32 ) , avg = [ ] , all_losses = AccumDict ( accum = { } ) ) for ri in riskinputs : for out in riskmodel . gen_outputs ( ri , monitor , param [ 'epspath' ] ) : r = out . rlzi weight = param [ 'weights' ] [ r ] slc = param [ 'event_slice' ] ( r ) for l , loss_type in enumerate ( riskmodel . loss_types ) : losses = out [ loss_type ] if numpy . product ( losses . shape ) == 0 : # happens for all NaNs continue stats = numpy . zeros ( len ( ri . assets ) , stat_dt ) # mean , stddev for a , asset in enumerate ( ri . assets ) : stats [ 'mean' ] [ a ] = losses [ a ] . mean ( ) stats [ 'stddev' ] [ a ] = losses [ a ] . std ( ddof = 1 ) result [ 'avg' ] . append ( ( l , r , asset [ 'ordinal' ] , stats [ a ] ) ) agglosses = losses . sum ( axis = 0 ) # shape num _ gmfs result [ 'agg' ] [ slc , l ] += agglosses * weight if param [ 'asset_loss_table' ] : aids = ri . assets [ 'ordinal' ] result [ 'all_losses' ] [ l , r ] += AccumDict ( zip ( aids , losses ) ) return result
def _pop_async_request ( self , msg_id , msg_name ) : """Pop the set of callbacks for a request . Return tuple of Nones if callbacks already popped ( or don ' t exist ) ."""
assert get_thread_ident ( ) == self . ioloop_thread_id if msg_id is None : msg_id = self . _msg_id_for_name ( msg_name ) if msg_id in self . _async_queue : callback_tuple = self . _async_queue [ msg_id ] del self . _async_queue [ msg_id ] self . _async_id_stack [ callback_tuple [ 0 ] . name ] . remove ( msg_id ) return callback_tuple else : return None , None , None , None , None
def create ( self , table_id , schema ) : """Create a table in Google BigQuery given a table and schema Parameters table : str Name of table to be written schema : str Use the generate _ bq _ schema to generate your table schema from a dataframe ."""
from google . cloud . bigquery import SchemaField from google . cloud . bigquery import Table if self . exists ( table_id ) : raise TableCreationError ( "Table {0} already " "exists" . format ( table_id ) ) if not _Dataset ( self . project_id , credentials = self . credentials ) . exists ( self . dataset_id ) : _Dataset ( self . project_id , credentials = self . credentials , location = self . location , ) . create ( self . dataset_id ) table_ref = self . client . dataset ( self . dataset_id ) . table ( table_id ) table = Table ( table_ref ) # Manually create the schema objects , adding NULLABLE mode # as a workaround for # https : / / github . com / GoogleCloudPlatform / google - cloud - python / issues / 4456 for field in schema [ "fields" ] : if "mode" not in field : field [ "mode" ] = "NULLABLE" table . schema = [ SchemaField . from_api_repr ( field ) for field in schema [ "fields" ] ] try : self . client . create_table ( table ) except self . http_error as ex : self . process_http_error ( ex )
def kill ( timeout = 15 ) : '''Kill the salt minion . timeout int seconds to wait for the minion to die . If you have a monitor that restarts ` ` salt - minion ` ` when it dies then this is a great way to restart after a minion upgrade . CLI example : : > $ salt minion [ 12 ] minion . kill minion1: killed : 7874 retcode : minion2: killed : 29071 retcode : The result of the salt command shows the process ID of the minions and the results of a kill signal to the minion in as the ` ` retcode ` ` value : ` ` 0 ` ` is success , anything else is a failure .'''
ret = { 'killed' : None , 'retcode' : 1 , } comment = [ ] pid = __grains__ . get ( 'pid' ) if not pid : comment . append ( 'Unable to find "pid" in grains' ) ret [ 'retcode' ] = salt . defaults . exitcodes . EX_SOFTWARE else : if 'ps.kill_pid' not in __salt__ : comment . append ( 'Missing command: ps.kill_pid' ) ret [ 'retcode' ] = salt . defaults . exitcodes . EX_SOFTWARE else : # The retcode status comes from the first kill signal ret [ 'retcode' ] = int ( not __salt__ [ 'ps.kill_pid' ] ( pid ) ) # If the signal was successfully delivered then wait for the # process to die - check by sending signals until signal delivery # fails . if ret [ 'retcode' ] : comment . append ( 'ps.kill_pid failed' ) else : for _ in range ( timeout ) : time . sleep ( 1 ) signaled = __salt__ [ 'ps.kill_pid' ] ( pid ) if not signaled : ret [ 'killed' ] = pid break else : # The process did not exit before the timeout comment . append ( 'Timed out waiting for minion to exit' ) ret [ 'retcode' ] = salt . defaults . exitcodes . EX_TEMPFAIL if comment : ret [ 'comment' ] = comment return ret
def _check ( self , mode = None ) : """Check if TarFile is still open , and if the operation ' s mode corresponds to TarFile ' s mode ."""
if self . closed : raise IOError ( "%s is closed" % self . __class__ . __name__ ) if mode is not None and self . mode not in mode : raise IOError ( "bad operation for mode %r" % self . mode )
def verify_components ( components ) : """Verify values returned from : meth : ` make _ components ` . Used internally during the : meth : ` build ` process . : param components : value returned from : meth : ` make _ components ` : type components : : class : ` dict ` : raises ValueError : if verification fails"""
# verify returned type from user - defined function if not isinstance ( components , dict ) : raise ValueError ( "invalid type returned by make_components(): %r (must be a dict)" % components ) # check types for ( name , component ) pairs in dict for ( name , component ) in components . items ( ) : # name is a string if not isinstance ( name , str ) : raise ValueError ( ( "invalid name from make_components(): (%r, %r) " "(must be a (str, Component))" ) % ( name , component ) ) # component is a Component instance if not isinstance ( component , Component ) : raise ValueError ( ( "invalid component type from make_components(): (%r, %r) " "(must be a (str, Component))" ) % ( name , component ) ) # check component name validity invalid_chars = set ( name ) - VALID_NAME_CHARS if invalid_chars : raise ValueError ( "component name {!r} invalid; cannot include {!r}" . format ( name , invalid_chars ) )
def errtrapz ( x , yerr ) : """Error of the trapezoid formula Inputs : x : the abscissa yerr : the error of the dependent variable Outputs : the error of the integral"""
x = np . array ( x ) assert isinstance ( x , np . ndarray ) yerr = np . array ( yerr ) return 0.5 * np . sqrt ( ( x [ 1 ] - x [ 0 ] ) ** 2 * yerr [ 0 ] ** 2 + np . sum ( ( x [ 2 : ] - x [ : - 2 ] ) ** 2 * yerr [ 1 : - 1 ] ** 2 ) + ( x [ - 1 ] - x [ - 2 ] ) ** 2 * yerr [ - 1 ] ** 2 )
def indent_xml ( elem , level = 0 , more_sibs = False ) : """Indent an xml element object to prepare for pretty printing . To avoid changing the contents of the original Element , it is recommended that a copy is made to send to this function . Args : elem : Element to indent . level : Int indent level ( default is 0) more _ sibs : Bool , whether to anticipate further siblings ."""
i = "\n" pad = " " if level : i += ( level - 1 ) * pad num_kids = len ( elem ) if num_kids : if not elem . text or not elem . text . strip ( ) : elem . text = i + pad if level : elem . text += pad count = 0 for kid in elem : if kid . tag == "data" : kid . text = "*DATA*" indent_xml ( kid , level + 1 , count < num_kids - 1 ) count += 1 if not elem . tail or not elem . tail . strip ( ) : elem . tail = i if more_sibs : elem . tail += pad else : if level and ( not elem . tail or not elem . tail . strip ( ) ) : elem . tail = i if more_sibs : elem . tail += pad
def make_http_credentials ( username = None , password = None ) : """Build auth part for api _ url ."""
credentials = '' if username is None : return credentials if username is not None : if ':' in username : return credentials credentials += username if credentials and password is not None : credentials += ":%s" % password return "%s@" % credentials
def configure_defaults ( ) : """This function is executed immediately after ROOT ' s finalSetup"""
log . debug ( "configure_defaults()" ) global initialized initialized = True if use_rootpy_handler : # Need to do it again here , since it is overridden by ROOT . set_error_handler ( python_logging_error_handler ) if os . environ . get ( 'ROOTPY_BATCH' , False ) or IN_NOSETESTS : ROOT . gROOT . SetBatch ( True ) log . debug ( 'ROOT is running in batch mode' ) ROOT . gErrorIgnoreLevel = 0 this_dll = C . CDLL ( None ) try : EnableAutoDictionary = C . c_int . in_dll ( this_dll , "G__EnableAutoDictionary" ) except ValueError : pass else : # Disable automatic dictionary generation EnableAutoDictionary . value = 0 # TODO ( pwaller ) : idea , ` execfile ( " userdata / initrc . py " ) ` here ? # note : that wouldn ' t allow the user to override the default # canvas size , for example . for init in _initializations : init ( )
def stop ( self ) : """Stop the camera process ."""
if not self . _started : raise Exception ( "Cannot stop a video recorder before starting it!" ) self . _started = False if self . _actual_camera . is_running : self . _actual_camera . stop ( ) if self . _camera is not None : try : self . _camera . terminate ( ) except : pass
def wfdb_strptime ( time_string ) : """Given a time string in an acceptable wfdb format , return a datetime . time object . Valid formats : SS , MM : SS , HH : MM : SS , all with and without microsec ."""
n_colons = time_string . count ( ':' ) if n_colons == 0 : time_fmt = '%S' elif n_colons == 1 : time_fmt = '%M:%S' elif n_colons == 2 : time_fmt = '%H:%M:%S' if '.' in time_string : time_fmt += '.%f' return datetime . datetime . strptime ( time_string , time_fmt ) . time ( )
def _update_pods_metrics ( self , instance , pods ) : """Reports the number of running pods on this node , tagged by service and creator We go though all the pods , extract tags then count them by tag list , sorted and serialized in a pipe - separated string ( it is an illegar character for tags )"""
tags_map = defaultdict ( int ) for pod in pods [ 'items' ] : pod_meta = pod . get ( 'metadata' , { } ) pod_tags = self . kubeutil . get_pod_creator_tags ( pod_meta , legacy_rep_controller_tag = True ) services = self . kubeutil . match_services_for_pod ( pod_meta ) if isinstance ( services , list ) : for service in services : pod_tags . append ( 'kube_service:%s' % service ) if 'namespace' in pod_meta : pod_tags . append ( 'kube_namespace:%s' % pod_meta [ 'namespace' ] ) tags_map [ frozenset ( pod_tags ) ] += 1 commmon_tags = instance . get ( 'tags' , [ ] ) for pod_tags , pod_count in tags_map . iteritems ( ) : tags = list ( pod_tags ) tags . extend ( commmon_tags ) self . publish_gauge ( self , NAMESPACE + '.pods.running' , pod_count , tags )
def table_dataset_database_table ( table = None , include_attributes = None , rows_limit = None , print_progress = False , ) : """Create a pyprel table contents list from a database table of the module dataset . Attributes to be included in the table can be specified ; by default , all attributes are included . A limit on the number of rows included can be specified . Progress on building the table can be reported ."""
if print_progress : import shijian progress = shijian . Progress ( ) progress . engage_quick_calculation_mode ( ) number_of_rows = len ( table ) if include_attributes : columns = include_attributes else : columns = table . columns table_contents = [ columns ] for index_row , row in enumerate ( table ) : if rows_limit is not None : if index_row >= rows_limit : break row_contents = [ ] for column in columns : try : string_representation = str ( row [ column ] ) except : string_representation = str ( row [ column ] . encode ( "utf-8" ) ) row_contents . append ( string_representation ) table_contents . append ( row_contents ) if print_progress : print ( progress . add_datum ( fraction = float ( index_row ) / float ( number_of_rows ) ) ) return table_contents
def _try_reconnect ( self ) : """Try to recover an interrupted connection ."""
try : if self . connection_interrupted : self . connect_direct ( self . connection_string , force = True ) self . connection_interrupted = False self . connected = True # Reenable streaming interface if that was open before as well if self . _reports is not None : self . _loop . run_coroutine ( self . adapter . open_interface ( 0 , 'streaming' ) ) # Reenable tracing interface if that was open before as well if self . _traces is not None : self . _loop . run_coroutine ( self . adapter . open_interface ( 0 , 'tracing' ) ) except HardwareError as exc : self . _logger . exception ( "Error reconnecting to device after an unexpected disconnect" ) raise HardwareError ( "Device disconnected unexpectedly and we could not reconnect" , reconnect_error = exc ) from exc
def _create_data_loader ( self , data , ** kwargs ) : """Converts input data into a DataLoader"""
if data is None : return None # Set DataLoader config # NOTE : Not applicable if data is already a DataLoader config = { ** self . config [ "train_config" ] [ "data_loader_config" ] , ** kwargs , "pin_memory" : self . config [ "device" ] != "cpu" , } # Return data as DataLoader if isinstance ( data , DataLoader ) : return data elif isinstance ( data , Dataset ) : return DataLoader ( data , ** config ) elif isinstance ( data , ( tuple , list ) ) : return DataLoader ( self . _create_dataset ( * data ) , ** config ) else : raise ValueError ( "Input data type not recognized." )
def verify_webhook ( signature , body ) : '''Verify the webhook signature from travisci signature The signature header from the webhook header body The full payload body from the webhook post . . note : : The body needs to be the urlencoded version of the body . CLI Example : . . code - block : : bash salt ' * ' travisci . verify _ webhook ' M6NucCX5722bxisQs7e . . . ' ' payload = % 7B % 22id % 22%3A183791261%2C % 22repository . . . ' '''
# get public key setup public_key = __utils__ [ 'http.query' ] ( 'https://api.travis-ci.org/config' ) [ 'config' ] [ 'notifications' ] [ 'webhook' ] [ 'public_key' ] pkey_public_key = OpenSSL . crypto . load_publickey ( OpenSSL . crypto . FILETYPE_PEM , public_key ) certificate = OpenSSL . crypto . X509 ( ) certificate . set_pubkey ( pkey_public_key ) # decode signature signature = base64 . b64decode ( signature ) # parse the urlencoded payload from travis payload = salt . utils . json . loads ( parse_qs ( body ) [ 'payload' ] [ 0 ] ) try : OpenSSL . crypto . verify ( certificate , signature , payload , six . text_type ( 'sha1' ) ) except OpenSSL . crypto . Error : return False return True
def set_page_load_timeout ( self , time_to_wait ) : """Set the amount of time to wait for a page load to complete before throwing an error . : Args : - time _ to _ wait : The amount of time to wait : Usage : driver . set _ page _ load _ timeout ( 30)"""
try : self . execute ( Command . SET_TIMEOUTS , { 'pageLoad' : int ( float ( time_to_wait ) * 1000 ) } ) except WebDriverException : self . execute ( Command . SET_TIMEOUTS , { 'ms' : float ( time_to_wait ) * 1000 , 'type' : 'page load' } )
def getIds ( self , query = '*:*' , fq = None , start = 0 , rows = 1000 ) : """Returns a dictionary of : matches : number of matches failed : if true , then an exception was thrown start : starting index ids : [ id , id , . . . ] See also the SOLRSearchResponseIterator class"""
params = { 'q' : query , 'start' : str ( start ) , 'rows' : str ( rows ) , 'wt' : 'python' } if fq is not None : params [ 'fq' ] = fq request = urllib . parse . urlencode ( params , doseq = True ) data = None response = { 'matches' : 0 , 'start' : start , 'failed' : True , 'ids' : [ ] } try : rsp = self . doPost ( self . solrBase + '' , request , self . formheaders ) data = eval ( rsp . read ( ) ) except Exception : pass if data is None : return response response [ 'failed' ] = False response [ 'matches' ] = data [ 'response' ] [ 'numFound' ] for doc in data [ 'response' ] [ 'docs' ] : response [ 'ids' ] . append ( doc [ 'id' ] [ 0 ] ) return response
def makeadistu_inlets ( data , commdct ) : """make the dict adistu _ inlets"""
adistus = getadistus ( data , commdct ) # assume that the inlet node has the words " Air Inlet Node Name " airinletnode = "Air Inlet Node Name" adistu_inlets = { } for adistu in adistus : objkey = adistu . upper ( ) objindex = data . dtls . index ( objkey ) objcomm = commdct [ objindex ] airinlets = [ ] for i , comm in enumerate ( objcomm ) : try : if comm [ 'field' ] [ 0 ] . find ( airinletnode ) != - 1 : airinlets . append ( comm [ 'field' ] [ 0 ] ) except KeyError as err : pass adistu_inlets [ adistu ] = airinlets return adistu_inlets
def _parse_in_batches ( cmd_array ) : """Find patterns that match to ` in _ batches _ pat ` and replace them into ` STDIN ` or ` TMPFILE ` . : param cmd _ array : ` shlex . split ` - ed command : rtype : ( [ cmd _ array ] , ( batch _ to _ file , batch _ to _ file , . . . ) ) : returns : Modified ` cmd _ array ` and tuple to show how each IN _ BATCH is instantiated ( TMPFILE or STDIN ) . Returned ` cmd _ array ` drops IN _ BATCH related tokens . : raises : ` IndexError ` if IN _ BATCHes don ' t have sequential ID starting from 0"""
res_cmd_array = cmd_array [ : ] res_batch_to_file_s = [ ] in_batches_cmdidx = BatchCommand . _in_batches_cmdidx ( cmd_array ) for batch_id , cmdidx in enumerate ( in_batches_cmdidx ) : if cmdidx > 0 and cmd_array [ cmdidx - 1 ] == '<' : # e . g . ` < IN _ BATCH0 ` res_batch_to_file_s . append ( BatchToFile ( 'STDIN' ) ) del res_cmd_array [ cmdidx ] , res_cmd_array [ cmdidx - 1 ] else : # IN _ BATCHx is TMPFILE batch_to_file = BatchToFile ( 'TMPFILE' ) res_batch_to_file_s . append ( batch_to_file ) res_cmd_array [ cmdidx ] = batch_to_file . tmpfile_path ( ) return ( res_cmd_array , tuple ( res_batch_to_file_s ) )
def _check_type_and_load_cert ( self , msg , key_type , cert_type ) : """Perform message type - checking & optional certificate loading . This includes fast - forwarding cert ` ` msg ` ` objects past the nonce , so that the subsequent fields are the key numbers ; thus the caller may expect to treat the message as key material afterwards either way . The obtained key type is returned for classes which need to know what it was ( e . g . ECDSA . )"""
# Normalization ; most classes have a single key type and give a string , # but eg ECDSA is a 1 : N mapping . key_types = key_type cert_types = cert_type if isinstance ( key_type , string_types ) : key_types = [ key_types ] if isinstance ( cert_types , string_types ) : cert_types = [ cert_types ] # Can ' t do much with no message , that should ' ve been handled elsewhere if msg is None : raise SSHException ( "Key object may not be empty" ) # First field is always key type , in either kind of object . ( make sure # we rewind before grabbing it - sometimes caller had to do their own # introspection first ! ) msg . rewind ( ) type_ = msg . get_text ( ) # Regular public key - nothing special to do besides the implicit # type check . if type_ in key_types : pass # OpenSSH - compatible certificate - store full copy as . public _ blob # ( so signing works correctly ) and then fast - forward past the # nonce . elif type_ in cert_types : # This seems the cleanest way to ' clone ' an already - being - read # message ; they ' re * IO objects at heart and their . getvalue ( ) # always returns the full value regardless of pointer position . self . load_certificate ( Message ( msg . asbytes ( ) ) ) # Read out nonce as it comes before the public numbers . # TODO : usefully interpret it & other non - public - number fields # ( requires going back into per - type subclasses . ) msg . get_string ( ) else : err = "Invalid key (class: {}, data type: {}" raise SSHException ( err . format ( self . __class__ . __name__ , type_ ) )
def master ( cls , cluster_id_label ) : """Show the details of the master of the cluster with id / label ` cluster _ id _ label ` ."""
cluster_status = cls . status ( cluster_id_label ) if cluster_status . get ( "state" ) == 'UP' : return list ( filter ( lambda x : x [ "role" ] == "master" , cluster_status . get ( "nodes" ) ) ) [ 0 ] else : return cluster_status
def _mm ( n_items , data , initial_params , alpha , max_iter , tol , mm_fun ) : """Iteratively refine MM estimates until convergence . Raises RuntimeError If the algorithm does not converge after ` max _ iter ` iterations ."""
if initial_params is None : params = np . zeros ( n_items ) else : params = initial_params converged = NormOfDifferenceTest ( tol = tol , order = 1 ) for _ in range ( max_iter ) : nums , denoms = mm_fun ( n_items , data , params ) params = log_transform ( ( nums + alpha ) / ( denoms + alpha ) ) if converged ( params ) : return params raise RuntimeError ( "Did not converge after {} iterations" . format ( max_iter ) )
def com_google_fonts_check_family_equal_unicode_encodings ( ttFonts ) : """Fonts have equal unicode encodings ?"""
encoding = None failed = False for ttFont in ttFonts : cmap = None for table in ttFont [ 'cmap' ] . tables : if table . format == 4 : cmap = table break # Could a font lack a format 4 cmap table ? # If we ever find one of those , it would crash the check here . # Then we ' d have to yield a FAIL regarding the missing table entry . if not encoding : encoding = cmap . platEncID if encoding != cmap . platEncID : failed = True if failed : yield FAIL , "Fonts have different unicode encodings." else : yield PASS , "Fonts have equal unicode encodings."
def validate_union ( datum , schema , parent_ns = None , raise_errors = True ) : """Check that the data is a list type with possible options to validate as True . Parameters datum : Any Data being validated schema : dict Schema parent _ ns : str parent namespace raise _ errors : bool If true , raises ValidationError on invalid data"""
if isinstance ( datum , tuple ) : ( name , datum ) = datum for candidate in schema : if extract_record_type ( candidate ) == 'record' : if name == candidate [ "name" ] : return validate ( datum , schema = candidate , field = parent_ns , raise_errors = raise_errors ) else : return False errors = [ ] for s in schema : try : ret = validate ( datum , schema = s , field = parent_ns , raise_errors = raise_errors ) if ret : # We exit on the first passing type in Unions return True except ValidationError as e : errors . extend ( e . errors ) if raise_errors : raise ValidationError ( * errors ) return False
def convertdistmethod ( method_str ) : """Convert distance method to h , v , p , and s ."""
if StringClass . string_match ( method_str , 'Horizontal' ) : return 'h' elif StringClass . string_match ( method_str , 'Vertical' ) : return 'v' elif StringClass . string_match ( method_str , 'Pythagoras' ) : return 'p' elif StringClass . string_match ( method_str , 'Surface' ) : return 's' elif method_str . lower ( ) in [ 'h' , 'v' , 'p' , 's' ] : return method_str . lower ( ) else : return 's'
def zmq_version ( ) : '''ZeroMQ python bindings > = 2.1.9 are required'''
try : import zmq except Exception : # Return True for local mode return True ver = zmq . __version__ # The last matched group can be None if the version # is something like 3.1 and that will work properly match = re . match ( r'^(\d+)\.(\d+)(?:\.(\d+))?' , ver ) # Fallthrough and hope for the best if not match : msg = "Using untested zmq python bindings version: '{0}'" . format ( ver ) if is_console_configured ( ) : log . warning ( msg ) else : sys . stderr . write ( "WARNING {0}\n" . format ( msg ) ) return True major , minor , point = match . groups ( ) if major . isdigit ( ) : major = int ( major ) if minor . isdigit ( ) : minor = int ( minor ) # point very well could be None if point and point . isdigit ( ) : point = int ( point ) if major == 2 and minor == 1 : # zmq 2.1dev could be built against a newer libzmq if "dev" in ver and not point : msg = 'Using dev zmq module, please report unexpected results' if is_console_configured ( ) : log . warning ( msg ) else : sys . stderr . write ( "WARNING: {0}\n" . format ( msg ) ) return True elif point and point >= 9 : return True elif major > 2 or ( major == 2 and minor > 1 ) : return True # If all else fails , gracefully croak and warn the user log . critical ( 'ZeroMQ python bindings >= 2.1.9 are required' ) if 'salt-master' in sys . argv [ 0 ] : msg = ( 'The Salt Master is unstable using a ZeroMQ version ' 'lower than 2.1.11 and requires this fix: http://lists.zeromq.' 'org/pipermail/zeromq-dev/2011-June/012094.html' ) if is_console_configured ( ) : log . critical ( msg ) else : sys . stderr . write ( 'CRITICAL {0}\n' . format ( msg ) ) return False
def account_list ( self , wallet ) : """Lists all the accounts inside * * wallet * * : param wallet : Wallet to get account list for : type wallet : str : raises : : py : exc : ` nano . rpc . RPCException ` > > > rpc . account _ list ( . . . wallet = " 000D1BAEC8EC208142C99059B393051BAC8380F9B5A2E6B2489A277D81789F3F " " xrb _ 3e3j5tkog48pnny9dmfzj1r16pg8t1e76dz5tmac6iq689wyjfpi00000" """
wallet = self . _process_value ( wallet , 'wallet' ) payload = { "wallet" : wallet } resp = self . call ( 'account_list' , payload ) return resp . get ( 'accounts' ) or [ ]
def snapped_speed_limits ( client , path ) : """Returns the posted speed limit ( in km / h ) for given road segments . The provided points will first be snapped to the most likely roads the vehicle was traveling along . : param path : The path of points to be snapped . : type path : a single location , or a list of locations , where a location is a string , dict , list , or tuple : rtype : dict with a list of speed limits and a list of the snapped points ."""
params = { "path" : convert . location_list ( path ) } return client . _request ( "/v1/speedLimits" , params , base_url = _ROADS_BASE_URL , accepts_clientid = False , extract_body = _roads_extract )
def islice_extended ( iterable , * args ) : """An extension of : func : ` itertools . islice ` that supports negative values for * stop * , * start * , and * step * . > > > iterable = iter ( ' abcdefgh ' ) > > > list ( islice _ extended ( iterable , - 4 , - 1 ) ) [ ' e ' , ' f ' , ' g ' ] Slices with negative values require some caching of * iterable * , but this function takes care to minimize the amount of memory required . For example , you can use a negative step with an infinite iterator : > > > from itertools import count > > > list ( islice _ extended ( count ( ) , 110 , 99 , - 2 ) ) [110 , 108 , 106 , 104 , 102 , 100]"""
s = slice ( * args ) start = s . start stop = s . stop if s . step == 0 : raise ValueError ( 'step argument must be a non-zero integer or None.' ) step = s . step or 1 it = iter ( iterable ) if step > 0 : start = 0 if ( start is None ) else start if ( start < 0 ) : # Consume all but the last - start items cache = deque ( enumerate ( it , 1 ) , maxlen = - start ) len_iter = cache [ - 1 ] [ 0 ] if cache else 0 # Adjust start to be positive i = max ( len_iter + start , 0 ) # Adjust stop to be positive if stop is None : j = len_iter elif stop >= 0 : j = min ( stop , len_iter ) else : j = max ( len_iter + stop , 0 ) # Slice the cache n = j - i if n <= 0 : return for index , item in islice ( cache , 0 , n , step ) : yield item elif ( stop is not None ) and ( stop < 0 ) : # Advance to the start position next ( islice ( it , start , start ) , None ) # When stop is negative , we have to carry - stop items while # iterating cache = deque ( islice ( it , - stop ) , maxlen = - stop ) for index , item in enumerate ( it ) : cached_item = cache . popleft ( ) if index % step == 0 : yield cached_item cache . append ( item ) else : # When both start and stop are positive we have the normal case yield from islice ( it , start , stop , step ) else : start = - 1 if ( start is None ) else start if ( stop is not None ) and ( stop < 0 ) : # Consume all but the last items n = - stop - 1 cache = deque ( enumerate ( it , 1 ) , maxlen = n ) len_iter = cache [ - 1 ] [ 0 ] if cache else 0 # If start and stop are both negative they are comparable and # we can just slice . Otherwise we can adjust start to be negative # and then slice . if start < 0 : i , j = start , stop else : i , j = min ( start - len_iter , - 1 ) , None for index , item in list ( cache ) [ i : j : step ] : yield item else : # Advance to the stop position if stop is not None : m = stop + 1 next ( islice ( it , m , m ) , None ) # stop is positive , so if start is negative they are not comparable # and we need the rest of the items . if start < 0 : i = start n = None # stop is None and start is positive , so we just need items up to # the start index . elif stop is None : i = None n = start + 1 # Both stop and start are positive , so they are comparable . else : i = None n = start - stop if n <= 0 : return cache = list ( islice ( it , n ) ) yield from cache [ i : : step ]
def _FormatIPCPermToken ( self , token_data ) : """Formats an IPC permissions token as a dictionary of values . Args : token _ data ( bsm _ token _ data _ ipc _ perm ) : AUT _ IPC _ PERM token data . Returns : dict [ str , str ] : token values ."""
return { 'user_id' : token_data . user_identifier , 'group_id' : token_data . group_identifier , 'creator_user_id' : token_data . creator_user_identifier , 'creator_group_id' : token_data . creator_group_identifier , 'access' : token_data . access_mode }
def resolve_path ( self , path , root_id = '0' , objects = False ) : '''Return id ( or metadata ) of an object , specified by chain ( iterable or fs - style path string ) of " name " attributes of it ' s ancestors , or raises DoesNotExists error . Requires a lot of calls to resolve each name in path , so use with care . root _ id parameter allows to specify path relative to some folder _ id ( default : 0 ) .'''
if path : if isinstance ( path , types . StringTypes ) : path = filter ( None , path . split ( os . sep ) ) if path : try : for i , name in enumerate ( path ) : root_id = dict ( it . imap ( op . itemgetter ( 'name' , 'id' ) , ( yield self . listdir ( root_id ) ) ) ) [ name ] except ( KeyError , ProtocolError ) as err : if isinstance ( err , ProtocolError ) and err . code != 404 : raise raise DoesNotExists ( root_id , path [ i : ] ) defer . returnValue ( root_id if not objects else ( yield self . info ( root_id ) ) )
def _create_clone ( self , parent , part , ** kwargs ) : """Create a new ` Part ` clone under the ` Parent ` . . . versionadded : : 2.3 : param parent : parent part : type parent : : class : ` models . Part ` : param part : part to be cloned : type part : : class : ` models . Part ` : param kwargs : ( optional ) additional keyword = value arguments : type kwargs : dict : return : cloned : class : ` models . Part ` : raises APIError : if the ` Part ` could not be cloned"""
if part . category == Category . MODEL : select_action = 'clone_model' else : select_action = 'clone_instance' data = { "part" : part . id , "parent" : parent . id , "suppress_kevents" : kwargs . pop ( 'suppress_kevents' , None ) } # prepare url query parameters query_params = kwargs query_params [ 'select_action' ] = select_action response = self . _request ( 'POST' , self . _build_url ( 'parts' ) , params = query_params , data = data ) if response . status_code != requests . codes . created : raise APIError ( "Could not clone part, {}: {}" . format ( str ( response ) , response . content ) ) return Part ( response . json ( ) [ 'results' ] [ 0 ] , client = self )
def trimmomatic_barplot ( self ) : """Make the HighCharts HTML to plot the trimmomatic rates"""
# Specify the order of the different possible categories keys = OrderedDict ( ) keys [ 'surviving' ] = { 'color' : '#437bb1' , 'name' : 'Surviving Reads' } keys [ 'both_surviving' ] = { 'color' : '#f7a35c' , 'name' : 'Both Surviving' } keys [ 'forward_only_surviving' ] = { 'color' : '#e63491' , 'name' : 'Forward Only Surviving' } keys [ 'reverse_only_surviving' ] = { 'color' : '#b1084c' , 'name' : 'Reverse Only Surviving' } keys [ 'dropped' ] = { 'color' : '#7f0000' , 'name' : 'Dropped' } # Config for the plot pconfig = { 'id' : 'trimmomatic_plot' , 'title' : 'Trimmomatic: Surviving Reads' , 'ylab' : '# Reads' , 'cpswitch_counts_label' : 'Number of Reads' } self . add_section ( plot = bargraph . plot ( self . trimmomatic , keys , pconfig ) )
def issueQueingServiceJobs ( self ) : """Issues any queuing service jobs up to the limit of the maximum allowed ."""
while len ( self . serviceJobsToBeIssued ) > 0 and self . serviceJobsIssued < self . config . maxServiceJobs : self . issueJob ( self . serviceJobsToBeIssued . pop ( ) ) self . serviceJobsIssued += 1 while len ( self . preemptableServiceJobsToBeIssued ) > 0 and self . preemptableServiceJobsIssued < self . config . maxPreemptableServiceJobs : self . issueJob ( self . preemptableServiceJobsToBeIssued . pop ( ) ) self . preemptableServiceJobsIssued += 1
def inheritance_patch ( attrs ) : """Patch tango objects before they are processed by the metaclass ."""
for key , obj in attrs . items ( ) : if isinstance ( obj , attribute ) : if getattr ( obj , 'attr_write' , None ) == AttrWriteType . READ_WRITE : if not getattr ( obj , 'fset' , None ) : method_name = obj . write_method_name or "write_" + key obj . fset = attrs . get ( method_name )
def get_alignment_summary_metrics ( self , barcode ) : """Parses the metrics in a $ { barcode } alignment _ summary _ metrics file in the DNAnexus project ( usually in the qc folder ) . This contains metrics produced by Picard Tools ' s CollectAlignmentSummaryMetrics program ."""
filename = barcode + ".alignment_summary_metrics" # In the call to dxpy . find _ one _ data _ object ( ) below , I ' d normally set the # more _ ok parameter to False , but this blows - up in Python 3.7 - giving me a RuntimeError . # So , I just won ' t set it for now . I think dxpy is still mainly a Python 2.7 library and # can break in later version of Python3. try : file_id = dxpy . find_one_data_object ( zero_ok = False , project = self . dx_project_id , name = filename ) [ "id" ] except dxpy . exceptions . DXSearchError as err : msg = "Picard alignment summary metrics for barcode {} in DX project {} not found." . format ( barcode , self . dx_project_id ) debug_logger . error ( msg ) raise DxMissingAlignmentSummaryMetrics ( msg ) fh = StringIO ( dxpy . open_dxfile ( file_id ) . read ( ) ) asm = picard . CollectAlignmentSummaryMetrics ( fh ) return asm . metrics
def clear ( self ) : """Clear and reset to orignal state"""
WhereQuery . clear ( self ) self . _table = None self . _parameters = [ ] self . _sql = None
def _handle_tag_defineshape4 ( self ) : """Handle the DefineShape4 tag ."""
obj = _make_object ( "DefineShape4" ) obj . ShapeId = unpack_ui16 ( self . _src ) obj . ShapeBounds = self . _get_struct_rect ( ) obj . EdgeBounds = self . _get_struct_rect ( ) bc = BitConsumer ( self . _src ) bc . u_get ( 5 ) # reserved obj . UsesFillWindingRule = bc . u_get ( 1 ) obj . UsesNonScalingStrokes = bc . u_get ( 1 ) obj . UsesScalingStrokes = bc . u_get ( 1 ) obj . Shapes = self . _get_struct_shapewithstyle ( 4 ) return obj
def connect ( self , protocol = None , mode = None , disposition = None ) : """Connect to card . @ param protocol : a bit mask of the protocols to use , from L { CardConnection . T0 _ protocol } , L { CardConnection . T1 _ protocol } , L { CardConnection . RAW _ protocol } , L { CardConnection . T15 _ protocol } @ param mode : SCARD _ SHARE _ SHARED ( default ) , SCARD _ SHARE _ EXCLUSIVE or SCARD _ SHARE _ DIRECT @ param disposition : SCARD _ LEAVE _ CARD ( default ) , SCARD _ RESET _ CARD , SCARD _ UNPOWER _ CARD or SCARD _ EJECT _ CARD"""
Observable . setChanged ( self ) Observable . notifyObservers ( self , CardConnectionEvent ( 'connect' ) )
def commit_state_create ( self , nameop , current_block_number ) : """Commit a state - creation operation ( works for name _ registration , namespace _ reveal , name _ import ) . Returns the sequence of dicts of fields to serialize . DO NOT CALL THIS DIRECTLY"""
# have to have read - write disposition if self . disposition != DISPOSITION_RW : log . error ( "FATAL: borrowing violation: not a read-write connection" ) traceback . print_stack ( ) os . abort ( ) cur = self . db . cursor ( ) opcode = nameop . get ( 'opcode' , None ) try : assert state_create_is_valid ( nameop ) , "Invalid state-creation" assert opcode is not None , "BUG: did not set opcode" preorder = state_create_get_preorder ( nameop ) except Exception , e : log . exception ( e ) log . error ( "FATAL: missing preorder and/or prior history and/or opcode" ) os . abort ( ) initial_state = self . sanitize_op ( nameop ) table = state_create_get_table ( nameop ) history_id_key = state_create_get_history_id_key ( nameop ) history_id = nameop [ history_id_key ] constraints_ignored = state_create_get_always_set ( nameop ) # cannot have collided if BlockstackDB . nameop_is_collided ( nameop ) : # TODO : is this reachable ? log . debug ( "Not commiting '%s' since we're collided" % history_id ) self . log_reject ( current_block_number , nameop [ 'vtxindex' ] , nameop [ 'op' ] , nameop ) return { } self . log_accept ( current_block_number , nameop [ 'vtxindex' ] , nameop [ 'op' ] , nameop ) canonical_opdata = None if preorder is not None : # preordered a name or a namespace , possibly not for the first time even . try : assert 'preorder_hash' in preorder , 'BUG: missing preorder-hash' except Exception as e : log . exception ( e ) log . error ( "FATAL: invalid preorder" ) os . abort ( ) canonical_opdata = namedb_state_create ( cur , opcode , initial_state , current_block_number , initial_state [ 'vtxindex' ] , initial_state [ 'txid' ] , history_id , preorder , table , constraints_ignored = constraints_ignored ) if not canonical_opdata : log . error ( "FATAL: failed to create '{}'" . format ( history_id ) ) self . db . rollback ( ) os . abort ( ) self . db . commit ( ) else : # importing a name try : assert opcode in OPCODE_NAME_STATE_IMPORTS , "BUG: not an import operation" except Exception , e : log . exception ( e ) log . error ( "FATAL: invalid import operation" ) os . abort ( ) canonical_opdata = namedb_state_create_as_import ( self . db , opcode , initial_state , current_block_number , initial_state [ 'vtxindex' ] , initial_state [ 'txid' ] , history_id , table , constraints_ignored = constraints_ignored ) if not canonical_opdata : log . error ( "FATAL: failed to create '{}' as import" . format ( history_id ) ) self . db . rollback ( ) os . abort ( ) self . db . commit ( ) return canonical_opdata
def add_rect ( self , width , height , rid = None ) : """Add rectangle of widthxheight dimensions . Arguments : width ( int , float ) : Rectangle width height ( int , float ) : Rectangle height rid : Optional rectangle user id Returns : Rectangle : Rectangle with placemente coordinates None : If the rectangle couldn be placed ."""
assert ( width > 0 and height > 0 ) # Search best position and orientation rect , _ = self . _select_position ( width , height ) if not rect : return None # Subdivide all the max rectangles intersecting with the selected # rectangle . self . _split ( rect ) # Remove any max _ rect contained by another self . _remove_duplicates ( ) # Store and return rectangle position . rect . rid = rid self . rectangles . append ( rect ) return rect
def draw_build_target ( self , surf ) : """Draw the build target ."""
round_half = lambda v , cond : round ( v - 0.5 ) + 0.5 if cond else round ( v ) queued_action = self . _queued_action if queued_action : radius = queued_action . footprint_radius if radius : pos = self . get_mouse_pos ( ) if pos : pos = point . Point ( round_half ( pos . world_pos . x , ( radius * 2 ) % 2 ) , round_half ( pos . world_pos . y , ( radius * 2 ) % 2 ) ) surf . draw_circle ( colors . PLAYER_ABSOLUTE_PALETTE [ self . _obs . observation . player_common . player_id ] , pos , radius )
def retrieve_old_notifications ( self ) : """Retrieve notifications older than X days , where X is specified in settings"""
date = ago ( days = DELETE_OLD ) return Notification . objects . filter ( added__lte = date )
def _process_content_streams ( * , pdf , container , shorthand = None ) : """Find all individual instances of images drawn in the container Usually the container is a page , but it may also be a Form XObject . On a typical page images are stored inline or as regular images in an XObject . Form XObjects may include inline images , XObject images , and recursively , other Form XObjects ; and also vector graphic objects . Every instance of an image being drawn somewhere is flattened and treated as a unique image , since if the same image is drawn multiple times on one page it may be drawn at differing resolutions , and our objective is to find the resolution at which the page can be rastered without downsampling ."""
if container . get ( '/Type' ) == '/Page' and '/Contents' in container : initial_shorthand = shorthand or UNIT_SQUARE elif container . get ( '/Type' ) == '/XObject' and container [ '/Subtype' ] == '/Form' : # Set the CTM to the state it was when the " Do " operator was # encountered that is drawing this instance of the Form XObject ctm = PdfMatrix ( shorthand ) if shorthand else PdfMatrix . identity ( ) # A Form XObject may provide its own matrix to map form space into # user space . Get this if one exists form_shorthand = container . get ( '/Matrix' , PdfMatrix . identity ( ) ) form_matrix = PdfMatrix ( form_shorthand ) # Concatenate form matrix with CTM to ensure CTM is correct for # drawing this instance of the XObject ctm = form_matrix @ ctm initial_shorthand = ctm . shorthand else : return contentsinfo = _interpret_contents ( container , initial_shorthand ) if contentsinfo . found_vector : yield VectorInfo ( ) yield from _find_inline_images ( contentsinfo ) yield from _find_regular_images ( container , contentsinfo ) yield from _find_form_xobject_images ( pdf , container , contentsinfo )
def getApplicationsTransitionStateNameFromEnum ( self , state ) : """Returns a string for an application transition state"""
fn = self . function_table . getApplicationsTransitionStateNameFromEnum result = fn ( state ) return result
def _ensure_panel_ids ( dashboard ) : '''Assign panels auto - incrementing IDs .'''
panel_id = 1 for row in dashboard . get ( 'rows' , [ ] ) : for panel in row . get ( 'panels' , [ ] ) : panel [ 'id' ] = panel_id panel_id += 1
def pop ( self , sexp ) : '''Notes : Sequence works a bit different than other nodes . This method ( like others ) expectes a list . However , sequence matches against the list , whereas other nodes try to match against elements of the list .'''
for t in self . terms : sexp = t . pop ( sexp ) return sexp
def get_relationship_query_session ( self , proxy = None ) : """Gets the ` ` OsidSession ` ` associated with the relationship query service . arg : proxy ( osid . proxy . Proxy ) : a proxy return : ( osid . relationship . RelationshipQuerySession ) - a ` ` RelationshipQuerySession ` ` raise : NullArgument - ` ` proxy ` ` is ` ` null ` ` raise : OperationFailed - unable to complete request raise : Unimplemented - ` ` supports _ relationship _ query ( ) ` ` is ` ` false ` ` * compliance : optional - - This method must be implemented if ` ` supports _ relationship _ query ( ) ` ` is ` ` true ` ` . *"""
if not self . supports_relationship_query ( ) : raise Unimplemented ( ) try : from . import sessions except ImportError : raise OperationFailed ( ) proxy = self . _convert_proxy ( proxy ) try : session = sessions . RelationshipQuerySession ( proxy = proxy , runtime = self . _runtime ) except AttributeError : raise OperationFailed ( ) return session
def select_by_name ( self , name ) : """shows a tab identified by the name"""
for a , li , holder in self . _tabs . values ( ) : if a . children [ 'text' ] == name : self . _on_tab_pressed ( a , li , holder ) return
def FloatProperty ( name , default = 0.0 , readonly = False , docs = None ) : ''': name : string - property name : default : float - property default value : readonly : boolean - if True , setter method is NOT generated Returns a property object that can be used to initialize a class instance variable as a property .'''
private_name = '_' + name def getf ( self ) : if not hasattr ( self , private_name ) : setattr ( self , private_name , default ) return getattr ( self , private_name ) if readonly : setf = None else : def setf ( self , newValue ) : def epsilon_set ( v ) : # epsilon _ set : creates a float from v unless that # float is less than epsilon , which will # be considered effectively zero . fv = float ( v ) return 0.0 if nearly_zero ( fv ) else fv try : setattr ( self , private_name , epsilon_set ( newValue ) ) return except TypeError : pass if isinstance ( newValue , collections . Mapping ) : try : setattr ( self , private_name , epsilon_set ( newValue [ name ] ) ) except KeyError : pass return if isinstance ( newValue , collections . Iterable ) : try : setattr ( self , private_name , epsilon_set ( newValue [ 0 ] ) ) return except ( IndexError , TypeError ) : pass try : mapping = vars ( newValue ) setattr ( self , private_name , epsilon_set ( mapping [ name ] ) ) return except ( TypeError , KeyError ) : pass if newValue is None : setattr ( self , private_name , epsilon_set ( default ) ) return raise ValueError ( newValue ) return property ( getf , setf , None , docs )
def build ( self , parent_step = None , force_sequence = None ) : """Build a factory instance ."""
# TODO : Handle " batch build " natively pre , post = parse_declarations ( self . extras , base_pre = self . factory_meta . pre_declarations , base_post = self . factory_meta . post_declarations , ) if force_sequence is not None : sequence = force_sequence elif self . force_init_sequence is not None : sequence = self . force_init_sequence else : sequence = self . factory_meta . next_sequence ( ) step = BuildStep ( builder = self , sequence = sequence , parent_step = parent_step , ) step . resolve ( pre ) args , kwargs = self . factory_meta . prepare_arguments ( step . attributes ) instance = self . factory_meta . instantiate ( step = step , args = args , kwargs = kwargs , ) postgen_results = { } for declaration_name in post . sorted ( ) : declaration = post [ declaration_name ] unrolled_context = declaration . declaration . unroll_context ( instance = instance , step = step , context = declaration . context , ) postgen_context = PostGenerationContext ( value_provided = '' in unrolled_context , value = unrolled_context . get ( '' ) , extra = { k : v for k , v in unrolled_context . items ( ) if k != '' } , ) postgen_results [ declaration_name ] = declaration . declaration . call ( instance = instance , step = step , context = postgen_context , ) self . factory_meta . use_postgeneration_results ( instance = instance , step = step , results = postgen_results , ) return instance
def _read_oem ( string ) : """Args : string ( str ) : String containing the OEM Return : Ephem :"""
ephems = [ ] required = ( 'REF_FRAME' , 'CENTER_NAME' , 'TIME_SYSTEM' , 'OBJECT_ID' , 'OBJECT_NAME' ) mode = None for line in string . splitlines ( ) : if not line or line . startswith ( "COMMENT" ) : # pragma : no cover continue elif line . startswith ( "META_START" ) : mode = "meta" ephem = { 'orbits' : [ ] } ephems . append ( ephem ) elif line . startswith ( "META_STOP" ) : mode = "data" # Check for required fields for k in required : if k not in ephem : raise ValueError ( "Missing field '{}'" . format ( k ) ) # Conversion to be compliant with beyond . env . jpl dynamic reference # frames naming convention . if ephem [ 'CENTER_NAME' ] . lower ( ) != "earth" : ephem [ 'REF_FRAME' ] = ephem [ 'CENTER_NAME' ] . title ( ) . replace ( " " , "" ) elif mode == "meta" : key , _ , value = line . partition ( "=" ) ephem [ key . strip ( ) ] = value . strip ( ) elif mode == "data" : date , * state_vector = line . split ( ) date = Date . strptime ( date , "%Y-%m-%dT%H:%M:%S.%f" , scale = ephem [ 'TIME_SYSTEM' ] ) # Conversion from km to m , from km / s to m / s # and discard acceleration if present state_vector = np . array ( [ float ( x ) for x in state_vector [ : 6 ] ] ) * 1000 ephem [ 'orbits' ] . append ( Orbit ( date , state_vector , 'cartesian' , ephem [ 'REF_FRAME' ] , None ) ) for i , ephem_dict in enumerate ( ephems ) : if not ephem_dict [ 'orbits' ] : raise ValueError ( "Empty ephemeris" ) # In case there is no recommendation for interpolation # default to a Lagrange 8th order method = ephem_dict . get ( 'INTERPOLATION' , 'Lagrange' ) . lower ( ) order = int ( ephem_dict . get ( 'INTERPOLATION_DEGREE' , 7 ) ) + 1 ephem = Ephem ( ephem_dict [ 'orbits' ] , method = method , order = order ) ephem . name = ephem_dict [ 'OBJECT_NAME' ] ephem . cospar_id = ephem_dict [ 'OBJECT_ID' ] ephems [ i ] = ephem if len ( ephems ) == 1 : return ephems [ 0 ] return ephems
def build_default_link ( self ) : '''Called when ' link ' is not defined in the settings'''
attrs = { } attrs [ "rel" ] = "stylesheet" attrs [ "href" ] = "{}?{:x}" . format ( os . path . join ( settings . STATIC_URL , self . filepath ) . replace ( os . path . sep , '/' ) , self . version_id , ) attrs . update ( self . options [ 'link_attrs' ] ) attrs [ "data-context" ] = self . provider_run . uid # can ' t be overridden return '<link{} />' . format ( flatatt ( attrs ) )
def translate ( self , tx , ty ) : """Applies a translation by : obj : ` tx ` , : obj : ` ty ` to the transformation in this matrix . The effect of the new transformation is to first translate the coordinates by : obj : ` tx ` and : obj : ` ty ` , then apply the original transformation to the coordinates . . . note : : This changes the matrix in - place . : param tx : Amount to translate in the X direction . : param ty : Amount to translate in the Y direction . : type tx : float : type ty : float"""
cairo . cairo_matrix_translate ( self . _pointer , tx , ty )
def get_passphrase ( self , prompt = 'Passphrase:' ) : """Ask the user for passphrase ."""
passphrase = None if self . cached_passphrase_ack : passphrase = self . cached_passphrase_ack . get ( ) if passphrase is None : passphrase = interact ( title = '{} passphrase' . format ( self . device_name ) , prompt = prompt , description = None , binary = self . passphrase_entry_binary , options = self . options_getter ( ) ) if self . cached_passphrase_ack : self . cached_passphrase_ack . set ( passphrase ) return passphrase
def parse_variable_definition ( lexer : Lexer ) -> VariableDefinitionNode : """VariableDefinition : Variable : Type DefaultValue ? Directives [ Const ] ?"""
start = lexer . token return VariableDefinitionNode ( variable = parse_variable ( lexer ) , type = expect_token ( lexer , TokenKind . COLON ) and parse_type_reference ( lexer ) , default_value = parse_value_literal ( lexer , True ) if expect_optional_token ( lexer , TokenKind . EQUALS ) else None , directives = parse_directives ( lexer , True ) , loc = loc ( lexer , start ) , )
def find_applications_on_system ( ) : """Collect maya version from Autodesk PATH if exists , else try looking for custom executable paths from config file ."""
# First we collect maya versions from the Autodesk folder we presume # is addeed to the system environment " PATH " path_env = os . getenv ( 'PATH' ) . split ( os . pathsep ) versions = { } for each in path_env : path = Path ( os . path . expandvars ( each ) ) if not path . exists ( ) : continue if path . name . endswith ( DEVELOPER_NAME ) : if not path . exists ( ) : continue versions . update ( get_version_exec_mapping_from_path ( path ) ) return versions
def get_bank_ids_by_item ( self , item_id ) : """Gets the list of ` ` Bank ` ` ` ` Ids ` ` mapped to an ` ` Item ` ` . arg : item _ id ( osid . id . Id ) : ` ` Id ` ` of an ` ` Item ` ` return : ( osid . id . IdList ) - list of bank ` ` Ids ` ` raise : NotFound - ` ` item _ id ` ` is not found raise : NullArgument - ` ` item _ id ` ` is ` ` null ` ` raise : OperationFailed - unable to complete request raise : PermissionDenied - assessment failure * compliance : mandatory - - This method must be implemented . *"""
# Implemented from template for # osid . resource . ResourceBinSession . get _ bin _ ids _ by _ resource mgr = self . _get_provider_manager ( 'ASSESSMENT' , local = True ) lookup_session = mgr . get_item_lookup_session ( proxy = self . _proxy ) lookup_session . use_federated_bank_view ( ) item = lookup_session . get_item ( item_id ) id_list = [ ] for idstr in item . _my_map [ 'assignedBankIds' ] : id_list . append ( Id ( idstr ) ) return IdList ( id_list )
def _deck_from_smoothie ( self , smoothie_pos : Dict [ str , float ] ) -> Dict [ Axis , float ] : """Build a deck - abs position store from the smoothie ' s position This should take the smoothie style position { ' X ' : float , etc } and turn it into the position dict used here { Axis . X : float } in deck - absolute coordinates . It runs the reverse deck transformation for the axes that require it . One piece of complexity is that if the gantry transformation includes a transition between non parallel planes , the z position of the left mount would depend on its actual position in deck frame , so we have to apply the mount offset . TODO : Figure out which frame the mount offset is measured in , because if it ' s measured in the deck frame ( e . g . by touching off points on the deck ) it has to go through the reverse transform to be added to the smoothie coordinates here ."""
with_enum = { Axis [ k ] : v for k , v in smoothie_pos . items ( ) } plunger_axes = { k : v for k , v in with_enum . items ( ) if k not in Axis . gantry_axes ( ) } right = ( with_enum [ Axis . X ] , with_enum [ Axis . Y ] , with_enum [ Axis . by_mount ( top_types . Mount . RIGHT ) ] ) # Tell apply _ transform to just do the change of base part of the # transform rather than the full affine transform , because this is # an offset left = ( with_enum [ Axis . X ] , with_enum [ Axis . Y ] , with_enum [ Axis . by_mount ( top_types . Mount . LEFT ) ] ) right_deck = linal . apply_reverse ( self . config . gantry_calibration , right ) left_deck = linal . apply_reverse ( self . config . gantry_calibration , left ) deck_pos = { Axis . X : right_deck [ 0 ] , Axis . Y : right_deck [ 1 ] , Axis . by_mount ( top_types . Mount . RIGHT ) : right_deck [ 2 ] , Axis . by_mount ( top_types . Mount . LEFT ) : left_deck [ 2 ] } deck_pos . update ( plunger_axes ) return deck_pos
def set_authentication_predicate ( predicate , params = ( ) ) : """Assign a new authentication predicate to an RPC method . This is the most generic decorator used to implement authentication . Predicate is a standard function with the following signature : . . code : : python def my _ predicate ( request , * params ) : # Inspect request and extract required information if < condition > : # The condition to execute the method are met return True return False : param predicate : : param params : : return :"""
def wrapper ( rpc_method ) : if hasattr ( rpc_method , 'modernrpc_auth_predicates' ) : rpc_method . modernrpc_auth_predicates . append ( predicate ) rpc_method . modernrpc_auth_predicates_params . append ( params ) else : rpc_method . modernrpc_auth_predicates = [ predicate ] rpc_method . modernrpc_auth_predicates_params = [ params ] return rpc_method return wrapper
def save ( self ) : """save PlayerRecord settings to disk"""
data = str . encode ( json . dumps ( self . simpleAttrs , indent = 4 , sort_keys = True ) ) with open ( self . filename , "wb" ) as f : f . write ( data )
def main ( ) : """Mainloop for the application"""
logging . basicConfig ( level = logging . INFO ) app = RunSnakeRunApp ( 0 ) app . MainLoop ( )
async def _runJob ( self , user , appt ) : '''Actually run the storm query , updating the appropriate statistics and results'''
count = 0 appt . isrunning = True appt . laststarttime = time . time ( ) appt . startcount += 1 await self . _storeAppt ( appt ) with s_provenance . claim ( 'cron' , iden = appt . iden ) : logger . info ( 'Agenda executing for iden=%s, user=%s, query={%s}' , appt . iden , user . name , appt . query ) starttime = time . time ( ) try : async for _ in self . core . eval ( appt . query , user = user ) : # NOQA count += 1 except asyncio . CancelledError : result = 'cancelled' raise except Exception as e : result = f'raised exception {e}' logger . exception ( 'Agenda job %s raised exception' , appt . iden ) else : result = f'finished successfully with {count} nodes' finally : finishtime = time . time ( ) logger . info ( 'Agenda completed query for iden=%s with result "%s" took %0.3fs' , appt . iden , result , finishtime - starttime ) appt . lastfinishtime = finishtime appt . isrunning = False appt . lastresult = result if not self . isfini : await self . _storeAppt ( appt )
def add_hypermedia ( self , obj ) : '''Adds HATEOAS links to the resource . Adds href link to self . Override in subclasses to include additional functionality'''
if hasattr ( self , 'pk' ) : obj [ '_links' ] = { 'self' : { 'href' : '{}{}/' . format ( self . get_resource_uri ( ) , obj [ self . pk ] ) } }
def reverse_ip ( self , domain = None , limit = None , ** kwargs ) : """Pass in a domain name ."""
return self . _results ( 'reverse-ip' , '/v1/{0}/reverse-ip' . format ( domain ) , limit = limit , ** kwargs )
def get_elements ( self , json_string , expr ) : """Get list of elements from _ json _ string _ , matching [ http : / / goessner . net / articles / JsonPath / | JSONPath ] expression . * Args : * \n _ json _ string _ - JSON string ; \n _ expr _ - JSONPath expression ; * Returns : * \n List of found elements or ` ` None ` ` if no elements were found * Example : * \n | * Settings * | * Value * | | Library | JsonValidator | | Library | OperatingSystem | | * Test Cases * | * Action * | * Argument * | * Argument * | | Get json elements | $ { json _ example } = | OperatingSystem . Get File | $ { CURDIR } $ { / } json _ example . json | | | $ { json _ elements } = | Get elements | $ { json _ example } | $ . store . book [ * ] . author | | [ u ' Nigel Rees ' , u ' Evelyn Waugh ' , u ' Herman Melville ' , u ' J . R . R . Tolkien ' ]"""
load_input_json = self . string_to_json ( json_string ) # parsing jsonpath jsonpath_expr = parse ( expr ) # list of returned elements value_list = [ ] for match in jsonpath_expr . find ( load_input_json ) : value_list . append ( match . value ) if not value_list : return None else : return value_list
def GetZoneGroupState ( self , * args , ** kwargs ) : """Overrides default handling to use the global shared zone group state cache , unless another cache is specified ."""
kwargs [ 'cache' ] = kwargs . get ( 'cache' , zone_group_state_shared_cache ) return self . send_command ( 'GetZoneGroupState' , * args , ** kwargs )
def post_save_update_cache ( sender , instance , created , raw , ** kwargs ) : """Update the cache when an instance is created or modified ."""
if raw : return name = sender . __name__ if name in cached_model_names : delay_cache = getattr ( instance , '_delay_cache' , False ) if not delay_cache : from . tasks import update_cache_for_instance update_cache_for_instance ( name , instance . pk , instance )
def create ( self , name , ** params ) : """Creates a new role . This function makes two roundtrips to the server , plus at most two more if the ` ` autologin ` ` field of : func : ` connect ` is set to ` ` True ` ` . : param name : Name for the role . : type name : ` ` string ` ` : param params : Additional arguments ( optional ) . For a list of available parameters , see ` Roles parameters < http : / / dev . splunk . com / view / SP - CAAAEJ6 # rolesparams > ` _ on Splunk Developer Portal . : type params : ` ` dict ` ` : return : The new role . : rtype : : class : ` Role ` * * Example * * : : import splunklib . client as client c = client . connect ( . . . ) roles = c . roles paltry = roles . create ( " paltry " , imported _ roles = " user " , defaultApp = " search " )"""
if not isinstance ( name , six . string_types ) : raise ValueError ( "Invalid role name: %s" % str ( name ) ) name = name . lower ( ) self . post ( name = name , ** params ) # splunkd doesn ' t return the user in the POST response body , # so we have to make a second round trip to fetch it . response = self . get ( name ) entry = _load_atom ( response , XNAME_ENTRY ) . entry state = _parse_atom_entry ( entry ) entity = self . item ( self . service , urllib . parse . unquote ( state . links . alternate ) , state = state ) return entity
def stats_for_satellite_image ( self , metaimage ) : """Retrieves statistics for the satellite image described by the provided metadata . This is currently only supported ' EVI ' and ' NDVI ' presets : param metaimage : the satellite image ' s metadata , in the form of a ` MetaImage ` subtype instance : type metaimage : a ` pyowm . agroapi10 . imagery . MetaImage ` subtype : return : dict"""
if metaimage . preset != PresetEnum . EVI and metaimage . preset != PresetEnum . NDVI : raise ValueError ( "Unsupported image preset: should be EVI or NDVI" ) if metaimage . stats_url is None : raise ValueError ( "URL for image statistics is not defined" ) status , data = self . http_client . get_json ( metaimage . stats_url , params = { } ) return data
def create_unsigned_transaction ( cls , * , nonce : int , gas_price : int , gas : int , to : Address , value : int , data : bytes ) -> 'BaseUnsignedTransaction' : """Create an unsigned transaction ."""
raise NotImplementedError ( "Must be implemented by subclasses" )
def main ( args , stop = False ) : """Arguments parsing , etc . ."""
daemon = AMQPDaemon ( con_param = getConParams ( settings . RABBITMQ_ANTIVIRUS_VIRTUALHOST ) , queue = settings . RABBITMQ_ANTIVIRUS_INPUT_QUEUE , out_exch = settings . RABBITMQ_ANTIVIRUS_EXCHANGE , out_key = settings . RABBITMQ_ANTIVIRUS_OUTPUT_KEY , react_fn = reactToAMQPMessage , glob = globals ( ) # used in deserializer ) if not stop and args . foreground : # run at foreground daemon . run ( ) else : daemon . run_daemon ( )