signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def parse ( cls , gvid , exception = True ) : """Parse a string value into the geoid of this class . : param gvid : String value to parse . : param exception : If true ( default ) raise an eception on parse erorrs . If False , return a ' null ' geoid . : return :"""
if gvid == 'invalid' : return cls . get_class ( 'null' ) ( 0 ) if not bool ( gvid ) : return None if not isinstance ( gvid , six . string_types ) : raise TypeError ( "Can't parse; not a string. Got a '{}' " . format ( type ( gvid ) ) ) try : if not cls . sl : # Civick and ACS include the SL , so can call from base type . if six . PY3 : fn = cls . decode else : fn = cls . decode . __func__ sl = fn ( gvid [ 0 : cls . sl_width ] ) else : sl = cls . sl # Otherwise must use derived class . except ValueError as e : if exception : raise ValueError ( "Failed to parse gvid '{}': {}" . format ( gvid , str ( e ) ) ) else : return cls . get_class ( 'null' ) ( 0 ) try : cls = cls . sl_map [ sl ] except KeyError : if exception : raise ValueError ( "Failed to parse gvid '{}': Unknown summary level '{}' " . format ( gvid , sl ) ) else : return cls . get_class ( 'null' ) ( 0 ) m = cls . regex . match ( gvid ) if not m : raise ValueError ( "Failed to match '{}' to '{}' " . format ( gvid , cls . regex_str ) ) d = m . groupdict ( ) if not d : return None if six . PY3 : fn = cls . decode else : fn = cls . decode . __func__ d = { k : fn ( v ) for k , v in d . items ( ) } try : del d [ 'sl' ] except KeyError : pass return cls ( ** d )
def base_concrete_modeladmin ( self ) : """The class inheriting directly from ContentModelAdmin ."""
candidates = [ self . __class__ ] while candidates : candidate = candidates . pop ( ) if ContentTypedAdmin in candidate . __bases__ : return candidate candidates . extend ( candidate . __bases__ ) raise Exception ( "Can't find base concrete ModelAdmin class." )
async def on_shutdown ( self ) : """Cleans up any outstanding subscriptions ."""
await self . _unregister_subscriptions ( ) self . _accepting = False for ( ws , _ ) in self . _subscribers : await ws . close ( code = aiohttp . WSCloseCode . GOING_AWAY , message = 'Server shutdown' )
def get_cases ( self , skip_ws = False ) : """Returns a list of 2 - tuples ( condition , value ) . If an ELSE exists condition is None ."""
CONDITION = 1 VALUE = 2 ret = [ ] mode = CONDITION for token in self . tokens : # Set mode from the current statement if token . match ( T . Keyword , 'CASE' ) : continue elif skip_ws and token . ttype in T . Whitespace : continue elif token . match ( T . Keyword , 'WHEN' ) : ret . append ( ( [ ] , [ ] ) ) mode = CONDITION elif token . match ( T . Keyword , 'THEN' ) : mode = VALUE elif token . match ( T . Keyword , 'ELSE' ) : ret . append ( ( None , [ ] ) ) mode = VALUE elif token . match ( T . Keyword , 'END' ) : mode = None # First condition without preceding WHEN if mode and not ret : ret . append ( ( [ ] , [ ] ) ) # Append token depending of the current mode if mode == CONDITION : ret [ - 1 ] [ 0 ] . append ( token ) elif mode == VALUE : ret [ - 1 ] [ 1 ] . append ( token ) # Return cases list return ret
def thunder_decorator ( func ) : """Decorator for functions so they could get as input a thunder . Images / thunder . Series object , while they are expecting an rdd . Also will return the data from rdd to the appropriate type Assumes only one input object of type Images / Series , and up to one output object of type RDD : param func : function to decorate : return : decorated function"""
@ wraps ( func ) def dec ( * args , ** kwargs ) : # find Images / Series object in args result = None args = list ( args ) image_args = list ( map ( lambda x : isinstance ( x , td . images . Images ) , args ) ) series_args = list ( map ( lambda x : isinstance ( x , td . series . Series ) , args ) ) rdd_args = list ( map ( lambda x : isinstance ( x , RDD ) , args ) ) # find Images / Series object in kwargs image_kwargs = [ ] series_kwargs = [ ] rdd_kwargs = [ ] for key , value in iteritems ( kwargs ) : if isinstance ( value , td . images . Images ) : image_kwargs . append ( key ) if isinstance ( value , td . series . Series ) : series_kwargs . append ( key ) if isinstance ( value , RDD ) : rdd_kwargs . append ( key ) # make sure there is only one count = sum ( image_args ) + sum ( series_args ) + sum ( rdd_args ) + len ( image_kwargs ) + len ( series_kwargs ) + len ( rdd_kwargs ) if count == 0 : raise ValueError ( 'Wrong data type, expected [RDD, Images, Series] got None' ) if count > 1 : raise ValueError ( 'Expecting on input argument of type Series / Images, got: %d' % count ) # bypass for RDD if sum ( rdd_args ) or len ( rdd_kwargs ) : return func ( * args , ** kwargs ) image_flag = None # convert to rdd and send if sum ( image_args ) > 0 : image_flag = True index = np . where ( image_args ) [ 0 ] [ 0 ] args [ index ] = args [ index ] . tordd ( ) result = func ( * args , ** kwargs ) if sum ( series_args ) > 0 : image_flag = False index = np . where ( series_args ) [ 0 ] [ 0 ] args [ index ] = args [ index ] . tordd ( ) result = func ( * args , ** kwargs ) if len ( image_kwargs ) > 0 : image_flag = True kwargs [ image_kwargs [ 0 ] ] = kwargs [ image_kwargs [ 0 ] ] . tordd ( ) result = func ( * args , ** kwargs ) if len ( series_kwargs ) > 0 : image_flag = False kwargs [ series_kwargs [ 0 ] ] = kwargs [ series_kwargs [ 0 ] ] . tordd ( ) result = func ( * args , ** kwargs ) if image_flag is None : raise RuntimeError ( 'Target function did not run' ) # handle output if not isinstance ( result , tuple ) : result = ( result , ) result_len = len ( result ) rdd_index = np . where ( list ( map ( lambda x : isinstance ( x , RDD ) , result ) ) ) [ 0 ] # no RDD as output if len ( rdd_index ) == 0 : logging . getLogger ( 'pySparkUtils' ) . debug ( 'No RDDs found in output' ) if result_len == 1 : return result [ 0 ] else : return result if len ( rdd_index ) > 1 : raise ValueError ( 'Expecting one RDD as output got: %d' % len ( rdd_index ) ) result = list ( result ) rdd_index = rdd_index [ 0 ] # handle type of output if image_flag : result [ rdd_index ] = td . images . fromrdd ( result [ rdd_index ] ) else : result [ rdd_index ] = td . series . fromrdd ( result [ rdd_index ] ) if result_len == 1 : return result [ 0 ] else : return result return dec
def get_nx_graph ( ea ) : """Convert an IDA flowchart to a NetworkX graph ."""
nx_graph = networkx . DiGraph ( ) func = idaapi . get_func ( ea ) flowchart = FlowChart ( func ) for block in flowchart : # Make sure all nodes are added ( including edge - less nodes ) nx_graph . add_node ( block . startEA ) for pred in block . preds ( ) : nx_graph . add_edge ( pred . startEA , block . startEA ) for succ in block . succs ( ) : nx_graph . add_edge ( block . startEA , succ . startEA ) return nx_graph
def mouse_out ( self ) : """Performs a mouse out the element . Currently works only on Chrome driver ."""
self . scroll_to ( ) ActionChains ( self . parent . driver ) . move_by_offset ( 0 , 0 ) . click ( ) . perform ( )
def main ( args ) : '''register _ retinotopy . main ( args ) can be given a list of arguments , such as sys . argv [ 1 : ] ; these arguments may include any options and must include at least one subject id . All subjects whose ids are given are registered to a retinotopy model , and the resulting registration , as well as the predictions made by the model in the registration , are exported .'''
m = register_retinotopy_plan ( args = args ) # force completion files = m [ 'files' ] if len ( files ) > 0 : return 0 else : print ( 'Error: No files exported.' , file = sys . stderr ) return 1
def validate_params_match ( method , parameters ) : """Validates that the given parameters are exactly the method ' s declared parameters . : param method : The method to be called : type method : function : param parameters : The parameters to use in the call : type parameters : dict [ str , object ] | list [ object ]"""
argspec = inspect . getargspec ( method ) # pylint : disable = deprecated - method default_length = len ( argspec . defaults ) if argspec . defaults is not None else 0 if isinstance ( parameters , list ) : if len ( parameters ) > len ( argspec . args ) and argspec . varargs is None : raise InvalidParamsError ( "Too many parameters" ) remaining_parameters = len ( argspec . args ) - len ( parameters ) if remaining_parameters > default_length : raise InvalidParamsError ( "Not enough parameters" ) elif isinstance ( parameters , dict ) : missing_parameters = [ key for key in argspec . args if key not in parameters ] default_parameters = set ( argspec . args [ len ( argspec . args ) - default_length : ] ) for key in missing_parameters : if key not in default_parameters : raise InvalidParamsError ( "Parameter {} has not been satisfied" . format ( key ) ) extra_params = [ key for key in parameters if key not in argspec . args ] if len ( extra_params ) > 0 and argspec . keywords is None : raise InvalidParamsError ( "Too many parameters" )
def _parse_extra_features ( node , NHX_string ) : """Reads node ' s extra data form its NHX string . NHX uses this format : [ & & NHX : prop1 = value1 : prop2 = value2]"""
NHX_string = NHX_string . replace ( "[&&NHX:" , "" ) NHX_string = NHX_string . replace ( "]" , "" ) for field in NHX_string . split ( ":" ) : try : pname , pvalue = field . split ( "=" ) except ValueError as e : raise NewickError ( 'Invalid NHX format %s' % field ) node . add_feature ( pname , pvalue )
def list_blobs ( kwargs = None , storage_conn = None , call = None ) : '''. . versionadded : : 2015.8.0 List blobs associated with the container CLI Example : . . code - block : : bash salt - cloud - f list _ blobs my - azure container = mycontainer container : The name of the storage container prefix : Optional . Filters the results to return only blobs whose names begin with the specified prefix . marker : Optional . A string value that identifies the portion of the list to be returned with the next list operation . The operation returns a marker value within the response body if the list returned was not complete . The marker value may then be used in a subsequent call to request the next set of list items . The marker value is opaque to the client . maxresults : Optional . Specifies the maximum number of blobs to return , including all BlobPrefix elements . If the request does not specify maxresults or specifies a value greater than 5,000 , the server will return up to 5,000 items . Setting maxresults to a value less than or equal to zero results in error response code 400 ( Bad Request ) . include : Optional . Specifies one or more datasets to include in the response . To specify more than one of these options on the URI , you must separate each option with a comma . Valid values are : snapshots : Specifies that snapshots should be included in the enumeration . Snapshots are listed from oldest to newest in the response . metadata : Specifies that blob metadata be returned in the response . uncommittedblobs : Specifies that blobs for which blocks have been uploaded , but which have not been committed using Put Block List ( REST API ) , be included in the response . copy : Version 2012-02-12 and newer . Specifies that metadata related to any current or previous Copy Blob operation should be included in the response . delimiter : Optional . When the request includes this parameter , the operation returns a BlobPrefix element in the response body that acts as a placeholder for all blobs whose names begin with the same substring up to the appearance of the delimiter character . The delimiter may be a single character or a string .'''
if call != 'function' : raise SaltCloudSystemExit ( 'The list_blobs function must be called with -f or --function.' ) if kwargs is None : kwargs = { } if 'container' not in kwargs : raise SaltCloudSystemExit ( 'An storage container name must be specified as "container"' ) if not storage_conn : storage_conn = get_storage_conn ( conn_kwargs = kwargs ) return salt . utils . msazure . list_blobs ( storage_conn = storage_conn , ** kwargs )
def HashFilePath ( self , path , byte_count ) : """Updates underlying hashers with file on a given path . Args : path : A path to the file that is going to be fed to the hashers . byte _ count : A maximum numbers of bytes that are going to be processed ."""
with open ( path , "rb" ) as fd : self . HashFile ( fd , byte_count )
def get_cat_model ( model ) : """Return a class from a string or class"""
try : if isinstance ( model , string_types ) : model_class = apps . get_model ( * model . split ( "." ) ) elif issubclass ( model , CategoryBase ) : model_class = model if model_class is None : raise TypeError except TypeError : raise TemplateSyntaxError ( "Unknown model submitted: %s" % model ) return model_class
def tabulate ( self , n = None , headers = ( ) , tablefmt = "simple" , floatfmt = "g" , numalign = "decimal" , stralign = "left" , missingval = "" ) : """Return pretty string table of first n rows of sequence or everything if n is None . See https : / / bitbucket . org / astanin / python - tabulate for details on tabulate parameters : param n : Number of rows to show , if set to None return all rows : param headers : Passed to tabulate : param tablefmt : Passed to tabulate : param floatfmt : Passed to tabulate : param numalign : Passed to tabulate : param stralign : Passed to tabulate : param missingval : Passed to tabulate"""
self . cache ( ) length = self . len ( ) if length == 0 or not is_tabulatable ( self [ 0 ] ) : return None if n is None or n >= length : rows = self . list ( ) message = '' else : rows = self . take ( n ) . list ( ) if tablefmt == 'simple' : message = '\nShowing {} of {} rows' . format ( n , length ) elif tablefmt == 'html' : message = '<p>Showing {} of {} rows' . format ( n , length ) else : message = '' if len ( headers ) == 0 and is_namedtuple ( rows [ 0 ] ) : headers = rows [ 0 ] . _fields return tabulate ( rows , headers = headers , tablefmt = tablefmt , floatfmt = floatfmt , numalign = numalign , stralign = stralign , missingval = missingval ) + message
def add_or_update_record ( self , dst_ase , src_block_list , offset , chunk_size , total_chunks , completed_chunks , completed ) : # type : ( SyncCopyResumeManager , # blobxfer . models . azure . StorageEntity , list , int , int , int , # int , bool ) - > None """Add or update a resume record : param SyncCopyResumeManager self : this : param blobxfer . models . azure . StorageEntity dst _ ase : Storage Entity : param list src _ block _ list : source block list : param int offset : offset : param int chunk _ size : chunk size in bytes : param int total _ chunks : total chunks : param int completed _ chunks : completed chunks bitarray : param bool completed : if completed"""
key = blobxfer . operations . resume . _BaseResumeManager . generate_record_key ( dst_ase ) with self . datalock ( ) : sc = self . get_record ( dst_ase , key = key , lock = False ) if sc is None : sc = blobxfer . models . resume . SyncCopy ( length = dst_ase . _size , src_block_list = src_block_list , offset = offset , chunk_size = chunk_size , total_chunks = total_chunks , completed_chunks = completed_chunks , completed = completed , ) else : if sc . completed or completed_chunks == sc . completed_chunks : return sc . offset = offset sc . completed_chunks = completed_chunks if completed : sc . completed = completed self . _data [ key ] = sc self . _data . sync ( )
def itemData ( self , item , column , role = Qt . DisplayRole ) : """Returns the data stored under the given role for the item . O The column parameter may be used to differentiate behavior per column . The default implementation does nothing . Descendants should typically override this function instead of data ( ) Note : If you do not have a value to return , return an invalid QVariant instead of returning 0 . ( This means returning None in Python )"""
if role == Qt . DecorationRole : if column == self . COL_DECORATION : return item . decoration elif role == Qt . FontRole : return item . font elif role == Qt . ForegroundRole : return item . foregroundBrush elif role == Qt . BackgroundRole : return item . backgroundBrush elif role == Qt . SizeHintRole : return self . cellSizeHint if item . sizeHint is None else item . sizeHint return None
def _build_mesh ( self , mesh_method , ** kwargs ) : """this function takes mesh _ method and kwargs that came from the generic Body . intialize _ mesh and returns the grid . . . intialize mesh then takes care of filling columns and rescaling to the correct units , etc"""
# need the sma to scale between Roche and real units sma = kwargs . get ( 'sma' , self . sma ) # Rsol ( same units as coordinates ) mesh_args = self . instantaneous_mesh_args if mesh_method == 'marching' : # TODO : do this during mesh initialization only and then keep delta fixed in time ? ? ntriangles = kwargs . get ( 'ntriangles' , self . ntriangles ) # we need the surface area of the lobe to estimate the correct value # to pass for delta to marching . We will later need the volume to # expose its value logger . debug ( "libphoebe.roche_area_volume{}" . format ( mesh_args ) ) av = libphoebe . roche_area_volume ( * mesh_args , choice = 2 , larea = True , lvolume = True ) delta = _estimate_delta ( ntriangles , av [ 'larea' ] ) logger . debug ( "libphoebe.roche_marching_mesh{}" . format ( mesh_args ) ) new_mesh = libphoebe . roche_marching_mesh ( * mesh_args , delta = delta , choice = 2 , full = True , max_triangles = ntriangles * 2 , vertices = True , triangles = True , centers = True , vnormals = True , tnormals = True , cnormals = False , vnormgrads = True , cnormgrads = False , areas = True , volume = False , init_phi = self . mesh_init_phi ) # In addition to the values exposed by the mesh itself , let ' s report # the volume and surface area of the lobe . The lobe area is used # if mesh _ offseting is required , and the volume is optionally exposed # to the user . new_mesh [ 'volume' ] = av [ 'lvolume' ] # * sma * * 3 new_mesh [ 'area' ] = av [ 'larea' ] # * sma * * 2 scale = sma elif mesh_method == 'wd' : N = int ( kwargs . get ( 'gridsize' , self . gridsize ) ) # unpack mesh _ args q , F , d , Phi = mesh_args the_grid = mesh_wd . discretize_wd_style ( N , q , F , d , Phi ) new_mesh = mesh . wd_grid_to_mesh_dict ( the_grid , q , F , d ) scale = sma else : raise NotImplementedError ( "mesh_method '{}' is not supported" . format ( mesh_method ) ) return new_mesh , scale
def error ( self , buf , newline = True ) : """Similar to ` write ` , except it writes buffer to error stream . If coloring enabled , adds error coloring . ` buf ` Data buffer to write . ` newline ` Append newline character to buffer before writing ."""
buf = buf or '' if self . _colored : buf = self . ESCAPE_RED + buf + self . ESCAPE_CLEAR if newline : buf += os . linesep try : self . _error . write ( buf ) if hasattr ( self . _error , 'flush' ) : self . _error . flush ( ) except IOError as exc : if exc . errno != errno . EPIPE : # silence EPIPE errors raise
def paste ( ** kwargs ) : """Returns system clipboard contents ."""
clip . OpenClipboard ( ) d = clip . GetClipboardData ( win32con . CF_UNICODETEXT ) clip . CloseClipboard ( ) return d
def get_pending_bios_settings ( self , only_allowed_settings = True ) : """Get current BIOS settings . : param : only _ allowed _ settings : True when only allowed BIOS settings are to be returned . If False , All the BIOS settings supported by iLO are returned . : return : a dictionary of pending BIOS settings . Depending on the ' only _ allowed _ settings ' , either only the allowed settings are returned or all the supported settings are returned . : raises : IloError , on an error from iLO . : raises : IloCommandNotSupportedError , if the command is not supported on the server ."""
headers , bios_uri , bios_settings = self . _check_bios_resource ( ) try : settings_config_uri = bios_settings [ 'links' ] [ 'Settings' ] [ 'href' ] except KeyError : msg = ( "Settings resource not found. Couldn't get pending BIOS " "Settings." ) raise exception . IloCommandNotSupportedError ( msg ) status , headers , config = self . _rest_get ( settings_config_uri ) if status != 200 : msg = self . _get_extended_error ( config ) raise exception . IloError ( msg ) # Remove the " links " section config . pop ( "links" , None ) if only_allowed_settings : return utils . apply_bios_properties_filter ( config , constants . SUPPORTED_BIOS_PROPERTIES ) return config
def stats ( self , request ) : '''Get stats for the provided request . : param request dict : A search request that also contains the ' interval ' property . : returns : : py : class : ` planet . api . models . JSON ` : raises planet . api . exceptions . APIException : On API error .'''
# work - around for API bug request = _patch_stats_request ( request ) body = json . dumps ( request ) return self . dispatcher . response ( models . Request ( self . _url ( 'data/v1/stats' ) , self . auth , body_type = models . JSON , data = body , method = 'POST' ) ) . get_body ( )
def prune ( self , whole = False , keys = [ ] , names = [ ] , filters = [ ] ) : """Filter tree nodes based on given criteria"""
for node in self . climb ( whole ) : # Select only nodes with key content if not all ( [ key in node . data for key in keys ] ) : continue # Select nodes with name matching regular expression if names and not any ( [ re . search ( name , node . name ) for name in names ] ) : continue # Apply advanced filters if given try : if not all ( [ utils . filter ( filter , node . data , regexp = True ) for filter in filters ] ) : continue # Handle missing attribute as if filter failed except utils . FilterError : continue # All criteria met , thus yield the node yield node
def acctradinginfo_query ( self , order_type , code , price , order_id = None , adjust_limit = 0 , trd_env = TrdEnv . REAL , acc_id = 0 , acc_index = 0 ) : """查询账户下最大可买卖数量 : param order _ type : 订单类型 , 参见OrderType : param code : 证券代码 , 例如 ' HK . 00700' : param price : 报价 , 3位精度 : param order _ id : 订单号 。 如果是新下单 , 则可以传None 。 如果是改单则要传单号 。 : param adjust _ limit : 调整方向和调整幅度百分比限制 , 正数代表向上调整 , 负数代表向下调整 , 具体值代表调整幅度限制 , 如 : 0.015代表向上调整且幅度不超过1.5 % ; - 0.01代表向下调整且幅度不超过1 % 。 默认0表示不调整 : param trd _ env : 交易环境 , 参见TrdEnv : param acc _ id : 业务账号 , 默认0表示第1个 : param acc _ index : int , 交易业务子账户ID列表所对应的下标 , 默认0 , 表示第1个业务ID : return : ( ret , data ) ret = = RET _ OK , data为pd . DataFrame , 数据列如下 ret ! = RET _ OK , data为错误信息 参数 类型 说明 max _ cash _ buy float 不使用融资 , 仅自己的现金最大可买整手股数 max _ cash _ and _ margin _ buy float 使用融资 , 自己的现金 + 融资资金总共的最大可买整手股数 max _ position _ sell float 不使用融券 ( 卖空 ) , 仅自己的持仓最大可卖整手股数 max _ sell _ short float 使用融券 ( 卖空 ) , 最大可卖空整手股数 , 不包括多仓 max _ buy _ back float 卖空后 , 需要买回的最大整手股数 。 因为卖空后 , 必须先买回已卖空的股数 , 还掉股票 , 才能再继续买多 。"""
ret , msg = self . _check_trd_env ( trd_env ) if ret != RET_OK : return ret , msg ret , msg , acc_id = self . _check_acc_id_and_acc_index ( trd_env , acc_id , acc_index ) if ret != RET_OK : return ret , msg ret , content = self . _split_stock_code ( code ) if ret != RET_OK : return ret , content market_str , stock_code = content query_processor = self . _get_sync_query_processor ( AccTradingInfoQuery . pack_req , AccTradingInfoQuery . unpack_rsp ) kargs = { 'order_type' : order_type , 'code' : str ( stock_code ) , 'price' : price , 'order_id' : order_id , 'adjust_limit' : adjust_limit , 'trd_mkt' : self . __trd_mkt , 'sec_mkt_str' : market_str , 'trd_env' : trd_env , 'acc_id' : acc_id , 'conn_id' : self . get_sync_conn_id ( ) } ret_code , msg , data = query_processor ( ** kargs ) if ret_code != RET_OK : return RET_ERROR , msg col_list = [ 'max_cash_buy' , 'max_cash_and_margin_buy' , 'max_position_sell' , 'max_sell_short' , 'max_buy_back' ] acctradinginfo_table = pd . DataFrame ( data , columns = col_list ) return RET_OK , acctradinginfo_table
def syncView ( self , recursive = False ) : """Syncs the information from this item to the view ."""
# update the view widget gantt = self . ganttWidget ( ) tree = self . treeWidget ( ) if not gantt : return vwidget = gantt . viewWidget ( ) scene = vwidget . scene ( ) cell_w = gantt . cellWidth ( ) tree_offset_y = tree . header ( ) . height ( ) + 1 tree_offset_y += tree . verticalScrollBar ( ) . value ( ) # collect the items to work on items = [ self ] if recursive : items += list ( self . children ( recursive = True ) ) for item in items : # grab the view item from the gantt item vitem = item . viewItem ( ) if not vitem . scene ( ) : scene . addItem ( vitem ) # make sure the item should be visible if item . isHidden ( ) or not tree : vitem . hide ( ) continue vitem . show ( ) tree_rect = tree . visualItemRect ( item ) tree_y = tree_rect . y ( ) + tree_offset_y tree_h = tree_rect . height ( ) # check to see if this item is hidden if tree_rect . height ( ) == 0 : vitem . hide ( ) continue if gantt . timescale ( ) in ( gantt . Timescale . Minute , gantt . Timescale . Hour , gantt . Timescale . Day ) : dstart = item . dateTimeStart ( ) dend = item . dateTimeEnd ( ) view_x = scene . datetimeXPos ( dstart ) view_r = scene . datetimeXPos ( dend ) view_w = view_r - view_x else : view_x = scene . dateXPos ( item . dateStart ( ) ) view_w = item . duration ( ) * cell_w # determine the % off from the length based on this items time if not item . isAllDay ( ) : full_day = 24 * 60 * 60 # full days worth of seconds # determine the start offset start = item . timeStart ( ) start_day = ( start . hour ( ) * 60 * 60 ) start_day += ( start . minute ( ) * 60 ) start_day += ( start . second ( ) ) offset_start = ( start_day / float ( full_day ) ) * cell_w # determine the end offset end = item . timeEnd ( ) end_day = ( end . hour ( ) * 60 * 60 ) end_day += ( start . minute ( ) * 60 ) end_day += ( start . second ( ) + 1 ) # forces at least 1 sec offset_end = ( ( full_day - end_day ) / float ( full_day ) ) offset_end *= cell_w # update the xpos and widths view_x += offset_start view_w -= ( offset_start + offset_end ) view_w = max ( view_w , 5 ) vitem . setSyncing ( True ) vitem . setPos ( view_x , tree_y ) vitem . setRect ( 0 , 0 , view_w , tree_h ) vitem . setSyncing ( False ) # setup standard properties flags = vitem . ItemIsSelectable flags |= vitem . ItemIsFocusable if item . flags ( ) & Qt . ItemIsEditable : flags |= vitem . ItemIsMovable vitem . setFlags ( flags ) item . syncDependencies ( )
def render_message ( self ) : """渲染消息 : return : 渲染后的消息"""
message = None if self . title : message = '标题:{0}' . format ( self . title ) if self . message_time : message = '{0}\n时间:{1}' . format ( message , self . time ) if message : message = '{0}\n内容:{1}' . format ( message , self . content ) else : message = self . content return message
def __getIp6Address ( self , addressType ) : """get specific type of IPv6 address configured on OpenThread _ WpanCtl Args : addressType : the specific type of IPv6 address link local : link local unicast IPv6 address that ' s within one - hop scope global : global unicast IPv6 address rloc : mesh local unicast IPv6 address for routing in thread network mesh EID : mesh Endpoint Identifier Returns : IPv6 address string"""
addrType = [ 'link local' , 'global' , 'rloc' , 'mesh EID' ] addrs = [ ] globalAddr = [ ] linkLocal64Addr = '' rlocAddr = '' meshEIDAddr = '' addrs = self . __sendCommand ( WPANCTL_CMD + 'getprop -v IPv6:AllAddresses' ) for ip6AddrItem in addrs : if re . match ( '\[|\]' , ip6AddrItem ) : continue if re . match ( WPAN_CARRIER_PROMPT , ip6AddrItem , re . M | re . I ) : break ip6AddrItem = ip6AddrItem . strip ( ) ip6Addr = self . __stripValue ( ip6AddrItem ) . split ( ' ' ) [ 0 ] ip6AddrPrefix = ip6Addr . split ( ':' ) [ 0 ] if ip6AddrPrefix == 'fe80' : # link local address if ip6Addr . split ( ':' ) [ 4 ] != '0' : linkLocal64Addr = ip6Addr elif ip6Addr . startswith ( self . meshLocalPrefix ) : # mesh local address if ip6Addr . split ( ':' ) [ 4 ] == '0' : # rloc rlocAddr = ip6Addr else : # mesh EID meshEIDAddr = ip6Addr print 'meshEIDAddr:' + meshEIDAddr else : # global ipv6 address if ip6Addr : print 'globalAddr: ' + ip6Addr globalAddr . append ( ip6Addr ) else : pass if addressType == addrType [ 0 ] : return linkLocal64Addr elif addressType == addrType [ 1 ] : return globalAddr elif addressType == addrType [ 2 ] : return rlocAddr elif addressType == addrType [ 3 ] : return meshEIDAddr else : pass
def update_with_diff ( self , identifier , new_instance ) : """Update an existing model with a new one . : raises ` ModelNotFoundError ` if there is no existing model"""
with self . flushing ( ) : instance = self . retrieve ( identifier ) before = Version ( instance ) self . merge ( instance , new_instance ) instance . updated_at = instance . new_timestamp ( ) after = Version ( instance ) return instance , before - after
def _get_datapoints ( self , params ) : """Will make a direct REST call with the given json body payload to get datapoints ."""
url = self . query_uri + '/v1/datapoints' return self . service . _get ( url , params = params )
def tr ( self , args , color = None ) : """Method to print ASCII patterns to terminal"""
width = self . _term_size ( ) [ 1 ] if not args : if color is not None : print ( self . _echo ( "#" * width , color ) ) else : print ( self . _echo ( "#" * width , "green" ) ) else : for each_symbol in args : chars = len ( each_symbol ) number_chars = width // chars if color is not None : print ( self . _echo ( each_symbol * number_chars , color ) ) else : print ( each_symbol * number_chars )
def _log_future_exception ( future , logger ) : """Log any exception raised by future ."""
if not future . done ( ) : return try : future . result ( ) except : # pylint : disable = bare - except ; This is a background logging helper logger . warning ( "Exception in ignored future: %s" , future , exc_info = True )
def make_app ( ) : """Helper function that creates a plnt app ."""
from plnt import Plnt database_uri = os . environ . get ( "PLNT_DATABASE_URI" ) app = Plnt ( database_uri or "sqlite:////tmp/plnt.db" ) app . bind_to_context ( ) return app
def fetch_option_taskfileinfos ( self , typ , element ) : """Fetch the options for possible files to load , replace etc for the given element . Thiss will call : meth : ` ReftypeInterface . fetch _ option _ taskfileinfos ` . : param typ : the typ of options . E . g . Asset , Alembic , Camera etc : type typ : str : param element : The element for which the options should be fetched . : type element : : class : ` jukeboxcore . djadapter . models . Asset ` | : class : ` jukeboxcore . djadapter . models . Shot ` : returns : The options : rtype : list of : class : ` TaskFileInfo `"""
inter = self . get_typ_interface ( typ ) return inter . fetch_option_taskfileinfos ( element )
def diff ( name_a , name_b = None , ** kwargs ) : '''Display the difference between a snapshot of a given filesystem and another snapshot of that filesystem from a later time or the current contents of the filesystem . name _ a : string name of snapshot name _ b : string ( optional ) name of snapshot or filesystem show _ changetime : boolean display the path ' s inode change time as the first column of output . ( default = True ) show _ indication : boolean display an indication of the type of file . ( default = True ) parsable : boolean if true we don ' t parse the timestamp to a more readable date ( default = True ) . . versionadded : : 2016.3.0 CLI Example : . . code - block : : bash salt ' * ' zfs . diff myzpool / mydataset @ yesterday myzpool / mydataset'''
# # Configure command # NOTE : initialize the defaults flags = [ '-H' ] target = [ ] # NOTE : set extra config from kwargs if kwargs . get ( 'show_changetime' , True ) : flags . append ( '-t' ) if kwargs . get ( 'show_indication' , True ) : flags . append ( '-F' ) # NOTE : update target target . append ( name_a ) if name_b : target . append ( name_b ) # # Diff filesystem / snapshot res = __salt__ [ 'cmd.run_all' ] ( __utils__ [ 'zfs.zfs_command' ] ( command = 'diff' , flags = flags , target = target , ) , python_shell = False , ) if res [ 'retcode' ] != 0 : return __utils__ [ 'zfs.parse_command_result' ] ( res ) else : if not kwargs . get ( 'parsable' , True ) and kwargs . get ( 'show_changetime' , True ) : ret = OrderedDict ( ) for entry in res [ 'stdout' ] . splitlines ( ) : entry = entry . split ( ) entry_timestamp = __utils__ [ 'dateutils.strftime' ] ( entry [ 0 ] , '%Y-%m-%d.%H:%M:%S.%f' ) entry_data = "\t\t" . join ( entry [ 1 : ] ) ret [ entry_timestamp ] = entry_data else : ret = res [ 'stdout' ] . splitlines ( ) return ret
def _validate_number_of_layers ( self , number_of_layers ) : """Makes sure that the specified number of layers to squash is a valid number"""
# Only positive numbers are correct if number_of_layers <= 0 : raise SquashError ( "Number of layers to squash cannot be less or equal 0, provided: %s" % number_of_layers ) # Do not squash if provided number of layer to squash is bigger # than number of actual layers in the image if number_of_layers > len ( self . old_image_layers ) : raise SquashError ( "Cannot squash %s layers, the %s image contains only %s layers" % ( number_of_layers , self . image , len ( self . old_image_layers ) ) )
def _round ( self , ** kwargs ) : """Subclasses may override this method ."""
mathInfo = self . _toMathInfo ( guidelines = False ) mathInfo = mathInfo . round ( ) self . _fromMathInfo ( mathInfo , guidelines = False )
def sinkhorn2 ( a , b , M , reg , method = 'sinkhorn' , numItermax = 1000 , stopThr = 1e-9 , verbose = False , log = False , ** kwargs ) : u"""Solve the entropic regularization optimal transport problem and return the loss The function solves the following optimization problem : . . math : : W = \ min _ \ gamma < \ gamma , M > _ F + reg \ cdot \ Omega ( \ gamma ) s . t . \ gamma 1 = a \gamma^T 1= b \gamma\geq 0 where : - M is the ( ns , nt ) metric cost matrix - : math : ` \ Omega ` is the entropic regularization term : math : ` \ Omega ( \ gamma ) = \ sum _ { i , j } \ gamma _ { i , j } \ log ( \ gamma _ { i , j } ) ` - a and b are source and target weights ( sum to 1) The algorithm used for solving the problem is the Sinkhorn - Knopp matrix scaling algorithm as proposed in [ 2 ] _ Parameters a : np . ndarray ( ns , ) samples weights in the source domain b : np . ndarray ( nt , ) or np . ndarray ( nt , nbb ) samples in the target domain , compute sinkhorn with multiple targets and fixed M if b is a matrix ( return OT loss + dual variables in log ) M : np . ndarray ( ns , nt ) loss matrix reg : float Regularization term > 0 method : str method used for the solver either ' sinkhorn ' , ' sinkhorn _ stabilized ' or ' sinkhorn _ epsilon _ scaling ' , see those function for specific parameters numItermax : int , optional Max number of iterations stopThr : float , optional Stop threshol on error ( > 0) verbose : bool , optional Print information along iterations log : bool , optional record log if True Returns W : ( nt ) ndarray or float Optimal transportation matrix for the given parameters log : dict log dictionary return only if log = = True in parameters Examples > > > import ot > > > a = [ . 5 , . 5] > > > b = [ . 5 , . 5] > > > M = [ [ 0 . , 1 . ] , [ 1 . , 0 . ] ] > > > ot . sinkhorn2 ( a , b , M , 1) array ( [ 0.26894142 ] ) References . . [ 2 ] M . Cuturi , Sinkhorn Distances : Lightspeed Computation of Optimal Transport , Advances in Neural Information Processing Systems ( NIPS ) 26 , 2013 . . [ 9 ] Schmitzer , B . ( 2016 ) . Stabilized Sparse Scaling Algorithms for Entropy Regularized Transport Problems . arXiv preprint arXiv : 1610.06519. . . [ 10 ] Chizat , L . , Peyré , G . , Schmitzer , B . , & Vialard , F . X . ( 2016 ) . Scaling algorithms for unbalanced transport problems . arXiv preprint arXiv : 1607.05816. [21 ] Altschuler J . , Weed J . , Rigollet P . : Near - linear time approximation algorithms for optimal transport via Sinkhorn iteration , Advances in Neural Information Processing Systems ( NIPS ) 31 , 2017 See Also ot . lp . emd : Unregularized OT ot . optim . cg : General regularized OT ot . bregman . sinkhorn _ knopp : Classic Sinkhorn [ 2] ot . bregman . greenkhorn : Greenkhorn [ 21] ot . bregman . sinkhorn _ stabilized : Stabilized sinkhorn [ 9 ] [ 10] ot . bregman . sinkhorn _ epsilon _ scaling : Sinkhorn with epslilon scaling [ 9 ] [ 10]"""
if method . lower ( ) == 'sinkhorn' : def sink ( ) : return sinkhorn_knopp ( a , b , M , reg , numItermax = numItermax , stopThr = stopThr , verbose = verbose , log = log , ** kwargs ) elif method . lower ( ) == 'sinkhorn_stabilized' : def sink ( ) : return sinkhorn_stabilized ( a , b , M , reg , numItermax = numItermax , stopThr = stopThr , verbose = verbose , log = log , ** kwargs ) elif method . lower ( ) == 'sinkhorn_epsilon_scaling' : def sink ( ) : return sinkhorn_epsilon_scaling ( a , b , M , reg , numItermax = numItermax , stopThr = stopThr , verbose = verbose , log = log , ** kwargs ) else : print ( 'Warning : unknown method using classic Sinkhorn Knopp' ) def sink ( ) : return sinkhorn_knopp ( a , b , M , reg , ** kwargs ) b = np . asarray ( b , dtype = np . float64 ) if len ( b . shape ) < 2 : b = b . reshape ( ( - 1 , 1 ) ) return sink ( )
def flatten ( iterables ) : """Flatten an iterable , except for string elements ."""
for it in iterables : if isinstance ( it , str ) : yield it else : for element in it : yield element
def _generate_shape ( word : str ) -> str : """Recreate shape from a token input by user Args : word : str Returns : str"""
def counting_stars ( w ) -> List [ int ] : count = [ 1 ] for i in range ( 1 , len ( w ) ) : if w [ i - 1 ] == w [ i ] : count [ - 1 ] += 1 else : count . append ( 1 ) return count shape = "" p = 0 for c in counting_stars ( word ) : if c > 4 : shape += word [ p : p + 4 ] else : shape += word [ p : p + c ] p = p + c return shape
def _compute_distance_fast ( self ) : """Calls edit _ distance , and asserts that if we already have values for matches and distance , that they match ."""
d , m = edit_distance ( self . seq1 , self . seq2 , action_function = self . action_function , test = self . test ) if self . dist : assert d == self . dist if self . _matches : assert m == self . _matches self . dist = d self . _matches = m
def info ( self , remote_path ) : """Gets information about resource on WebDAV . More information you can find by link http : / / webdav . org / specs / rfc4918 . html # METHOD _ PROPFIND : param remote _ path : the path to remote resource . : return : a dictionary of information attributes and them values with following keys : ` created ` : date of resource creation , ` name ` : name of resource , ` size ` : size of resource , ` modified ` : date of resource modification ."""
urn = Urn ( remote_path ) if not self . check ( urn . path ( ) ) and not self . check ( Urn ( remote_path , directory = True ) . path ( ) ) : raise RemoteResourceNotFound ( remote_path ) response = self . execute_request ( action = 'info' , path = urn . quote ( ) ) path = self . get_full_path ( urn ) return WebDavXmlUtils . parse_info_response ( content = response . content , path = path , hostname = self . webdav . hostname )
async def shutdown ( self , connmark = - 1 ) : '''Can call without delegate'''
if connmark is None : connmark = self . connmark self . scheduler . emergesend ( ConnectionControlEvent ( self , ConnectionControlEvent . SHUTDOWN , True , connmark ) )
def export ( self , filename , fmt = "DER" ) : """Export certificate in ' fmt ' format ( DER or PEM ) to file ' filename '"""
f = open ( filename , "wb" ) if fmt == "DER" : f . write ( self . der ) elif fmt == "PEM" : f . write ( self . pem ) f . close ( )
def parent_images ( self ) : """: return : list of parent images - - one image per each stage ' s FROM instruction"""
parents = [ ] for instr in self . structure : if instr [ 'instruction' ] != 'FROM' : continue image , _ = image_from ( instr [ 'value' ] ) if image is not None : parents . append ( image ) return parents
def ensure_hexadecimal_string ( self , value , command = None ) : """Make sure the given value is a hexadecimal string . : param value : The value to check ( a string ) . : param command : The command that produced the value ( a string or : data : ` None ` ) . : returns : The validated hexadecimal string . : raises : : exc : ` ~ exceptions . ValueError ` when ` value ` is not a hexadecimal string ."""
if not HEX_PATTERN . match ( value ) : msg = "Expected a hexadecimal string, got '%s' instead!" if command : msg += " ('%s' gave unexpected output)" msg %= ( value , command ) else : msg %= value raise ValueError ( msg ) return value
def from_dict ( d ) : """Transform the dict to a response object and return the response ."""
warnings_ = d . get ( 'warnings' , [ ] ) query = d . get ( 'query' ) or None if query : query = Person . from_dict ( query ) person = d . get ( 'person' ) or None if person : person = Person . from_dict ( person ) records = d . get ( 'records' ) if records : records = [ Record . from_dict ( record ) for record in records ] suggested_searches = d . get ( 'suggested_searches' ) if suggested_searches : suggested_searches = [ Record . from_dict ( record ) for record in suggested_searches ] return SearchAPIResponse ( query = query , person = person , records = records , suggested_searches = suggested_searches , warnings_ = warnings_ )
def content ( self , file_relpath ) : """Returns the content for file at path . Raises exception if path is ignored . Raises exception if path is ignored ."""
if self . isignored ( file_relpath ) : self . _raise_access_ignored ( file_relpath ) return self . _content_raw ( file_relpath )
def reconstitute_path ( drive , folders ) : """Reverts a tuple from ` get _ path _ components ` into a path . : param drive : A drive ( eg ' c : ' ) . Only applicable for NT systems : param folders : A list of folder names : return : A path comprising the drive and list of folder names . The path terminate with a ` os . path . sep ` * only * if it is a root directory"""
reconstituted = os . path . join ( drive , os . path . sep , * folders ) return reconstituted
def confirm_destructive_query ( queries ) : """Check if the query is destructive and prompts the user to confirm . Returns : * None if the query is non - destructive or we can ' t prompt the user . * True if the query is destructive and the user wants to proceed . * False if the query is destructive and the user doesn ' t want to proceed ."""
prompt_text = ( "You're about to run a destructive command.\n" "Do you want to proceed? (y/n)" ) if is_destructive ( queries ) and sys . stdin . isatty ( ) : return prompt ( prompt_text , type = bool )
def _ParseLogonApplications ( self , parser_mediator , registry_key ) : """Parses the registered logon applications . Args : parser _ mediator ( ParserMediator ) : mediates interactions between parsers and other components , such as storage and dfvfs . registry _ key ( dfwinreg . WinRegistryKey ) : Windows Registry key ."""
for application in self . _LOGON_APPLICATIONS : command_value = registry_key . GetValueByName ( application ) if not command_value : continue values_dict = { 'Application' : application , 'Command' : command_value . GetDataAsObject ( ) , 'Trigger' : 'Logon' } event_data = windows_events . WindowsRegistryEventData ( ) event_data . key_path = registry_key . path event_data . offset = registry_key . offset event_data . regvalue = values_dict event_data . source_append = ': Winlogon' event = time_events . DateTimeValuesEvent ( registry_key . last_written_time , definitions . TIME_DESCRIPTION_WRITTEN ) parser_mediator . ProduceEventWithEventData ( event , event_data )
def setup ( app ) : """Configure setup for Sphinx extension . : param app : Sphinx application context ."""
app . add_config_value ( 'sphinxmark_enable' , False , 'html' ) app . add_config_value ( 'sphinxmark_div' , 'default' , 'html' ) app . add_config_value ( 'sphinxmark_border' , None , 'html' ) app . add_config_value ( 'sphinxmark_repeat' , True , 'html' ) app . add_config_value ( 'sphinxmark_fixed' , False , 'html' ) app . add_config_value ( 'sphinxmark_image' , 'default' , 'html' ) app . add_config_value ( 'sphinxmark_text' , 'default' , 'html' ) app . add_config_value ( 'sphinxmark_text_color' , ( 255 , 0 , 0 ) , 'html' ) app . add_config_value ( 'sphinxmark_text_size' , 100 , 'html' ) app . add_config_value ( 'sphinxmark_text_width' , 1000 , 'html' ) app . add_config_value ( 'sphinxmark_text_opacity' , 20 , 'html' ) app . add_config_value ( 'sphinxmark_text_spacing' , 400 , 'html' ) app . add_config_value ( 'sphinxmark_text_rotation' , 0 , 'html' ) app . connect ( 'env-updated' , watermark ) return { 'version' : '0.1.18' , 'parallel_read_safe' : True , 'parallel_write_safe' : True , }
async def change_presence ( self , * , activity = None , status = None , afk = False , shard_id = None ) : """| coro | Changes the client ' s presence . The activity parameter is a : class : ` Activity ` object ( not a string ) that represents the activity being done currently . This could also be the slimmed down versions , : class : ` Game ` and : class : ` Streaming ` . Example : : : game = discord . Game ( " with the API " ) await client . change _ presence ( status = discord . Status . idle , activity = game ) Parameters activity : Optional [ Union [ : class : ` Game ` , : class : ` Streaming ` , : class : ` Activity ` ] ] The activity being done . ` ` None ` ` if no currently active activity is done . status : Optional [ : class : ` Status ` ] Indicates what status to change to . If None , then : attr : ` Status . online ` is used . afk : : class : ` bool ` Indicates if you are going AFK . This allows the discord client to know how to handle push notifications better for you in case you are actually idle and not lying . shard _ id : Optional [ : class : ` int ` ] The shard _ id to change the presence to . If not specified or ` ` None ` ` , then it will change the presence of every shard the bot can see . Raises InvalidArgument If the ` ` activity ` ` parameter is not of proper type ."""
if status is None : status = 'online' status_enum = Status . online elif status is Status . offline : status = 'invisible' status_enum = Status . offline else : status_enum = status status = str ( status ) if shard_id is None : for shard in self . shards . values ( ) : await shard . ws . change_presence ( activity = activity , status = status , afk = afk ) guilds = self . _connection . guilds else : shard = self . shards [ shard_id ] await shard . ws . change_presence ( activity = activity , status = status , afk = afk ) guilds = [ g for g in self . _connection . guilds if g . shard_id == shard_id ] for guild in guilds : me = guild . me if me is None : continue me . activities = ( activity , ) me . status = status_enum
def storage_record2pairwise_info ( storec : StorageRecord ) -> PairwiseInfo : """Given indy - sdk non _ secrets implementation of pairwise storage record dict , return corresponding PairwiseInfo . : param storec : ( non - secret ) storage record to convert to PairwiseInfo : return : PairwiseInfo on record DIDs , verkeys , metadata"""
return PairwiseInfo ( storec . id , # = their did storec . value , # = their verkey storec . tags [ '~my_did' ] , storec . tags [ '~my_verkey' ] , { tag [ tag . startswith ( '~' ) : ] : storec . tags [ tag ] for tag in ( storec . tags or { } ) # strip any leading ' ~ ' } )
def CloseCHM ( self ) : '''Closes the CHM archive . This function will close the CHM file , if it is open . All variables are also reset .'''
if self . filename is not None : chmlib . chm_close ( self . file ) self . file = None self . filename = '' self . title = "" self . home = "/" self . index = None self . topics = None self . encoding = None
def _resolve_readme ( self , path = None , silent = False ) : """Returns the path if it ' s a file ; otherwise , looks for a compatible README file in the directory specified by path . If path is None , the current working directory is used . If silent is set , the default relative filename will be returned if path is a directory or None if it does not exist . Raises ReadmeNotFoundError if no compatible README file can be found and silent is False ."""
# Default to current working directory if path is None : path = '.' # Normalize the path path = os . path . normpath ( path ) # Resolve README file if path is a directory if os . path . isdir ( path ) : return self . _find_file ( path , silent ) # Return path if file exists or if silent if silent or os . path . exists ( path ) : return path raise ReadmeNotFoundError ( path , 'File not found: ' + path )
def add_filter ( self , methods = None , endpoints = None ) : """Adds a filter . : param methods : The HTTP methods to be filtered . : param endpoints : The endpoints to be filtered . : return Filter : The filter added ."""
if not methods and not endpoints : raise ValueError ( 'Filter cannot be added with no criteria.' ) filter = TraceFilter ( methods , endpoints ) self . filters . append ( filter ) return filter
def trmm ( L , B , alpha = 1.0 , trans = 'N' , nrhs = None , offsetB = 0 , ldB = None ) : r"""Multiplication with sparse triangular matrix . Computes . . math : : B & : = \ alpha L B \ text { if trans is ' N ' } \ \ B & : = \ alpha L ^ T B \ text { if trans is ' T ' } where : math : ` L ` is a : py : class : ` cspmatrix ` factor . : param L : : py : class : ` cspmatrix ` factor : param B : matrix : param alpha : float ( default : 1.0) : param trans : ' N ' or ' T ' ( default : ' N ' ) : param nrhs : number of right - hand sides ( default : number of columns in : math : ` B ` ) : param offsetB : integer ( default : 0) : param ldB : leading dimension of : math : ` B ` ( default : number of rows in : math : ` B ` )"""
assert isinstance ( L , cspmatrix ) and L . is_factor is True , "L must be a cspmatrix factor" assert isinstance ( B , matrix ) , "B must be a matrix" if ldB is None : ldB = B . size [ 0 ] if nrhs is None : nrhs = B . size [ 1 ] assert trans in [ 'N' , 'T' ] n = L . symb . n snpost = L . symb . snpost snptr = L . symb . snptr snode = L . symb . snode chptr = L . symb . chptr chidx = L . symb . chidx relptr = L . symb . relptr relidx = L . symb . relidx blkptr = L . symb . blkptr blkval = L . blkval p = L . symb . p if p is None : p = range ( n ) stack = [ ] if trans is 'N' : for k in snpost : nn = snptr [ k + 1 ] - snptr [ k ] # | Nk | na = relptr [ k + 1 ] - relptr [ k ] # | Ak | nj = na + nn # extract and scale block from rhs Uk = matrix ( 0.0 , ( nj , nrhs ) ) for j in range ( nrhs ) : for i , ir in enumerate ( snode [ snptr [ k ] : snptr [ k + 1 ] ] ) : Uk [ i , j ] = alpha * B [ offsetB + j * ldB + p [ ir ] ] blas . trmm ( blkval , Uk , m = nn , n = nrhs , offsetA = blkptr [ k ] , ldA = nj ) if na > 0 : # compute new contribution ( to be stacked ) blas . gemm ( blkval , Uk , Uk , m = na , n = nrhs , k = nn , alpha = 1.0 , offsetA = blkptr [ k ] + nn , ldA = nj , offsetC = nn ) # add contributions from children for _ in range ( chptr [ k ] , chptr [ k + 1 ] ) : Ui , i = stack . pop ( ) r = relidx [ relptr [ i ] : relptr [ i + 1 ] ] Uk [ r , : ] += Ui # if k is not a root node if na > 0 : stack . append ( ( Uk [ nn : , : ] , k ) ) # copy block to rhs for j in range ( nrhs ) : for i , ir in enumerate ( snode [ snptr [ k ] : snptr [ k + 1 ] ] ) : B [ offsetB + j * ldB + p [ ir ] ] = Uk [ i , j ] else : # trans is ' T ' for k in reversed ( list ( snpost ) ) : nn = snptr [ k + 1 ] - snptr [ k ] # | Nk | na = relptr [ k + 1 ] - relptr [ k ] # | Ak | nj = na + nn # extract and scale block from rhs Uk = matrix ( 0.0 , ( nj , nrhs ) ) for j in range ( nrhs ) : for i , ir in enumerate ( snode [ snptr [ k ] : snptr [ k + 1 ] ] ) : Uk [ i , j ] = alpha * B [ offsetB + j * ldB + p [ ir ] ] # if k is not a root node if na > 0 : Uk [ nn : , : ] = stack . pop ( ) # stack contributions for children for ii in range ( chptr [ k ] , chptr [ k + 1 ] ) : i = chidx [ ii ] stack . append ( Uk [ relidx [ relptr [ i ] : relptr [ i + 1 ] ] , : ] ) if na > 0 : blas . gemm ( blkval , Uk , Uk , alpha = 1.0 , beta = 1.0 , m = nn , n = nrhs , k = na , transA = 'T' , offsetA = blkptr [ k ] + nn , ldA = nj , offsetB = nn ) # scale and copy block to rhs blas . trmm ( blkval , Uk , transA = 'T' , m = nn , n = nrhs , offsetA = blkptr [ k ] , ldA = nj ) for j in range ( nrhs ) : for i , ir in enumerate ( snode [ snptr [ k ] : snptr [ k + 1 ] ] ) : B [ offsetB + j * ldB + p [ ir ] ] = Uk [ i , j ] return
def _validate_min_version ( min_version ) : """Validates the extension version matches the requested version . Args : min _ version : Minimum version passed as a query param when establishing the connection . Returns : An ExtensionVersionResult indicating validation status . If there is a problem , the error _ reason field will be non - empty ."""
if min_version is not None : try : parsed_min_version = version . StrictVersion ( min_version ) except ValueError : return ExtensionVersionResult ( error_reason = ExtensionValidationError . UNPARSEABLE_REQUESTED_VERSION , requested_extension_version = min_version ) if parsed_min_version > HANDLER_VERSION : return ExtensionVersionResult ( error_reason = ExtensionValidationError . OUTDATED_VERSION , requested_extension_version = str ( parsed_min_version ) ) return ExtensionVersionResult ( error_reason = None , requested_extension_version = min_version )
def multi_exposure_analysis_question_extractor ( impact_report , component_metadata ) : """Extracting analysis question from the impact layer . : param impact _ report : the impact report that acts as a proxy to fetch all the data that extractor needed : type impact _ report : safe . report . impact _ report . ImpactReport : param component _ metadata : the component metadata . Used to obtain information about the component we want to render : type component _ metadata : safe . report . report _ metadata . ReportComponentsMetadata : return : context for rendering phase : rtype : dict . . versionadded : : 4.3"""
context = { } extra_args = component_metadata . extra_args multi_exposure = impact_report . multi_exposure_impact_function provenance = multi_exposure . provenance header = resolve_from_dictionary ( extra_args , 'header' ) analysis_questions = [ ] analysis_question = provenance [ 'analysis_question' ] analysis_questions . append ( analysis_question ) context [ 'component_key' ] = component_metadata . key context [ 'header' ] = header context [ 'analysis_questions' ] = analysis_questions return context
def install_templates ( srcroot , destroot , prefix = '' , excludes = None , includes = None , path_prefix = None ) : # pylint : disable = too - many - arguments , too - many - statements """Expand link to compiled assets all templates in * srcroot * and its subdirectories ."""
# pylint : disable = too - many - locals if excludes is None : excludes = [ ] if includes is None : includes = [ ] if not os . path . exists ( os . path . join ( prefix , destroot ) ) : os . makedirs ( os . path . join ( prefix , destroot ) ) for pathname in os . listdir ( os . path . join ( srcroot , prefix ) ) : pathname = os . path . join ( prefix , pathname ) excluded = False for pat in excludes : if re . match ( pat , pathname ) : excluded = True break if excluded : for pat in includes : if re . match ( pat , pathname ) : excluded = False break if excluded : LOGGER . debug ( "skip %s" , pathname ) continue source_name = os . path . join ( srcroot , pathname ) dest_name = os . path . join ( destroot , pathname ) if os . path . isfile ( source_name ) and not os . path . exists ( dest_name ) : # We don ' t want to overwrite specific theme files by generic ones . with open ( source_name ) as source : template_string = source . read ( ) try : template_string = force_text ( template_string ) lexer = DebugLexer ( template_string ) tokens = lexer . tokenize ( ) if not os . path . isdir ( os . path . dirname ( dest_name ) ) : os . makedirs ( os . path . dirname ( dest_name ) ) engine , libraries , builtins = get_html_engine ( ) if isinstance ( engine , DjangoTemplates ) : with open ( dest_name , 'w' ) as dest : parser = AssetsParser ( tokens , URLRewriteWrapper ( dest , path_prefix ) , libraries = libraries , builtins = builtins , origin = None ) parser . parse_through ( ) else : template_name = None tokens = Lexer ( engine . env ) . tokeniter ( template_string , template_name , filename = source_name ) buffered_tokens = [ ] state = None with open ( dest_name , 'w' ) as dest : for token in tokens : if state is None : if token [ 1 ] == 'block_begin' : state = STATE_BLOCK_BEGIN elif state == STATE_BLOCK_BEGIN : if token [ 1 ] == 'name' : if token [ 2 ] == 'assets' : state = STATE_ASSETS_BEGIN else : buffered_tokens += [ token ] state = None elif state == STATE_ASSETS_BEGIN : if ( token [ 1 ] == 'name' and token [ 2 ] == 'endassets' ) : state = STATE_ASSETS_END elif state == STATE_ASSETS_END : if token [ 1 ] == 'block_end' : buffered_tokens += [ token ] state = None if state is None : if buffered_tokens : for tok in buffered_tokens : if ( tok [ 1 ] == 'name' and tok [ 2 ] == 'assets' ) : dest . write ( _render_assets ( buffered_tokens , engine . env . assets_environment ) ) buffered_tokens = [ ] break if buffered_tokens : dest . write ( "%s" % '' . join ( [ token [ 2 ] for token in buffered_tokens ] ) ) buffered_tokens = [ ] else : dest . write ( "%s" % str ( token [ 2 ] ) ) else : buffered_tokens += [ token ] if buffered_tokens : dest . write ( "%s" % '' . join ( [ token [ 2 ] for token in buffered_tokens ] ) ) buffered_tokens = [ ] dest . write ( "\n" ) cmdline = [ 'diff' , '-u' , source_name , dest_name ] cmd = subprocess . Popen ( cmdline , stdout = subprocess . PIPE ) lines = cmd . stdout . readlines ( ) cmd . wait ( ) # Non - zero error codes are ok here . That ' s how diff # indicates the files are different . if lines : verb = 'compile' else : verb = 'install' dest_multitier_name = dest_name . replace ( destroot , '*MULTITIER_TEMPLATES_ROOT*' ) LOGGER . debug ( "%s %s to %s" , verb , source_name . replace ( django_settings . BASE_DIR , '*APP_ROOT*' ) , dest_multitier_name ) except UnicodeDecodeError : LOGGER . warning ( "%s: Templates can only be constructed " "from unicode or UTF-8 strings." , source_name ) elif os . path . isdir ( source_name ) : install_templates ( srcroot , destroot , prefix = pathname , excludes = excludes , includes = includes , path_prefix = path_prefix )
def euler_trans_matrix ( etheta , elongan , eincl ) : """Get the transformation matrix R to translate / rotate a mesh according to euler angles . The matrix is R ( long , incl , theta ) = Rz ( pi ) . Rz ( long ) . Rx ( incl ) . Rz ( theta ) Rz ( long ) . Rx ( - incl ) . Rz ( theta ) . Rz ( pi ) where Rx ( u ) = 1 , 0 , 0 0 , cos ( u ) , - sin ( u ) 0 , sin ( u ) , cos ( u ) Ry ( u ) = cos ( u ) , 0 , sin ( u ) 0 , 1 , 0 - sin ( u ) , 0 , cos ( u ) Rz ( u ) = cos ( u ) , - sin ( u ) , 0 sin ( u ) , cos ( u ) , 0 0 , 0 , 1 Rz ( pi ) = reflection across z - axis Note : R ( 0,0,0 ) = - 1 , 0 , 0 0 , - 1 , 0 0 , 0 , 1 : parameter float etheta : euler theta angle : parameter float elongan : euler long of asc node angle : parameter float eincl : euler inclination angle : return : matrix with size 3x3"""
s1 = sin ( eincl ) ; c1 = cos ( eincl ) ; s2 = sin ( elongan ) ; c2 = cos ( elongan ) ; s3 = sin ( etheta ) ; c3 = cos ( etheta ) ; c1s3 = c1 * s3 ; c1c3 = c1 * c3 ; return np . array ( [ [ - c2 * c3 + s2 * c1s3 , c2 * s3 + s2 * c1c3 , - s2 * s1 ] , [ - s2 * c3 - c2 * c1s3 , s2 * s3 - c2 * c1c3 , c2 * s1 ] , [ s1 * s3 , s1 * c3 , c1 ] ] )
def name_usage ( key = None , name = None , data = 'all' , language = None , datasetKey = None , uuid = None , sourceId = None , rank = None , shortname = None , limit = 100 , offset = None , ** kwargs ) : '''Lookup details for specific names in all taxonomies in GBIF . : param key : [ fixnum ] A GBIF key for a taxon : param name : [ str ] Filters by a case insensitive , canonical namestring , e . g . ' Puma concolor ' : param data : [ str ] The type of data to get . Default : ` ` all ` ` . Options : ` ` all ` ` , ` ` verbatim ` ` , ` ` name ` ` , ` ` parents ` ` , ` ` children ` ` , ` ` related ` ` , ` ` synonyms ` ` , ` ` descriptions ` ` , ` ` distributions ` ` , ` ` media ` ` , ` ` references ` ` , ` ` speciesProfiles ` ` , ` ` vernacularNames ` ` , ` ` typeSpecimens ` ` , ` ` root ` ` : param language : [ str ] Language , default is english : param datasetKey : [ str ] Filters by the dataset ' s key ( a uuid ) : param uuid : [ str ] A uuid for a dataset . Should give exact same results as datasetKey . : param sourceId : [ fixnum ] Filters by the source identifier . : param rank : [ str ] Taxonomic rank . Filters by taxonomic rank as one of : ` ` CLASS ` ` , ` ` CULTIVAR ` ` , ` ` CULTIVAR _ GROUP ` ` , ` ` DOMAIN ` ` , ` ` FAMILY ` ` , ` ` FORM ` ` , ` ` GENUS ` ` , ` ` INFORMAL ` ` , ` ` INFRAGENERIC _ NAME ` ` , ` ` INFRAORDER ` ` , ` ` INFRASPECIFIC _ NAME ` ` , ` ` INFRASUBSPECIFIC _ NAME ` ` , ` ` KINGDOM ` ` , ` ` ORDER ` ` , ` ` PHYLUM ` ` , ` ` SECTION ` ` , ` ` SERIES ` ` , ` ` SPECIES ` ` , ` ` STRAIN ` ` , ` ` SUBCLASS ` ` , ` ` SUBFAMILY ` ` , ` ` SUBFORM ` ` , ` ` SUBGENUS ` ` , ` ` SUBKINGDOM ` ` , ` ` SUBORDER ` ` , ` ` SUBPHYLUM ` ` , ` ` SUBSECTION ` ` , ` ` SUBSERIES ` ` , ` ` SUBSPECIES ` ` , ` ` SUBTRIBE ` ` , ` ` SUBVARIETY ` ` , ` ` SUPERCLASS ` ` , ` ` SUPERFAMILY ` ` , ` ` SUPERORDER ` ` , ` ` SUPERPHYLUM ` ` , ` ` SUPRAGENERIC _ NAME ` ` , ` ` TRIBE ` ` , ` ` UNRANKED ` ` , ` ` VARIETY ` ` : param shortname : [ str ] A short name . . need more info on this ? : param limit : [ fixnum ] Number of records to return . Default : ` ` 100 ` ` . Maximum : ` ` 1000 ` ` . ( optional ) : param offset : [ fixnum ] Record number to start at . ( optional ) References : http : / / www . gbif . org / developer / species # nameUsages Usage : : from pygbif import species species . name _ usage ( key = 1) # Name usage for a taxonomic name species . name _ usage ( name = ' Puma ' , rank = " GENUS " ) # All name usages species . name _ usage ( ) # References for a name usage species . name _ usage ( key = 2435099 , data = ' references ' ) # Species profiles , descriptions species . name _ usage ( key = 3119195 , data = ' speciesProfiles ' ) species . name _ usage ( key = 3119195 , data = ' descriptions ' ) species . name _ usage ( key = 2435099 , data = ' children ' ) # Vernacular names for a name usage species . name _ usage ( key = 3119195 , data = ' vernacularNames ' ) # Limit number of results returned species . name _ usage ( key = 3119195 , data = ' vernacularNames ' , limit = 3) # Search for names by dataset with datasetKey parameter species . name _ usage ( datasetKey = " d7dddbf4-2cf0-4f39-9b2a - bb099caae36c " ) # Search for a particular language species . name _ usage ( key = 3119195 , language = " FRENCH " , data = ' vernacularNames ' )'''
args = { 'language' : language , 'name' : name , 'datasetKey' : datasetKey , 'rank' : rank , 'sourceId' : sourceId , 'limit' : limit , 'offset' : offset } data_choices = [ 'all' , 'verbatim' , 'name' , 'parents' , 'children' , 'related' , 'synonyms' , 'descriptions' , 'distributions' , 'media' , 'references' , 'speciesProfiles' , 'vernacularNames' , 'typeSpecimens' , 'root' ] check_data ( data , data_choices ) if len2 ( data ) == 1 : return name_usage_fetch ( data , key , shortname , uuid , args , ** kwargs ) else : return [ name_usage_fetch ( x , key , shortname , uuid , args , ** kwargs ) for x in data ]
def _normalize_coerce ( self , mapping , schema ) : """{ ' oneof ' : [ { ' type ' : ' callable ' } , { ' type ' : ' list ' , ' schema ' : { ' oneof ' : [ { ' type ' : ' callable ' } , { ' type ' : ' string ' } ] } } , { ' type ' : ' string ' }"""
error = errors . COERCION_FAILED for field in mapping : if field in schema and 'coerce' in schema [ field ] : mapping [ field ] = self . __normalize_coerce ( schema [ field ] [ 'coerce' ] , field , mapping [ field ] , schema [ field ] . get ( 'nullable' , False ) , error ) elif isinstance ( self . allow_unknown , Mapping ) and 'coerce' in self . allow_unknown : mapping [ field ] = self . __normalize_coerce ( self . allow_unknown [ 'coerce' ] , field , mapping [ field ] , self . allow_unknown . get ( 'nullable' , False ) , error )
def download_binaries ( package_dir = False ) : """Download all binaries for the current platform Parameters package _ dir : bool If set to ` True ` , the binaries will be downloaded to the ` resources ` directory of the qpsphere package instead of to the users application data directory . Note that this might require administrative rights if qpsphere is installed in a system directory . Returns paths : list of pathlib . Path List of paths to binaries . This will always return binaries in the ` resources ` directory of the qpsphere package ( if binaries are present there ) , in disregard of the parameter ` package _ dir ` ."""
# bhfield # make sure the binary is available on the system paths = _bhfield . fetch . get_binaries ( ) if package_dir : # Copy the binaries to the ` resources ` directory # of qpsphere . pdir = RESCR_PATH outpaths = [ ] for pp in paths : target = pdir / pp . name if not target . exists ( ) : shutil . copy ( pp , target ) outpaths . append ( target ) else : outpaths = paths return outpaths
def meta_changed_notify_after ( self , state_machine_m , _ , info ) : """Handle notification about the change of a state ' s meta data The meta data of the affected state ( s ) are read and the view updated accordingly . : param StateMachineModel state _ machine _ m : Always the state machine model belonging to this editor : param str _ : Always " state _ meta _ signal " : param dict info : Information about the change , contains the MetaSignalMessage in the ' arg ' key value"""
meta_signal_message = info [ 'arg' ] if meta_signal_message . origin == "graphical_editor_gaphas" : # Ignore changes caused by ourself return if meta_signal_message . origin == "load_meta_data" : # Meta data can ' t be applied , as the view has not yet return # been created notification = meta_signal_message . notification if not notification : # For changes applied to the root state , there are always two notifications return # Ignore the one with less information if self . model . ongoing_complex_actions : return model = notification . model view = self . canvas . get_view_for_model ( model ) if meta_signal_message . change == 'show_content' : library_state_m = model library_state_v = view if library_state_m . meta [ 'gui' ] [ 'show_content' ] is not library_state_m . show_content ( ) : logger . warning ( "The content of the LibraryState won't be shown, because " "MAX_VISIBLE_LIBRARY_HIERARCHY is 1." ) if library_state_m . show_content ( ) : if not library_state_m . state_copy_initialized : logger . warning ( "Show library content without initialized state copy does not work {0}" "" . format ( library_state_m ) ) logger . debug ( "Show content of {}" . format ( library_state_m . state ) ) gui_helper_meta_data . scale_library_content ( library_state_m ) self . add_state_view_for_model ( library_state_m . state_copy , view , hierarchy_level = library_state_v . hierarchy_level + 1 ) else : logger . debug ( "Hide content of {}" . format ( library_state_m . state ) ) state_copy_v = self . canvas . get_view_for_model ( library_state_m . state_copy ) if state_copy_v : state_copy_v . remove ( ) else : if isinstance ( view , StateView ) : view . apply_meta_data ( recursive = meta_signal_message . affects_children ) else : view . apply_meta_data ( ) self . canvas . request_update ( view , matrix = True ) self . canvas . wait_for_update ( )
def _send_kex_init ( self ) : """announce to the other side that we ' d like to negotiate keys , and what kind of key negotiation we support ."""
self . clear_to_send_lock . acquire ( ) try : self . clear_to_send . clear ( ) finally : self . clear_to_send_lock . release ( ) self . in_kex = True if self . server_mode : if ( self . _modulus_pack is None ) and ( 'diffie-hellman-group-exchange-sha1' in self . _preferred_kex ) : # can ' t do group - exchange if we don ' t have a pack of potential primes pkex = list ( self . get_security_options ( ) . kex ) pkex . remove ( 'diffie-hellman-group-exchange-sha1' ) self . get_security_options ( ) . kex = pkex available_server_keys = filter ( self . server_key_dict . keys ( ) . __contains__ , self . _preferred_keys ) else : available_server_keys = self . _preferred_keys m = Message ( ) m . add_byte ( chr ( MSG_KEXINIT ) ) m . add_bytes ( rng . read ( 16 ) ) m . add_list ( self . _preferred_kex ) m . add_list ( available_server_keys ) m . add_list ( self . _preferred_ciphers ) m . add_list ( self . _preferred_ciphers ) m . add_list ( self . _preferred_macs ) m . add_list ( self . _preferred_macs ) m . add_list ( self . _preferred_compression ) m . add_list ( self . _preferred_compression ) m . add_string ( '' ) m . add_string ( '' ) m . add_boolean ( False ) m . add_int ( 0 ) # save a copy for later ( needed to compute a hash ) self . local_kex_init = str ( m ) self . _send_message ( m )
def get_sns_subscriptions ( app_name , env , region ) : """List SNS lambda subscriptions . Returns : list : List of Lambda subscribed SNS ARNs ."""
session = boto3 . Session ( profile_name = env , region_name = region ) sns_client = session . client ( 'sns' ) lambda_alias_arn = get_lambda_alias_arn ( app = app_name , account = env , region = region ) lambda_subscriptions = [ ] subscriptions = sns_client . list_subscriptions ( ) for subscription in subscriptions [ 'Subscriptions' ] : if subscription [ 'Protocol' ] == "lambda" and subscription [ 'Endpoint' ] == lambda_alias_arn : lambda_subscriptions . append ( subscription [ 'SubscriptionArn' ] ) if not lambda_subscriptions : LOG . debug ( 'SNS subscription for function %s not found' , lambda_alias_arn ) return lambda_subscriptions
def get_feature_by_query ( self , ** kwargs ) : """Retrieve an enumerated sequence feature This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please define a ` callback ` function to be invoked when receiving the response . > > > def callback _ function ( response ) : > > > pprint ( response ) > > > thread = api . get _ feature _ by _ query ( callback = callback _ function ) : param callback function : The callback function for asynchronous request . ( optional ) : param str locus : locus name or URI : param str term : Sequence Ontology ( SO ) term name , accession , or URI : param int rank : feature rank , must be at least 1 : param int accession : accession , must be at least 1 : return : Feature If the method is called asynchronously , returns the request thread ."""
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . get_feature_by_query_with_http_info ( ** kwargs ) else : ( data ) = self . get_feature_by_query_with_http_info ( ** kwargs ) return data
def product ( self , * bases ) : """Compute the tensor product with another basis . : param bases : One or more additional bases to form the product with . : return ( OperatorBasis ) : The tensor product basis as an OperatorBasis object ."""
if len ( bases ) > 1 : basis_rest = bases [ 0 ] . product ( * bases [ 1 : ] ) else : assert len ( bases ) == 1 basis_rest = bases [ 0 ] labels_ops = [ ( b1l + b2l , qt . tensor ( b1 , b2 ) ) for ( b1l , b1 ) , ( b2l , b2 ) in itertools . product ( self , basis_rest ) ] return OperatorBasis ( labels_ops )
def _generate_current_command ( self ) : '''Returns a constructed GCode string that contains this driver ' s axis - current settings , plus a small delay to wait for those settings to take effect .'''
values = [ '{}{}' . format ( axis , value ) for axis , value in sorted ( self . current . items ( ) ) ] current_cmd = '{} {}' . format ( GCODES [ 'SET_CURRENT' ] , ' ' . join ( values ) ) command = '{currents} {code}P{seconds}' . format ( currents = current_cmd , code = GCODES [ 'DWELL' ] , seconds = CURRENT_CHANGE_DELAY ) log . debug ( "_generate_current_command: {}" . format ( command ) ) return command
def _storage_init ( self ) : """Ensure that storage is initialized ."""
if not self . _storage . initialized : self . _storage . init ( self . _module . _py3_wrapper , self . _is_python_2 )
def fill_parentidid2obj_r1 ( self , id2obj_user , child_obj ) : """Fill id2obj _ user with all parent / relationship key item IDs and their objects ."""
for higher_obj in self . _getobjs_higher ( child_obj ) : if higher_obj . item_id not in id2obj_user : id2obj_user [ higher_obj . item_id ] = higher_obj self . fill_parentidid2obj_r1 ( id2obj_user , higher_obj )
def append_from_list ( self , content , fill_title = False ) : """Appends rows created from the data contained in the provided list of tuples of strings . The first tuple of the list can be set as table title . Args : content ( list ) : list of tuples of strings . Each tuple is a row . fill _ title ( bool ) : if true , the first tuple in the list will be set as title ."""
row_index = 0 for row in content : tr = TableRow ( ) column_index = 0 for item in row : if row_index == 0 and fill_title : ti = TableTitle ( item ) else : ti = TableItem ( item ) tr . append ( ti , str ( column_index ) ) column_index = column_index + 1 self . append ( tr , str ( row_index ) ) row_index = row_index + 1
def getTotalPrice ( self ) : """Compute total price including VAT"""
price = self . getPrice ( ) vat = self . getVAT ( ) price = price and price or 0 vat = vat and vat or 0 return float ( price ) + ( float ( price ) * float ( vat ) ) / 100
def xross_listener ( http_method = None , ** xross_attrs ) : """Instructs xross to handle AJAX calls right from the moment it is called . This should be placed in a view decorated with ` @ xross _ view ( ) ` . : param str http _ method : GET or POST . To be used as a source of data for xross . : param dict xross _ attrs : xross handler attributes . Those attributes will be available in operation functions in ` xross ` keyword argument ."""
handler = currentframe ( ) . f_back . f_locals [ 'request' ] . _xross_handler handler . set_attrs ( ** xross_attrs ) if http_method is not None : handler . http_method = http_method handler . dispatch ( )
def get_port_profile_for_intf_output_interface_interface_type ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) get_port_profile_for_intf = ET . Element ( "get_port_profile_for_intf" ) config = get_port_profile_for_intf output = ET . SubElement ( get_port_profile_for_intf , "output" ) interface = ET . SubElement ( output , "interface" ) interface_type = ET . SubElement ( interface , "interface-type" ) interface_type . text = kwargs . pop ( 'interface_type' ) callback = kwargs . pop ( 'callback' , self . _callback ) return callback ( config )
def get_iss ( self , key ) : """Get the Issuer ID : param key : Key to the information in the state database : return : The issuer ID"""
_state = self . get_state ( key ) if not _state : raise KeyError ( key ) return _state [ 'iss' ]
def after ( self , value ) : """Sets the operator type to Query . Op . After and sets the value to the amount that this query should be lower than . This is functionally the same as doing the lessThan operation , but is useful for visual queries for things like dates . : param value | < variant > : return < Query > : usage | > > > from orb import Query as Q | > > > query = Q ( ' dateStart ' ) . after ( date . today ( ) ) | > > > print query | dateStart after 2011-10-10"""
newq = self . copy ( ) newq . setOp ( Query . Op . After ) newq . setValue ( value ) return newq
def huge_words_and_punctuation_challenge ( ) : "Yay , undocumneted . Mostly used to test Issue 39 - http : / / code . google . com / p / django - simple - captcha / issues / detail ? id = 39"
fd = open ( settings . CAPTCHA_WORDS_DICTIONARY , 'rb' ) l = fd . readlines ( ) fd . close ( ) word = '' while True : word1 = random . choice ( l ) . strip ( ) word2 = random . choice ( l ) . strip ( ) punct = random . choice ( settings . CAPTCHA_PUNCTUATION ) word = '%s%s%s' % ( word1 , punct , word2 ) if len ( word ) >= settings . CAPTCHA_DICTIONARY_MIN_LENGTH and len ( word ) <= settings . CAPTCHA_DICTIONARY_MAX_LENGTH : break return word . upper ( ) , word . lower ( )
def backup ( path , password_file = None ) : """Replaces the contents of a file with its decrypted counterpart , storing the original encrypted version and a hash of the file contents for later retrieval ."""
vault = VaultLib ( get_vault_password ( password_file ) ) with open ( path , 'r' ) as f : encrypted_data = f . read ( ) # Normally we ' d just try and catch the exception , but the # exception raised here is not very specific ( just # ` AnsibleError ` ) , so this feels safer to avoid suppressing # other things that might go wrong . if vault . is_encrypted ( encrypted_data ) : decrypted_data = vault . decrypt ( encrypted_data ) # Create atk vault files atk_path = os . path . join ( ATK_VAULT , path ) mkdir_p ( atk_path ) # . . . encrypted with open ( os . path . join ( atk_path , 'encrypted' ) , 'wb' ) as f : f . write ( encrypted_data ) # . . . hash with open ( os . path . join ( atk_path , 'hash' ) , 'wb' ) as f : f . write ( hashlib . sha1 ( decrypted_data ) . hexdigest ( ) ) # Replace encrypted file with decrypted one with open ( path , 'wb' ) as f : f . write ( decrypted_data )
def send_single_image ( self , sender , receiver , media_id ) : """发送单聊图片消息 : param sender : 发送人 : param receiver : 接收人成员 ID : param media _ id : 图片媒体文件id , 可以调用上传素材文件接口获取 : return : 返回的 JSON 数据包"""
return self . send_image ( sender , 'single' , receiver , media_id )
def update ( tgt , tgt_type = 'glob' , clear = False , mine_functions = None ) : '''. . versionadded : : 2017.7.0 Update the mine data on a certain group of minions . tgt Which minions to target for the execution . tgt _ type : ` ` glob ` ` The type of ` ` tgt ` ` . clear : ` ` False ` ` Boolean flag specifying whether updating will clear the existing mines , or will update . Default : ` ` False ` ` ( update ) . mine _ functions Update the mine data on certain functions only . This feature can be used when updating the mine for functions that require refresh at different intervals than the rest of the functions specified under ` ` mine _ functions ` ` in the minion / master config or pillar . CLI Example : . . code - block : : bash salt - run mine . update ' * ' salt - run mine . update ' juniper - edges ' tgt _ type = ' nodegroup ' '''
ret = __salt__ [ 'salt.execute' ] ( tgt , 'mine.update' , tgt_type = tgt_type , clear = clear , mine_functions = mine_functions ) return ret
def docgraph2freqt ( docgraph , root = None , include_pos = False , escape_func = FREQT_ESCAPE_FUNC ) : """convert a docgraph into a FREQT string ."""
if root is None : return u"\n" . join ( sentence2freqt ( docgraph , sentence , include_pos = include_pos , escape_func = escape_func ) for sentence in docgraph . sentences ) else : return sentence2freqt ( docgraph , root , include_pos = include_pos , escape_func = escape_func )
def _max ( self ) : """Getter for the maximum series value"""
return ( self . range [ 1 ] if ( self . range and self . range [ 1 ] is not None ) else ( max ( self . _values ) if self . _values else None ) )
def relative_path ( sub_directory = '' , function_index = 1 ) : """This will return the file relative to this python script : param subd _ irectory : str of the relative path : param function _ index : int of the number of function calls to go back : return : str of the full path"""
frm = inspect . currentframe ( ) for i in range ( function_index ) : frm = frm . f_back if frm . f_code . co_name == 'run_code' : frm = frm . f_back if not isinstance ( sub_directory , list ) : sub_directory = sub_directory . replace ( '\\' , '/' ) . split ( '/' ) path = os . path . split ( frm . f_code . co_filename ) [ 0 ] if sub_directory : path = os . path . abspath ( os . path . join ( path , * sub_directory ) ) return path
def get_console_width ( ) -> int : """A small utility function for getting the current console window ' s width . : return : The current console window ' s width ."""
# Assigning the value once , as frequent call to this function # causes a major slow down ( ImportErrors + isinstance ) . global _IN_QT if _IN_QT is None : _IN_QT = _in_qtconsole ( ) try : if _IN_QT : # QTConsole determines and handles the max line length by itself . width = sys . maxsize else : width = _get_windows_console_width ( ) if os . name == 'nt' else _get_linux_console_width ( ) if width <= 0 : return 80 return width except Exception : # Default value . return 80
def getRandomSequence ( length = 500 ) : """Generates a random name and sequence ."""
fastaHeader = "" for i in xrange ( int ( random . random ( ) * 100 ) ) : fastaHeader = fastaHeader + random . choice ( [ 'A' , 'C' , '0' , '9' , ' ' , '\t' ] ) return ( fastaHeader , "" . join ( [ random . choice ( [ 'A' , 'C' , 'T' , 'G' , 'A' , 'C' , 'T' , 'G' , 'A' , 'C' , 'T' , 'G' , 'A' , 'C' , 'T' , 'G' , 'A' , 'C' , 'T' , 'G' , 'N' ] ) for i in xrange ( ( int ) ( random . random ( ) * length ) ) ] ) )
def construct_item_args ( self , domain_event ) : """Constructs attributes of a sequenced item from the given domain event ."""
# Get the sequence ID . sequence_id = domain_event . __dict__ [ self . sequence_id_attr_name ] # Get the position in the sequence . position = getattr ( domain_event , self . position_attr_name , None ) # Get topic and data . topic , state = self . get_item_topic_and_state ( domain_event . __class__ , domain_event . __dict__ ) # Get the ' other ' args . # - these are meant to be derivative of the other attributes , # to populate database fields , and shouldn ' t affect the hash . other_args = tuple ( ( getattr ( domain_event , name ) for name in self . other_attr_names ) ) return ( sequence_id , position , topic , state ) + other_args
def _archive_single_dir ( archive ) : """Check if all members of the archive are in a single top - level directory : param archive : An archive from _ open _ archive ( ) : return : None if not a single top level directory in archive , otherwise a unicode string of the top level directory name"""
common_root = None for info in _list_archive_members ( archive ) : fn = _info_name ( info ) if fn in set ( [ '.' , '/' ] ) : continue sep = None if '/' in fn : sep = '/' elif '\\' in fn : sep = '\\' if sep is None : root_dir = fn else : root_dir , _ = fn . split ( sep , 1 ) if common_root is None : common_root = root_dir else : if common_root != root_dir : return None return common_root
def update_port_statuses_cfg ( self , context , port_ids , status ) : """Update the operational statuses of a list of router ports . This is called by the Cisco cfg agent to update the status of a list of ports . : param context : contains user information : param port _ ids : list of ids of all the ports for the given status : param status : PORT _ STATUS _ ACTIVE / PORT _ STATUS _ DOWN ."""
self . _l3plugin . update_router_port_statuses ( context , port_ids , status )
def _TemplateNamesToFiles ( self , template_str ) : """Parses a string of templates into a list of file handles ."""
template_list = template_str . split ( ":" ) template_files = [ ] try : for tmplt in template_list : template_files . append ( open ( os . path . join ( self . template_dir , tmplt ) , "r" ) ) except : # noqa for tmplt in template_files : tmplt . close ( ) raise return template_files
def _overwrite_special_dates ( midnight_utcs , opens_or_closes , special_opens_or_closes ) : """Overwrite dates in open _ or _ closes with corresponding dates in special _ opens _ or _ closes , using midnight _ utcs for alignment ."""
# Short circuit when nothing to apply . if not len ( special_opens_or_closes ) : return len_m , len_oc = len ( midnight_utcs ) , len ( opens_or_closes ) if len_m != len_oc : raise ValueError ( "Found misaligned dates while building calendar.\n" "Expected midnight_utcs to be the same length as open_or_closes,\n" "but len(midnight_utcs)=%d, len(open_or_closes)=%d" % len_m , len_oc ) # Find the array indices corresponding to each special date . indexer = midnight_utcs . get_indexer ( special_opens_or_closes . index ) # -1 indicates that no corresponding entry was found . If any - 1s are # present , then we have special dates that doesn ' t correspond to any # trading day . if - 1 in indexer : bad_dates = list ( special_opens_or_closes [ indexer == - 1 ] ) raise ValueError ( "Special dates %s are not trading days." % bad_dates ) # NOTE : This is a slightly dirty hack . We ' re in - place overwriting the # internal data of an Index , which is conceptually immutable . Since we ' re # maintaining sorting , this should be ok , but this is a good place to # sanity check if things start going haywire with calendar computations . opens_or_closes . values [ indexer ] = special_opens_or_closes . values
def best_trial_tid ( self , rank = 0 ) : """Get tid of the best trial rank = 0 means the best model rank = 1 means second best"""
candidates = [ t for t in self . trials if t [ 'result' ] [ 'status' ] == STATUS_OK ] if len ( candidates ) == 0 : return None losses = [ float ( t [ 'result' ] [ 'loss' ] ) for t in candidates ] assert not np . any ( np . isnan ( losses ) ) lid = np . where ( np . argsort ( losses ) . argsort ( ) == rank ) [ 0 ] [ 0 ] return candidates [ lid ] [ "tid" ]
def delta ( self , local = False ) : """Returns the number of days of difference"""
( s , e ) = self . get ( local ) return e - s
def validate_cidr ( s ) : """Validate a CIDR notation ip address . The string is considered a valid CIDR address if it consists of a valid IPv6 address in hextet format followed by a forward slash ( / ) and a bit mask length ( 0-128 ) . > > > validate _ cidr ( ' : : / 128 ' ) True > > > validate _ cidr ( ' : : / 0 ' ) True > > > validate _ cidr ( ' fc00 : : / 7 ' ) True > > > validate _ cidr ( ' : : ffff : 0:0/96 ' ) True > > > validate _ cidr ( ' : : ' ) False > > > validate _ cidr ( ' : : / 129 ' ) False > > > validate _ cidr ( None ) # doctest : + IGNORE _ EXCEPTION _ DETAIL Traceback ( most recent call last ) : TypeError : expected string or buffer : param s : String to validate as a CIDR notation ip address . : type s : str : returns : ` ` True ` ` if a valid CIDR address , ` ` False ` ` otherwise . : raises : TypeError"""
if _CIDR_RE . match ( s ) : ip , mask = s . split ( '/' ) if validate_ip ( ip ) : if int ( mask ) > 128 : return False else : return False return True return False
def register_sub ( self , o ) : """Register argument a suboption for ` self ` ."""
if o . subopt in self . subopt_map : raise OptionConflictError ( "conflicting suboption handlers for `%s'" % o . subopt , o ) self . subopt_map [ o . subopt ] = o
def format_args ( options ) : """Convert hash / key options into arguments list"""
args = list ( ) for key , value in options . items ( ) : # convert foo _ bar key into - - foo - bar option key = key . replace ( '_' , '-' ) if value is True : # key : True # - - key args . append ( '--{key}' . format ( key = key ) ) elif is_sequence ( value ) : # key : [ ' foo ' , ' bar ' ] # - - key = foo , bar values = [ str ( val ) for val in value ] args . append ( '--{key}={values}' . format ( key = key , values = ',' . join ( values ) ) ) else : # key : ' foo ' # - - key = foo args . append ( '--{key}={value}' . format ( key = key , value = value ) ) return args
def delete ( self , req , driver ) : """Delete a network Delete a specific netowrk with id on special cloud with : : Param req : Type object Request"""
response = driver . delete_network ( req . params , id ) data = { 'action' : "delete" , 'controller' : "network" , 'id' : id , 'cloud' : req . environ [ 'calplus.cloud' ] , 'response' : response } return data
def _logfile_sigterm_handler ( * _ ) : # type : ( . . . ) - > None """Handle exit signals and write out a log file . Raises : SystemExit : Contains the signal as the return code ."""
logging . error ( 'Received SIGTERM.' ) write_logfile ( ) print ( 'Received signal. Please see the log file for more information.' , file = sys . stderr ) sys . exit ( signal )
def CacheStorage_requestEntries ( self , cacheId , skipCount , pageSize ) : """Function path : CacheStorage . requestEntries Domain : CacheStorage Method name : requestEntries Parameters : Required arguments : ' cacheId ' ( type : CacheId ) - > ID of cache to get entries from . ' skipCount ' ( type : integer ) - > Number of records to skip . ' pageSize ' ( type : integer ) - > Number of records to fetch . Returns : ' cacheDataEntries ' ( type : array ) - > Array of object store data entries . ' hasMore ' ( type : boolean ) - > If true , there are more entries to fetch in the given range . Description : Requests data from cache ."""
assert isinstance ( skipCount , ( int , ) ) , "Argument 'skipCount' must be of type '['int']'. Received type: '%s'" % type ( skipCount ) assert isinstance ( pageSize , ( int , ) ) , "Argument 'pageSize' must be of type '['int']'. Received type: '%s'" % type ( pageSize ) subdom_funcs = self . synchronous_command ( 'CacheStorage.requestEntries' , cacheId = cacheId , skipCount = skipCount , pageSize = pageSize ) return subdom_funcs
def _split_source_page ( self , path ) : """Split the source file texts by triple - dashed lines . shit code"""
with codecs . open ( path , "rb" , "utf-8" ) as fd : textlist = fd . readlines ( ) metadata_notation = "---\n" if textlist [ 0 ] != metadata_notation : logging . error ( "{} first line must be triple-dashed!" . format ( path ) ) sys . exit ( 1 ) metadata_textlist = [ ] metadata_end_flag = False idx = 1 max_idx = len ( textlist ) # TODO ( crow ) : BE PYTHONIC ! ! ! while not metadata_end_flag : metadata_textlist . append ( textlist [ idx ] ) idx += 1 if idx >= max_idx : logging . error ( "{} doesn't have end triple-dashed!" . format ( path ) ) sys . exit ( 1 ) if textlist [ idx ] == metadata_notation : metadata_end_flag = True content = textlist [ idx + 1 : ] return metadata_textlist , content