signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def determine_bpi ( data , frames , EMPTY = b"\x00" * 10 ) : """Takes id3v2.4 frame data and determines if ints or bitpaddedints should be used for parsing . Needed because iTunes used to write normal ints for frame sizes ."""
# count number of tags found as BitPaddedInt and how far past o = 0 asbpi = 0 while o < len ( data ) - 10 : part = data [ o : o + 10 ] if part == EMPTY : bpioff = - ( ( len ( data ) - o ) % 10 ) break name , size , flags = struct . unpack ( '>4sLH' , part ) size = BitPaddedInt ( size ) o += 10 + size if PY3 : try : name = name . decode ( "ascii" ) except UnicodeDecodeError : continue if name in frames : asbpi += 1 else : bpioff = o - len ( data ) # count number of tags found as int and how far past o = 0 asint = 0 while o < len ( data ) - 10 : part = data [ o : o + 10 ] if part == EMPTY : intoff = - ( ( len ( data ) - o ) % 10 ) break name , size , flags = struct . unpack ( '>4sLH' , part ) o += 10 + size if PY3 : try : name = name . decode ( "ascii" ) except UnicodeDecodeError : continue if name in frames : asint += 1 else : intoff = o - len ( data ) # if more tags as int , or equal and bpi is past and int is not if asint > asbpi or ( asint == asbpi and ( bpioff >= 1 and intoff <= 1 ) ) : return int return BitPaddedInt
async def joinTeleLayer ( self , url , indx = None ) : '''Convenience function to join a remote telepath layer into this cortex and default view .'''
info = { 'type' : 'remote' , 'owner' : 'root' , 'config' : { 'url' : url } } layr = await self . addLayer ( ** info ) await self . view . addLayer ( layr , indx = indx ) return layr . iden
def refresh ( self , only_closed = False ) : """refresh ports status Args : only _ closed - check status only for closed ports"""
if only_closed : opened = filter ( self . __check_port , self . __closed ) self . __closed = self . __closed . difference ( opened ) self . __ports = self . __ports . union ( opened ) else : ports = self . __closed . union ( self . __ports ) self . __ports = set ( filter ( self . __check_port , ports ) ) self . __closed = ports . difference ( self . __ports )
def _check_seed ( seed ) : """If possible , convert ` seed ` into a valid form for Stan ( an integer between 0 and MAX _ UINT , inclusive ) . If not possible , use a random seed instead and raise a warning if ` seed ` was not provided as ` None ` ."""
if isinstance ( seed , ( Number , string_types ) ) : try : seed = int ( seed ) except ValueError : logger . warning ( "`seed` must be castable to an integer" ) seed = None else : if seed < 0 : logger . warning ( "`seed` may not be negative" ) seed = None elif seed > MAX_UINT : raise ValueError ( '`seed` is too large; max is {}' . format ( MAX_UINT ) ) elif isinstance ( seed , np . random . RandomState ) : seed = seed . randint ( 0 , MAX_UINT ) elif seed is not None : logger . warning ( '`seed` has unexpected type' ) seed = None if seed is None : seed = random . randint ( 0 , MAX_UINT ) return seed
def _autodiscover ( self ) : """Discovers panels to register from the current dashboard module ."""
if getattr ( self , "_autodiscover_complete" , False ) : return panels_to_discover = [ ] panel_groups = [ ] # If we have a flat iterable of panel names , wrap it again so # we have a consistent structure for the next step . if all ( [ isinstance ( i , six . string_types ) for i in self . panels ] ) : self . panels = [ self . panels ] # Now iterate our panel sets . default_created = False for panel_set in self . panels : # Instantiate PanelGroup classes . if not isinstance ( panel_set , collections . Iterable ) and issubclass ( panel_set , PanelGroup ) : panel_group = panel_set ( self ) # Check for nested tuples , and convert them to PanelGroups elif not isinstance ( panel_set , PanelGroup ) : panel_group = PanelGroup ( self , panels = panel_set ) # Put our results into their appropriate places panels_to_discover . extend ( panel_group . panels ) panel_groups . append ( ( panel_group . slug , panel_group ) ) if panel_group . slug == DEFAULT_PANEL_GROUP : default_created = True # Plugin panels can be added to a default panel group . Make sure such a # default group exists . if not default_created : default_group = PanelGroup ( self ) panel_groups . insert ( 0 , ( default_group . slug , default_group ) ) self . _panel_groups = collections . OrderedDict ( panel_groups ) # Do the actual discovery package = '.' . join ( self . __module__ . split ( '.' ) [ : - 1 ] ) mod = import_module ( package ) for panel in panels_to_discover : try : before_import_registry = copy . copy ( self . _registry ) import_module ( '.%s.panel' % panel , package ) except Exception : self . _registry = before_import_registry if module_has_submodule ( mod , panel ) : raise self . _autodiscover_complete = True
def _teardown ( self ) : "Handles the restoration of any potential global state set ."
self . example . after ( self . context ) if self . is_root_runner : run . after_all . execute ( self . context ) # self . context = self . context . _ parent self . has_ran = True
def send_confirmation_email ( self , confirmation_id , email_dict ) : """Sends an confirmation by email If you want to send your email to more than one persons do : ' recipients ' : { ' to ' : [ ' bykof @ me . com ' , ' mbykovski @ seibert - media . net ' ] } } : param confirmation _ id : the confirmation id : param email _ dict : the email dict : return dict"""
return self . _create_post_request ( resource = CONFIRMATIONS , billomat_id = confirmation_id , send_data = email_dict , command = EMAIL , )
def auth_keys ( user = None , config = '.ssh/authorized_keys' , fingerprint_hash_type = None ) : '''Return the authorized keys for users CLI Example : . . code - block : : bash salt ' * ' ssh . auth _ keys salt ' * ' ssh . auth _ keys root salt ' * ' ssh . auth _ keys user = root salt ' * ' ssh . auth _ keys user = " [ user1 , user2 ] "'''
if not user : user = __salt__ [ 'user.list_users' ] ( ) old_output_when_one_user = False if not isinstance ( user , list ) : user = [ user ] old_output_when_one_user = True keys = { } for u in user : full = None try : full = _get_config_file ( u , config ) except CommandExecutionError : pass if full and os . path . isfile ( full ) : keys [ u ] = _validate_keys ( full , fingerprint_hash_type ) if old_output_when_one_user : if user [ 0 ] in keys : return keys [ user [ 0 ] ] else : return { } return keys
def log_exception ( self , typ : "Optional[Type[BaseException]]" , value : Optional [ BaseException ] , tb : Optional [ TracebackType ] , ) -> None : """Override to customize logging of uncaught exceptions . By default logs instances of ` HTTPError ` as warnings without stack traces ( on the ` ` tornado . general ` ` logger ) , and all other exceptions as errors with stack traces ( on the ` ` tornado . application ` ` logger ) . . . versionadded : : 3.1"""
if isinstance ( value , HTTPError ) : if value . log_message : format = "%d %s: " + value . log_message args = [ value . status_code , self . _request_summary ( ) ] + list ( value . args ) gen_log . warning ( format , * args ) else : app_log . error ( # type : ignore "Uncaught exception %s\n%r" , self . _request_summary ( ) , self . request , exc_info = ( typ , value , tb ) , )
def draw_line ( self , data , coordinates , style , label , mplobj = None ) : """Draw a line . By default , draw the line via the draw _ path ( ) command . Some renderers might wish to override this and provide more fine - grained behavior . In matplotlib , lines are generally created via the plt . plot ( ) command , though this command also can create marker collections . Parameters data : array _ like A shape ( N , 2 ) array of datapoints . coordinates : string A string code , which should be either ' data ' for data coordinates , or ' figure ' for figure ( pixel ) coordinates . style : dictionary a dictionary specifying the appearance of the line . mplobj : matplotlib object the matplotlib plot element which generated this line"""
pathcodes = [ 'M' ] + ( data . shape [ 0 ] - 1 ) * [ 'L' ] pathstyle = dict ( facecolor = 'none' , ** style ) pathstyle [ 'edgecolor' ] = pathstyle . pop ( 'color' ) pathstyle [ 'edgewidth' ] = pathstyle . pop ( 'linewidth' ) self . draw_path ( data = data , coordinates = coordinates , pathcodes = pathcodes , style = pathstyle , mplobj = mplobj )
def save ( self ) : """Save the state to the JSON file in the config dir ."""
logger . debug ( "Save the GUI state to `%s`." , self . path ) _save_json ( self . path , { k : v for k , v in self . items ( ) if k not in ( 'config_dir' , 'name' ) } )
async def clean_up_clients_async ( self ) : """Resets the pump swallows all exceptions ."""
if self . partition_receiver : if self . eh_client : await self . eh_client . stop_async ( ) self . partition_receiver = None self . partition_receive_handler = None self . eh_client = None
def get_json_argument ( self , name , default = None ) : """Find and return the argument with key ' name ' from JSON request data . Similar to Tornado ' s get _ argument ( ) method . : param str name : The name of the json key you want to get the value for : param bool default : The default value if nothing is found : returns : value of the argument name request"""
if default is None : default = self . _ARG_DEFAULT if not self . request . arguments : self . load_json ( ) if name not in self . request . arguments : if default is self . _ARG_DEFAULT : msg = "Missing argument '%s'" % name self . logger . debug ( msg ) self . raise_error ( 400 , msg ) self . logger . debug ( "Returning default argument %s, as we couldn't " "find '%s' in %s" % ( default , name , self . request . arguments ) ) return default arg = self . request . arguments [ name ] return arg
def moderate_view ( self , request , object_id , extra_context = None ) : """Handles moderate object tool through a somewhat hacky changelist view whose queryset is altered via CommentAdmin . get _ changelist to only list comments for the object under review ."""
opts = self . model . _meta app_label = opts . app_label view = CommentAdmin ( model = Comment , admin_site = self . admin_site ) view . list_filter = ( ) view . list_display = ( 'comment_text' , 'moderator_reply' , '_user' , 'submit_date' , ) model = self . model obj = get_object_or_404 ( model , pk = unquote ( object_id ) ) request . obj = obj view . change_list_template = self . change_list_template or [ 'admin/%s/%s/moderate.html' % ( app_label , opts . object_name . lower ( ) ) , 'admin/%s/moderate.html' % app_label , 'admin/moderate.html' ] orig_has_change_permission = self . has_change_permission ( request , obj ) if not orig_has_change_permission : raise PermissionDenied extra_context = { 'opts' : opts , 'original' : obj , 'orig_has_change_permission' : orig_has_change_permission , } return view . changelist_view ( request , extra_context )
def add_timeline_callback ( self , timeline_events , callback ) : """Register a callback for a specific timeline event ."""
if not timeline_events : return False if not isinstance ( timeline_events , ( tuple , list ) ) : timeline_events = [ timeline_events ] for timeline_event in timeline_events : if not isinstance ( timeline_event , dict ) : raise AbodeException ( ( ERROR . EVENT_CODE_MISSING ) ) event_code = timeline_event . get ( 'event_code' ) if not event_code : raise AbodeException ( ( ERROR . EVENT_CODE_MISSING ) ) _LOGGER . debug ( "Subscribing to timeline event: %s" , timeline_event ) self . _timeline_callbacks [ event_code ] . append ( ( callback ) ) return True
def get_nt_system_uid ( ) : """Get the MachineGuid from HKEY _ LOCAL _ MACHINE \ Software \ Microsoft \ Cryptography \ MachineGuid"""
try : import _winreg as winreg except ImportError : import winreg lm = winreg . ConnectRegistry ( None , winreg . HKEY_LOCAL_MACHINE ) try : key = winreg . OpenKey ( lm , r"Software\Microsoft\Cryptography" ) try : return winreg . QueryValueEx ( key , "MachineGuid" ) [ 0 ] finally : key . Close ( ) finally : lm . Close ( )
def _g_2 ( self ) : """omega2 < omega < omega3"""
return 3 / ( self . _vertices_omegas [ 3 ] - self . _vertices_omegas [ 0 ] ) * ( self . _f ( 1 , 2 ) * self . _f ( 2 , 0 ) + self . _f ( 2 , 1 ) * self . _f ( 1 , 3 ) )
def r_oauth_authorized ( self ) : """Route for OAuth2 Authorization callback : return : { " template " }"""
resp = self . authobj . authorized_response ( ) if resp is None : return 'Access denied: reason=%s error=%s' % ( request . args [ 'error' ] , request . args [ 'error_description' ] ) session [ 'oauth_token' ] = ( resp [ 'access_token' ] , '' ) user = self . authobj . get ( 'user' ) # # TODO this is too specific to Perseids ' api model . We should externalize . session [ 'oauth_user_uri' ] = user . data [ 'user' ] [ 'uri' ] session [ 'oauth_user_name' ] = user . data [ 'user' ] [ 'full_name' ] if 'next' in session and session [ 'next' ] is not None : return redirect ( session [ 'next' ] ) else : return { "template" : "nemo_oauth_plugin::authorized.html" , "username" : session [ 'oauth_user_name' ] }
def get_cost_per_mol ( self , comp ) : """Get best estimate of minimum cost / mol based on known data Args : comp : Composition as a pymatgen . core . structure . Composition Returns : float of cost / mol"""
comp = comp if isinstance ( comp , Composition ) else Composition ( comp ) decomp = self . get_lowest_decomposition ( comp ) return sum ( k . energy_per_atom * v * comp . num_atoms for k , v in decomp . items ( ) )
def build_db_cmd ( self , fname ) : """Return database format / build command"""
return self . funcs . db_func ( fname , self . outdir , self . exes . format_exe ) [ 0 ]
def process_data ( self ) : """Attempt to extract a report from the current data stream contents Returns : bool : True if further processing is required and process _ data should be called again ."""
further_processing = False if self . state == self . WaitingForReportType and len ( self . raw_data ) > 0 : self . current_type = self . raw_data [ 0 ] try : self . current_header_size = self . calculate_header_size ( self . current_type ) self . state = self . WaitingForReportHeader further_processing = True except Exception as exc : self . state = self . ErrorState if self . error_callback : self . error_callback ( self . ErrorFindingReportType , str ( exc ) , self . context ) else : raise if self . state == self . WaitingForReportHeader and len ( self . raw_data ) >= self . current_header_size : try : self . current_report_size = self . calculate_report_size ( self . current_type , self . raw_data [ : self . current_header_size ] ) self . state = self . WaitingForCompleteReport further_processing = True except Exception as exc : self . state = self . ErrorState if self . error_callback : self . error_callback ( self . ErrorParsingReportHeader , str ( exc ) , self . context ) else : raise if self . state == self . WaitingForCompleteReport and len ( self . raw_data ) >= self . current_report_size : try : report_data = self . raw_data [ : self . current_report_size ] self . raw_data = self . raw_data [ self . current_report_size : ] report = self . parse_report ( self . current_type , report_data ) self . _handle_report ( report ) self . state = self . WaitingForReportType further_processing = True except Exception as exc : self . state = self . ErrorState if self . error_callback : self . error_callback ( self . ErrorParsingCompleteReport , str ( exc ) , self . context ) else : raise return further_processing
def _get_kind_and_names ( attributes ) : """Gets kind and possible names for : tl : ` DocumentAttribute ` ."""
kind = 'document' possible_names = [ ] for attr in attributes : if isinstance ( attr , types . DocumentAttributeFilename ) : possible_names . insert ( 0 , attr . file_name ) elif isinstance ( attr , types . DocumentAttributeAudio ) : kind = 'audio' if attr . performer and attr . title : possible_names . append ( '{} - {}' . format ( attr . performer , attr . title ) ) elif attr . performer : possible_names . append ( attr . performer ) elif attr . title : possible_names . append ( attr . title ) elif attr . voice : kind = 'voice' return kind , possible_names
def flip ( f ) : """Flip the order of positonal arguments of given function ."""
ensure_callable ( f ) result = lambda * args , ** kwargs : f ( * reversed ( args ) , ** kwargs ) functools . update_wrapper ( result , f , ( '__name__' , '__module__' ) ) return result
def ticket_satisfaction_rating_create ( self , ticket_id , data , ** kwargs ) : "https : / / developer . zendesk . com / rest _ api / docs / core / satisfaction _ ratings # create - a - satisfaction - rating"
api_path = "/api/v2/tickets/{ticket_id}/satisfaction_rating.json" api_path = api_path . format ( ticket_id = ticket_id ) return self . call ( api_path , method = "POST" , data = data , ** kwargs )
def get_member_ideas ( self , * args , ** kwargs ) : """: allowed _ param : ' memberId ' : pagination _ param : ' page _ number ' , ' page _ size '"""
return bind_api ( api = self , path = '/members/{memberId}/ideas' , payload_type = 'idea' , payload_list = True , allowed_param = [ 'memberId' ] , pagination_param = [ 'page_number' , 'page_size' ] ) ( * args , ** kwargs )
def IntegerSum ( input_vertex : vertex_constructor_param_types , label : Optional [ str ] = None ) -> Vertex : """Performs a sum across all dimensions : param input _ vertex : the vertex to have its values summed"""
return Integer ( context . jvm_view ( ) . IntegerSumVertex , label , cast_to_integer_vertex ( input_vertex ) )
def get_nodes ( self , request ) : """Generates the nodelist : param request : : return : list of nodes"""
nodes = [ ] language = get_language_from_request ( request , check_path = True ) current_site = get_current_site ( request ) page_site = self . instance . node . site if self . instance and page_site != current_site : return [ ] categories_menu = False posts_menu = False config = False if self . instance : if not self . _config . get ( self . instance . application_namespace , False ) : self . _config [ self . instance . application_namespace ] = BlogConfig . objects . get ( namespace = self . instance . application_namespace ) config = self . _config [ self . instance . application_namespace ] if not getattr ( request , 'toolbar' , False ) or not request . toolbar . edit_mode_active : if self . instance == self . instance . get_draft_object ( ) : return [ ] else : if self . instance == self . instance . get_public_object ( ) : return [ ] if config and config . menu_structure in ( MENU_TYPE_COMPLETE , MENU_TYPE_CATEGORIES ) : categories_menu = True if config and config . menu_structure in ( MENU_TYPE_COMPLETE , MENU_TYPE_POSTS ) : posts_menu = True if config and config . menu_structure in ( MENU_TYPE_NONE , ) : return nodes used_categories = [ ] if posts_menu : posts = Post . objects if hasattr ( self , 'instance' ) and self . instance : posts = posts . namespace ( self . instance . application_namespace ) . on_site ( ) posts = posts . active_translations ( language ) . distinct ( ) . select_related ( 'app_config' ) . prefetch_related ( 'translations' , 'categories' ) for post in posts : post_id = None parent = None used_categories . extend ( post . categories . values_list ( 'pk' , flat = True ) ) if categories_menu : category = post . categories . first ( ) if category : parent = '{0}-{1}' . format ( category . __class__ . __name__ , category . pk ) post_id = '{0}-{1}' . format ( post . __class__ . __name__ , post . pk ) , else : post_id = '{0}-{1}' . format ( post . __class__ . __name__ , post . pk ) , if post_id : node = NavigationNode ( post . get_title ( ) , post . get_absolute_url ( language ) , post_id , parent ) nodes . append ( node ) if categories_menu : categories = BlogCategory . objects if config : categories = categories . namespace ( self . instance . application_namespace ) if config and not config . menu_empty_categories : categories = categories . active_translations ( language ) . filter ( pk__in = used_categories ) . distinct ( ) else : categories = categories . active_translations ( language ) . distinct ( ) categories = categories . order_by ( 'parent__id' , 'translations__name' ) . select_related ( 'app_config' ) . prefetch_related ( 'translations' ) added_categories = [ ] for category in categories : if category . pk not in added_categories : node = NavigationNode ( category . name , category . get_absolute_url ( ) , '{0}-{1}' . format ( category . __class__ . __name__ , category . pk ) , ( '{0}-{1}' . format ( category . __class__ . __name__ , category . parent . id ) if category . parent else None ) ) nodes . append ( node ) added_categories . append ( category . pk ) return nodes
def _format_base_path ( self , api_name ) : """Format the base path name ."""
name = self . app_name if self . app_name != api_name : name = '{0}-{1}' . format ( self . app_name , api_name ) return name
def to_json ( self , value ) : """Subclasses should override this method for JSON encoding ."""
if not self . is_valid ( value ) : raise ex . SerializeException ( 'Invalid value: {}' . format ( value ) ) return value
def _to_dict ( self ) : '''Returns a dictionary representation of this object'''
return dict ( minimum = self . minimum . _to_dict ( ) , maximum = self . maximum . _to_dict ( ) )
def __prepare_raw_data ( self ) : "Format internal _ _ raw _ data storage according to usages setting"
# pre - parsed data should exist if not self . __hid_object . ptr_preparsed_data : raise HIDError ( "HID object close or unable to request pre parsed " "report data" ) # make sure pre - memory allocation already done self . __alloc_raw_data ( ) try : HidStatus ( hid_dll . HidP_InitializeReportForID ( self . __report_kind , self . __report_id , self . __hid_object . ptr_preparsed_data , byref ( self . __raw_data ) , self . __raw_report_size ) ) except HIDError : self . __raw_data [ 0 ] = self . __report_id # check if we have pre - allocated usage storage if not self . __usage_data_list : # create HIDP _ DATA buffer max_items = hid_dll . HidP_MaxDataListLength ( self . __report_kind , self . __hid_object . ptr_preparsed_data ) if not max_items : raise HIDError ( "Internal error while requesting usage length" ) data_list_type = winapi . HIDP_DATA * max_items self . __usage_data_list = data_list_type ( ) # reference HIDP _ DATA buffer data_list = self . __usage_data_list # set buttons and values usages first n_total_usages = 0 single_usage = USAGE ( ) single_usage_len = c_ulong ( ) for data_index , report_item in self . __idx_items . items ( ) : if ( not report_item . is_value_array ( ) ) and report_item . value != None : # set by user , include in request if report_item . is_button ( ) and report_item . value : # windows just can ' t handle button arrays ! , we just don ' t # know if usage is button array or plain single usage , so # we set all usages at once single_usage . value = report_item . usage_id single_usage_len . value = 1 HidStatus ( hid_dll . HidP_SetUsages ( self . __report_kind , report_item . page_id , 0 , byref ( single_usage ) , byref ( single_usage_len ) , self . __hid_object . ptr_preparsed_data , byref ( self . __raw_data ) , self . __raw_report_size ) ) continue elif report_item . is_value ( ) and not report_item . is_value_array ( ) : data_list [ n_total_usages ] . value . raw_value = report_item . value else : continue # do nothing data_list [ n_total_usages ] . reserved = 0 # reset data_list [ n_total_usages ] . data_index = data_index # reference n_total_usages += 1 # set data if any usage is not ' none ' ( and not any value array ) if n_total_usages : # some usages set usage_len = c_ulong ( n_total_usages ) HidStatus ( hid_dll . HidP_SetData ( self . __report_kind , byref ( data_list ) , byref ( usage_len ) , self . __hid_object . ptr_preparsed_data , byref ( self . __raw_data ) , self . __raw_report_size ) ) # set values based on value arrays for report_item in self . __value_array_items : HidStatus ( hid_dll . HidP_SetUsageValueArray ( self . __report_kind , report_item . page_id , 0 , # all link collections report_item . usage_id , byref ( report_item . value_array ) , len ( report_item . value_array ) , self . __hid_object . ptr_preparsed_data , byref ( self . __raw_data ) , len ( self . __raw_data ) ) )
def _getOccurs ( self , e ) : '''return a 3 item tuple'''
minOccurs = maxOccurs = '1' nillable = True return minOccurs , maxOccurs , nillable
def compareTaggers ( model1 , model2 , string_list , module_name ) : """Compare two models . Given a list of strings , prints out tokens & tags whenever the two taggers parse a string differently . This is for spot - checking models : param tagger1 : a . crfsuite filename : param tagger2 : another . crfsuite filename : param string _ list : a list of strings to be checked : param module _ name : name of a parser module"""
module = __import__ ( module_name ) tagger1 = pycrfsuite . Tagger ( ) tagger1 . open ( module_name + '/' + model1 ) tagger2 = pycrfsuite . Tagger ( ) tagger2 . open ( module_name + '/' + model2 ) count_discrepancies = 0 for string in string_list : tokens = module . tokenize ( string ) if tokens : features = module . tokens2features ( tokens ) tags1 = tagger1 . tag ( features ) tags2 = tagger2 . tag ( features ) if tags1 != tags2 : count_discrepancies += 1 print ( '\n' ) print ( "%s. %s" % ( count_discrepancies , string ) ) print ( '-' * 75 ) print_spaced ( 'token' , model1 , model2 ) print ( '-' * 75 ) for token in zip ( tokens , tags1 , tags2 ) : print_spaced ( token [ 0 ] , token [ 1 ] , token [ 2 ] ) print ( "\n\n%s of %s strings were labeled differently" % ( count_discrepancies , len ( string_list ) ) )
def apply_mflist_budget_obs ( list_filename , flx_filename = "flux.dat" , vol_filename = "vol.dat" , start_datetime = "1-1-1970" ) : """process a MODFLOW list file to extract flux and volume water budget entries . Parameters list _ filename : str the modflow list file flx _ filename : str the name of the output file with water budget flux information . Default is " flux . dat " vol _ filename : str the name of the output file with water budget volume information . Default is " vol . dat " start _ datatime : str an str that can be cast to a pandas . TimeStamp . Used to give observations a meaningful name Returns flx : pandas . DataFrame the flux dataframe vol : pandas . DataFrame the volume dataframe Note requires flopy"""
try : import flopy except Exception as e : raise Exception ( "error import flopy: {0}" . format ( str ( e ) ) ) mlf = flopy . utils . MfListBudget ( list_filename ) flx , vol = mlf . get_dataframes ( start_datetime = start_datetime , diff = True ) flx . to_csv ( flx_filename , sep = ' ' , index_label = "datetime" , date_format = "%Y%m%d" ) vol . to_csv ( vol_filename , sep = ' ' , index_label = "datetime" , date_format = "%Y%m%d" ) return flx , vol
def autoremove ( list_only = False , purge = False ) : '''. . versionadded : : 2015.5.0 Remove packages not required by another package using ` ` apt - get autoremove ` ` . list _ only : False Only retrieve the list of packages to be auto - removed , do not actually perform the auto - removal . purge : False Also remove package config data when autoremoving packages . . . versionadded : : 2015.8.0 CLI Example : . . code - block : : bash salt ' * ' pkg . autoremove salt ' * ' pkg . autoremove list _ only = True salt ' * ' pkg . autoremove purge = True'''
cmd = [ ] if list_only : ret = [ ] cmd . extend ( [ 'apt-get' , '--assume-no' ] ) if purge : cmd . append ( '--purge' ) cmd . append ( 'autoremove' ) out = _call_apt ( cmd , ignore_retcode = True ) [ 'stdout' ] found = False for line in out . splitlines ( ) : if found is True : if line . startswith ( ' ' ) : ret . extend ( line . split ( ) ) else : found = False elif 'The following packages will be REMOVED:' in line : found = True ret . sort ( ) return ret else : old = list_pkgs ( ) cmd . extend ( [ 'apt-get' , '--assume-yes' ] ) if purge : cmd . append ( '--purge' ) cmd . append ( 'autoremove' ) _call_apt ( cmd , ignore_retcode = True ) __context__ . pop ( 'pkg.list_pkgs' , None ) new = list_pkgs ( ) return salt . utils . data . compare_dicts ( old , new )
def do_set_device ( self , args ) : """Set the PLM OS device . Device defaults to / dev / ttyUSB0 Usage : set _ device device Arguments : device : Required - INSTEON PLM device"""
params = args . split ( ) device = None try : device = params [ 0 ] except IndexError : _LOGGING . error ( 'Device name required.' ) self . do_help ( 'set_device' ) if device : self . tools . device = device
def until_synced ( self , timeout = None ) : """Return a tornado Future ; resolves when all subordinate clients are synced"""
futures = [ r . until_synced ( timeout ) for r in dict . values ( self . children ) ] yield tornado . gen . multi ( futures , quiet_exceptions = tornado . gen . TimeoutError )
def sample_prob ( probs , rand ) : """Get samples from a tensor of probabilities . : param probs : tensor of probabilities : param rand : tensor ( of the same shape as probs ) of random values : return : binary sample of probabilities"""
return tf . nn . relu ( tf . sign ( probs - rand ) )
def _create_hashes ( self , count ) : """Breaks up our hash into slots , so we can pull them out later . Essentially , it splits our SHA / MD5 / etc into X parts ."""
for i in range ( 0 , count ) : # Get 1 / numblocks of the hash blocksize = int ( len ( self . hexdigest ) / count ) currentstart = ( 1 + i ) * blocksize - blocksize currentend = ( 1 + i ) * blocksize self . hasharray . append ( int ( self . hexdigest [ currentstart : currentend ] , 16 ) ) # Workaround for adding more sets in 2019. # We run out of blocks , because we use some for each set , whether it ' s called or not . # I can ' t easily change this without invalidating every hash so far : / # This shouldn ' t reduce the security since it should only draw from one set of these in practice . self . hasharray = self . hasharray + self . hasharray
def image_from_name ( name , images ) : """Return an image from a list of images . If the name is an exact match , return the last exactly matching image . Otherwise , sort images by ' natural ' order , using decorate - sort - undecorate , and return the largest . see : http : / / code . activestate . com / recipes / 285264 - natural - string - sorting /"""
prefixed_images = [ i for i in images if i . name . startswith ( name ) ] if name in [ i . name for i in prefixed_images ] : return [ i for i in prefixed_images if i . name == name ] [ - 1 ] decorated = sorted ( [ ( int ( re . search ( '\d+' , i . name ) . group ( 0 ) ) , i ) for i in prefixed_images ] ) return [ i [ 1 ] for i in decorated ] [ - 1 ]
def get_ytvideos ( query , ilogger ) : """Gets either a list of videos from a playlist or a single video , using the first result of a YouTube search Args : query ( str ) : The YouTube search query ilogger ( logging . logger ) : The logger to log API calls to Returns : queue ( list ) : The items obtained from the YouTube search"""
queue = [ ] # Search YouTube search_result = ytdiscoveryapi . search ( ) . list ( q = query , part = "id,snippet" , maxResults = 1 , type = "video,playlist" ) . execute ( ) if not search_result [ "items" ] : return [ ] # Get video / playlist title title = search_result [ "items" ] [ 0 ] [ "snippet" ] [ "title" ] ilogger . info ( "Queueing {}" . format ( title ) ) # Queue video if video if search_result [ "items" ] [ 0 ] [ "id" ] [ "kind" ] == "youtube#video" : # Get ID of video videoid = search_result [ "items" ] [ 0 ] [ "id" ] [ "videoId" ] # Append video to queue queue . append ( [ "https://www.youtube.com/watch?v={}" . format ( videoid ) , title ] ) # Queue playlist if playlist elif search_result [ "items" ] [ 0 ] [ "id" ] [ "kind" ] == "youtube#playlist" : queue = get_queue_from_playlist ( search_result [ "items" ] [ 0 ] [ "id" ] [ "playlistId" ] ) return queue
def contains ( self , key ) : "Exact matching ."
index = self . follow_bytes ( key , self . ROOT ) if index is None : return False return self . has_value ( index )
def libs ( package , static = False ) : """Return the LDFLAGS string returned by pkg - config . The static specifier will also include libraries for static linking ( i . e . , includes any private libraries ) ."""
_raise_if_not_exists ( package ) return _query ( package , * _build_options ( '--libs' , static = static ) )
def execute_by_options ( args ) : """execute by argument dictionary Args : args ( dict ) : command line argument dictionary"""
if args [ 'subcommand' ] == 'sphinx' : s = Sphinx ( proj_info ) if args [ 'quickstart' ] : s . quickstart ( ) elif args [ 'gen_code_api' ] : s . gen_code_api ( ) elif args [ 'rst2html' ] : s . rst2html ( ) pass elif args [ 'subcommand' ] == 'offline_dist' : pod = PyOfflineDist ( ) if args [ 'freeze_deps' ] : pod . freeze_deps ( ) elif args [ 'download_deps' ] : pod . download_deps ( ) elif args [ 'install_deps' ] : pod . install_deps ( ) elif args [ 'clean_deps' ] : pod . clean_deps ( ) elif args [ 'mkbinary' ] : pod . pyinstaller_mkbinary ( args [ 'mkbinary' ] ) elif args [ 'clean_binary' ] : pod . clean_binary ( ) pass
def cudnnGetConvolutionForwardWorkspaceSize ( handle , srcDesc , wDesc , convDesc , destDesc , algo ) : """This function returns the amount of GPU memory workspace the user needs to allocate to be able to call cudnnConvolutionForward with the specified algorithm . Parameters handle : cudnnHandle Handle to a previously created cuDNN context . srcDesc : cudnnTensorDescriptor Handle to a previously initialized tensor descriptor . wDesc : cudnnFilterDescriptor Handle to a previously initialized filter descriptor . convDesc : cudnnConvolutionDescriptor Previously initialized convolution descriptor . destDesc : cudnnTensorDescriptor Handle to a previously initialized tensor descriptor . algo : cudnnConvolutionFwdAlgo Enumerant that specifies the chosen convolution algorithm . Returns sizeInBytes : c _ size _ t Amount of GPU memory needed as workspace to be able to execute a forward convolution with the sepcified algo ."""
sizeInBytes = ctypes . c_size_t ( ) status = _libcudnn . cudnnGetConvolutionForwardWorkspaceSize ( handle , srcDesc , wDesc , convDesc , destDesc , algo , ctypes . byref ( sizeInBytes ) ) cudnnCheckStatus ( status ) return sizeInBytes
def check_handle_syntax ( string ) : '''Checks the syntax of a handle without an index ( are prefix and suffix there , are there too many slashes ? ) . : string : The handle without index , as string prefix / suffix . : raise : : exc : ` ~ b2handle . handleexceptions . handleexceptions . HandleSyntaxError ` : return : True . If it ' s not ok , exceptions are raised .'''
expected = 'prefix/suffix' try : arr = string . split ( '/' ) except AttributeError : raise handleexceptions . HandleSyntaxError ( msg = 'The provided handle is None' , expected_syntax = expected ) if len ( arr ) < 2 : msg = 'No slash' raise handleexceptions . HandleSyntaxError ( msg = msg , handle = string , expected_syntax = expected ) if len ( arr [ 0 ] ) == 0 : msg = 'Empty prefix' raise handleexceptions . HandleSyntaxError ( msg = msg , handle = string , expected_syntax = expected ) if len ( arr [ 1 ] ) == 0 : msg = 'Empty suffix' raise handleexceptions . HandleSyntaxError ( msg = msg , handle = string , expected_syntax = expected ) if ':' in string : check_handle_syntax_with_index ( string , base_already_checked = True ) return True
def _cut_hypernodes ( hypergraph ) : """Return the cut - nodes of the given hypergraph . @ type hypergraph : hypergraph @ param hypergraph : Hypergraph @ rtype : list @ return : List of cut - nodes ."""
nodes_ = cut_nodes ( hypergraph . graph ) nodes = [ ] for each in nodes_ : if ( each [ 1 ] == 'n' ) : nodes . append ( each [ 0 ] ) return nodes
def main ( ) : """Main entry point for the script . Create a parser , process the command line , and run it"""
parser = cli . Cli ( ) parser . parse ( sys . argv [ 1 : ] ) return parser . run ( )
def get ( self , key , lang = None ) : """Returns triple related to this node . Can filter on lang : param key : Predicate of the triple : param lang : Language of the triple if applicable : rtype : Literal or BNode or URIRef"""
if lang is not None : for o in self . graph . objects ( self . asNode ( ) , key ) : if o . language == lang : yield o else : for o in self . graph . objects ( self . asNode ( ) , key ) : yield o
def _make_request ( self , type , path , args , noRetry = False ) : """Make a request to Blot ' re . Attempts to reply the request if it fails due to an expired access token ."""
response = getattr ( requests , type ) ( path , headers = self . _add_auth_headers ( _JSON_HEADERS ) , ** args ) if response . status_code == 200 or response . status_code == 201 : return response . json ( ) elif not noRetry and self . _is_expired_response ( response ) and 'refresh_token' in self . creds : try : self . exchange_refresh_token ( ) except TokenEndpointError : raise _rest_error_from_response ( response ) return self . _make_request ( type , path , args , noRetry = True ) raise _rest_error_from_response ( response )
def payload ( self ) : """Renders the resource payload . : returns : a dict representing the object to be used as payload for a request"""
payload = { 'type' : self . resource_type ( ) , 'attributes' : self . attributes } if self . id : payload [ 'id' ] = self . id return payload
def putCtrlConf ( self , eleobj , ctrlkey , val , type = 'raw' ) : """put the value to control PV field : param eleobj : element object in lattice : param ctrlkey : element control property , PV name : param val : new value for ctrlkey : param type : set in ' raw ' or ' real ' mode , ' raw ' by default ' raw ' : set PV with the value of ' val ' , ' real ' : set PV with the value translated from ' val '"""
if ctrlkey in eleobj . ctrlkeys : if type == 'raw' : newval = val else : # val should be translated newval = eleobj . unitTrans ( val , direction = '-' ) epics . caput ( eleobj . ctrlinfo [ ctrlkey ] [ 'pv' ] , newval ) return True else : return False
def PDBasXMLwithSymwithPolarH ( self , id ) : """Adds Hydrogen Atoms to a Structure ."""
print _WARNING # Protonated Structure in XML Format h_s_xml = urllib . urlopen ( "http://www.cmbi.ru.nl/wiwsd/rest/PDBasXMLwithSymwithPolarH/id/" + id ) self . raw = h_s_xml p = self . parser h_s_smcra = p . read ( h_s_xml , 'WHATIF_Output' ) return h_s_smcra
async def RelationById ( self , relation_ids ) : '''relation _ ids : typing . Sequence [ int ] Returns - > typing . Sequence [ ~ RelationResult ]'''
# map input types to rpc msg _params = dict ( ) msg = dict ( type = 'Uniter' , request = 'RelationById' , version = 5 , params = _params ) _params [ 'relation-ids' ] = relation_ids reply = await self . rpc ( msg ) return reply
def _load_words ( self ) : """Loads the list of profane words from file ."""
with open ( self . _words_file , 'r' ) as f : self . _censor_list = [ line . strip ( ) for line in f . readlines ( ) ]
def get ( self , keyword ) : """Return the element of the list after the given keyword . Parameters keyword : str The keyword parameter to find in the list . Putting a colon before the keyword is optional , if no colon is given , it is added automatically ( e . g . " keyword " will be found as " : keyword " in the list ) . Returns obj : KQMLObject The object corresponding to the keyword parameter Example : kl = KQMLList . from _ string ( ' ( FAILURE : reason INVALID _ PARAMETER ) ' ) kl . get ( ' reason ' ) # KQMLToken ( ' INVALID _ PARAMETER ' )"""
if not keyword . startswith ( ':' ) : keyword = ':' + keyword for i , s in enumerate ( self . data ) : if s . to_string ( ) . upper ( ) == keyword . upper ( ) : if i < len ( self . data ) - 1 : return self . data [ i + 1 ] else : return None return None
def GetCacheSize ( self ) : """Determines the size of the uncompressed cached data . Returns : int : number of cached bytes ."""
if not self . _cache_start_offset or not self . _cache_end_offset : return 0 return self . _cache_end_offset - self . _cache_start_offset
def get_num_commenters ( self , item ) : """Get the number of unique people who commented on the issue / pr"""
commenters = [ comment [ 'user' ] [ 'login' ] for comment in item [ 'comments_data' ] ] return len ( set ( commenters ) )
def __init ( self ) : """gets the properties for the site"""
url = "%s/%s.json" % ( self . _url , self . _itemId ) params = { "f" : "json" } json_dict = self . _get ( url , params , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) self . _json_dict = json_dict self . _json = json . dumps ( self . _json_dict ) setattr ( self , "data" , json_dict [ 'data' ] ) if 'data' in json_dict : for k , v in json_dict [ 'data' ] . items ( ) : setattr ( self , k , v ) del k , v
def _print_sql_with_error ( self , sql , error_line ) : """Writes a SQL statement with an syntax error to the output . The line where the error occurs is highlighted . : param str sql : The SQL statement . : param int error _ line : The line where the error occurs ."""
if os . linesep in sql : lines = sql . split ( os . linesep ) digits = math . ceil ( math . log ( len ( lines ) + 1 , 10 ) ) i = 1 for line in lines : if i == error_line : self . _io . text ( '<error>{0:{width}} {1}</error>' . format ( i , line , width = digits , ) ) else : self . _io . text ( '{0:{width}} {1}' . format ( i , line , width = digits , ) ) i += 1 else : self . _io . text ( sql )
def create_equipamento_roteiro ( self ) : """Get an instance of equipamento _ roteiro services facade ."""
return EquipamentoRoteiro ( self . networkapi_url , self . user , self . password , self . user_ldap )
def tls_session_update ( self , msg_str ) : """Either for parsing or building , we store the client _ random along with the raw string representing this handshake message ."""
super ( TLSClientHello , self ) . tls_session_update ( msg_str ) self . tls_session . advertised_tls_version = self . version self . random_bytes = msg_str [ 10 : 38 ] self . tls_session . client_random = ( struct . pack ( '!I' , self . gmt_unix_time ) + self . random_bytes ) if self . ext : for e in self . ext : if isinstance ( e , TLS_Ext_SupportedVersions ) : if self . tls_session . tls13_early_secret is None : # this is not recomputed if there was a TLS 1.3 HRR self . tls_session . compute_tls13_early_secrets ( ) break
def create ( self ) : """Create item under file system with its path . Returns : True if its path does not exist , False otherwise ."""
if os . path . isfile ( self . path ) : if not os . path . exists ( self . path ) : with open ( self . path , 'w' ) as fileobj : fileobj . write ( '' ) else : os . makedirs ( self . path )
def delete_asset_content ( self , asset_content_id = None ) : """Deletes content from an ` ` Asset ` ` . : param asset _ content _ id : the ` ` Id ` ` of the ` ` AssetContent ` ` : type asset _ content _ id : ` ` osid . id . Id ` ` : raise : ` ` NotFound ` ` - - ` ` asset _ content _ id ` ` is not found : raise : ` ` NullArgument ` ` - - ` ` asset _ content _ id ` ` is ` ` null ` ` : raise : ` ` OperationFailed ` ` - - unable to complete request : raise : ` ` PermissionDenied ` ` - - authorization failure * compliance : mandatory - - This method must be implemented . *"""
if asset_content_id is None : raise NullArgument ( ) asset = None for a in AssetLookupSession ( self . _repository_id , proxy = self . _proxy , runtime = self . _runtime ) . get_assets ( ) : i = 0 # might want to set plenary view # to assure ordering ? for ac in a . get_asset_contents ( ) : if ac . get_id ( ) == asset_content_id : asset = a asset_content = ac index = i i += 1 if asset is None : raise NotFound ( ) asset . _my_map [ 'assetContents' ] . pop ( index ) url_path = construct_url ( 'assets' , bank_id = self . _catalog_idstr ) try : result = self . _put_request ( url_path , asset . _my_map ) except Exception : raise
def partial_derivative_scalar ( self , U , V , y = 0 ) : """Compute partial derivative : math : ` C ( u | v ) ` of cumulative density of single values ."""
self . check_fit ( ) X = np . column_stack ( ( U , V ) ) return self . partial_derivative ( X , y )
def _view ( self , filepath , format ) : """Start the right viewer based on file format and platform ."""
methodnames = [ '_view_%s_%s' % ( format , backend . PLATFORM ) , '_view_%s' % backend . PLATFORM , ] for name in methodnames : view_method = getattr ( self , name , None ) if view_method is not None : break else : raise RuntimeError ( '%r has no built-in viewer support for %r ' 'on %r platform' % ( self . __class__ , format , backend . PLATFORM ) ) view_method ( filepath )
def parse_failed_targets ( test_registry , junit_xml_path , error_handler ) : """Parses junit xml reports and maps targets to the set of individual tests that failed . Targets with no failed tests are omitted from the returned mapping and failed tests with no identifiable owning target are keyed under ` None ` . : param test _ registry : A registry of tests that were run . : type test _ registry : : class : ` RegistryOfTests ` : param string junit _ xml _ path : A path to a file or directory containing test junit xml reports to analyze . : param error _ handler : An error handler that will be called with any junit xml parsing errors . : type error _ handler : callable that accepts a single : class : ` ParseError ` argument . : returns : A mapping from targets to the set of individual tests that failed . Any failed tests that belong to no identifiable target will be mapped to ` None ` . : rtype : dict from : class : ` pants . build _ graph . target . Target ` to a set of : class : ` Test `"""
failed_targets = defaultdict ( set ) def parse_junit_xml_file ( path ) : try : xml = XmlParser . from_file ( path ) failures = int ( xml . get_attribute ( 'testsuite' , 'failures' ) ) errors = int ( xml . get_attribute ( 'testsuite' , 'errors' ) ) if failures or errors : for testcase in xml . parsed . getElementsByTagName ( 'testcase' ) : test_failed = testcase . getElementsByTagName ( 'failure' ) test_errored = testcase . getElementsByTagName ( 'error' ) if test_failed or test_errored : test = Test ( classname = testcase . getAttribute ( 'classname' ) , methodname = testcase . getAttribute ( 'name' ) ) target = test_registry . get_owning_target ( test ) failed_targets [ target ] . add ( test ) except ( XmlParser . XmlError , ValueError ) as e : error_handler ( ParseError ( path , e ) ) if os . path . isdir ( junit_xml_path ) : for root , _ , files in safe_walk ( junit_xml_path ) : for junit_xml_file in fnmatch . filter ( files , 'TEST-*.xml' ) : parse_junit_xml_file ( os . path . join ( root , junit_xml_file ) ) else : parse_junit_xml_file ( junit_xml_path ) return dict ( failed_targets )
def initialize_eigenanatomy ( initmat , mask = None , initlabels = None , nreps = 1 , smoothing = 0 ) : """InitializeEigenanatomy is a helper function to initialize sparseDecom and sparseDecom2 . Can be used to estimate sparseness parameters per eigenvector . The user then only chooses nvecs and optional regularization parameters . Arguments initmat : np . ndarray or ANTsImage input matrix where rows provide initial vector values . alternatively , this can be an antsImage which contains labeled regions . mask : ANTsImage mask if available initlabels : list / tuple of integers which labels in initmat to use as initial components nreps : integer nrepetitions to use smoothing : float if using an initial label image , optionally smooth each roi Returns dict w / the following key / value pairs : ` initlist ` : list of ANTsImage types initialization list ( s ) for sparseDecom ( 2) ` mask ` : ANTsImage mask ( s ) for sparseDecom ( 2) ` enames ` : list of strings string names of components for sparseDecom ( 2) Example > > > import ants > > > import numpy as np > > > mat = np . random . randn ( 4,100 ) . astype ( ' float32 ' ) > > > init = ants . initialize _ eigenanatomy ( mat )"""
if isinstance ( initmat , iio . ANTsImage ) : # create initmat from each of the unique labels if mask is not None : selectvec = mask > 0 else : selectvec = initmat > 0 initmatvec = initmat [ selectvec ] if initlabels is None : ulabs = np . sort ( np . unique ( initmatvec ) ) ulabs = ulabs [ ulabs > 0 ] else : ulabs = initlabels nvox = len ( initmatvec ) temp = np . zeros ( ( len ( ulabs ) , nvox ) ) for x in range ( len ( ulabs ) ) : timg = utils . threshold_image ( initmat , ulabs [ x ] - 1e-4 , ulabs [ x ] + 1e-4 ) if smoothing > 0 : timg = utils . smooth_image ( timg , smoothing ) temp [ x , : ] = timg [ selectvec ] initmat = temp nclasses = initmat . shape [ 0 ] classlabels = [ 'init%i' % i for i in range ( nclasses ) ] initlist = [ ] if mask is None : maskmat = np . zeros ( initmat . shape ) maskmat [ 0 , : ] = 1 mask = core . from_numpy ( maskmat . astype ( 'float32' ) ) eanatnames = [ 'A' ] * ( nclasses * nreps ) ct = 0 for i in range ( nclasses ) : vecimg = mask . clone ( 'float' ) initf = initmat [ i , : ] vecimg [ mask == 1 ] = initf for nr in range ( nreps ) : initlist . append ( vecimg ) eanatnames [ ct + nr - 1 ] = str ( classlabels [ i ] ) ct = ct + 1 return { 'initlist' : initlist , 'mask' : mask , 'enames' : eanatnames }
def atlas_get_peer ( peer_hostport , peer_table = None ) : """Get the given peer ' s info"""
ret = None with AtlasPeerTableLocked ( peer_table ) as ptbl : ret = ptbl . get ( peer_hostport , None ) return ret
async def request_offline_members ( self , * guilds ) : r"""| coro | Requests previously offline members from the guild to be filled up into the : attr : ` . Guild . members ` cache . This function is usually not called . It should only be used if you have the ` ` fetch _ offline _ members ` ` parameter set to ` ` False ` ` . When the client logs on and connects to the websocket , Discord does not provide the library with offline members if the number of members in the guild is larger than 250 . You can check if a guild is large if : attr : ` . Guild . large ` is ` ` True ` ` . Parameters \ * guilds : : class : ` Guild ` An argument list of guilds to request offline members for . Raises InvalidArgument If any guild is unavailable or not large in the collection ."""
if any ( not g . large or g . unavailable for g in guilds ) : raise InvalidArgument ( 'An unavailable or non-large guild was passed.' ) await self . _connection . request_offline_members ( guilds )
def geometry_linestring ( lat , lon , elev ) : """GeoJSON Linestring . Latitude and Longitude have 2 values each . : param list lat : Latitude values : param list lon : Longitude values : return dict :"""
logger_excel . info ( "enter geometry_linestring" ) d = OrderedDict ( ) coordinates = [ ] temp = [ "" , "" ] # Point type , Matching pairs . if lat [ 0 ] == lat [ 1 ] and lon [ 0 ] == lon [ 1 ] : logger_excel . info ( "matching geo coordinate" ) lat . pop ( ) lon . pop ( ) d = geometry_point ( lat , lon , elev ) else : # Creates coordinates list logger_excel . info ( "unique geo coordinates" ) for i in lon : temp [ 0 ] = i for j in lat : temp [ 1 ] = j coordinates . append ( copy . copy ( temp ) ) if elev : for i in coordinates : i . append ( elev ) # Create geometry block d [ 'type' ] = 'Linestring' d [ 'coordinates' ] = coordinates logger_excel . info ( "exit geometry_linestring" ) return d
def AskFileForOpen ( message = None , typeList = None , version = None , defaultLocation = None , dialogOptionFlags = None , location = None , clientName = None , windowTitle = None , actionButtonLabel = None , cancelButtonLabel = None , preferenceKey = None , popupExtension = None , eventProc = None , previewProc = None , filterProc = None , wanted = None , multiple = None ) : """Original doc : Display a dialog asking the user for a file to open . wanted is the return type wanted : FSSpec , FSRef , unicode or string ( default ) the other arguments can be looked up in Apple ' s Navigation Services documentation"""
return psidialogs . ask_file ( message = message )
def detag_string ( self , string ) : """Extracts tags from string . returns ( string , list ) where string : string has tags replaced by indices ( < BR > . . . = > < 0 > , < 1 > , < 2 > , etc . ) list : list of the removed tags ( ' < BR > ' , ' < I > ' , ' < / I > ' )"""
counter = itertools . count ( 0 ) count = lambda m : '<%s>' % next ( counter ) tags = self . tag_pattern . findall ( string ) tags = [ '' . join ( tag ) for tag in tags ] ( new , nfound ) = self . tag_pattern . subn ( count , string ) if len ( tags ) != nfound : raise Exception ( 'tags dont match:' + string ) return ( new , tags )
def collides ( self , other ) : """Returns collision with axis aligned rect"""
angle = self . angle width = self . width height = self . height if angle == 0 : return other . collides ( Rect ( - 0.5 * width , - 0.5 * height , width , height ) ) # Phase 1 # * Form bounding box on tilted rectangle P . # * Check whether bounding box and other intersect . # * If not , then self and other do not intersect . # * Otherwise proceed to Phase 2. # Now perform the standard rectangle intersection test . if self . is_bbox_not_intersecting ( other ) : return False # Phase 2 # If we get here , check the edges of self to see # * if one of them excludes all vertices of other . # * If so , then self and other do not intersect . # * ( If not , then self and other do intersect . ) return not self . is_edge_not_excluding_vertices ( other )
def delete ( self , email_to_addresses = None , email_cc_addresses = None , email_insert = None ) : """Delete this storage volume on the HMC , and optionally send emails to storage administrators requesting deletion of the storage volume on the storage subsystem and cleanup of any resources related to the storage volume ( e . g . LUN mask definitions on a storage subsystem ) . This method performs the " Modify Storage Group Properties " operation , requesting deletion of the volume . Authorization requirements : * Object - access permission to the storage group owning this storage volume . * Task permission to the " Configure Storage - System Programmer " task . Parameters : email _ to _ addresses ( : term : ` iterable ` of : term : ` string ` ) : Email addresses of one or more storage administrator to be notified . If ` None ` or empty , no email will be sent . email _ cc _ addresses ( : term : ` iterable ` of : term : ` string ` ) : Email addresses of one or more storage administrator to be copied on the notification email . If ` None ` or empty , nobody will be copied on the email . Must be ` None ` or empty if ` email _ to _ addresses ` is ` None ` or empty . email _ insert ( : term : ` string ` ) : Additional text to be inserted in the notification email . The text can include HTML formatting tags . If ` None ` , no additional text will be inserted . Must be ` None ` or empty if ` email _ to _ addresses ` is ` None ` or empty . Raises : : exc : ` ~ zhmcclient . HTTPError ` : exc : ` ~ zhmcclient . ParseError ` : exc : ` ~ zhmcclient . AuthError ` : exc : ` ~ zhmcclient . ConnectionError `"""
volreq_obj = { 'operation' : 'delete' , 'element-uri' : self . uri , } body = { 'storage-volumes' : [ volreq_obj ] , } if email_to_addresses : body [ 'email-to-addresses' ] = email_to_addresses if email_cc_addresses : body [ 'email-cc-addresses' ] = email_cc_addresses if email_insert : body [ 'email-insert' ] = email_insert else : if email_cc_addresses : raise ValueError ( "email_cc_addresses must not be specified if " "there is no email_to_addresses: %r" % email_cc_addresses ) if email_insert : raise ValueError ( "email_insert must not be specified if " "there is no email_to_addresses: %r" % email_insert ) self . manager . session . post ( self . manager . storage_group . uri + '/operations/modify' , body = body ) self . manager . _name_uri_cache . delete ( self . properties . get ( self . manager . _name_prop , None ) )
def make_gaussian_kernel ( sigma , npix = 501 , cdelt = 0.01 , xpix = None , ypix = None ) : """Make kernel for a 2D gaussian . Parameters sigma : float Standard deviation in degrees ."""
sigma /= cdelt def fn ( t , s ) : return 1. / ( 2 * np . pi * s ** 2 ) * np . exp ( - t ** 2 / ( s ** 2 * 2.0 ) ) dxy = make_pixel_distance ( npix , xpix , ypix ) k = fn ( dxy , sigma ) k /= ( np . sum ( k ) * np . radians ( cdelt ) ** 2 ) return k
def check_lock ( i ) : """Input : { path - path to be locked ( unlock _ uid ) - UID of the lock to release it Output : { return - return code = 0 , if successful = 32 , lock UID is not matching > 0 , if error ( error ) - error text if return > 0"""
p = i [ 'path' ] uuid = i . get ( 'unlock_uid' , '' ) pl = os . path . join ( p , cfg [ 'subdir_ck_ext' ] , cfg [ 'file_for_lock' ] ) luid = '' if os . path . isfile ( pl ) : import time # Read lock file try : f = open ( pl ) luid = f . readline ( ) . strip ( ) exp = float ( f . readline ( ) . strip ( ) ) if exp < 0 : exp = 1 f . close ( ) except Exception as e : return { 'return' : 1 , 'error' : 'problem reading lock file' } # Check if lock has expired dt = os . path . getmtime ( pl ) + exp - time . time ( ) if dt < 0 : # Expired if uuid == '' or uuid == luid : os . remove ( pl ) else : return { 'return' : 32 , 'error' : 'entry lock UID is not matching' } else : if uuid == '' : return { 'return' : 32 , 'error' : 'entry is locked' } elif uuid != luid : return { 'return' : 32 , 'error' : 'entry is locked with different UID' } elif uuid != '' : return { 'return' : 32 , 'error' : 'lock was removed or expired' } return { 'return' : 0 }
def process ( self , context , internal_response ) : """Manage consent and attribute filtering : type context : satosa . context . Context : type internal _ response : satosa . internal . InternalData : rtype : satosa . response . Response : param context : response context : param internal _ response : the response : return : response"""
consent_state = context . state [ STATE_KEY ] internal_response . attributes = self . _filter_attributes ( internal_response . attributes , consent_state [ "filter" ] ) id_hash = self . _get_consent_id ( internal_response . requester , internal_response . subject_id , internal_response . attributes ) try : # Check if consent is already given consent_attributes = self . _verify_consent ( id_hash ) except requests . exceptions . ConnectionError as e : satosa_logging ( logger , logging . ERROR , "Consent service is not reachable, no consent given." , context . state ) # Send an internal _ response without any attributes internal_response . attributes = { } return self . _end_consent ( context , internal_response ) # Previous consent was given if consent_attributes is not None : satosa_logging ( logger , logging . DEBUG , "Previous consent was given" , context . state ) internal_response . attributes = self . _filter_attributes ( internal_response . attributes , consent_attributes ) return self . _end_consent ( context , internal_response ) # No previous consent , request consent by user return self . _approve_new_consent ( context , internal_response , id_hash )
def reset ( self , label = None ) : """clears all measurements , allowing the object to be reused Args : label ( str , optional ) : optionally change the label Example : > > > from timerit import Timerit > > > import math > > > ti = Timerit ( num = 10 , unit = ' us ' , verbose = True ) > > > _ = ti . reset ( label = ' 10 ! ' ) . call ( math . factorial , 10) Timed best = . . . s , mean = . . . s for 10! > > > _ = ti . reset ( label = ' 20 ! ' ) . call ( math . factorial , 20) Timed best = . . . s , mean = . . . s for 20! > > > _ = ti . reset ( ) . call ( math . factorial , 20) Timed best = . . . s , mean = . . . s for 20!"""
if label : self . label = label self . times = [ ] self . n_loops = None self . total_time = None return self
def load ( handle ) : """Loads a module from a handle . Currently this method only works with Tensorflow 2 . x and can only load modules created by calling tensorflow . saved _ model . save ( ) . The method works in both eager and graph modes . Depending on the type of handle used , the call may involve downloading a Tensorflow Hub module to a local cache location specified by the TFHUB _ CACHE _ DIR environment variable . If a copy of the module is already present in the TFHUB _ CACHE _ DIR , the download step is skipped . Currently , three types of module handles are supported : 1 ) Smart URL resolvers such as tfhub . dev , e . g . : https : / / tfhub . dev / google / nnlm - en - dim128/1. 2 ) A directory on a file system supported by Tensorflow containing module files . This may include a local directory ( e . g . / usr / local / mymodule ) or a Google Cloud Storage bucket ( gs : / / mymodule ) . 3 ) A URL pointing to a TGZ archive of a module , e . g . https : / / example . com / mymodule . tar . gz . Args : handle : ( string ) the Module handle to resolve . Returns : A trackable object ( see tf . saved _ model . load ( ) documentation for details ) . Raises : NotImplementedError : If the code is running against incompatible ( 1 . x ) version of TF ."""
if hasattr ( tf_v1 . saved_model , "load_v2" ) : module_handle = resolve ( handle ) return tf_v1 . saved_model . load_v2 ( module_handle ) else : raise NotImplementedError ( "hub.load() is not implemented for TF < 1.14.x, " "Current version: %s" , tf . __version__ )
def real_time_scheduling ( self , availability , oauth , event , target_calendars = ( ) ) : """Generates an real time scheduling link to start the OAuth process with an event to be automatically upserted : param dict availability : - A dict describing the availability details for the event : : participants - A dict stating who is required for the availability call : required _ duration - A dict stating the length of time the event will last for : available _ periods - A dict stating the available periods for the event : start _ interval - A Integer representing the start _ interval of the event : buffer - A dict representing the buffer for the event : param dict oauth : - A dict describing the OAuth flow required : : scope - A String representing the scopes to ask for within the OAuth flow : redirect _ uri - A String containing a url to redirect the user to after completing the OAuth flow . : scope - A String representing additional state to be passed within the OAuth flow . : param dict event : - A dict describing the event : param list target _ calendars : - An list of dics stating into which calendars to insert the created event See http : / / www . cronofy . com / developers / api # upsert - event for reference ."""
args = { 'oauth' : oauth , 'event' : event , 'target_calendars' : target_calendars } if availability : options = { } options [ 'participants' ] = self . map_availability_participants ( availability . get ( 'participants' , None ) ) options [ 'required_duration' ] = self . map_availability_required_duration ( availability . get ( 'required_duration' , None ) ) options [ 'start_interval' ] = self . map_availability_required_duration ( availability . get ( 'start_interval' , None ) ) options [ 'buffer' ] = self . map_availability_buffer ( availability . get ( 'buffer' , None ) ) self . translate_available_periods ( availability [ 'available_periods' ] ) options [ 'available_periods' ] = availability [ 'available_periods' ] args [ 'availability' ] = options return self . request_handler . post ( endpoint = 'real_time_scheduling' , data = args , use_api_key = True ) . json ( )
def to_scaled_dtype ( val ) : """Parse * val * to return a dtype ."""
res = [ ] for i in val : if i [ 1 ] . startswith ( "S" ) : res . append ( ( i [ 0 ] , i [ 1 ] ) + i [ 2 : - 1 ] ) else : try : res . append ( ( i [ 0 ] , i [ - 1 ] . dtype ) + i [ 2 : - 1 ] ) except AttributeError : res . append ( ( i [ 0 ] , type ( i [ - 1 ] ) ) + i [ 2 : - 1 ] ) return np . dtype ( res )
def threshold_monitor_hidden_threshold_monitor_Cpu_limit ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) threshold_monitor_hidden = ET . SubElement ( config , "threshold-monitor-hidden" , xmlns = "urn:brocade.com:mgmt:brocade-threshold-monitor" ) threshold_monitor = ET . SubElement ( threshold_monitor_hidden , "threshold-monitor" ) Cpu = ET . SubElement ( threshold_monitor , "Cpu" ) limit = ET . SubElement ( Cpu , "limit" ) limit . text = kwargs . pop ( 'limit' ) callback = kwargs . pop ( 'callback' , self . _callback ) return callback ( config )
def GET_blockchain_num_names ( self , path_info , blockchain_name ) : """Handle GET / blockchains / : blockchainID / name _ count Takes ` all = true ` to include expired names Reply with the number of names on this blockchain"""
if blockchain_name != 'bitcoin' : # not supported self . _reply_json ( { 'error' : 'Unsupported blockchain' } , status_code = 404 ) return include_expired = False qs_values = path_info [ 'qs_values' ] if qs_values . get ( 'all' , '' ) . lower ( ) in [ '1' , 'true' ] : include_expired = True blockstackd_url = get_blockstackd_url ( ) num_names = blockstackd_client . get_num_names ( include_expired = include_expired , hostport = blockstackd_url ) if json_is_error ( num_names ) : # error status_code = num_names . get ( 'http_status' , 502 ) return self . _reply_json ( { 'error' : num_names [ 'error' ] } , status_code = status_code ) self . _reply_json ( { 'names_count' : num_names } ) return
def zen ( self ) : """Returns a quote from the Zen of GitHub . Yet another API Easter Egg : returns : str"""
url = self . _build_url ( 'zen' ) resp = self . _get ( url ) return resp . content if resp . status_code == 200 else ''
def set_terminal_title ( title , kernel32 = None ) : """Set the terminal title . : param title : The title to set ( string , unicode , bytes accepted ) . : param kernel32 : Optional mock kernel32 object . For testing . : return : If title changed successfully ( Windows only , always True on Linux / OSX ) . : rtype : bool"""
try : title_bytes = title . encode ( 'utf-8' ) except AttributeError : title_bytes = title if IS_WINDOWS : kernel32 = kernel32 or ctypes . windll . kernel32 try : is_ascii = all ( ord ( c ) < 128 for c in title ) # str / unicode . except TypeError : is_ascii = all ( c < 128 for c in title ) # bytes . if is_ascii : return kernel32 . SetConsoleTitleA ( title_bytes ) != 0 else : return kernel32 . SetConsoleTitleW ( title ) != 0 # Linux / OSX . sys . stdout . write ( b'\033]0;' + title_bytes + b'\007' ) return True
def get_restart_freeze ( ) : '''Displays whether ' restart on freeze ' is on or off if supported : return : A string value representing the " restart on freeze " settings : rtype : string CLI Example : . . code - block : : bash salt ' * ' power . get _ restart _ freeze'''
ret = salt . utils . mac_utils . execute_return_result ( 'systemsetup -getrestartfreeze' ) return salt . utils . mac_utils . validate_enabled ( salt . utils . mac_utils . parse_return ( ret ) ) == 'on'
def activate_axes ( self , axes ) : '''Sets motors to a high current , for when they are moving and / or must hold position Activating XYZABC axes before both HOMING and MOVING axes : String containing the axes to set to high current ( eg : ' XYZABC ' )'''
axes = '' . join ( set ( axes ) & set ( AXES ) - set ( DISABLE_AXES ) ) active_currents = { ax : self . _active_current_settings [ 'now' ] [ ax ] for ax in axes if self . _active_axes [ ax ] is False } if active_currents : self . _save_current ( active_currents , axes_active = True )
def hover ( self , target = None ) : """Moves the cursor to the target location"""
if target is None : target = self . _lastMatch or self # Whichever one is not None target_location = None if isinstance ( target , Pattern ) : target_location = self . find ( target ) . getTarget ( ) elif isinstance ( target , basestring ) : target_location = self . find ( target ) . getTarget ( ) elif isinstance ( target , Match ) : target_location = target . getTarget ( ) elif isinstance ( target , Region ) : target_location = target . getCenter ( ) elif isinstance ( target , Location ) : target_location = target else : raise TypeError ( "hover expected Pattern, String, Match, Region, or Location object" ) Mouse . moveSpeed ( target_location , Settings . MoveMouseDelay )
def packages ( self , name = None , memory = None , disk = None , swap = None , version = None , vcpus = None , group = None ) : """GET / : login / packages : param name : the label associated with the resource package : type name : : py : class : ` basestring ` : param memory : amount of RAM ( in MiB ) that the package provisions : type memory : : py : class : ` int ` : param disk : amount of disk storage ( in MiB ) the package provisions : type disk : : py : class : ` int ` : param swap : amount of swap ( in MiB ) the package provisions : type swap : : py : class : ` int ` : param version : the version identifier associated with the package : type version : : py : class : ` basestring ` : param vcpus : the number of virtual CPUs provisioned with the package : type vcpus : : py : class : ` int ` : param group : the group to which the package belongs : type group : : py : class : ` basestring ` : Returns : packages ( machine " sizes " , with resource types and values ) available in this datacenter . : rtype : : py : class : ` list ` of : py : class : ` dict ` \ s"""
params = { } if name : params [ 'name' ] = name if memory : params [ 'memory' ] = memory if disk : params [ 'disk' ] = disk if swap : params [ 'swap' ] = swap if version : params [ 'version' ] = version if vcpus : params [ 'vcpus' ] = vcpus if group : params [ 'group' ] = group j , _ = self . request ( 'GET' , '/packages' , params = params ) return j
def process_lipisha_payment ( request ) : """Handle payment received and respond with a dictionary"""
log . debug ( request . POST ) schema = LipishaInitiateSchema api_type = request . POST . get ( 'api_type' ) if api_type == TYPE_ACKNOWLEDGE : schema = LipishaAcknowledgeSchema form = Form ( request , schema ( ) ) transaction_status_code = STATUS_SUCCESS transaction_status = 'Processed' transaction_status_description = 'Processed' if form . validate ( ) : if api_type == TYPE_INITIATE : # Process new payment pass elif api_type == TYPE_ACKNOWLEDGE : if form . data . get ( 'transaction_status_code' ) == STATUS_SUCCESS : # Process successful accknowledgement pass else : log . error ( 'Invalid payment acknowledgement' ) log . error ( request . POST ) else : log . error ( "Error while processing payment" ) for error in form . all_errors ( ) : log . error ( error ) transaction_status_code = STATUS_INITIATE_FAILURE transaction_status = 'Error' transaction_status_description = 'Error while processing' if api_type == TYPE_INITIATE : data = request . POST return dict ( api_key = LIPISHA_API_KEY , api_signature = LIPISHA_API_SIGNATURE , api_version = data . get ( 'api_version' ) , api_type = TYPE_RECEIPT , transaction_reference = data . get ( 'transaction_reference' ) , transaction_status_code = transaction_status_code , transaction_status = transaction_status , transaction_status_description = transaction_status_description , ) return { }
def node_transmit ( node_id ) : """Transmit to another node . The sender ' s node id must be specified in the url . As with node . transmit ( ) the key parameters are what and to _ whom . However , the values these accept are more limited than for the back end due to the necessity of serialization . If what and to _ whom are not specified they will default to None . Alternatively you can pass an int ( e . g . ' 5 ' ) or a class name ( e . g . ' Info ' or ' Agent ' ) . Passing an int will get that info / node , passing a class name will pass the class . Note that if the class you are specifying is a custom class it will need to be added to the dictionary of known _ classes in your experiment code . You may also pass the values property1 , property2 , property3 , property4, property5 and details . If passed this will fill in the relevant values of the transmissions created with the values you specified . For example , to transmit all infos of type Meme to the node with id 10: dallinger . post ( " / node / " + my _ node _ id + " / transmit " , { what : " Meme " , to _ whom : 10}"""
exp = Experiment ( session ) what = request_parameter ( parameter = "what" , optional = True ) to_whom = request_parameter ( parameter = "to_whom" , optional = True ) # check the node exists node = models . Node . query . get ( node_id ) if node is None : return error_response ( error_type = "/node/transmit, node does not exist" ) # create what if what is not None : try : what = int ( what ) what = models . Info . query . get ( what ) if what is None : return error_response ( error_type = "/node/transmit POST, info does not exist" , participant = node . participant , ) except Exception : try : what = exp . known_classes [ what ] except KeyError : msg = "/node/transmit POST, {} not in experiment.known_classes" return error_response ( error_type = msg . format ( what ) , participant = node . participant ) # create to _ whom if to_whom is not None : try : to_whom = int ( to_whom ) to_whom = models . Node . query . get ( to_whom ) if to_whom is None : return error_response ( error_type = "/node/transmit POST, recipient Node does not exist" , participant = node . participant , ) except Exception : try : to_whom = exp . known_classes [ to_whom ] except KeyError : msg = "/node/transmit POST, {} not in experiment.known_classes" return error_response ( error_type = msg . format ( to_whom ) , participant = node . participant ) # execute the request try : transmissions = node . transmit ( what = what , to_whom = to_whom ) for t in transmissions : assign_properties ( t ) session . commit ( ) # ping the experiment exp . transmission_post_request ( node = node , transmissions = transmissions ) session . commit ( ) except Exception : return error_response ( error_type = "/node/transmit POST, server error" , participant = node . participant ) # return the data return success_response ( transmissions = [ t . __json__ ( ) for t in transmissions ] )
def enable_ap_port ( self , apid , port ) : """开启接入点端口 开启临时关闭的接入点端口 , 仅对公网域名 , 公网ip有效 。 Args : - apid : 接入点ID - port : 要设置的端口号 Returns : 返回一个tuple对象 , 其格式为 ( < result > , < ResponseInfo > ) - result 成功返回空dict { } , 失败返回 { " error " : " < errMsg string > " } - ResponseInfo 请求的Response信息"""
url = '{0}/v3/aps/{1}/{2}/enable' . format ( self . host , apid , port ) return self . __post ( url )
def delete ( self , ** kwargs ) : """Deletes a member from an unmanaged license pool You need to be careful with this method . When you use it , and it succeeds on the remote BIG - IP , the configuration of the BIG - IP will be reloaded . During this process , you will not be able to access the REST interface . This method overrides the Resource class ' s method because it requires that extra json kwargs be supplied . This is not a behavior that is part of the normal Resource class ' s delete method . : param kwargs : : return :"""
if 'uuid' not in kwargs : kwargs [ 'uuid' ] = str ( self . uuid ) requests_params = self . _handle_requests_params ( kwargs ) kwargs = self . _check_for_python_keywords ( kwargs ) kwargs = self . _prepare_request_json ( kwargs ) delete_uri = self . _meta_data [ 'uri' ] session = self . _meta_data [ 'bigip' ] . _meta_data [ 'icr_session' ] # Check the generation for match before delete force = self . _check_force_arg ( kwargs . pop ( 'force' , True ) ) if not force : self . _check_generation ( ) response = session . delete ( delete_uri , json = kwargs , ** requests_params ) if response . status_code == 200 : self . __dict__ = { 'deleted' : True }
def _font ( size ) : """Returns a PIL ImageFont instance . : param size : size of the avatar , in pixels"""
# path = ' / usr / share / fonts / wenquanyi / wqy - microhei / wqy - microhei . ttc ' path = os . path . join ( os . path . dirname ( __file__ ) , 'data' , "wqy-microhei.ttc" ) return ImageFont . truetype ( path , size = int ( 0.65 * size ) , index = 0 )
def organization_create ( self , data , ** kwargs ) : "https : / / developer . zendesk . com / rest _ api / docs / core / organizations # create - organization"
api_path = "/api/v2/organizations.json" return self . call ( api_path , method = "POST" , data = data , ** kwargs )
def create ( model_config , model , source , storage , phases , callbacks = None , restart = True ) : """Vel factory function"""
return PhaseTrainCommand ( model_config = model_config , model_factory = model , source = source , storage = storage , phases = phases , callbacks = callbacks , restart = restart )
def get_resource_by_id ( resource_id , api_version , extract_value = None ) : '''Get an AzureARM resource by id'''
ret = { } try : resconn = get_conn ( client_type = 'resource' ) resource_query = resconn . resources . get_by_id ( resource_id = resource_id , api_version = api_version ) resource_dict = resource_query . as_dict ( ) if extract_value is not None : ret = resource_dict [ extract_value ] else : ret = resource_dict except CloudError as exc : __utils__ [ 'azurearm.log_cloud_error' ] ( 'resource' , exc . message ) ret = { 'Error' : exc . message } return ret
def get_config ( self ) : """function to get current configuration"""
config = { 'location' : self . location , 'language' : self . language , 'topic' : self . topic , } return config
def set_logyticks_for_all ( self , row_column_list = None , logticks = None ) : """Manually specify the y - axis log tick values . : param row _ column _ list : a list containing ( row , column ) tuples to specify the subplots , or None to indicate * all * subplots . : type row _ column _ list : list or None : param logticks : logarithm of the locations for the ticks along the axis . For example , if you specify [ 1 , 2 , 3 ] , ticks will be placed at 10, 100 and 1000."""
if row_column_list is None : self . ticks [ 'y' ] = [ '1e%d' % u for u in logticks ] else : for row , column in row_column_list : self . set_logyticks ( row , column , logticks )