idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
25,100
def logged_in ( self ) : try : self . _proxy . User . get ( { 'ids' : [ ] } ) return True except Fault as e : if e . faultCode == 505 or e . faultCode == 32000 : return False raise e
This is True if this instance is logged in else False .
25,101
def _getbugfields ( self ) : r = self . _proxy . Bug . fields ( { 'include_fields' : [ 'name' ] } ) return [ f [ 'name' ] for f in r [ 'fields' ] ]
Get the list of valid fields for Bug objects
25,102
def getbugfields ( self , force_refresh = False ) : if force_refresh or not self . _cache . bugfields : log . debug ( "Refreshing bugfields" ) self . _cache . bugfields = self . _getbugfields ( ) self . _cache . bugfields . sort ( ) log . debug ( "bugfields = %s" , self . _cache . bugfields ) return self . _cache . bug...
Calls getBugFields which returns a list of fields in each bug for this bugzilla instance . This can be used to set the list of attrs on the Bug object .
25,103
def refresh_products ( self , ** kwargs ) : for product in self . product_get ( ** kwargs ) : updated = False for current in self . _cache . products [ : ] : if ( current . get ( "id" , - 1 ) != product . get ( "id" , - 2 ) and current . get ( "name" , - 1 ) != product . get ( "name" , - 2 ) ) : continue _nested_update...
Refresh a product s cached info . Basically calls product_get with the passed arguments and tries to intelligently update our product cache .
25,104
def getproducts ( self , force_refresh = False , ** kwargs ) : if force_refresh or not self . _cache . products : self . refresh_products ( ** kwargs ) return self . _cache . products
Query all products and return the raw dict info . Takes all the same arguments as product_get .
25,105
def getcomponentdetails ( self , product , component , force_refresh = False ) : d = self . getcomponentsdetails ( product , force_refresh ) return d [ component ]
Helper for accessing a single component s info . This is a wrapper around getcomponentsdetails see that for explanation
25,106
def getcomponents ( self , product , force_refresh = False ) : proddict = self . _lookup_product_in_cache ( product ) product_id = proddict . get ( "id" , None ) if ( force_refresh or product_id is None or product_id not in self . _cache . component_names ) : self . refresh_products ( names = [ product ] , include_fiel...
Return a list of component names for the passed product .
25,107
def _process_include_fields ( self , include_fields , exclude_fields , extra_fields ) : def _convert_fields ( _in ) : if not _in : return _in for newname , oldname in self . _get_api_aliases ( ) : if oldname in _in : _in . remove ( oldname ) if newname not in _in : _in . append ( newname ) return _in ret = { } if self ...
Internal helper to process include_fields lists
25,108
def _getbugs ( self , idlist , permissive , include_fields = None , exclude_fields = None , extra_fields = None ) : oldidlist = idlist idlist = [ ] for i in oldidlist : try : idlist . append ( int ( i ) ) except ValueError : idlist . append ( i ) extra_fields = self . _listify ( extra_fields or [ ] ) extra_fields += se...
Return a list of dicts of full bug info for each given bug id . bug ids that couldn t be found will return None instead of a dict .
25,109
def _getbug ( self , objid , ** kwargs ) : return self . _getbugs ( [ objid ] , permissive = False , ** kwargs ) [ 0 ]
Thin wrapper around _getbugs to handle the slight argument tweaks for fetching a single bug . The main bit is permissive = False which will tell bugzilla to raise an explicit error if we can t fetch that bug .
25,110
def getbug ( self , objid , include_fields = None , exclude_fields = None , extra_fields = None ) : data = self . _getbug ( objid , include_fields = include_fields , exclude_fields = exclude_fields , extra_fields = extra_fields ) return Bug ( self , dict = data , autorefresh = self . bug_autorefresh )
Return a Bug object with the full complement of bug data already loaded .
25,111
def getbugs ( self , idlist , include_fields = None , exclude_fields = None , extra_fields = None , permissive = True ) : data = self . _getbugs ( idlist , include_fields = include_fields , exclude_fields = exclude_fields , extra_fields = extra_fields , permissive = permissive ) return [ ( b and Bug ( self , dict = b ,...
Return a list of Bug objects with the full complement of bug data already loaded . If there s a problem getting the data for a given id the corresponding item in the returned list will be None .
25,112
def update_tags ( self , idlist , tags_add = None , tags_remove = None ) : tags = { } if tags_add : tags [ "add" ] = self . _listify ( tags_add ) if tags_remove : tags [ "remove" ] = self . _listify ( tags_remove ) d = { "ids" : self . _listify ( idlist ) , "tags" : tags , } return self . _proxy . Bug . update_tags ( d...
Updates the tags field for a bug .
25,113
def _attachment_uri ( self , attachid ) : att_uri = self . url . replace ( 'xmlrpc.cgi' , 'attachment.cgi' ) att_uri = att_uri + '?id=%s' % attachid return att_uri
Returns the URI for the given attachment ID .
25,114
def attachfile ( self , idlist , attachfile , description , ** kwargs ) : if isinstance ( attachfile , str ) : f = open ( attachfile , "rb" ) elif hasattr ( attachfile , 'read' ) : f = attachfile else : raise TypeError ( "attachfile must be filename or file-like object" ) if "contenttype" in kwargs : kwargs [ "content_...
Attach a file to the given bug IDs . Returns the ID of the attachment or raises XMLRPC Fault if something goes wrong .
25,115
def openattachment ( self , attachid ) : attachments = self . get_attachments ( None , attachid ) data = attachments [ "attachments" ] [ str ( attachid ) ] xmlrpcbinary = data [ "data" ] ret = BytesIO ( ) ret . write ( xmlrpcbinary . data ) ret . name = data [ "file_name" ] ret . seek ( 0 ) return ret
Get the contents of the attachment with the given attachment ID . Returns a file - like object .
25,116
def get_attachments ( self , ids , attachment_ids , include_fields = None , exclude_fields = None ) : params = { "ids" : self . _listify ( ids ) or [ ] , "attachment_ids" : self . _listify ( attachment_ids ) or [ ] , } if include_fields : params [ "include_fields" ] = self . _listify ( include_fields ) if exclude_field...
Wrapper for Bug . attachments . One of ids or attachment_ids is required
25,117
def createbug ( self , * args , ** kwargs ) : data = self . _validate_createbug ( * args , ** kwargs ) rawbug = self . _proxy . Bug . create ( data ) return Bug ( self , bug_id = rawbug [ "id" ] , autorefresh = self . bug_autorefresh )
Create a bug with the given info . Returns a new Bug object . Check bugzilla API documentation for valid values at least product component summary version and description need to be passed .
25,118
def _getusers ( self , ids = None , names = None , match = None ) : params = { } if ids : params [ 'ids' ] = self . _listify ( ids ) if names : params [ 'names' ] = self . _listify ( names ) if match : params [ 'match' ] = self . _listify ( match ) if not params : raise BugzillaError ( '_get() needs one of ids, ' ' nam...
Return a list of users that match criteria .
25,119
def getusers ( self , userlist ) : userobjs = [ User ( self , ** rawuser ) for rawuser in self . _getusers ( names = userlist ) . get ( 'users' , [ ] ) ] ret = [ ] for u in userlist : for uobj in userobjs [ : ] : if uobj . email == u : userobjs . remove ( uobj ) ret . append ( uobj ) break ret += userobjs return ret
Return a list of Users from .
25,120
def searchusers ( self , pattern ) : return [ User ( self , ** rawuser ) for rawuser in self . _getusers ( match = pattern ) . get ( 'users' , [ ] ) ]
Return a bugzilla User for the given list of patterns
25,121
def createuser ( self , email , name = '' , password = '' ) : self . _proxy . User . create ( email , name , password ) return self . getuser ( email )
Return a bugzilla User for the given username
25,122
def refresh ( self , include_fields = None , exclude_fields = None , extra_fields = None ) : r = self . bugzilla . _getbug ( self . bug_id , include_fields = include_fields , exclude_fields = exclude_fields , extra_fields = self . _bug_fields + ( extra_fields or [ ] ) ) self . _update_dict ( r )
Refresh the bug with the latest data from bugzilla
25,123
def _update_dict ( self , newdict ) : if self . bugzilla : self . bugzilla . post_translation ( { } , newdict ) aliases = self . bugzilla . _get_bug_aliases ( ) for newname , oldname in aliases : if oldname not in newdict : continue if newname not in newdict : newdict [ newname ] = newdict [ oldname ] elif newdict [ ne...
Update internal dictionary in a way that ensures no duplicate entries are stored WRT field aliases
25,124
def deletecc ( self , cclist , comment = None ) : vals = self . bugzilla . build_update ( comment = comment , cc_remove = cclist ) log . debug ( "deletecc: update=%s" , vals ) return self . bugzilla . update_bugs ( self . bug_id , vals )
Removes the given email addresses from the CC list for this bug .
25,125
def addcomment ( self , comment , private = False ) : vals = self . bugzilla . build_update ( comment = comment , comment_private = private ) log . debug ( "addcomment: update=%s" , vals ) return self . bugzilla . update_bugs ( self . bug_id , vals )
Add the given comment to this bug . Set private to True to mark this comment as private .
25,126
def getcomments ( self ) : comment_list = self . bugzilla . get_comments ( [ self . bug_id ] ) return comment_list [ 'bugs' ] [ str ( self . bug_id ) ] [ 'comments' ]
Returns an array of comment dictionaries for this bug
25,127
def get_flag_status ( self , name ) : f = self . get_flags ( name ) if not f : return None assert len ( f ) <= 1 return f [ 0 ] [ 'status' ]
Return a flag status field
25,128
def get_attachments ( self , include_fields = None , exclude_fields = None ) : if "attachments" in self . __dict__ : return self . attachments data = self . bugzilla . get_attachments ( [ self . bug_id ] , None , include_fields , exclude_fields ) return data [ "bugs" ] [ str ( self . bug_id ) ]
Helper call to Bugzilla . get_attachments . If you want to fetch specific attachment IDs use that function instead
25,129
def refresh ( self ) : newuser = self . bugzilla . getuser ( self . email ) self . __dict__ . update ( newuser . __dict__ )
Update User object with latest info from bugzilla
25,130
def pre_translation ( self , query ) : old = query . copy ( ) if 'bug_id' in query : if not isinstance ( query [ 'bug_id' ] , list ) : query [ 'id' ] = query [ 'bug_id' ] . split ( ',' ) else : query [ 'id' ] = query [ 'bug_id' ] del query [ 'bug_id' ] if 'component' in query : if not isinstance ( query [ 'component' ]...
Translates the query for possible aliases
25,131
def post_translation ( self , query , bug ) : ignore = query if 'component' in bug and "components" not in bug : val = bug [ 'component' ] bug [ 'components' ] = isinstance ( val , list ) and val or [ val ] bug [ 'component' ] = bug [ 'components' ] [ 0 ] if 'version' in bug and "versions" not in bug : val = bug [ 'ver...
Convert the results of getbug back to the ancient RHBZ value formats
25,132
def delete ( self , mail ) : self . stats [ 'mail_deleted' ] += 1 if self . conf . dry_run : logger . info ( "Skip deletion of {!r}." . format ( mail ) ) return logger . debug ( "Deleting {!r}..." . format ( mail ) ) os . unlink ( mail . path ) logger . info ( "{} deleted." . format ( mail . path ) )
Delete a mail from the filesystem .
25,133
def check_differences ( self ) : logger . info ( "Check that mail differences are within the limits." ) if self . conf . size_threshold < 0 : logger . info ( "Skip checking for size differences." ) if self . conf . content_threshold < 0 : logger . info ( "Skip checking for content differences." ) if self . conf . size_...
In - depth check of mail differences .
25,134
def diff ( self , mail_a , mail_b ) : return len ( '' . join ( unified_diff ( mail_a . body_lines , mail_b . body_lines , fromfile = 'a' , tofile = 'b' , fromfiledate = '' , tofiledate = '' , n = 0 , lineterm = '\n' ) ) )
Return difference in bytes between two mails normalized body .
25,135
def pretty_diff ( self , mail_a , mail_b ) : return '' . join ( unified_diff ( mail_a . body_lines , mail_b . body_lines , fromfile = 'Normalized body of {}' . format ( mail_a . path ) , tofile = 'Normalized body of {}' . format ( mail_b . path ) , fromfiledate = '{:0.2f}' . format ( mail_a . timestamp ) , tofiledate =...
Returns a verbose unified diff between two mails normalized body .
25,136
def apply_strategy ( self ) : method_id = self . conf . strategy . replace ( '-' , '_' ) if not hasattr ( DuplicateSet , method_id ) : raise NotImplementedError ( "DuplicateSet.{}() method." . format ( method_id ) ) return getattr ( self , method_id ) ( )
Apply deduplication with the configured strategy .
25,137
def dedupe ( self ) : if len ( self . pool ) == 1 : logger . debug ( "Ignore set: only one message found." ) self . stats [ 'mail_unique' ] += 1 self . stats [ 'set_ignored' ] += 1 return try : self . check_differences ( ) self . apply_strategy ( ) except UnicodeDecodeError as expt : self . stats [ 'set_rejected_encodi...
Performs the deduplication and its preliminary checks .
25,138
def delete_older ( self ) : logger . info ( "Deleting all mails strictly older than the {} timestamp..." "" . format ( self . newest_timestamp ) ) candidates = [ mail for mail in self . pool if mail . timestamp < self . newest_timestamp ] if len ( candidates ) == self . size : logger . warning ( "Skip deletion: all {} ...
Delete all older duplicates .
25,139
def delete_oldest ( self ) : logger . info ( "Deleting all mails sharing the oldest {} timestamp..." . format ( self . oldest_timestamp ) ) candidates = [ mail for mail in self . pool if mail . timestamp == self . oldest_timestamp ] if len ( candidates ) == self . size : logger . warning ( "Skip deletion: all {} mails ...
Delete all the oldest duplicates .
25,140
def delete_bigger ( self ) : logger . info ( "Deleting all mails strictly bigger than {} bytes..." . format ( self . smallest_size ) ) candidates = [ mail for mail in self . pool if mail . size > self . smallest_size ] if len ( candidates ) == self . size : logger . warning ( "Skip deletion: all {} mails share the same...
Delete all bigger duplicates .
25,141
def delete_biggest ( self ) : logger . info ( "Deleting all mails sharing the biggest size of {} bytes..." "" . format ( self . biggest_size ) ) candidates = [ mail for mail in self . pool if mail . size == self . biggest_size ] if len ( candidates ) == self . size : logger . warning ( "Skip deletion: all {} mails shar...
Delete all the biggest duplicates .
25,142
def delete_matching_path ( self ) : logger . info ( "Deleting all mails with file path matching the {} regexp..." "" . format ( self . conf . regexp . pattern ) ) candidates = [ mail for mail in self . pool if re . search ( self . conf . regexp , mail . path ) ] if len ( candidates ) == self . size : logger . warning (...
Delete all duplicates whose file path match the regexp .
25,143
def canonical_path ( path ) : return os . path . normcase ( os . path . realpath ( os . path . abspath ( os . path . expanduser ( path ) ) ) )
Return a normalized canonical path to a file or folder .
25,144
def add_maildir ( self , maildir_path ) : maildir_path = self . canonical_path ( maildir_path ) logger . info ( "Opening maildir at {} ..." . format ( maildir_path ) ) maildir = Maildir ( str ( maildir_path ) , factory = None , create = False ) logger . info ( "{} mails found." . format ( len ( maildir ) ) ) if self . ...
Load up a maildir and compute hash for each mail found .
25,145
def run ( self ) : logger . info ( "The {} strategy will be applied on each duplicate set." . format ( self . conf . strategy ) ) self . stats [ 'set_total' ] = len ( self . mails ) for hash_key , mail_path_set in self . mails . items ( ) : logger . info ( '---' ) duplicates = DuplicateSet ( hash_key , mail_path_set , ...
Run the deduplication process .
25,146
def report ( self ) : table = [ [ "Mails" , "Metric" ] ] table . append ( [ "Found" , self . stats [ 'mail_found' ] ] ) table . append ( [ "Skipped" , self . stats [ 'mail_skipped' ] ] ) table . append ( [ "Rejected" , self . stats [ 'mail_rejected' ] ] ) table . append ( [ "Kept" , self . stats [ 'mail_kept' ] ] ) tab...
Print user - friendly statistics and metrics .
25,147
def cli ( ctx ) : level = logger . level try : level_to_name = logging . _levelToName except AttributeError : level_to_name = logging . _levelNames level_name = level_to_name . get ( level , level ) logger . debug ( 'Verbosity set to {}.' . format ( level_name ) ) if ctx . invoked_subcommand is None : click . echo ( ct...
CLI for maildirs content analysis and deletion .
25,148
def validate_regexp ( ctx , param , value ) : if value : try : value = re . compile ( value ) except ValueError : raise click . BadParameter ( 'invalid regular expression.' ) return value
Validate and compile regular expression .
25,149
def validate_maildirs ( ctx , param , value ) : for path in value : for subdir in MD_SUBDIRS : if not os . path . isdir ( os . path . join ( path , subdir ) ) : raise click . BadParameter ( '{} is not a maildir (missing {!r} sub-directory).' . format ( path , subdir ) ) return value
Check that folders are maildirs .
25,150
def deduplicate ( ctx , strategy , time_source , regexp , dry_run , message_id , size_threshold , content_threshold , show_diff , maildirs ) : if not maildirs : click . echo ( ctx . get_help ( ) ) ctx . exit ( ) requirements = [ ( time_source , '-t/--time-source' , [ DELETE_OLDER , DELETE_OLDEST , DELETE_NEWER , DELETE...
Deduplicate mails from a set of maildir folders .
25,151
def hash ( ctx , message_id , message ) : conf = Config ( message_id = message_id ) mail = Mail ( message , conf ) logger . info ( mail . header_text ) logger . info ( '-' * 70 ) logger . info ( 'Hash: {}' . format ( mail . hash_key ) )
Take a single mail message and show its canonicalised form and hash .
25,152
def read_file ( * relative_path_elements ) : file_path = path . join ( path . dirname ( __file__ ) , * relative_path_elements ) return io . open ( file_path , encoding = 'utf8' ) . read ( ) . strip ( )
Return content of a file relative to this setup . py .
25,153
def message ( self ) : logger . debug ( "Parsing mail at {} ..." . format ( self . path ) ) with open ( self . path , 'rb' ) as mail_file : if PY2 : message = email . message_from_file ( mail_file ) else : message = email . message_from_binary_file ( mail_file ) return message
Read mail parse it and return a Message instance .
25,154
def timestamp ( self ) : if self . conf . time_source == CTIME : return os . path . getctime ( self . path ) return email . utils . mktime_tz ( email . utils . parsedate_tz ( self . message . get ( 'Date' ) ) )
Compute the normalized canonical timestamp of the mail .
25,155
def body_lines ( self ) : if not self . message . is_multipart ( ) : body = self . message . get_payload ( None , decode = True ) else : _ , _ , body = self . message . as_string ( ) . partition ( "\n\n" ) if isinstance ( body , bytes ) : for enc in [ 'ascii' , 'utf-8' ] : try : body = body . decode ( enc ) break excep...
Return a normalized list of lines from message s body .
25,156
def subject ( self ) : subject = self . message . get ( 'Subject' , '' ) subject , _ = re . subn ( r'\s+' , ' ' , subject ) return subject
Normalized subject .
25,157
def hash_key ( self ) : if self . conf . message_id : message_id = self . message . get ( 'Message-Id' ) if message_id : return message_id . strip ( ) logger . error ( "No Message-ID in {}: {}" . format ( self . path , self . header_text ) ) raise MissingMessageID return hashlib . sha224 ( self . canonical_headers ) . ...
Returns the canonical hash of a mail .
25,158
def canonical_headers ( self ) : canonical_headers = '' for header in HEADERS : if header not in self . message : continue for value in self . message . get_all ( header ) : canonical_value = self . canonical_header_value ( header , value ) if re . search ( r'\S' , canonical_value ) : canonical_headers += '{}: {}\n' . ...
Copy selected headers into a new string .
25,159
def enumerate ( cls ) : devices = { } for d in hid . enumerate ( 0 , 0 ) : vendor_id = d [ 'vendor_id' ] product_id = d [ 'product_id' ] serial_number = d [ 'serial_number' ] interface_number = d [ 'interface_number' ] path = d [ 'path' ] if devices . get ( serial_number ) != None and devices [ serial_number ] [ 0 ] ==...
Return a list of available KeepKey devices .
25,160
def is_connected ( self ) : for d in hid . enumerate ( 0 , 0 ) : if d [ 'path' ] == self . device : return True return False
Check if the device is still connected .
25,161
def session_end ( self ) : self . session_depth -= 1 self . session_depth = max ( 0 , self . session_depth ) if self . session_depth == 0 : self . _session_end ( )
End a session . Se session_begin for an in depth description of TREZOR sessions .
25,162
def read ( self ) : if not self . ready_to_read ( ) : return None data = self . _read ( ) if data is None : return None return self . _parse_message ( data )
If there is data available to be read from the transport reads the data and tries to parse it as a protobuf message . If the parsing succeeds return a protobuf object . Otherwise returns None .
25,163
def read_blocking ( self ) : while True : data = self . _read ( ) if data != None : break return self . _parse_message ( data )
Same as read except blocks untill data is available to be read .
25,164
def _get_cache_name ( function ) : module_name = _inspect . getfile ( function ) module_name = _os . path . abspath ( module_name ) cache_name = module_name cache_name = cache_name . replace ( '<' , '_lt_' ) cache_name = cache_name . replace ( '>' , '_gt_' ) tmpdir = _os . getenv ( 'TMPDIR' ) or _os . getenv ( 'TEMP' )...
returns a name for the module s cache db .
25,165
def filecache ( seconds_of_validity = None , fail_silently = False ) : def filecache_decorator ( function ) : @ _functools . wraps ( function ) def function_with_cache ( * args , ** kwargs ) : try : key = _args_key ( function , args , kwargs ) if key in function . _db : rv = function . _db [ key ] if seconds_of_validit...
filecache is called and the decorator should be returned .
25,166
def entity_data ( self , entity_type , entity_id , history_index ) : return self . entity_history ( entity_type , entity_id ) [ history_index ]
Return the data dict for an entity at a specific index of its history .
25,167
def get_entity ( self , entity_type , entity_id , history_index = - 1 , connected = True ) : if history_index < 0 and history_index != - 1 : history_index += len ( self . entity_history ( entity_type , entity_id ) ) if history_index < 0 : return None try : self . entity_data ( entity_type , entity_id , history_index ) ...
Return an object instance for the given entity_type and id .
25,168
def on_change ( self , callable_ ) : self . model . add_observer ( callable_ , self . entity_type , 'change' , self . entity_id )
Add a change observer to this entity .
25,169
def on_remove ( self , callable_ ) : self . model . add_observer ( callable_ , self . entity_type , 'remove' , self . entity_id )
Add a remove observer to this entity .
25,170
def dead ( self ) : return ( self . data is None or self . model . state . entity_data ( self . entity_type , self . entity_id , - 1 ) is None )
Returns True if this entity no longer exists in the underlying model .
25,171
def previous ( self ) : return self . model . state . get_entity ( self . entity_type , self . entity_id , self . _history_index - 1 , connected = False )
Return a copy of this object as was at its previous state in history .
25,172
def next ( self ) : if self . _history_index == - 1 : return None new_index = self . _history_index + 1 connected = ( new_index == len ( self . model . state . entity_history ( self . entity_type , self . entity_id ) ) - 1 ) return self . model . state . get_entity ( self . entity_type , self . entity_id , self . _hist...
Return a copy of this object at its next state in history .
25,173
async def connect ( self , * args , ** kwargs ) : await self . disconnect ( ) if 'endpoint' not in kwargs and len ( args ) < 2 : if args and 'model_name' in kwargs : raise TypeError ( 'connect() got multiple values for model_name' ) elif args : model_name = args [ 0 ] else : model_name = kwargs . pop ( 'model_name' , N...
Connect to a juju model .
25,174
async def add_local_charm_dir ( self , charm_dir , series ) : fh = tempfile . NamedTemporaryFile ( ) CharmArchiveGenerator ( charm_dir ) . make_archive ( fh . name ) with fh : func = partial ( self . add_local_charm , fh , series , os . stat ( fh . name ) . st_size ) charm_url = await self . _connector . loop . run_in_...
Upload a local charm to the model .
25,175
def add_local_charm ( self , charm_file , series , size = None ) : conn , headers , path_prefix = self . connection ( ) . https_connection ( ) path = "%s/charms?series=%s" % ( path_prefix , series ) headers [ 'Content-Type' ] = 'application/zip' if size : headers [ 'Content-Length' ] = size conn . request ( "POST" , pa...
Upload a local charm archive to the model .
25,176
def all_units_idle ( self ) : for unit in self . units . values ( ) : unit_status = unit . data [ 'agent-status' ] [ 'current' ] if unit_status != 'idle' : return False return True
Return True if all units are idle .
25,177
async def reset ( self , force = False ) : log . debug ( 'Resetting model' ) for app in self . applications . values ( ) : await app . destroy ( ) for machine in self . machines . values ( ) : await machine . destroy ( force = force ) await self . block_until ( lambda : len ( self . machines ) == 0 )
Reset the model to a clean state .
25,178
async def get_info ( self ) : facade = client . ClientFacade . from_connection ( self . connection ( ) ) self . _info = await facade . ModelInfo ( ) log . debug ( 'Got ModelInfo: %s' , vars ( self . info ) ) return self . info
Return a client . ModelInfo object for this Model .
25,179
def add_observer ( self , callable_ , entity_type = None , action = None , entity_id = None , predicate = None ) : observer = _Observer ( callable_ , entity_type , action , entity_id , predicate ) self . _observers [ observer ] = callable_
Register an on - model - change callback
25,180
def _watch ( self ) : async def _all_watcher ( ) : try : allwatcher = client . AllWatcherFacade . from_connection ( self . connection ( ) ) while not self . _watch_stopping . is_set ( ) : try : results = await utils . run_with_interrupt ( allwatcher . Next ( ) , self . _watch_stopping , loop = self . _connector . loop ...
Start an asynchronous watch against this model .
25,181
async def _notify_observers ( self , delta , old_obj , new_obj ) : if new_obj and not old_obj : delta . type = 'add' log . debug ( 'Model changed: %s %s %s' , delta . entity , delta . type , delta . get_id ( ) ) for o in self . _observers : if o . cares_about ( delta ) : asyncio . ensure_future ( o ( delta , old_obj , ...
Call observing callbacks notifying them of a change in model state
25,182
async def _wait ( self , entity_type , entity_id , action , predicate = None ) : q = asyncio . Queue ( loop = self . _connector . loop ) async def callback ( delta , old , new , model ) : await q . put ( delta . get_id ( ) ) self . add_observer ( callback , entity_type , action , entity_id , predicate ) entity_id = awa...
Block the calling routine until a given action has happened to the given entity
25,183
async def _wait_for_new ( self , entity_type , entity_id ) : if entity_id in self . state . _live_entity_map ( entity_type ) : return self . state . _live_entity_map ( entity_type ) [ entity_id ] return await self . _wait ( entity_type , entity_id , None )
Wait for a new object to appear in the Model and return it .
25,184
async def wait_for_action ( self , action_id ) : if action_id . startswith ( "action-" ) : action_id = action_id [ 7 : ] def predicate ( delta ) : return delta . data [ 'status' ] in ( 'completed' , 'failed' ) return await self . _wait ( 'action' , action_id , None , predicate )
Given an action wait for it to complete .
25,185
async def add_machine ( self , spec = None , constraints = None , disks = None , series = None ) : params = client . AddMachineParams ( ) if spec : if spec . startswith ( "ssh:" ) : placement , target , private_key_path = spec . split ( ":" ) user , host = target . split ( "@" ) sshProvisioner = provisioner . SSHProvis...
Start a new empty machine and optionally a container or add a container to a machine .
25,186
async def add_relation ( self , relation1 , relation2 ) : connection = self . connection ( ) app_facade = client . ApplicationFacade . from_connection ( connection ) log . debug ( 'Adding relation %s <-> %s' , relation1 , relation2 ) def _find_relation ( * specs ) : for rel in self . relations : if rel . matches ( * sp...
Add a relation between two applications .
25,187
async def add_ssh_key ( self , user , key ) : key_facade = client . KeyManagerFacade . from_connection ( self . connection ( ) ) return await key_facade . AddKeys ( [ key ] , user )
Add a public SSH key to this model .
25,188
def debug_log ( self , no_tail = False , exclude_module = None , include_module = None , include = None , level = None , limit = 0 , lines = 10 , replay = False , exclude = None ) : raise NotImplementedError ( )
Get log messages for this model .
25,189
async def _deploy ( self , charm_url , application , series , config , constraints , endpoint_bindings , resources , storage , channel = None , num_units = None , placement = None , devices = None ) : log . info ( 'Deploying %s' , charm_url ) config = { k : str ( v ) for k , v in config . items ( ) } config = yaml . du...
Logic shared between Model . deploy and BundleHandler . deploy .
25,190
async def destroy_unit ( self , * unit_names ) : connection = self . connection ( ) app_facade = client . ApplicationFacade . from_connection ( connection ) log . debug ( 'Destroying unit%s %s' , 's' if len ( unit_names ) == 1 else '' , ' ' . join ( unit_names ) ) return await app_facade . DestroyUnits ( list ( unit_na...
Destroy units by name .
25,191
async def get_config ( self ) : config_facade = client . ModelConfigFacade . from_connection ( self . connection ( ) ) result = await config_facade . ModelGet ( ) config = result . config for key , value in config . items ( ) : config [ key ] = ConfigValue . from_json ( value ) return config
Return the configuration settings for this model .
25,192
async def get_constraints ( self ) : constraints = { } client_facade = client . ClientFacade . from_connection ( self . connection ( ) ) result = await client_facade . GetModelConstraints ( ) if result . constraints : constraint_types = [ a for a in dir ( result . constraints ) if a in Value . _toSchema . keys ( ) ] fo...
Return the machine constraints for this model .
25,193
def restore_backup ( self , bootstrap = False , constraints = None , archive = None , backup_id = None , upload_tools = False ) : raise NotImplementedError ( )
Restore a backup archive to a new controller .
25,194
async def set_config ( self , config ) : config_facade = client . ModelConfigFacade . from_connection ( self . connection ( ) ) for key , value in config . items ( ) : if isinstance ( value , ConfigValue ) : config [ key ] = value . value await config_facade . ModelSet ( config )
Set configuration keys on this model .
25,195
async def set_constraints ( self , constraints ) : client_facade = client . ClientFacade . from_connection ( self . connection ( ) ) await client_facade . SetModelConstraints ( application = '' , constraints = constraints )
Set machine constraints on this model .
25,196
async def get_action_output ( self , action_uuid , wait = None ) : action_facade = client . ActionFacade . from_connection ( self . connection ( ) ) entity = [ { 'tag' : tag . action ( action_uuid ) } ] async def _wait_for_action_status ( ) : while True : action_output = await action_facade . Actions ( entity ) if acti...
Get the results of an action by ID .
25,197
async def get_action_status ( self , uuid_or_prefix = None , name = None ) : results = { } action_results = [ ] action_facade = client . ActionFacade . from_connection ( self . connection ( ) ) if name : name_results = await action_facade . FindActionsByNames ( [ name ] ) action_results . extend ( name_results . action...
Get the status of all actions filtered by ID ID prefix or name .
25,198
async def get_status ( self , filters = None , utc = False ) : client_facade = client . ClientFacade . from_connection ( self . connection ( ) ) return await client_facade . FullStatus ( filters )
Return the status of the model .
25,199
def sync_tools ( self , all_ = False , destination = None , dry_run = False , public = False , source = None , stream = None , version = None ) : raise NotImplementedError ( )
Copy Juju tools into this model .