idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
28,500
def deserialize_instance ( model , data = { } ) : ret = model ( ) for k , v in data . items ( ) : if v is not None : try : f = model . _meta . get_field ( k ) if isinstance ( f , DateTimeField ) : v = dateparse . parse_datetime ( v ) elif isinstance ( f , TimeField ) : v = dateparse . parse_time ( v ) elif isinstance ( f , DateField ) : v = dateparse . parse_date ( v ) except FieldDoesNotExist : pass setattr ( ret , k , v ) return ret
Translate raw data into a model instance .
138
9
28,501
def get_media_urls ( tweet ) : media = get_media_entities ( tweet ) urls = [ m . get ( "media_url_https" ) for m in media ] if media else [ ] return urls
Gets the https links to each media entity in the tweet .
51
13
28,502
def get_hashtags ( tweet ) : entities = get_entities ( tweet ) hashtags = entities . get ( "hashtags" ) hashtags = [ tag [ "text" ] for tag in hashtags ] if hashtags else [ ] return hashtags
Get a list of hashtags in the Tweet Note that in the case of a quote - tweet this does not return the hashtags in the quoted status .
55
31
28,503
def get_embedded_tweet ( tweet ) : if tweet . retweeted_tweet is not None : return tweet . retweeted_tweet elif tweet . quoted_tweet is not None : return tweet . quoted_tweet else : return None
Get the retweeted Tweet OR the quoted Tweet and return it as a dictionary
55
15
28,504
def get_bio ( tweet ) : if is_original_format ( tweet ) : bio_or_none = tweet [ "user" ] . get ( "description" , "" ) else : bio_or_none = tweet [ "actor" ] . get ( "summary" , "" ) if bio_or_none is None : return "" else : return bio_or_none
Get the bio text of the user who posted the Tweet
82
11
28,505
def is_original_format ( tweet ) : # deleted due to excess checking; it's a key lookup and does not need any # operational optimization if "created_at" in tweet : original_format = True elif "postedTime" in tweet : original_format = False else : raise NotATweetError ( "This dict has neither 'created_at' or 'postedTime' as keys" ) return original_format
Simple checker to flag the format of a tweet .
89
11
28,506
def get_all_keys ( tweet , parent_key = '' ) : items = [ ] for k , v in tweet . items ( ) : new_key = parent_key + " " + k if isinstance ( v , dict ) : items . extend ( get_all_keys ( v , parent_key = new_key ) ) else : items . append ( new_key . strip ( " " ) ) return items
Takes a tweet object and recursively returns a list of all keys contained in this level and all nexstted levels of the tweet .
91
30
28,507
def key_validation_check ( tweet_keys_list , superset_keys , minset_keys ) : # check for keys that must be present tweet_keys = set ( tweet_keys_list ) minset_overlap = tweet_keys & minset_keys if minset_overlap != minset_keys : raise UnexpectedFormatError ( "keys ({}) missing from Tweet (Public API data is not supported)" . format ( minset_keys - tweet_keys ) ) # check for keys that could be present unexpected_keys = tweet_keys - superset_keys if len ( unexpected_keys ) > 0 : raise UnexpectedFormatError ( "Unexpected keys ({}) are in this Tweet" . format ( unexpected_keys ) ) return 0
Validates the keys present in a Tweet .
162
9
28,508
def check_tweet ( tweet , validation_checking = False ) : if "id" not in tweet : raise NotATweetError ( "This text has no 'id' key" ) original_format = is_original_format ( tweet ) if original_format : _check_original_format_tweet ( tweet , validation_checking = validation_checking ) else : _check_activity_streams_tweet ( tweet , validation_checking = validation_checking ) return original_format
Ensures a tweet is valid and determines the type of format for the tweet .
104
17
28,509
def get_lang ( tweet ) : if is_original_format ( tweet ) : lang_field = "lang" else : lang_field = "twitter_lang" if tweet [ lang_field ] is not None and tweet [ lang_field ] != "und" : return tweet [ lang_field ] else : return None
Get the language that the Tweet is written in .
69
10
28,510
def get_poll_options ( tweet ) : if is_original_format ( tweet ) : try : poll_options_text = [ ] for p in tweet [ "entities" ] [ "polls" ] : for o in p [ "options" ] : poll_options_text . append ( o [ "text" ] ) return poll_options_text except KeyError : return [ ] else : raise NotAvailableError ( "Gnip activity-streams format does not" + " return poll options" )
Get the text in the options of a poll as a list - If there is no poll in the Tweet return an empty list - If the Tweet is in activity - streams format raise NotAvailableError
111
39
28,511
def remove_links ( text ) : tco_link_regex = re . compile ( "https?://t.co/[A-z0-9].*" ) generic_link_regex = re . compile ( "(https?://)?(\w*[.]\w+)+([/?=&]+\w+)*" ) remove_tco = re . sub ( tco_link_regex , " " , text ) remove_generic = re . sub ( generic_link_regex , " " , remove_tco ) return remove_generic
Helper function to remove the links from the input text
124
10
28,512
def get_matching_rules ( tweet ) : if is_original_format ( tweet ) : rules = tweet . get ( "matching_rules" ) else : gnip = tweet . get ( "gnip" ) rules = gnip . get ( "matching_rules" ) if gnip else None return rules
Retrieves the matching rules for a tweet with a gnip field enrichment .
69
16
28,513
def get_profile_location ( tweet ) : if is_original_format ( tweet ) : try : return tweet [ "user" ] [ "derived" ] [ "locations" ] [ 0 ] except KeyError : return None else : try : location = tweet [ "gnip" ] [ "profileLocations" ] [ 0 ] reconstructed_original_format = { } if location [ "address" ] . get ( "country" , None ) is not None : reconstructed_original_format [ "country" ] = location [ "address" ] [ "country" ] if location [ "address" ] . get ( "countryCode" , None ) is not None : reconstructed_original_format [ "country_code" ] = location [ "address" ] [ "countryCode" ] if location [ "address" ] . get ( "locality" , None ) is not None : reconstructed_original_format [ "locality" ] = location [ "address" ] [ "locality" ] if location [ "address" ] . get ( "region" , None ) is not None : reconstructed_original_format [ "region" ] = location [ "address" ] [ "region" ] if location [ "address" ] . get ( "subRegion" , None ) is not None : reconstructed_original_format [ "sub_region" ] = location [ "address" ] [ "subRegion" ] if location . get ( "displayName" , None ) is not None : reconstructed_original_format [ "full_name" ] = location [ "displayName" ] if location . get ( "geo" , None ) is not None : reconstructed_original_format [ "geo" ] = location [ "geo" ] return reconstructed_original_format except KeyError : return None
Get user s derived location data from the profile location enrichment If unavailable returns None .
384
16
28,514
def get_generator ( tweet ) : if is_original_format ( tweet ) : if sys . version_info [ 0 ] == 3 and sys . version_info [ 1 ] >= 4 : parser = GeneratorHTMLParser ( convert_charrefs = True ) else : parser = GeneratorHTMLParser ( ) parser . feed ( tweet [ "source" ] ) return { "link" : parser . generator_link , "name" : parser . generator_name } else : return { "link" : tweet [ "generator" ] [ "link" ] , "name" : tweet [ "generator" ] [ "displayName" ] }
Get information about the application that generated the Tweet
137
9
28,515
def lazy_property ( fn ) : attr_name = '_lazy_' + fn . __name__ @ property @ wraps ( fn ) def _lazy_property ( self ) : if not hasattr ( self , attr_name ) : setattr ( self , attr_name , fn ( self ) ) return getattr ( self , attr_name ) return _lazy_property
Decorator that makes a property lazy - evaluated whilst preserving docstrings .
87
15
28,516
def quoted_tweet ( self ) : quote_tweet = tweet_embeds . get_quoted_tweet ( self ) if quote_tweet is not None : try : return Tweet ( quote_tweet ) except NotATweetError as nate : raise ( NotATweetError ( "The quote-tweet payload appears malformed." + " Failed with '{}'" . format ( nate ) ) ) else : return None
The quoted Tweet as a Tweet object If the Tweet is not a quote Tweet return None If the quoted Tweet payload cannot be loaded as a Tweet this will raise a NotATweetError
95
36
28,517
def retweeted_tweet ( self ) : retweet = tweet_embeds . get_retweeted_tweet ( self ) if retweet is not None : try : return Tweet ( retweet ) except NotATweetError as nate : raise ( NotATweetError ( "The retweet payload appears malformed." + " Failed with '{}'" . format ( nate ) ) ) else : return None
The retweeted Tweet as a Tweet object If the Tweet is not a Retweet return None If the Retweet payload cannot be loaded as a Tweet this will raise a NotATweetError
85
37
28,518
def embedded_tweet ( self ) : embedded_tweet = tweet_embeds . get_embedded_tweet ( self ) if embedded_tweet is not None : try : return Tweet ( embedded_tweet ) except NotATweetError as nate : raise ( NotATweetError ( "The embedded tweet payload {} appears malformed." + " Failed with '{}'" . format ( embedded_tweet , nate ) ) ) else : return None
Get the retweeted Tweet OR the quoted Tweet and return it as a Tweet object
99
16
28,519
def is_applicable ( cls , conf ) : return all ( ( URLPromoter . is_applicable ( conf ) , not cls . needs_firefox ( conf ) , ) )
Return whether this promoter is applicable for given conf
44
9
28,520
def xpath_selector ( selector , html , select_all ) : from defusedxml import lxml as dlxml from lxml import etree import re # lxml requires argument to be bytes # see https://github.com/kibitzr/kibitzr/issues/47 encoded = html . encode ( 'utf-8' ) root = dlxml . fromstring ( encoded , parser = etree . HTMLParser ( ) ) xpath_results = root . xpath ( selector ) if not xpath_results : logger . warning ( 'XPath selector not found: %r' , selector ) return False , html if isinstance ( xpath_results , list ) : if select_all is False : xpath_results = xpath_results [ 0 : 1 ] else : xpath_results = [ xpath_results ] # Serialize xpath_results # see https://lxml.de/xpathxslt.html#xpath-return-values results = [ ] for r in xpath_results : # namespace declarations if isinstance ( r , tuple ) : results . append ( "%s=\"%s\"" % ( r [ 0 ] , r [ 1 ] ) ) # an element elif hasattr ( r , 'tag' ) : results . append ( re . sub ( r'\s+' , ' ' , dlxml . tostring ( r , method = 'html' , encoding = 'unicode' ) ) ) else : results . append ( r ) return True , u"\n" . join ( six . text_type ( x ) . strip ( ) for x in results )
Returns Xpath match for selector within html .
353
9
28,521
def register ( ) : registry = { key : bake_html ( key ) for key in ( 'css' , 'css-all' , 'tag' , 'text' ) } registry [ 'xpath' ] = bake_parametrized ( xpath_selector , select_all = False ) registry [ 'xpath-all' ] = bake_parametrized ( xpath_selector , select_all = True ) return registry
Return dictionary of tranform factories
97
7
28,522
def reread ( self ) : logger . debug ( "Loading settings from %s" , os . path . abspath ( self . filename ) ) conf = self . read_conf ( ) changed = self . creds . reread ( ) checks = self . parser . parse_checks ( conf ) if self . checks != checks : self . checks = checks return True else : return changed
Read configuration file and substitute references into checks conf
81
9
28,523
def reread ( self ) : logger . debug ( "Loading credentials from %s" , os . path . abspath ( self . creds_filename ) ) creds = { } try : with self . open_creds ( ) as fp : creds = yaml . safe_load ( fp ) except IOError : logger . info ( "No credentials file found at %s" , os . path . abspath ( self . creds_filename ) ) except : logger . exception ( "Error loading credentials file" ) if creds != self . creds : self . creds = creds return True return False
Read and parse credentials file . If something goes wrong log exception and continue .
134
15
28,524
def parse_checks ( self , conf ) : checks = conf . get ( 'checks' , conf . get ( 'pages' , [ ] ) ) checks = list ( self . unpack_batches ( checks ) ) checks = list ( self . unpack_templates ( checks , conf . get ( 'templates' , { } ) ) ) self . inject_missing_names ( checks ) for check in checks : self . inject_scenarios ( check , conf . get ( 'scenarios' , { } ) ) self . inject_notifiers ( check , conf . get ( 'notifiers' , { } ) ) self . expand_schedule ( check ) return checks
Unpack configuration from human - friendly form to strict check definitions .
147
13
28,525
def create_boilerplate ( ) : if not os . path . exists ( 'kibitzr.yml' ) : with open ( 'kibitzr.yml' , 'wt' ) as fp : logger . info ( "Saving sample check in kibitzr.yml" ) fp . write ( KIBITZR_YML ) else : logger . info ( "kibitzr.yml already exists. Skipping" ) if not os . path . exists ( 'kibitzr-creds.yml' ) : with open ( 'kibitzr-creds.yml' , 'wt' ) as fp : logger . info ( "Creating kibitzr-creds.yml" ) fp . write ( KIBITZR_CREDS_YML ) os . chmod ( 'kibitzr-creds.yml' , stat . S_IRUSR | stat . S_IWUSR ) else : logger . info ( "kibitzr-creds.yml already exists. Skipping" )
Create kibitzr . yml and kibitzr - creds . yml in current directory if they do not exist .
249
28
28,526
def cleanup ( ) : temp_dirs = [ ] for key in ( 'headless' , 'headed' ) : if FIREFOX_INSTANCE [ key ] is not None : if FIREFOX_INSTANCE [ key ] . profile : temp_dirs . append ( FIREFOX_INSTANCE [ key ] . profile . profile_dir ) try : FIREFOX_INSTANCE [ key ] . quit ( ) FIREFOX_INSTANCE [ key ] = None except : logger . exception ( "Exception occurred in browser cleanup" ) for temp_dir in temp_dirs : shutil . rmtree ( temp_dir , ignore_errors = True )
Must be called before exit
138
5
28,527
def firefox ( headless = True ) : from selenium import webdriver from selenium . webdriver . firefox . options import Options if headless : driver_key = 'headless' firefox_options = Options ( ) firefox_options . add_argument ( '-headless' ) else : driver_key = 'headed' firefox_options = None # Load profile, if it exists: if os . path . isdir ( PROFILE_DIR ) : firefox_profile = webdriver . FirefoxProfile ( PROFILE_DIR ) else : firefox_profile = None if FIREFOX_INSTANCE [ driver_key ] is None : FIREFOX_INSTANCE [ driver_key ] = webdriver . Firefox ( firefox_profile = firefox_profile , firefox_options = firefox_options , ) yield FIREFOX_INSTANCE [ driver_key ]
Context manager returning Selenium webdriver . Instance is reused and must be cleaned up on exit .
189
20
28,528
def fetcher_factory ( conf ) : global PROMOTERS applicable = [ ] if not PROMOTERS : PROMOTERS = load_promoters ( ) for promoter in PROMOTERS : if promoter . is_applicable ( conf ) : applicable . append ( ( promoter . PRIORITY , promoter ) ) if applicable : best_match = sorted ( applicable , reverse = True ) [ 0 ] [ 1 ] return best_match ( conf ) else : raise ConfigurationError ( 'No fetcher is applicable for "{0}"' . format ( conf [ 'name' ] ) )
Return initialized fetcher capable of processing given conf .
127
10
28,529
def fetch ( self , conf ) : url = conf [ 'url' ] # If Firefox is broken, it will raise here, causing kibitzr restart: self . driver . set_window_size ( 1366 , 800 ) self . driver . implicitly_wait ( 2 ) self . driver . get ( url ) try : self . _run_automation ( conf ) html = self . _get_html ( ) except : logger . exception ( "Exception occurred while fetching" ) return False , traceback . format_exc ( ) finally : self . _close_tab ( ) return True , html
1 . Fetch URL 2 . Run automation . 3 . Return HTML . 4 . Close the tab .
129
21
28,530
def _run_automation ( self , conf ) : self . _fill_form ( self . _find_form ( conf ) ) self . _run_scenario ( conf ) self . _delay ( conf )
1 . Fill form . 2 . Run scenario . 3 . Delay .
47
14
28,531
def _fill_form ( form ) : clicked = False last_element = None for field in form : if field [ 'text' ] : field [ 'element' ] . clear ( ) field [ 'element' ] . send_keys ( field [ 'text' ] ) if field [ 'click' ] : field [ 'element' ] . click ( ) clicked = True last_element = field [ 'element' ] if last_element : if not clicked : last_element . submit ( )
Fill all inputs with provided Jinja2 templates . If no field had click key submit last element .
105
20
28,532
def _find_element ( self , selector , selector_type , check_displayed = False ) : if selector_type == 'css' : elements = self . driver . find_elements_by_css_selector ( selector ) elif selector_type == 'xpath' : elements = self . driver . find_elements_by_xpath ( selector ) elif selector_type == 'id' : elements = self . driver . find_elements_by_css_selector ( '#' + selector ) else : raise RuntimeError ( "Unknown selector_type: %s for selector: %s" % ( selector_type , selector ) ) for element in elements : if check_displayed : if not element . is_displayed ( ) or sum ( element . size . values ( ) ) <= 0 : continue return element
Return first matching displayed element of non - zero size or None if nothing found
181
15
28,533
def _close_tab ( self ) : old_tab = self . driver . current_window_handle self . driver . execute_script ( '''window.open("about:blank", "_blank");''' ) self . driver . switch_to . window ( old_tab ) self . driver . close ( ) self . driver . switch_to . window ( self . driver . window_handles [ 0 ] )
Create a new tab and close the old one to avoid idle page resource usage
89
15
28,534
def write ( self , content ) : with io . open ( self . target , 'w' , encoding = 'utf-8' ) as fp : fp . write ( content ) if not content . endswith ( u'\n' ) : fp . write ( u'\n' )
Save content on disk
66
4
28,535
def commit ( self ) : self . git . add ( '-A' , '.' ) try : self . git . commit ( '-m' , self . commit_msg ) return True except sh . ErrorReturnCode_1 : return False
git commit and return whether there were changes
52
8
28,536
def ensure_repo_exists ( self ) : if not os . path . isdir ( self . cwd ) : os . makedirs ( self . cwd ) if not os . path . isdir ( os . path . join ( self . cwd , ".git" ) ) : self . git . init ( ) self . git . config ( "user.email" , "you@example.com" ) self . git . config ( "user.name" , "Your Name" )
Create git repo if one does not exist yet
108
9
28,537
def word ( self ) : try : output = ensure_unicode ( self . git . diff ( '--no-color' , '--word-diff=plain' , 'HEAD~1:content' , 'HEAD:content' , ) . stdout ) except sh . ErrorReturnCode_128 : result = ensure_unicode ( self . git . show ( "HEAD:content" ) . stdout ) else : ago = ensure_unicode ( self . git . log ( '-2' , '--pretty=format:last change was %cr' , 'content' ) . stdout ) . splitlines ( ) lines = output . splitlines ( ) result = u'\n' . join ( itertools . chain ( itertools . islice ( itertools . dropwhile ( lambda x : not x . startswith ( '@@' ) , lines [ 1 : ] , ) , 1 , None , ) , itertools . islice ( ago , 1 , None ) , ) ) return result
Return last changes with word diff
221
6
28,538
def default ( self ) : output = ensure_unicode ( self . git . log ( '-1' , '-p' , '--no-color' , '--format=%s' , ) . stdout ) lines = output . splitlines ( ) return u'\n' . join ( itertools . chain ( lines [ : 1 ] , itertools . islice ( itertools . dropwhile ( lambda x : not x . startswith ( '+++' ) , lines [ 1 : ] , ) , 1 , None , ) , ) )
Return last changes in truncated unified diff format
124
9
28,539
def temp_file ( self ) : with tempfile . NamedTemporaryFile ( suffix = '.bat' , delete = False ) as fp : try : logger . debug ( "Saving code to %r" , fp . name ) fp . write ( self . code . encode ( 'utf-8' ) ) fp . close ( ) yield fp . name finally : os . remove ( fp . name )
Create temporary file with code and yield its path . Works both on Windows and Linux
91
16
28,540
def once ( ctx , name ) : from kibitzr . app import Application app = Application ( ) sys . exit ( app . run ( once = True , log_level = ctx . obj [ 'log_level' ] , names = name ) )
Run kibitzr checks once and exit
56
9
28,541
def subtract_and_intersect_circle ( self , center , radius ) : # Check whether the target circle intersects us center = np . asarray ( center , float ) d = np . linalg . norm ( center - self . center ) if d > ( radius + self . radius - tol ) : return [ self , VennEmptyRegion ( ) ] # The circle does not intersect us elif d < tol : if radius > self . radius - tol : # We are completely covered by that circle or we are the same circle return [ VennEmptyRegion ( ) , self ] else : # That other circle is inside us and smaller than us - we can't deal with it raise VennRegionException ( "Invalid configuration of circular regions (holes are not supported)." ) else : # We *must* intersect the other circle. If it is not the case, then it is inside us completely, # and we'll complain. intersections = circle_circle_intersection ( self . center , self . radius , center , radius ) if intersections is None : raise VennRegionException ( "Invalid configuration of circular regions (holes are not supported)." ) elif np . all ( abs ( intersections [ 0 ] - intersections [ 1 ] ) < tol ) and self . radius < radius : # There is a single intersection point (i.e. we are touching the circle), # the circle to be subtracted is not outside of us (this was checked before), and is larger than us. # This is a particular corner case that is not dealt with correctly by the general-purpose code below and must # be handled separately return [ VennEmptyRegion ( ) , self ] else : # Otherwise the subtracted region is a 2-arc-gon # Before we need to convert the intersection points as angles wrt each circle. a_1 = vector_angle_in_degrees ( intersections [ 0 ] - self . center ) a_2 = vector_angle_in_degrees ( intersections [ 1 ] - self . center ) b_1 = vector_angle_in_degrees ( intersections [ 0 ] - center ) b_2 = vector_angle_in_degrees ( intersections [ 1 ] - center ) # We must take care of the situation where the intersection points happen to be the same if ( abs ( b_1 - b_2 ) < tol ) : b_1 = b_2 - tol / 2 if ( abs ( a_1 - a_2 ) < tol ) : a_2 = a_1 + tol / 2 # The subtraction is a 2-arc-gon [(AB, B-), (BA, A+)] s_arc1 = Arc ( center , radius , b_1 , b_2 , False ) s_arc2 = Arc ( self . center , self . radius , a_2 , a_1 , True ) subtraction = VennArcgonRegion ( [ s_arc1 , s_arc2 ] ) # .. and the intersection is a 2-arc-gon [(AB, A+), (BA, B+)] i_arc1 = Arc ( self . center , self . radius , a_1 , a_2 , True ) i_arc2 = Arc ( center , radius , b_2 , b_1 , True ) intersection = VennArcgonRegion ( [ i_arc1 , i_arc2 ] ) return [ subtraction , intersection ]
Will throw a VennRegionException if the circle to be subtracted is completely inside and not touching the given region .
728
24
28,542
def size ( self ) : polygon_area = 0 for a in self . arcs : polygon_area += box_product ( a . start_point ( ) , a . end_point ( ) ) polygon_area /= 2.0 return polygon_area + sum ( [ a . sign * a . segment_area ( ) for a in self . arcs ] )
Return the area of the patch . The area can be computed using the standard polygon area formula + signed segment areas of each arc .
81
27
28,543
def make_patch ( self ) : path = [ self . arcs [ 0 ] . start_point ( ) ] for a in self . arcs : if a . direction : vertices = Path . arc ( a . from_angle , a . to_angle ) . vertices else : vertices = Path . arc ( a . to_angle , a . from_angle ) . vertices vertices = vertices [ np . arange ( len ( vertices ) - 1 , - 1 , - 1 ) ] vertices = vertices * a . radius + a . center path = path + list ( vertices [ 1 : ] ) codes = [ 1 ] + [ 4 ] * ( len ( path ) - 1 ) # NB: We could also add a CLOSEPOLY code (and a random vertex) to the end return PathPatch ( Path ( path , codes ) )
Retuns a matplotlib PathPatch representing the current region .
186
13
28,544
def label_position ( self ) : reg_sizes = [ ( r . size ( ) , r ) for r in self . pieces ] reg_sizes . sort ( ) return reg_sizes [ - 1 ] [ 1 ] . label_position ( )
Find the largest region and position the label in that .
56
11
28,545
def make_patch ( self ) : paths = [ p . make_patch ( ) . get_path ( ) for p in self . pieces ] vertices = np . concatenate ( [ p . vertices for p in paths ] ) codes = np . concatenate ( [ p . codes for p in paths ] ) return PathPatch ( Path ( vertices , codes ) )
Currently only works if all the pieces are Arcgons . In this case returns a multiple - piece path . Otherwise throws an exception .
81
27
28,546
def mid_point ( self ) : midpoint_angle = self . from_angle + self . sign * self . length_degrees ( ) / 2 return self . angle_as_point ( midpoint_angle )
Returns the midpoint of the arc as a 1x2 numpy array .
47
16
28,547
def hide_zeroes ( self ) : for v in self . subset_labels : if v is not None and v . get_text ( ) == '0' : v . set_visible ( False )
Sometimes it makes sense to hide the labels for subsets whose size is zero . This utility method does this .
45
22
28,548
def calculate_dict_diff ( old_params : dict , new_params : dict ) : # Ignore all None values as those cannot be saved. old_params = remove_None ( old_params ) new_params = remove_None ( new_params ) params_diff = { } for key , value in old_params . items ( ) : if key in new_params : if value != new_params [ key ] : params_diff [ key ] = new_params [ key ] else : params_diff [ key ] = '' for key , value in new_params . items ( ) : if key not in old_params : params_diff [ key ] = value return params_diff
Return the parameters based on the difference .
147
8
28,549
def validate_file ( parser , arg ) : if not os . path . isfile ( arg ) : parser . error ( "%s is not a file." % arg ) return arg
Validates that arg is a valid file .
38
9
28,550
def register ( parser ) : cmd_machines . register ( parser ) cmd_machine . register ( parser ) cmd_allocate . register ( parser ) cmd_deploy . register ( parser ) cmd_commission . register ( parser ) cmd_release . register ( parser ) cmd_abort . register ( parser ) cmd_mark_fixed . register ( parser ) cmd_mark_broken . register ( parser ) cmd_power_off . register ( parser ) cmd_power_on . register ( parser ) cmd_ssh . register ( parser )
Register commands with the given parser .
116
7
28,551
def perform_action ( self , action , machines , params , progress_title , success_title ) : if len ( machines ) == 0 : return 0 with utils . Spinner ( ) as context : return self . _async_perform_action ( context , action , list ( machines ) , params , progress_title , success_title )
Perform the action on the set of machines .
74
10
28,552
def get_machines ( self , origin , hostnames ) : hostnames = { hostname : True for hostname in hostnames } machines = origin . Machines . read ( hostnames = hostnames ) machines = [ machine for machine in machines if hostnames . pop ( machine . hostname , False ) ] if len ( hostnames ) > 0 : raise CommandError ( "Unable to find %s %s." % ( "machines" if len ( hostnames ) > 1 else "machine" , ',' . join ( hostnames ) ) ) return machines
Return a set of machines based on hostnames .
121
10
28,553
def add_ssh_options ( self , parser ) : parser . add_argument ( "--username" , metavar = 'USER' , help = ( "Username for the SSH connection." ) ) parser . add_argument ( "--boot-only" , action = "store_true" , help = ( "Only use the IP addresses on the machine's boot interface." ) )
Add the SSH arguments to the parser .
83
8
28,554
def get_ip_addresses ( self , machine , * , boot_only = False , discovered = False ) : boot_ips = [ link . ip_address for link in machine . boot_interface . links if link . ip_address ] if boot_only : if boot_ips : return boot_ips elif discovered : return [ link . ip_address for link in machine . boot_interface . discovered if link . ip_address ] else : return [ ] else : other_ips = [ link . ip_address for interface in machine . interfaces for link in interface . links if ( interface . id != machine . boot_interface . id and link . ip_address ) ] ips = boot_ips + other_ips if ips : return ips elif discovered : return [ link . ip_address for link in machine . boot_interface . discovered if link . ip_address ] + [ link . ip_address for interface in machine . interfaces for link in interface . discovered if ( interface . id != machine . boot_interface . id and link . ip_address ) ] else : return [ ]
Return all IP address for machine .
234
7
28,555
async def _async_get_sshable_ips ( self , ip_addresses ) : async def _async_ping ( ip_address ) : try : reader , writer = await asyncio . wait_for ( asyncio . open_connection ( ip_address , 22 ) , timeout = 5 ) except ( OSError , TimeoutError ) : return None try : line = await reader . readline ( ) finally : writer . close ( ) if line . startswith ( b'SSH-' ) : return ip_address ssh_ips = await asyncio . gather ( * [ _async_ping ( ip_address ) for ip_address in ip_addresses ] ) return [ ip_address for ip_address in ssh_ips if ip_address is not None ]
Return list of all IP address that could be pinged .
171
12
28,556
def _check_ssh ( self , * args ) : ssh = subprocess . Popen ( args , stdin = subprocess . DEVNULL , stdout = subprocess . DEVNULL , stderr = subprocess . DEVNULL ) ssh . wait ( ) return ssh . returncode == 0
Check if SSH connection can be made to IP with username .
65
12
28,557
def _determine_username ( self , ip ) : ssh = subprocess . Popen ( [ "ssh" , "-o" , "UserKnownHostsFile=/dev/null" , "-o" , "StrictHostKeyChecking=no" , "root@%s" % ip ] , stdin = subprocess . DEVNULL , stdout = subprocess . PIPE , stderr = subprocess . DEVNULL ) first_line = ssh . stdout . readline ( ) ssh . kill ( ) ssh . wait ( ) if first_line : match = re . search ( r"Please login as the user \"(\w+)\" rather than " r"the user \"root\"." , first_line . decode ( 'utf-8' ) ) if match : return match . groups ( ) [ 0 ] else : return None
SSH in as root and determine the username .
186
10
28,558
def ssh ( self , machine , * , username = None , command = None , boot_only = False , discovered = False , wait = 300 ) : start_time = time . monotonic ( ) with utils . Spinner ( ) as context : context . msg = colorized ( "{autoblue}Determining{/autoblue} best IP for %s" % ( machine . hostname ) ) ip_addresses = self . get_ip_addresses ( machine , boot_only = boot_only , discovered = discovered ) if len ( ip_addresses ) > 0 : pingable_ips = self . _async_get_sshable_ips ( ip_addresses ) while ( len ( pingable_ips ) == 0 and ( time . monotonic ( ) - start_time ) < wait ) : time . sleep ( 5 ) pingable_ips = self . _async_get_sshable_ips ( ip_addresses ) if len ( pingable_ips ) == 0 : raise CommandError ( "No IP addresses on %s can be reached." % ( machine . hostname ) ) else : ip = pingable_ips [ 0 ] else : raise CommandError ( "%s has no IP addresses." % machine . hostname ) if username is None : context . msg = colorized ( "{autoblue}Determining{/autoblue} SSH username on %s" % ( machine . hostname ) ) username = self . _determine_username ( ip ) while ( username is None and ( time . monotonic ( ) - start_time ) < wait ) : username = self . _determine_username ( ip ) if username is None : raise CommandError ( "Failed to determine the username for SSH." ) conn_str = "%s@%s" % ( username , ip ) args = [ "ssh" , "-o" , "UserKnownHostsFile=/dev/null" , "-o" , "StrictHostKeyChecking=no" , conn_str ] context . msg = colorized ( "{automagenta}Waiting{/automagenta} for SSH on %s" % ( machine . hostname ) ) check_args = args + [ "echo" ] connectable = self . _check_ssh ( * check_args ) while not connectable and ( time . monotonic ( ) - start_time ) < wait : time . sleep ( 5 ) connectable = self . _check_ssh ( * check_args ) if not connectable : raise CommandError ( "SSH never started on %s using IP %s." % ( machine . hostname , ip ) ) if command is not None : args . append ( command ) ssh = subprocess . Popen ( args , stdin = sys . stdin , stdout = sys . stdout , stderr = sys . stderr ) ssh . wait ( ) return ssh . returncode
SSH into machine .
634
5
28,559
def _get_deploy_options ( self , options ) : user_data = None if options . user_data and options . b64_user_data : raise CommandError ( "Cannot provide both --user-data and --b64-user-data." ) if options . b64_user_data : user_data = options . b64_user_data if options . user_data : user_data = base64_file ( options . user_data ) . decode ( "ascii" ) return utils . remove_None ( { 'distro_series' : options . image , 'hwe_kernel' : options . hwe_kernel , 'user_data' : user_data , 'comment' : options . comment , 'wait' : False , } )
Return the deployment options based on command line .
171
9
28,560
def _handle_abort ( self , machine , allocated ) : abort = yes_or_no ( "Abort deployment?" ) if abort : with utils . Spinner ( ) as context : if allocated : context . msg = colorized ( "{autoblue}Releasing{/autoblue} %s" ) % ( machine . hostname ) machine . release ( ) context . print ( colorized ( "{autoblue}Released{/autoblue} %s" ) % ( machine . hostname ) ) else : context . msg = colorized ( "{autoblue}Aborting{/autoblue} %s" ) % ( machine . hostname ) machine . abort ( ) context . print ( colorized ( "{autoblue}Aborted{/autoblue} %s" ) % ( machine . hostname ) )
Handle the user aborting mid deployment .
182
8
28,561
def register ( parser ) : cmd_login . register ( parser ) cmd_logout . register ( parser ) cmd_switch . register ( parser ) cmd_profiles . register ( parser )
Register profile commands with the given parser .
40
8
28,562
def print_whats_next ( profile ) : what_next = [ "{{autogreen}}Congratulations!{{/autogreen}} You are logged in " "to the MAAS server at {{autoblue}}{profile.url}{{/autoblue}} " "with the profile name {{autoblue}}{profile.name}{{/autoblue}}." , "For help with the available commands, try:" , " maas help" , ] for message in what_next : message = message . format ( profile = profile ) print ( colorized ( message ) ) print ( )
Explain what to do next .
128
7
28,563
def obtain_credentials ( credentials ) : if credentials == "-" : credentials = sys . stdin . readline ( ) . strip ( ) elif credentials is None : credentials = try_getpass ( "API key (leave empty for anonymous access): " ) # Ensure that the credentials have a valid form. if credentials and not credentials . isspace ( ) : return Credentials . parse ( credentials ) else : return None
Prompt for credentials if possible .
90
7
28,564
async def fromURL ( cls , url , * , credentials = None , insecure = False ) : try : description = await helpers . fetch_api_description ( url , insecure = insecure ) except helpers . RemoteError as error : # For now just re-raise as SessionError. raise SessionError ( str ( error ) ) else : session = cls ( description , credentials ) session . insecure = insecure return session
Return a SessionAPI for a given MAAS instance .
87
11
28,565
def fromProfileName ( cls , name ) : with profiles . ProfileStore . open ( ) as config : return cls . fromProfile ( config . load ( name ) )
Return a SessionAPI from a given configuration profile name .
37
11
28,566
async def login ( cls , url , * , username = None , password = None , insecure = False ) : profile = await helpers . login ( url = url , username = username , password = password , insecure = insecure ) session = cls ( profile . description , profile . credentials ) session . insecure = insecure return profile , session
Make a SessionAPI by logging - in with a username and password .
70
14
28,567
async def connect ( cls , url , * , apikey = None , insecure = False ) : profile = await helpers . connect ( url = url , apikey = apikey , insecure = insecure ) session = cls ( profile . description , profile . credentials ) session . insecure = insecure return profile , session
Make a SessionAPI by connecting with an apikey .
68
12
28,568
def rebind ( self , * * params ) : new_params = self . __params . copy ( ) new_params . update ( params ) return self . __class__ ( new_params , self . __action )
Rebind the parameters into the URI .
47
8
28,569
def call ( self , * * data ) : uri , body , headers = self . prepare ( data ) return self . dispatch ( uri , body , headers )
Issue the call .
35
4
28,570
def prepare ( self , data ) : def expand ( data ) : for name , value in data . items ( ) : if isinstance ( value , Iterable ) : for value in value : yield name , value else : yield name , value # `data` must be an iterable yielding 2-tuples. if self . action . method in ( "GET" , "DELETE" ) : # MAAS does not expect an entity-body for GET or DELETE. data = expand ( data ) else : # MAAS expects and entity-body for PUT and POST. data = data . items ( ) # Bundle things up ready to throw over the wire. uri , body , headers = utils . prepare_payload ( self . action . op , self . action . method , self . uri , data ) # Headers are returned as a list, but they must be a dict for # the signing machinery. headers = dict ( headers ) # Sign request if credentials have been provided. credentials = self . action . handler . session . credentials if credentials is not None : utils . sign ( uri , headers , credentials ) return uri , body , headers
Prepare the call payload .
247
6
28,571
async def dispatch ( self , uri , body , headers ) : insecure = self . action . handler . session . insecure connector = aiohttp . TCPConnector ( verify_ssl = ( not insecure ) ) session = aiohttp . ClientSession ( connector = connector ) async with session : response = await session . request ( self . action . method , uri , data = body , headers = _prefer_json ( headers ) ) async with response : # Fetch the raw body content. content = await response . read ( ) # Debug output. if self . action . handler . session . debug : print ( response ) # 2xx status codes are all okay. if response . status // 100 != 2 : request = { "body" : body , "headers" : headers , "method" : self . action . method , "uri" : uri , } raise CallError ( request , response , content , self ) # Decode from JSON if that's what it's declared as. if response . content_type is None : data = await response . read ( ) elif response . content_type . endswith ( '/json' ) : data = await response . json ( ) else : data = await response . read ( ) if response . content_type is None : data = content elif response . content_type . endswith ( '/json' ) : # JSON should always be UTF-8. data = json . loads ( content . decode ( "utf-8" ) ) else : data = content return CallResult ( response , content , data )
Dispatch the call via HTTP .
330
6
28,572
async def connect ( url , * , apikey = None , insecure = False ) : from . facade import Client # Lazy. from . viscera import Origin # Lazy. profile , origin = await Origin . connect ( url , apikey = apikey , insecure = insecure ) return Client ( origin )
Connect to MAAS at url using a previously obtained API key .
68
13
28,573
async def login ( url , * , username = None , password = None , insecure = False ) : from . facade import Client # Lazy. from . viscera import Origin # Lazy. profile , origin = await Origin . login ( url , username = username , password = password , insecure = insecure ) return Client ( origin )
Connect to MAAS at url with a user name and password .
70
13
28,574
async def read ( cls , * , hostnames : typing . Sequence [ str ] = None ) : params = { } if hostnames : params [ "hostname" ] = [ normalize_hostname ( hostname ) for hostname in hostnames ] data = await cls . _handler . read ( * * params ) return cls ( map ( cls . _object , data ) )
List nodes .
86
3
28,575
def as_machine ( self ) : if self . node_type != NodeType . MACHINE : raise ValueError ( 'Cannot convert to `Machine`, node_type is not a machine.' ) return self . _origin . Machine ( self . _data )
Convert to a Machine object .
56
7
28,576
def as_device ( self ) : if self . node_type != NodeType . DEVICE : raise ValueError ( 'Cannot convert to `Device`, node_type is not a device.' ) return self . _origin . Device ( self . _data )
Convert to a Device object .
55
7
28,577
def as_rack_controller ( self ) : if self . node_type not in [ NodeType . RACK_CONTROLLER , NodeType . REGION_AND_RACK_CONTROLLER ] : raise ValueError ( 'Cannot convert to `RackController`, node_type is not a ' 'rack controller.' ) return self . _origin . RackController ( self . _data )
Convert to a RackController object .
89
8
28,578
def as_region_controller ( self ) : if self . node_type not in [ NodeType . REGION_CONTROLLER , NodeType . REGION_AND_RACK_CONTROLLER ] : raise ValueError ( 'Cannot convert to `RegionController`, node_type is not a ' 'region controller.' ) return self . _origin . RegionController ( self . _data )
Convert to a RegionController object .
88
8
28,579
async def get_power_parameters ( self ) : data = await self . _handler . power_parameters ( system_id = self . system_id ) return data
Get the power paramters for this node .
38
9
28,580
async def set_power ( self , power_type : str , power_parameters : typing . Mapping [ str , typing . Any ] = { } ) : data = await self . _handler . update ( system_id = self . system_id , power_type = power_type , power_parameters = power_parameters ) self . power_type = data [ 'power_type' ]
Set the power type and power parameters for this node .
88
11
28,581
async def create ( cls , boot_source , os , release , * , arches = None , subarches = None , labels = None ) : if not isinstance ( boot_source , BootSource ) : raise TypeError ( "boot_source must be a BootSource, not %s" % type ( boot_source ) . __name__ ) if arches is None : arches = [ '*' ] if subarches is None : subarches = [ '*' ] if labels is None : labels = [ '*' ] data = await cls . _handler . create ( boot_source_id = boot_source . id , os = os , release = release , arches = arches , subarches = subarches , labels = labels ) return cls . _object ( data , { "boot_source_id" : boot_source . id } )
Create a new BootSourceSelection .
193
8
28,582
async def read ( cls , boot_source , id ) : if isinstance ( boot_source , int ) : boot_source_id = boot_source elif isinstance ( boot_source , BootSource ) : boot_source_id = boot_source . id else : raise TypeError ( "boot_source must be a BootSource or int, not %s" % type ( boot_source ) . __name__ ) data = await cls . _handler . read ( boot_source_id = boot_source_id , id = id ) return cls ( data , { "boot_source_id" : boot_source_id } )
Get BootSourceSelection by id .
142
8
28,583
async def delete ( self ) : await self . _handler . delete ( boot_source_id = self . boot_source . id , id = self . id )
Delete boot source selection .
36
5
28,584
def calc_size_and_sha265 ( content : io . IOBase , chunk_size : int ) : size = 0 sha256 = hashlib . sha256 ( ) content . seek ( 0 , io . SEEK_SET ) while True : buf = content . read ( chunk_size ) length = len ( buf ) size += length sha256 . update ( buf ) if length != chunk_size : break return size , sha256 . hexdigest ( )
Calculates the size and the sha2566 value of the content .
103
16
28,585
async def create ( cls , name : str , architecture : str , content : io . IOBase , * , title : str = "" , filetype : BootResourceFileType = BootResourceFileType . TGZ , chunk_size = ( 1 << 22 ) , progress_callback = None ) : if '/' not in name : raise ValueError ( "name must be in format os/release; missing '/'" ) if '/' not in architecture : raise ValueError ( "architecture must be in format arch/subarch; missing '/'" ) if not content . readable ( ) : raise ValueError ( "content must be readable" ) elif not content . seekable ( ) : raise ValueError ( "content must be seekable" ) if chunk_size <= 0 : raise ValueError ( "chunk_size must be greater than 0, not %d" % chunk_size ) size , sha256 = calc_size_and_sha265 ( content , chunk_size ) resource = cls . _object ( await cls . _handler . create ( name = name , architecture = architecture , title = title , filetype = filetype . value , size = str ( size ) , sha256 = sha256 ) ) newest_set = max ( resource . sets , default = None ) assert newest_set is not None resource_set = resource . sets [ newest_set ] assert len ( resource_set . files ) == 1 rfile = list ( resource_set . files . values ( ) ) [ 0 ] if rfile . complete : # Already created and fully up-to-date. return resource else : # Upload in chunks and reload boot resource. await cls . _upload_chunks ( rfile , content , chunk_size , progress_callback ) return cls . _object . read ( resource . id )
Create a BootResource .
392
5
28,586
async def _upload_chunks ( cls , rfile : BootResourceFile , content : io . IOBase , chunk_size : int , progress_callback = None ) : content . seek ( 0 , io . SEEK_SET ) upload_uri = urlparse ( cls . _handler . uri ) . _replace ( path = rfile . _data [ 'upload_uri' ] ) . geturl ( ) uploaded_size = 0 insecure = cls . _handler . session . insecure connector = aiohttp . TCPConnector ( verify_ssl = ( not insecure ) ) session = aiohttp . ClientSession ( connector = connector ) async with session : while True : buf = content . read ( chunk_size ) length = len ( buf ) if length > 0 : uploaded_size += length await cls . _put_chunk ( session , upload_uri , buf ) if progress_callback is not None : progress_callback ( uploaded_size / rfile . size ) if length != chunk_size : break
Upload the content to rfile in chunks using chunk_size .
220
13
28,587
async def _put_chunk ( cls , session : aiohttp . ClientSession , upload_uri : str , buf : bytes ) : # Build the correct headers. headers = { 'Content-Type' : 'application/octet-stream' , 'Content-Length' : '%s' % len ( buf ) , } credentials = cls . _handler . session . credentials if credentials is not None : utils . sign ( upload_uri , headers , credentials ) # Perform upload of chunk. async with await session . put ( upload_uri , data = buf , headers = headers ) as response : if response . status != 200 : content = await response . read ( ) request = { "body" : buf , "headers" : headers , "method" : "PUT" , "uri" : upload_uri , } raise CallError ( request , response , content , None )
Upload one chunk to upload_uri .
190
8
28,588
async def read ( cls , id : int ) : data = await cls . _handler . read ( id = id ) return cls ( data )
Get BootResource by id .
34
6
28,589
async def read ( cls , node , id ) : if isinstance ( node , str ) : system_id = node elif isinstance ( node , Node ) : system_id = node . system_id else : raise TypeError ( "node must be a Node or str, not %s" % type ( node ) . __name__ ) return cls ( await cls . _handler . read ( system_id = system_id , id = id ) )
Get Bcache by id .
101
6
28,590
async def delete ( self ) : await self . _handler . delete ( system_id = self . node . system_id , id = self . id )
Delete this Bcache .
34
5
28,591
async def read ( cls , node ) : if isinstance ( node , str ) : system_id = node elif isinstance ( node , Node ) : system_id = node . system_id else : raise TypeError ( "node must be a Node or str, not %s" % type ( node ) . __name__ ) data = await cls . _handler . read ( system_id = system_id ) return cls ( cls . _object ( item , local_data = { "node_system_id" : system_id } ) for item in data )
Get list of Bcache s for node .
127
9
28,592
async def create ( cls , node : Union [ Node , str ] , name : str , backing_device : Union [ BlockDevice , Partition ] , cache_set : Union [ BcacheCacheSet , int ] , cache_mode : CacheMode , * , uuid : str = None ) : params = { 'name' : name , } if isinstance ( node , str ) : params [ 'system_id' ] = node elif isinstance ( node , Node ) : params [ 'system_id' ] = node . system_id else : raise TypeError ( 'node must be a Node or str, not %s' % ( type ( node ) . __name__ ) ) if isinstance ( backing_device , BlockDevice ) : params [ 'backing_device' ] = backing_device . id elif isinstance ( backing_device , Partition ) : params [ 'backing_partition' ] = backing_device . id else : raise TypeError ( "backing_device must be a BlockDevice or Partition, " "not %s" % type ( backing_device ) . __name__ ) if isinstance ( cache_set , BcacheCacheSet ) : params [ 'cache_set' ] = cache_set . id elif isinstance ( cache_set , int ) : params [ 'cache_set' ] = cache_set else : raise TypeError ( "cache_set must be a BcacheCacheSet or int, " "not %s" % type ( cache_set ) . __name__ ) if isinstance ( cache_mode , CacheMode ) : params [ 'cache_mode' ] = cache_mode . value else : raise TypeError ( "cache_mode must be a CacheMode, " "not %s" % type ( cache_mode ) . __name__ ) if uuid is not None : params [ 'uuid' ] = uuid return cls . _object ( await cls . _handler . create ( * * params ) )
Create a Bcache on a Node .
431
8
28,593
def get_param_arg ( param , idx , klass , arg , attr = 'id' ) : if isinstance ( arg , klass ) : return getattr ( arg , attr ) elif isinstance ( arg , ( int , str ) ) : return arg else : raise TypeError ( "%s[%d] must be int, str, or %s, not %s" % ( param , idx , klass . __name__ , type ( arg ) . __name__ ) )
Return the correct value for a fabric from arg .
110
10
28,594
async def create ( cls , architecture : str , mac_addresses : typing . Sequence [ str ] , power_type : str , power_parameters : typing . Mapping [ str , typing . Any ] = None , * , subarchitecture : str = None , min_hwe_kernel : str = None , hostname : str = None , domain : typing . Union [ int , str ] = None ) : params = { "architecture" : architecture , "mac_addresses" : mac_addresses , "power_type" : power_type , } if power_parameters is not None : params [ "power_parameters" ] = json . dumps ( power_parameters , sort_keys = True ) if subarchitecture is not None : params [ "subarchitecture" ] = subarchitecture if min_hwe_kernel is not None : params [ "min_hwe_kernel" ] = min_hwe_kernel if hostname is not None : params [ "hostname" ] = hostname if domain is not None : params [ "domain" ] = domain return cls . _object ( await cls . _handler . create ( * * params ) )
Create a new machine .
264
5
28,595
async def save ( self ) : orig_owner_data = self . _orig_data [ 'owner_data' ] new_owner_data = dict ( self . _data [ 'owner_data' ] ) self . _changed_data . pop ( 'owner_data' , None ) await super ( Machine , self ) . save ( ) params_diff = calculate_dict_diff ( orig_owner_data , new_owner_data ) if len ( params_diff ) > 0 : params_diff [ 'system_id' ] = self . system_id await self . _handler . set_owner_data ( * * params_diff ) self . _data [ 'owner_data' ] = self . _data [ 'owner_data' ]
Save the machine in MAAS .
164
7
28,596
async def abort ( self , * , comment : str = None ) : params = { "system_id" : self . system_id } if comment : params [ "comment" ] = comment self . _data = await self . _handler . abort ( * * params ) return self
Abort the current action .
61
6
28,597
async def clear_default_gateways ( self ) : self . _data = await self . _handler . clear_default_gateways ( system_id = self . system_id ) return self
Clear default gateways .
43
5
28,598
async def commission ( self , * , enable_ssh : bool = None , skip_networking : bool = None , skip_storage : bool = None , commissioning_scripts : typing . Sequence [ str ] = None , testing_scripts : typing . Sequence [ str ] = None , wait : bool = False , wait_interval : int = 5 ) : params = { "system_id" : self . system_id } if enable_ssh is not None : params [ "enable_ssh" ] = enable_ssh if skip_networking is not None : params [ "skip_networking" ] = skip_networking if skip_storage is not None : params [ "skip_storage" ] = skip_storage if ( commissioning_scripts is not None and len ( commissioning_scripts ) > 0 ) : params [ "commissioning_scripts" ] = "," . join ( commissioning_scripts ) if testing_scripts is not None : if len ( testing_scripts ) == 0 or testing_scripts == "none" : params [ "testing_scripts" ] = [ "none" ] else : params [ "testing_scripts" ] = "," . join ( testing_scripts ) self . _data = await self . _handler . commission ( * * params ) if not wait : return self else : # Wait for the machine to be fully commissioned. while self . status in [ NodeStatus . COMMISSIONING , NodeStatus . TESTING ] : await asyncio . sleep ( wait_interval ) self . _data = await self . _handler . read ( system_id = self . system_id ) if self . status == NodeStatus . FAILED_COMMISSIONING : msg = "{hostname} failed to commission." . format ( hostname = self . hostname ) raise FailedCommissioning ( msg , self ) if self . status == NodeStatus . FAILED_TESTING : msg = "{hostname} failed testing." . format ( hostname = self . hostname ) raise FailedTesting ( msg , self ) return self
Commission this machine .
439
4
28,599
async def deploy ( self , * , user_data : typing . Union [ bytes , str ] = None , distro_series : str = None , hwe_kernel : str = None , comment : str = None , wait : bool = False , wait_interval : int = 5 ) : params = { "system_id" : self . system_id } if user_data is not None : if isinstance ( user_data , bytes ) : params [ "user_data" ] = base64 . encodebytes ( user_data ) else : # Already base-64 encoded. Convert to a byte string in # preparation for multipart assembly. params [ "user_data" ] = user_data . encode ( "ascii" ) if distro_series is not None : params [ "distro_series" ] = distro_series if hwe_kernel is not None : params [ "hwe_kernel" ] = hwe_kernel if comment is not None : params [ "comment" ] = comment self . _data = await self . _handler . deploy ( * * params ) if not wait : return self else : # Wait for the machine to be fully deployed while self . status == NodeStatus . DEPLOYING : await asyncio . sleep ( wait_interval ) self . _data = await self . _handler . read ( system_id = self . system_id ) if self . status == NodeStatus . FAILED_DEPLOYMENT : msg = "{hostname} failed to deploy." . format ( hostname = self . hostname ) raise FailedDeployment ( msg , self ) return self
Deploy this machine .
350
4