idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
39,500
def wait_for_task ( service , task , timeout_sec = 120 ) : return time_wait ( lambda : task_predicate ( service , task ) , timeout_seconds = timeout_sec )
Waits for a task which was launched to be launched
39,501
def wait_for_task_property ( service , task , prop , timeout_sec = 120 ) : return time_wait ( lambda : task_property_present_predicate ( service , task , prop ) , timeout_seconds = timeout_sec )
Waits for a task to have the specified property
39,502
def copy_file ( host , file_path , remote_path = '.' , username = None , key_path = None , action = 'put' ) : if not username : username = shakedown . cli . ssh_user if not key_path : key_path = shakedown . cli . ssh_key_file key = validate_key ( key_path ) transport = get_transport ( host , username , key ) transport = start_transport ( transport , username , key ) if transport . is_authenticated ( ) : start = time . time ( ) channel = scp . SCPClient ( transport ) if action == 'get' : print ( "\n{}scp {}:{} {}\n" . format ( shakedown . cli . helpers . fchr ( '>>' ) , host , remote_path , file_path ) ) channel . get ( remote_path , file_path ) else : print ( "\n{}scp {} {}:{}\n" . format ( shakedown . cli . helpers . fchr ( '>>' ) , file_path , host , remote_path ) ) channel . put ( file_path , remote_path ) print ( "{} bytes copied in {} seconds." . format ( str ( os . path . getsize ( file_path ) ) , str ( round ( time . time ( ) - start , 2 ) ) ) ) try_close ( channel ) try_close ( transport ) return True else : print ( "error: unable to authenticate {}@{} with key {}" . format ( username , host , key_path ) ) return False
Copy a file via SCP proxied through the mesos master
39,503
def __metadata_helper ( json_path ) : url = shakedown . dcos_url_path ( 'dcos-metadata/{}' . format ( json_path ) ) try : response = dcos . http . request ( 'get' , url ) if response . status_code == 200 : return response . json ( ) except : pass return None
Returns json for specific cluster metadata . Important to realize that this was introduced in dcos - 1 . 9 . Clusters prior to 1 . 9 and missing metadata will return None
39,504
def log_decl_method ( func ) : from functools import wraps @ wraps ( func ) def with_logging ( * args , ** kwargs ) : self = args [ 0 ] decl = args [ 2 ] log ( DEBUG , u" {}: {} {}" . format ( self . state [ 'current_step' ] , decl . name , serialize ( decl . value ) . strip ( ) ) . encode ( 'utf-8' ) ) return func ( * args , ** kwargs ) return with_logging
Decorate do_declartion methods for debug logging .
39,505
def css_to_func ( css , flags , css_namespaces , lang ) : from cssselect import parse , HTMLTranslator from cssselect . parser import FunctionalPseudoElement if not ( css ) : return None sel = parse ( css . strip ( '" ' ) ) [ 0 ] xpath = HTMLTranslator ( ) . selector_to_xpath ( sel ) first_letter = False if sel . pseudo_element is not None : if type ( sel . pseudo_element ) == FunctionalPseudoElement : if sel . pseudo_element . name in ( 'attr' , 'first-letter' ) : xpath += '/@' + sel . pseudo_element . arguments [ 0 ] . value if sel . pseudo_element . name == 'first-letter' : first_letter = True elif isinstance ( sel . pseudo_element , type ( u'' ) ) : if sel . pseudo_element == 'first-letter' : first_letter = True xp = etree . XPath ( xpath , namespaces = css_namespaces ) def toupper ( u ) : loc = Locale ( lang ) if lang else Locale ( ) return UnicodeString ( u ) . toUpper ( loc ) . encode ( 'utf-8' ) . decode ( 'utf-8' ) def func ( elem ) : res = xp ( elem ) if res : if etree . iselement ( res [ 0 ] ) : res_str = etree . tostring ( res [ 0 ] , encoding = 'unicode' , method = "text" ) else : res_str = res [ 0 ] if first_letter : if res_str : if flags and 'nocase' in flags : return toupper ( res_str [ 0 ] ) else : return res_str [ 0 ] else : return res_str else : if flags and 'nocase' in flags : return toupper ( res_str ) else : return res_str return func
Convert a css selector to an xpath supporting pseudo elements .
39,506
def append_string ( t , string ) : node = t . tree if string : if len ( node ) == 0 : if node . text is not None : node . text += string else : node . text = string else : child = list ( node ) [ - 1 ] if child . tail is not None : child . tail += string else : child . tail = string
Append a string to a node as text or tail of last child .
39,507
def prepend_string ( t , string ) : node = t . tree if node . text is not None : node . text += string else : node . text = string
Prepend a string to a target node as text .
39,508
def grouped_insert ( t , value ) : collator = Collator . createInstance ( Locale ( t . lang ) if t . lang else Locale ( ) ) if value . tail is not None : val_prev = value . getprevious ( ) if val_prev is not None : val_prev . tail = ( val_prev . tail or '' ) + value . tail else : val_parent = value . getparent ( ) if val_parent is not None : val_parent . text = ( val_parent . text or '' ) + value . tail value . tail = None if t . isgroup and t . sort ( value ) is not None : if t . groupby : for child in t . tree : if child . get ( 'class' ) == 'group-by' : order = collator . compare ( t . groupby ( child [ 1 ] ) or '' , t . groupby ( value ) or '' ) if order == 0 : c_target = Target ( child , sort = t . sort , lang = t . lang ) insert_group ( value , c_target ) break elif order > 0 : group = create_group ( t . groupby ( value ) ) group . append ( value ) child . addprevious ( group ) break else : group = create_group ( t . groupby ( value ) ) group . append ( value ) t . tree . append ( group ) else : insert_group ( value , t ) elif t . sort and t . sort ( value ) is not None : insert_sort ( value , t ) elif t . location == 'inside' : for child in t . tree : value . append ( child ) value . text = t . tree . text t . tree . text = None t . tree . append ( value ) elif t . location == 'outside' : value . tail = t . tree . tail t . tree . tail = None target_parent_descendants = ( [ n . getparent ( ) for n in t . parent . iterdescendants ( ) if n == t . tree ] ) try : parent = target_parent_descendants [ 0 ] parent . insert ( parent . index ( t . tree ) , value ) value . append ( t . tree ) except IndexError as e : logger . error ( 'Target of outside has been moved or deleted' ) raise e elif t . location == 'before' : value . tail = t . tree . text t . tree . text = None t . tree . insert ( 0 , value ) else : t . tree . append ( value )
Insert value into the target tree t with correct grouping .
39,509
def insert_sort ( node , target ) : sort = target . sort lang = target . lang collator = Collator . createInstance ( Locale ( lang ) if lang else Locale ( ) ) for child in target . tree : if collator . compare ( sort ( child ) or '' , sort ( node ) or '' ) > 0 : child . addprevious ( node ) break else : target . tree . append ( node )
Insert node into sorted position in target tree .
39,510
def insert_group ( node , target ) : group = target . sort lang = target . lang collator = Collator . createInstance ( Locale ( lang ) if lang else Locale ( ) ) for child in target . tree : order = collator . compare ( group ( child ) or '' , group ( node ) or '' ) if order == 0 : for nodechild in node [ 1 : ] : child . append ( nodechild ) break elif order > 0 : child . addprevious ( node ) break else : target . tree . append ( node )
Insert node into in target tree in appropriate group .
39,511
def create_group ( value ) : node = etree . Element ( 'div' , attrib = { 'class' : 'group-by' } ) span = etree . Element ( 'span' , attrib = { 'class' : 'group-label' } ) span . text = value node . append ( span ) return node
Create the group wrapper node .
39,512
def _extract_sel_info ( sel ) : from cssselect2 . parser import ( CombinedSelector , CompoundSelector , PseudoClassSelector , FunctionalPseudoClassSelector ) steps = [ ] extras = [ ] if isinstance ( sel , CombinedSelector ) : lstep , lextras = _extract_sel_info ( sel . left ) rstep , rextras = _extract_sel_info ( sel . right ) steps = lstep + rstep extras = lextras + rextras elif isinstance ( sel , CompoundSelector ) : for ssel in sel . simple_selectors : s , e = _extract_sel_info ( ssel ) steps . extend ( s ) extras . extend ( e ) elif isinstance ( sel , FunctionalPseudoClassSelector ) : if sel . name == 'pass' : steps . append ( serialize ( sel . arguments ) . strip ( '"\'' ) ) elif isinstance ( sel , PseudoClassSelector ) : if sel . name == 'deferred' : extras . append ( 'deferred' ) return ( steps , extras )
Recurse down parsed tree return pseudo class info
39,513
def _to_roman ( num ) : roman_numeral_map = ( ( 'M' , 1000 ) , ( 'CM' , 900 ) , ( 'D' , 500 ) , ( 'CD' , 400 ) , ( 'C' , 100 ) , ( 'XC' , 90 ) , ( 'L' , 50 ) , ( 'XL' , 40 ) , ( 'X' , 10 ) , ( 'IX' , 9 ) , ( 'V' , 5 ) , ( 'IV' , 4 ) , ( 'I' , 1 ) ) if not ( 0 < num < 5000 ) : log ( WARN , 'Number out of range for roman (must be 1..4999)' ) return str ( num ) result = '' for numeral , integer in roman_numeral_map : while num >= integer : result += numeral num -= integer return result
Convert integer to roman numerals .
39,514
def copy_w_id_suffix ( elem , suffix = "_copy" ) : mycopy = deepcopy ( elem ) for id_elem in mycopy . xpath ( '//*[@id]' ) : id_elem . set ( 'id' , id_elem . get ( 'id' ) + suffix ) return mycopy
Make a deep copy of the provided tree altering ids .
39,515
def generate_id ( self ) : if self . use_repeatable_ids : self . repeatable_id_counter += 1 return 'autobaked-{}' . format ( self . repeatable_id_counter ) else : return str ( uuid4 ( ) )
Generate a fresh id
39,516
def clear_state ( self ) : self . state = { } self . state [ 'steps' ] = [ ] self . state [ 'current_step' ] = None self . state [ 'scope' ] = [ ] self . state [ 'counters' ] = { } self . state [ 'strings' ] = { } for step in self . matchers : self . state [ step ] = { } self . state [ step ] [ 'pending' ] = { } self . state [ step ] [ 'actions' ] = [ ] self . state [ step ] [ 'counters' ] = { } self . state [ step ] [ 'strings' ] = { } self . state [ step ] [ 'recipe' ] = False
Clear the recipe state .
39,517
def record_coverage_zero ( self , rule , offset ) : self . coverage_lines . append ( 'DA:{},0' . format ( rule . source_line + offset ) )
Add entry to coverage saying this selector was parsed
39,518
def record_coverage ( self , rule ) : log ( DEBUG , u'Rule ({}): {}' . format ( * rule ) . encode ( 'utf-8' ) ) self . coverage_lines . append ( 'DA:{},1' . format ( rule [ 0 ] ) )
Add entry to coverage saying this selector was matched
39,519
def push_target_elem ( self , element , pseudo = None ) : actions = self . state [ self . state [ 'current_step' ] ] [ 'actions' ] if len ( actions ) > 0 and actions [ - 1 ] [ 0 ] == 'target' : actions . pop ( ) actions . append ( ( 'target' , Target ( element . etree_element , pseudo , element . parent . etree_element ) ) )
Place target element onto action stack .
39,520
def push_pending_elem ( self , element , pseudo ) : self . push_target_elem ( element , pseudo ) elem = etree . Element ( 'div' ) actions = self . state [ self . state [ 'current_step' ] ] [ 'actions' ] actions . append ( ( 'move' , elem ) ) actions . append ( ( 'target' , Target ( elem ) ) )
Create and place pending target element onto stack .
39,521
def pop_pending_if_empty ( self , element ) : actions = self . state [ self . state [ 'current_step' ] ] [ 'actions' ] elem = self . current_target ( ) . tree if actions [ - 1 ] [ 0 ] == ( 'target' ) and actions [ - 1 ] [ 1 ] . tree == elem : actions . pop ( ) actions . pop ( ) actions . pop ( )
Remove empty wrapper element .
39,522
def current_target ( self ) : actions = self . state [ self . state [ 'current_step' ] ] [ 'actions' ] for action , value in reversed ( actions ) : if action == 'target' : return value
Return current target .
39,523
def find_method ( self , decl ) : name = decl . name method = None try : method = getattr ( self , u'do_{}' . format ( ( name ) . replace ( '-' , '_' ) ) ) except AttributeError : if name . startswith ( 'data-' ) : method = getattr ( self , 'do_data_any' ) elif name . startswith ( 'attr-' ) : method = getattr ( self , 'do_attr_any' ) else : log ( WARN , u'Missing method {}' . format ( ( name ) . replace ( '-' , '_' ) ) . encode ( 'utf-8' ) ) if method : self . record_coverage_line ( decl . source_line ) return method else : return lambda x , y , z : None
Find class method to call for declaration based on name .
39,524
def lookup ( self , vtype , vname , target_id = None ) : nullvals = { 'strings' : '' , 'counters' : 0 , 'pending' : ( None , None ) } nullval = nullvals [ vtype ] vstyle = None if vtype == 'counters' : if len ( vname ) > 1 : vname , vstyle = vname else : vname = vname [ 0 ] if target_id is not None : try : state = self . state [ vtype ] [ target_id ] steps = self . state [ vtype ] [ target_id ] . keys ( ) except KeyError : log ( WARN , u'Bad ID target lookup {}' . format ( target_id ) . encode ( 'utf-8' ) ) return nullval else : state = self . state steps = self . state [ 'scope' ] for step in steps : if vname in state [ step ] [ vtype ] : if vtype == 'pending' : return ( state [ step ] [ vtype ] [ vname ] , step ) else : val = state [ step ] [ vtype ] [ vname ] if vstyle is not None : return self . counter_style ( val , vstyle ) return val else : return nullval
Return value of vname from the variable store vtype .
39,525
def counter_style ( self , val , style ) : if style == 'decimal-leading-zero' : if val < 10 : valstr = "0{}" . format ( val ) else : valstr = str ( val ) elif style == 'lower-roman' : valstr = _to_roman ( val ) . lower ( ) elif style == 'upper-roman' : valstr = _to_roman ( val ) elif style == 'lower-latin' or style == 'lower-alpha' : if 1 <= val <= 26 : valstr = chr ( val + 96 ) else : log ( WARN , 'Counter out of range for latin (must be 1...26)' ) valstr = str ( val ) elif style == 'upper-latin' or style == 'upper-alpha' : if 1 <= val <= 26 : valstr = chr ( val + 64 ) else : log ( WARN , 'Counter out of range for latin (must be 1...26)' ) valstr = str ( val ) elif style == 'decimal' : valstr = str ( val ) else : log ( WARN , u"ERROR: Counter numbering not supported for" u" list type {}. Using decimal." . format ( style ) . encode ( 'utf-8' ) ) valstr = str ( val ) return valstr
Return counter value in given style .
39,526
def do_counter_reset ( self , element , decl , pseudo ) : step = self . state [ self . state [ 'current_step' ] ] counter_name = '' for term in decl . value : if type ( term ) is ast . WhitespaceToken : continue elif type ( term ) is ast . IdentToken : if counter_name : step [ 'counters' ] [ counter_name ] = 0 counter_name = term . value elif type ( term ) is ast . LiteralToken : if counter_name : step [ 'counters' ] [ counter_name ] = 0 counter_name = '' elif type ( term ) is ast . NumberToken : if counter_name : step [ 'counters' ] [ counter_name ] = int ( term . value ) counter_name = '' else : log ( WARN , u"Unrecognized counter-reset term {}" . format ( type ( term ) ) . encode ( 'utf-8' ) ) if counter_name : step [ 'counters' ] [ counter_name ] = 0
Clear specified counters .
39,527
def do_node_set ( self , element , decl , pseudo ) : target = serialize ( decl . value ) . strip ( ) step = self . state [ self . state [ 'current_step' ] ] elem = self . current_target ( ) . tree _ , valstep = self . lookup ( 'pending' , target ) if not valstep : step [ 'pending' ] [ target ] = [ ( 'nodeset' , elem ) ] else : self . state [ valstep ] [ 'pending' ] [ target ] = [ ( 'nodeset' , elem ) ]
Implement node - set declaration .
39,528
def do_move_to ( self , element , decl , pseudo ) : target = serialize ( decl . value ) . strip ( ) step = self . state [ self . state [ 'current_step' ] ] elem = self . current_target ( ) . tree actions = step [ 'actions' ] for pos , action in enumerate ( reversed ( actions ) ) : if action [ 0 ] == 'move' and action [ 1 ] == elem : target_index = - pos - 1 actions [ target_index : ] = actions [ target_index + 1 : ] break _ , valstep = self . lookup ( 'pending' , target ) if not valstep : step [ 'pending' ] [ target ] = [ ( 'move' , elem ) ] else : self . state [ valstep ] [ 'pending' ] [ target ] . append ( ( 'move' , elem ) )
Implement move - to declaration .
39,529
def do_container ( self , element , decl , pseudo ) : value = serialize ( decl . value ) . strip ( ) if '|' in value : namespace , tag = value . split ( '|' , 1 ) try : namespace = self . css_namespaces [ namespace ] except KeyError : log ( WARN , u'undefined namespace prefix: {}' . format ( namespace ) . encode ( 'utf-8' ) ) value = tag else : value = etree . QName ( namespace , tag ) step = self . state [ self . state [ 'current_step' ] ] actions = step [ 'actions' ] actions . append ( ( 'tag' , value ) )
Implement setting tag for new wrapper element .
39,530
def do_class ( self , element , decl , pseudo ) : step = self . state [ self . state [ 'current_step' ] ] actions = step [ 'actions' ] strval = self . eval_string_value ( element , decl . value ) actions . append ( ( 'attrib' , ( 'class' , strval ) ) )
Implement class declaration - pre - match .
39,531
def do_attr_any ( self , element , decl , pseudo ) : step = self . state [ self . state [ 'current_step' ] ] actions = step [ 'actions' ] strval = self . eval_string_value ( element , decl . value ) actions . append ( ( 'attrib' , ( decl . name [ 5 : ] , strval ) ) )
Implement generic attribute setting .
39,532
def do_group_by ( self , element , decl , pseudo ) : sort_css = groupby_css = flags = '' if ',' in decl . value : if decl . value . count ( ',' ) == 2 : sort_css , groupby_css , flags = map ( serialize , split ( decl . value , ',' ) ) else : sort_css , groupby_css = map ( serialize , split ( decl . value , ',' ) ) else : sort_css = serialize ( decl . value ) if groupby_css . strip ( ) == 'nocase' : flags = groupby_css groupby_css = '' sort = css_to_func ( sort_css , flags , self . css_namespaces , self . state [ 'lang' ] ) groupby = css_to_func ( groupby_css , flags , self . css_namespaces , self . state [ 'lang' ] ) step = self . state [ self . state [ 'current_step' ] ] target = self . current_target ( ) target . sort = sort target . lang = self . state [ 'lang' ] target . isgroup = True target . groupby = groupby for pos , action in enumerate ( reversed ( step [ 'actions' ] ) ) : if action [ 0 ] == 'target' and action [ 1 ] . tree == element . etree_element : action [ 1 ] . sort = sort action [ 1 ] . isgroup = True action [ 1 ] . groupby = groupby break
Implement group - by declaration - pre - match .
39,533
def do_sort_by ( self , element , decl , pseudo ) : if ',' in decl . value : css , flags = split ( decl . value , ',' ) else : css = decl . value flags = None sort = css_to_func ( serialize ( css ) , serialize ( flags or '' ) , self . css_namespaces , self . state [ 'lang' ] ) step = self . state [ self . state [ 'current_step' ] ] target = self . current_target ( ) target . sort = sort target . lang = self . state [ 'lang' ] target . isgroup = False target . groupby = None for pos , action in enumerate ( reversed ( step [ 'actions' ] ) ) : if action [ 0 ] == 'target' and action [ 1 ] . tree == element . etree_element : action [ 1 ] . sort = sort action [ 1 ] . isgroup = False action [ 1 ] . groupby = None break
Implement sort - by declaration - pre - match .
39,534
def do_pass ( self , element , decl , pseudo ) : log ( WARN , u"Old-style pass as declaration not allowed.{}" . format ( decl . value ) . encpde ( 'utf-8' ) )
No longer valid way to set processing pass .
39,535
def connection_cache ( func : callable ) : cache = dict ( ) lock = RLock ( ) @ wraps ( func ) def func_wrapper ( host : str , username : str , * args , ** kwargs ) : key = "{h}-{u}" . format ( h = host , u = username ) if key in cache : conn = cache [ key ] if conn and conn . is_active ( ) and conn . is_authenticated ( ) : return conn else : if conn : try_close ( conn ) del cache [ key ] if key not in cache : conn = func ( host , username , * args , ** kwargs ) if conn is not None : cache [ key ] = conn return conn return None def get_cache ( ) -> dict : return cache def purge ( key : str = None ) : with lock : if key is None : conns = [ ( k , v ) for k , v in cache . items ( ) ] elif key in cache : conns = ( ( key , cache [ key ] ) , ) else : conns = list ( ) for k , v in conns : try_close ( v ) del cache [ k ] func_wrapper . get_cache = get_cache func_wrapper . purge = purge return func_wrapper
Connection cache for SSH sessions . This is to prevent opening a new expensive connection on every command run .
39,536
def _get_connection ( host , username : str , key_path : str ) -> paramiko . Transport or None : if not username : username = shakedown . cli . ssh_user if not key_path : key_path = shakedown . cli . ssh_key_file key = validate_key ( key_path ) transport = get_transport ( host , username , key ) if transport : transport = start_transport ( transport , username , key ) if transport . is_authenticated ( ) : return transport else : print ( "error: unable to authenticate {}@{} with key {}" . format ( username , host , key_path ) ) else : print ( "error: unable to connect to {}" . format ( host ) ) return None
Return an authenticated SSH connection .
39,537
def run_command ( host , command , username = None , key_path = None , noisy = True ) : with HostSession ( host , username , key_path , noisy ) as s : if noisy : print ( "\n{}{} $ {}\n" . format ( shakedown . fchr ( '>>' ) , host , command ) ) s . run ( command ) ec , output = s . get_result ( ) return ec == 0 , output
Run a command via SSH proxied through the mesos master
39,538
def run_command_on_master ( command , username = None , key_path = None , noisy = True ) : return run_command ( shakedown . master_ip ( ) , command , username , key_path , noisy )
Run a command on the Mesos master
39,539
def run_command_on_leader ( command , username = None , key_path = None , noisy = True ) : return run_command ( shakedown . master_leader_ip ( ) , command , username , key_path , noisy )
Run a command on the Mesos leader . Important for Multi - Master .
39,540
def run_command_on_marathon_leader ( command , username = None , key_path = None , noisy = True ) : return run_command ( shakedown . marathon_leader_ip ( ) , command , username , key_path , noisy )
Run a command on the Marathon leader
39,541
def run_command_on_agent ( host , command , username = None , key_path = None , noisy = True ) : return run_command ( host , command , username , key_path , noisy )
Run a command on a Mesos agent proxied through the master
39,542
def get_all_masters ( ) : masters = [ ] for master in __master_zk_nodes_keys ( ) : master_zk_str = get_zk_node_data ( master ) [ 'str' ] masters . append ( json . loads ( master_zk_str ) ) return masters
Returns the json object that represents each of the masters .
39,543
def get_public_agents_public_ip ( ) : public_ip_list = [ ] agents = get_public_agents ( ) for agent in agents : status , public_ip = shakedown . run_command_on_agent ( agent , "/opt/mesosphere/bin/detect_ip_public" ) public_ip_list . append ( public_ip ) return public_ip_list
Provides a list public IPs for public agents in the cluster
39,544
def partition_agent ( host ) : network . save_iptables ( host ) network . flush_all_rules ( host ) network . allow_all_traffic ( host ) network . run_iptables ( host , ALLOW_SSH ) network . run_iptables ( host , ALLOW_PING ) network . run_iptables ( host , DISALLOW_MESOS ) network . run_iptables ( host , DISALLOW_INPUT )
Partition a node from all network traffic except for SSH and loopback
39,545
def kill_process_on_host ( hostname , pattern ) : status , stdout = run_command_on_agent ( hostname , "ps aux | grep -v grep | grep '{}'" . format ( pattern ) ) pids = [ p . strip ( ) . split ( ) [ 1 ] for p in stdout . splitlines ( ) ] for pid in pids : status , stdout = run_command_on_agent ( hostname , "sudo kill -9 {}" . format ( pid ) ) if status : print ( "Killed pid: {}" . format ( pid ) ) else : print ( "Unable to killed pid: {}" . format ( pid ) )
Kill the process matching pattern at ip
39,546
def kill_process_from_pid_file_on_host ( hostname , pid_file = 'app.pid' ) : status , pid = run_command_on_agent ( hostname , 'cat {}' . format ( pid_file ) ) status , stdout = run_command_on_agent ( hostname , "sudo kill -9 {}" . format ( pid ) ) if status : print ( "Killed pid: {}" . format ( pid ) ) run_command_on_agent ( hostname , 'rm {}' . format ( pid_file ) ) else : print ( "Unable to killed pid: {}" . format ( pid ) )
Retrieves the PID of a process from a pid file on host and kills it .
39,547
def wait_for ( predicate , timeout_seconds = 120 , sleep_seconds = 1 , ignore_exceptions = True , inverse_predicate = False , noisy = False , required_consecutive_success_count = 1 ) : count = 0 start_time = time_module . time ( ) timeout = Deadline . create_deadline ( timeout_seconds ) while True : try : result = predicate ( ) except Exception as e : if ignore_exceptions : if noisy : logger . exception ( "Ignoring error during wait." ) else : count = 0 raise else : if ( not inverse_predicate and result ) or ( inverse_predicate and not result ) : count = count + 1 if count >= required_consecutive_success_count : return result if timeout . is_expired ( ) : funname = __stringify_predicate ( predicate ) raise TimeoutExpired ( timeout_seconds , funname ) if noisy : header = '{}[{}/{}]' . format ( shakedown . cli . helpers . fchr ( '>>' ) , pretty_duration ( time_module . time ( ) - start_time ) , pretty_duration ( timeout_seconds ) ) if required_consecutive_success_count > 1 : header = '{} [{} of {} times]' . format ( header , count , required_consecutive_success_count ) print ( '{} spinning...' . format ( header ) ) time_module . sleep ( sleep_seconds )
waits or spins for a predicate returning the result . Predicate is a function that returns a truthy or falsy value . An exception in the function will be returned . A timeout will throw a TimeoutExpired Exception .
39,548
def __stringify_predicate ( predicate ) : funname = getsource ( predicate ) . strip ( ) . split ( ' ' ) [ 2 ] . rstrip ( ',' ) params = 'None' if '()' not in funname : stack = getouterframes ( currentframe ( ) ) for frame in range ( 0 , len ( stack ) ) : if funname in str ( stack [ frame ] ) : _ , _ , _ , params = getargvalues ( stack [ frame ] [ 0 ] ) return "function: {} params: {}" . format ( funname , params )
Reflection of function name and parameters of the predicate being used .
39,549
def time_wait ( predicate , timeout_seconds = 120 , sleep_seconds = 1 , ignore_exceptions = True , inverse_predicate = False , noisy = True , required_consecutive_success_count = 1 ) : start = time_module . time ( ) wait_for ( predicate , timeout_seconds , sleep_seconds , ignore_exceptions , inverse_predicate , noisy , required_consecutive_success_count ) return elapse_time ( start )
waits or spins for a predicate and returns the time of the wait . An exception in the function will be returned . A timeout will throw a TimeoutExpired Exception .
39,550
def wait_while_exceptions ( predicate , timeout_seconds = 120 , sleep_seconds = 1 , noisy = False ) : start_time = time_module . time ( ) timeout = Deadline . create_deadline ( timeout_seconds ) while True : try : result = predicate ( ) return result except Exception as e : if noisy : logger . exception ( "Ignoring error during wait." ) if timeout . is_expired ( ) : funname = __stringify_predicate ( predicate ) raise TimeoutExpired ( timeout_seconds , funname ) if noisy : header = '{}[{}/{}]' . format ( shakedown . cli . helpers . fchr ( '>>' ) , pretty_duration ( time_module . time ( ) - start_time ) , pretty_duration ( timeout_seconds ) ) print ( '{} spinning...' . format ( header ) ) time_module . sleep ( sleep_seconds )
waits for a predicate ignoring exceptions returning the result . Predicate is a function . Exceptions will trigger the sleep and retry ; any non - exception result will be returned . A timeout will throw a TimeoutExpired Exception .
39,551
def elapse_time ( start , end = None , precision = 3 ) : if end is None : end = time_module . time ( ) return round ( end - start , precision )
Simple time calculation utility . Given a start time it will provide an elapse time .
39,552
def set_config_defaults ( args ) : defaults = { 'fail' : 'fast' , 'stdout' : 'fail' } for key in defaults : if not args [ key ] : args [ key ] = defaults [ key ] return args
Set configuration defaults
39,553
def banner ( ) : banner_dict = { 'a0' : click . style ( chr ( 9601 ) , fg = 'magenta' ) , 'a1' : click . style ( chr ( 9601 ) , fg = 'magenta' , bold = True ) , 'b0' : click . style ( chr ( 9616 ) , fg = 'magenta' ) , 'c0' : click . style ( chr ( 9626 ) , fg = 'magenta' ) , 'c1' : click . style ( chr ( 9626 ) , fg = 'magenta' , bold = True ) , 'd0' : click . style ( chr ( 9622 ) , fg = 'magenta' ) , 'd1' : click . style ( chr ( 9622 ) , fg = 'magenta' , bold = True ) , 'e0' : click . style ( chr ( 9623 ) , fg = 'magenta' ) , 'e1' : click . style ( chr ( 9623 ) , fg = 'magenta' , bold = True ) , 'f0' : click . style ( chr ( 9630 ) , fg = 'magenta' ) , 'f1' : click . style ( chr ( 9630 ) , fg = 'magenta' , bold = True ) , 'g1' : click . style ( chr ( 9612 ) , fg = 'magenta' , bold = True ) , 'h0' : click . style ( chr ( 9624 ) , fg = 'magenta' ) , 'h1' : click . style ( chr ( 9624 ) , fg = 'magenta' , bold = True ) , 'i0' : click . style ( chr ( 9629 ) , fg = 'magenta' ) , 'i1' : click . style ( chr ( 9629 ) , fg = 'magenta' , bold = True ) , 'j0' : click . style ( fchr ( '>>' ) , fg = 'magenta' ) , 'k0' : click . style ( chr ( 9473 ) , fg = 'magenta' ) , 'l0' : click . style ( '_' , fg = 'magenta' ) , 'l1' : click . style ( '_' , fg = 'magenta' , bold = True ) , 'v0' : click . style ( 'mesosphere' , fg = 'magenta' ) , 'x1' : click . style ( 'shakedown' , fg = 'magenta' , bold = True ) , 'y0' : click . style ( 'v' + shakedown . VERSION , fg = 'magenta' ) , 'z0' : chr ( 32 ) } banner_map = [ " %(z0)s%(z0)s%(l0)s%(l0)s%(l1)s%(l0)s%(l1)s%(l1)s%(l1)s%(l1)s%(l1)s%(l1)s%(l1)s%(l1)s" , " %(z0)s%(b0)s%(z0)s%(c0)s%(z0)s%(d0)s%(z0)s%(z0)s%(z0)s%(z0)s%(e1)s%(z0)s%(f1)s%(z0)s%(g1)s" , " %(z0)s%(b0)s%(z0)s%(z0)s%(c0)s%(z0)s%(h0)s%(e0)s%(d1)s%(i1)s%(z0)s%(f1)s%(z0)s%(z0)s%(g1)s%(z0)s%(j0)s%(v0)s %(x1)s %(y0)s" , " %(z0)s%(b0)s%(z0)s%(z0)s%(f0)s%(c0)s%(i0)s%(z0)s%(z0)s%(h1)s%(f1)s%(c1)s%(z0)s%(z0)s%(g1)s%(z0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(z0)s%(k0)s%(k0)s%(z0)s%(z0)s%(k0)s" , " %(z0)s%(i0)s%(f0)s%(h0)s%(z0)s%(z0)s%(c0)s%(z0)s%(z0)s%(f0)s%(z0)s%(z0)s%(i1)s%(c1)s%(h1)s" , " %(z0)s%(z0)s%(z0)s%(z0)s%(z0)s%(z0)s%(z0)s%(c0)s%(f0)s" , ] if 'TERM' in os . environ and os . environ [ 'TERM' ] in ( 'velocity' , 'xterm' , 'xterm-256color' , 'xterm-color' ) : return echo ( "\n" . join ( banner_map ) % banner_dict ) else : return echo ( fchr ( '>>' ) + 'mesosphere shakedown v' + shakedown . VERSION , b = True )
Display a product banner
39,554
def decorate ( text , style ) : return { 'step-maj' : click . style ( "\n" + '> ' + text , fg = 'yellow' , bold = True ) , 'step-min' : click . style ( ' - ' + text + ' ' , bold = True ) , 'item-maj' : click . style ( ' - ' + text + ' ' ) , 'item-min' : click . style ( ' - ' + text + ' ' ) , 'quote-head-fail' : click . style ( "\n" + chr ( 9485 ) + ( chr ( 9480 ) * 2 ) + ' ' + text , fg = 'red' ) , 'quote-head-pass' : click . style ( "\n" + chr ( 9485 ) + ( chr ( 9480 ) * 2 ) + ' ' + text , fg = 'green' ) , 'quote-head-skip' : click . style ( "\n" + chr ( 9485 ) + ( chr ( 9480 ) * 2 ) + ' ' + text , fg = 'yellow' ) , 'quote-fail' : re . sub ( '^' , click . style ( chr ( 9482 ) + ' ' , fg = 'red' ) , text , flags = re . M ) , 'quote-pass' : re . sub ( '^' , click . style ( chr ( 9482 ) + ' ' , fg = 'green' ) , text , flags = re . M ) , 'quote-skip' : re . sub ( '^' , click . style ( chr ( 9482 ) + ' ' , fg = 'yellow' ) , text , flags = re . M ) , 'fail' : click . style ( text + ' ' , fg = 'red' ) , 'pass' : click . style ( text + ' ' , fg = 'green' ) , 'skip' : click . style ( text + ' ' , fg = 'yellow' ) } . get ( style , '' )
Console decoration style definitions
39,555
def echo ( text , ** kwargs ) : if shakedown . cli . quiet : return if not 'n' in kwargs : kwargs [ 'n' ] = True if 'd' in kwargs : text = decorate ( text , kwargs [ 'd' ] ) if 'TERM' in os . environ and os . environ [ 'TERM' ] == 'velocity' : if text : print ( text , end = "" , flush = True ) if kwargs . get ( 'n' ) : print ( ) else : click . echo ( text , nl = kwargs . get ( 'n' ) )
Print results to the console
39,556
def add_user ( uid , password , desc = None ) : try : desc = uid if desc is None else desc user_object = { "description" : desc , "password" : password } acl_url = urljoin ( _acl_url ( ) , 'users/{}' . format ( uid ) ) r = http . put ( acl_url , json = user_object ) assert r . status_code == 201 except DCOSHTTPException as e : if e . response . status_code != 409 : raise
Adds user to the DCOS Enterprise . If not description is provided the uid will be used for the description .
39,557
def get_user ( uid ) : try : acl_url = urljoin ( _acl_url ( ) , 'users/{}' . format ( uid ) ) r = http . get ( acl_url ) return r . json ( ) except DCOSHTTPException as e : if e . response . status_code == 400 : return None else : raise
Returns a user from the DCOS Enterprise . It returns None if none exists .
39,558
def remove_user ( uid ) : try : acl_url = urljoin ( _acl_url ( ) , 'users/{}' . format ( uid ) ) r = http . delete ( acl_url ) assert r . status_code == 204 except DCOSHTTPException as e : if e . response . status_code != 400 : raise
Removes a user from the DCOS Enterprise .
39,559
def remove_user_permission ( rid , uid , action = 'full' ) : rid = rid . replace ( '/' , '%252F' ) try : acl_url = urljoin ( _acl_url ( ) , 'acls/{}/users/{}/{}' . format ( rid , uid , action ) ) r = http . delete ( acl_url ) assert r . status_code == 204 except DCOSHTTPException as e : if e . response . status_code != 400 : raise
Removes user permission on a given resource .
39,560
def no_user ( ) : o_token = dcos_acs_token ( ) dcos . config . set_val ( 'core.dcos_acs_token' , '' ) yield dcos . config . set_val ( 'core.dcos_acs_token' , o_token )
Provides a context with no logged in user .
39,561
def new_dcos_user ( user_id , password ) : o_token = dcos_acs_token ( ) shakedown . add_user ( user_id , password , user_id ) token = shakedown . authenticate ( user_id , password ) dcos . config . set_val ( 'core.dcos_acs_token' , token ) yield dcos . config . set_val ( 'core.dcos_acs_token' , o_token ) shakedown . remove_user ( user_id )
Provides a context with a newly created user .
39,562
def dcos_user ( user_id , password ) : o_token = dcos_acs_token ( ) token = shakedown . authenticate ( user_id , password ) dcos . config . set_val ( 'core.dcos_acs_token' , token ) yield dcos . config . set_val ( 'core.dcos_acs_token' , o_token )
Provides a context with user otherthan super
39,563
def add_group ( id , description = None ) : if not description : description = id data = { 'description' : description } acl_url = urljoin ( _acl_url ( ) , 'groups/{}' . format ( id ) ) try : r = http . put ( acl_url , json = data ) assert r . status_code == 201 except DCOSHTTPException as e : if e . response . status_code != 409 : raise
Adds group to the DCOS Enterprise . If not description is provided the id will be used for the description .
39,564
def get_group ( id ) : acl_url = urljoin ( _acl_url ( ) , 'groups/{}' . format ( id ) ) try : r = http . get ( acl_url ) return r . json ( ) except DCOSHTTPException as e : if e . response . status_code != 400 : raise
Returns a group from the DCOS Enterprise . It returns None if none exists .
39,565
def remove_group ( id ) : acl_url = urljoin ( _acl_url ( ) , 'groups/{}' . format ( id ) ) try : r = http . delete ( acl_url ) print ( r . status_code ) except DCOSHTTPException as e : if e . response . status_code != 400 : raise
Removes a group from the DCOS Enterprise . The group is removed regardless of associated users .
39,566
def add_user_to_group ( uid , gid , exist_ok = True ) : acl_url = urljoin ( _acl_url ( ) , 'groups/{}/users/{}' . format ( gid , uid ) ) try : r = http . put ( acl_url ) assert r . status_code == 204 except DCOSHTTPException as e : if e . response . status_code == 409 and exist_ok : pass else : raise
Adds a user to a group within DCOS Enterprise . The group and user must exist .
39,567
def remove_user_from_group ( uid , gid ) : acl_url = urljoin ( _acl_url ( ) , 'groups/{}/users/{}' . format ( gid , uid ) ) try : r = http . delete ( acl_url ) assert r . status_code == 204 except dcos . errors . DCOSBadRequest : pass
Removes a user from a group within DCOS Enterprise .
39,568
def easybake ( css_in , html_in = sys . stdin , html_out = sys . stdout , last_step = None , coverage_file = None , use_repeatable_ids = False ) : html_doc = etree . parse ( html_in ) oven = Oven ( css_in , use_repeatable_ids ) oven . bake ( html_doc , last_step ) print ( etree . tostring ( html_doc , method = "xml" ) . decode ( 'utf-8' ) , file = html_out ) if coverage_file : print ( 'SF:{}' . format ( css_in . name ) , file = coverage_file ) print ( oven . get_coverage_report ( ) , file = coverage_file ) print ( 'end_of_record' , file = coverage_file )
Process the given HTML file stream with the css stream .
39,569
def main ( argv = None ) : parser = argparse . ArgumentParser ( description = "Process raw HTML to baked" " (embedded numbering and" " collation)" ) parser . add_argument ( '-v' , '--version' , action = "version" , version = __version__ , help = 'Report the library version' ) parser . add_argument ( "css_rules" , type = argparse . FileType ( 'rb' ) , help = "CSS3 ruleset stylesheet recipe" ) parser . add_argument ( "html_in" , nargs = "?" , type = argparse . FileType ( 'r' ) , help = "raw HTML file to bake (default stdin)" , default = sys . stdin ) parser . add_argument ( "html_out" , nargs = "?" , type = argparse . FileType ( 'w' ) , help = "baked HTML file output (default stdout)" , default = sys . stdout ) parser . add_argument ( '-s' , '--stop-at' , action = 'store' , metavar = '<pass>' , help = 'Stop baking just before given pass name' ) parser . add_argument ( '-d' , '--debug' , action = 'store_true' , help = 'Send debugging info to stderr' ) parser . add_argument ( '-q' , '--quiet' , action = 'store_true' , help = "Quiet all on stderr except errors" ) parser . add_argument ( '-c' , '--coverage-file' , metavar = 'coverage.lcov' , type = FileTypeExt ( 'w' ) , help = "output coverage file (lcov format). If " "filename starts with '+', append coverage info." ) parser . add_argument ( '--use-repeatable-ids' , action = 'store_true' , help = "use repeatable id attributes instead of uuids " "which is useful for diffing" ) args = parser . parse_args ( argv ) formatter = logging . Formatter ( '%(name)s %(levelname)s %(message)s' ) handler = logging . StreamHandler ( sys . stderr ) handler . setFormatter ( formatter ) logger . addHandler ( handler ) use_quiet_log = ( args . quiet and logging . ERROR ) use_debug_log = ( args . debug and logging . DEBUG ) logger . setLevel ( use_debug_log or use_quiet_log or logging . WARNING ) try : easybake ( args . css_rules , args . html_in , args . html_out , args . stop_at , args . coverage_file , args . use_repeatable_ids ) finally : if args . css_rules : args . css_rules . close ( ) if args . html_in : args . html_in . close ( ) if args . html_out : args . html_out . close ( ) if args . coverage_file : args . coverage_file . close ( )
Commandline script wrapping Baker .
39,570
def mom_version ( name = 'marathon-user' ) : if service_available_predicate ( name ) : with marathon_on_marathon ( name ) : return marathon_version ( ) else : print ( 'WARN: {} MoM not found. Version is None' . format ( name ) ) return None
Returns the version of marathon on marathon .
39,571
def get_transport ( host , username , key ) : if host == shakedown . master_ip ( ) : transport = paramiko . Transport ( host ) else : transport_master = paramiko . Transport ( shakedown . master_ip ( ) ) transport_master = start_transport ( transport_master , username , key ) if not transport_master . is_authenticated ( ) : print ( "error: unable to authenticate {}@{} with key {}" . format ( username , shakedown . master_ip ( ) , key ) ) return False try : channel = transport_master . open_channel ( 'direct-tcpip' , ( host , 22 ) , ( '127.0.0.1' , 0 ) ) except paramiko . SSHException : print ( "error: unable to connect to {}" . format ( host ) ) return False transport = paramiko . Transport ( channel ) return transport
Create a transport object
39,572
def start_transport ( transport , username , key ) : transport . start_client ( ) agent = paramiko . agent . Agent ( ) keys = itertools . chain ( ( key , ) if key else ( ) , agent . get_keys ( ) ) for test_key in keys : try : transport . auth_publickey ( username , test_key ) break except paramiko . AuthenticationException as e : pass else : raise ValueError ( 'No valid key supplied' ) return transport
Begin a transport client and authenticate it
39,573
def validate_key ( key_path ) : key_path = os . path . expanduser ( key_path ) if not os . path . isfile ( key_path ) : return False return paramiko . RSAKey . from_private_key_file ( key_path )
Validate a key
39,574
def get_marathon_task ( task_name , inactive = False , completed = False ) : return get_service_task ( 'marathon' , task_name , inactive , completed )
Get a dictionary describing a named marathon task
39,575
def get_mesos_task ( task_name ) : tasks = get_mesos_tasks ( ) if tasks is not None : for task in tasks : if task [ 'name' ] == task_name : return task return None
Get a mesos task with a specific task name
39,576
def service_healthy ( service_name , app_id = None ) : marathon_client = marathon . create_client ( ) apps = marathon_client . get_apps_for_framework ( service_name ) if apps : for app in apps : if ( app_id is not None ) and ( app [ 'id' ] != "/{}" . format ( str ( app_id ) ) ) : continue if ( app [ 'tasksHealthy' ] ) and ( app [ 'tasksRunning' ] ) and ( not app [ 'tasksStaged' ] ) and ( not app [ 'tasksUnhealthy' ] ) : return True return False
Check whether a named service is healthy
39,577
def delete_persistent_data ( role , zk_node ) : if role : destroy_volumes ( role ) unreserve_resources ( role ) if zk_node : delete_zk_node ( zk_node )
Deletes any persistent data associated with the specified role and zk node .
39,578
def destroy_volumes ( role ) : state = dcos_agents_state ( ) if not state or 'slaves' not in state . keys ( ) : return False all_success = True for agent in state [ 'slaves' ] : if not destroy_volume ( agent , role ) : all_success = False return all_success
Destroys all volumes on all the slaves in the cluster for the role .
39,579
def destroy_volume ( agent , role ) : volumes = [ ] agent_id = agent [ 'id' ] reserved_resources_full = agent . get ( 'reserved_resources_full' , None ) if not reserved_resources_full : return True reserved_resources = reserved_resources_full . get ( role , None ) if not reserved_resources : return True for reserved_resource in reserved_resources : name = reserved_resource . get ( 'name' , None ) disk = reserved_resource . get ( 'disk' , None ) if name == 'disk' and disk is not None and 'persistence' in disk : volumes . append ( reserved_resource ) req_url = urljoin ( master_url ( ) , 'destroy-volumes' ) data = { 'slaveId' : agent_id , 'volumes' : json . dumps ( volumes ) } success = False try : response = http . post ( req_url , data = data ) success = 200 <= response . status_code < 300 if response . status_code == 409 : print ( ) except DCOSHTTPException as e : print ( "HTTP {}: Unabled to delete volume based on: {}" . format ( e . response . status_code , e . response . text ) ) return success
Deletes the volumes on the specific agent for the role
39,580
def unreserve_resources ( role ) : state = dcos_agents_state ( ) if not state or 'slaves' not in state . keys ( ) : return False all_success = True for agent in state [ 'slaves' ] : if not unreserve_resource ( agent , role ) : all_success = False return all_success
Unreserves all the resources for all the slaves for the role .
39,581
def wait_for_service_endpoint ( service_name , timeout_sec = 120 ) : master_count = len ( get_all_masters ( ) ) return time_wait ( lambda : service_available_predicate ( service_name ) , timeout_seconds = timeout_sec , required_consecutive_success_count = master_count )
Checks the service url if available it returns true on expiration it returns false
39,582
def task_states_predicate ( service_name , expected_task_count , expected_task_states ) : try : tasks = get_service_tasks ( service_name ) except ( DCOSConnectionError , DCOSHTTPException ) : tasks = [ ] matching_tasks = [ ] other_tasks = [ ] for t in tasks : name = t . get ( 'name' , 'UNKNOWN_NAME' ) state = t . get ( 'state' , None ) if state and state in expected_task_states : matching_tasks . append ( name ) else : other_tasks . append ( '{}={}' . format ( name , state ) ) print ( 'expected {} tasks in {}:\n- {} in expected {}: {}\n- {} in other states: {}' . format ( expected_task_count , ', ' . join ( expected_task_states ) , len ( matching_tasks ) , ', ' . join ( expected_task_states ) , ', ' . join ( matching_tasks ) , len ( other_tasks ) , ', ' . join ( other_tasks ) ) ) return len ( matching_tasks ) >= expected_task_count
Returns whether the provided service_names s tasks have expected_task_count tasks in any of expected_task_states . For example if service foo has 5 tasks which are TASK_STAGING or TASK_RUNNING .
39,583
def tasks_all_replaced_predicate ( service_name , old_task_ids , task_predicate = None ) : try : task_ids = get_service_task_ids ( service_name , task_predicate ) except DCOSHTTPException : print ( 'failed to get task ids for service {}' . format ( service_name ) ) task_ids = [ ] print ( 'waiting for all task ids in "{}" to change:\n- old tasks: {}\n- current tasks: {}' . format ( service_name , old_task_ids , task_ids ) ) for id in task_ids : if id in old_task_ids : return False if len ( task_ids ) < len ( old_task_ids ) : return False return True
Returns whether ALL of old_task_ids have been replaced with new tasks
39,584
def tasks_missing_predicate ( service_name , old_task_ids , task_predicate = None ) : try : task_ids = get_service_task_ids ( service_name , task_predicate ) except DCOSHTTPException : print ( 'failed to get task ids for service {}' . format ( service_name ) ) task_ids = [ ] print ( 'checking whether old tasks in "{}" are missing:\n- old tasks: {}\n- current tasks: {}' . format ( service_name , old_task_ids , task_ids ) ) for id in old_task_ids : if id not in task_ids : return True return False
Returns whether any of old_task_ids are no longer present
39,585
def wait_for_service_tasks_all_changed ( service_name , old_task_ids , task_predicate = None , timeout_sec = 120 ) : return time_wait ( lambda : tasks_all_replaced_predicate ( service_name , old_task_ids , task_predicate ) , timeout_seconds = timeout_sec )
Returns once ALL of old_task_ids have been replaced with new tasks
39,586
def wait_for_service_tasks_all_unchanged ( service_name , old_task_ids , task_predicate = None , timeout_sec = 30 ) : try : time_wait ( lambda : tasks_missing_predicate ( service_name , old_task_ids , task_predicate ) , timeout_seconds = timeout_sec ) except TimeoutExpired : return timeout_sec raise DCOSException ( "One or more of the following tasks were no longer found: {}" . format ( old_task_ids ) )
Returns after verifying that NONE of old_task_ids have been removed or replaced from the service
39,587
def distribute_docker_credentials_to_private_agents ( username , password , file_name = 'docker.tar.gz' ) : create_docker_credentials_file ( username , password , file_name ) try : __distribute_docker_credentials_file ( ) finally : os . remove ( file_name )
Create and distributes a docker credentials file to all private agents
39,588
def prefetch_docker_image_on_private_agents ( image , timeout = timedelta ( minutes = 5 ) . total_seconds ( ) ) : agents = len ( shakedown . get_private_agents ( ) ) app = { "id" : "/prefetch" , "instances" : agents , "container" : { "type" : "DOCKER" , "docker" : { "image" : image } } , "cpus" : 0.1 , "mem" : 128 } client = marathon . create_client ( ) client . add_app ( app ) shakedown . deployment_wait ( timeout ) shakedown . delete_all_apps ( ) shakedown . deployment_wait ( timeout )
Given a docker image . An app with the image is scale across the private agents to ensure that the image is prefetched to all nodes .
39,589
def _get_options ( options_file = None ) : if options_file is not None : with open ( options_file , 'r' ) as opt_file : options = json . loads ( opt_file . read ( ) ) else : options = { } return options
Read in options_file as JSON .
39,590
def package_installed ( package_name , service_name = None ) : package_manager = _get_package_manager ( ) app_installed = len ( package_manager . installed_apps ( package_name , service_name ) ) > 0 subcommand_installed = False for subcmd in package . installed_subcommands ( ) : package_json = subcmd . package_json ( ) if package_json [ 'name' ] == package_name : subcommand_installed = True return ( app_installed or subcommand_installed )
Check whether the package package_name is currently installed .
39,591
def add_package_repo ( repo_name , repo_url , index = None , wait_for_package = None , expect_prev_version = None ) : package_manager = _get_package_manager ( ) if wait_for_package : prev_version = package_manager . get_package_version ( wait_for_package , None ) if not package_manager . add_repo ( repo_name , repo_url , index ) : return False if wait_for_package : try : spinner . time_wait ( lambda : package_version_changed_predicate ( package_manager , wait_for_package , prev_version ) ) except TimeoutExpired : return False return True
Add a repository to the list of package sources
39,592
def remove_package_repo ( repo_name , wait_for_package = None ) : package_manager = _get_package_manager ( ) if wait_for_package : prev_version = package_manager . get_package_version ( wait_for_package , None ) if not package_manager . remove_repo ( repo_name ) : return False if wait_for_package : try : spinner . time_wait ( lambda : package_version_changed_predicate ( package_manager , wait_for_package , prev_version ) ) except TimeoutExpired : return False return True
Remove a repository from the list of package sources
39,593
def tile ( self , z , x , y ) : logger . debug ( _ ( "Download tile %s" ) % ( ( z , x , y ) , ) ) size = self . tilesize s = self . tiles_subdomains [ ( x + y ) % len ( self . tiles_subdomains ) ] try : url = self . tiles_url . format ( ** locals ( ) ) except KeyError as e : raise DownloadError ( _ ( "Unknown keyword %s in URL" ) % e ) logger . debug ( _ ( "Retrieve tile at %s" ) % url ) r = DOWNLOAD_RETRIES sleeptime = 1 while r > 0 : try : request = requests . get ( url , headers = self . headers ) if request . status_code == 200 : return request . content raise DownloadError ( _ ( "Status code : %s, url : %s" ) % ( request . status_code , url ) ) except requests . exceptions . ConnectionError as e : logger . debug ( _ ( "Download error, retry (%s left). (%s)" ) % ( r , e ) ) r -= 1 time . sleep ( sleeptime ) if ( sleeptime <= 10 ) and ( r % 2 == 0 ) : sleeptime += 1 raise DownloadError ( _ ( "Cannot download URL %s" ) % url )
Download the specified tile from tiles_url
39,594
def project ( self , lng_lat ) : ( lng , lat ) = lng_lat x = lng * DEG_TO_RAD lat = max ( min ( MAX_LATITUDE , lat ) , - MAX_LATITUDE ) y = lat * DEG_TO_RAD y = log ( tan ( ( pi / 4 ) + ( y / 2 ) ) ) return ( x * EARTH_RADIUS , y * EARTH_RADIUS )
Returns the coordinates in meters from WGS84
39,595
def add_filter ( self , filter_ ) : assert has_pil , _ ( "Cannot add filters without python PIL" ) self . cache . basename += filter_ . basename self . _filters . append ( filter_ )
Add an image filter for post - processing
39,596
def grid ( self , z_x_y ) : ( z , x , y ) = z_x_y content = self . reader . grid ( z , x , y , self . grid_fields , self . grid_layer ) return content
Return the UTFGrid content
39,597
def _blend_layers ( self , imagecontent , z_x_y ) : ( z , x , y ) = z_x_y result = self . _tile_image ( imagecontent ) for ( layer , opacity ) in self . _layers : try : overlay = self . _tile_image ( layer . tile ( ( z , x , y ) ) ) except ( IOError , DownloadError , ExtractionError ) as e : logger . warn ( e ) continue overlay = overlay . convert ( "RGBA" ) r , g , b , a = overlay . split ( ) overlay = Image . merge ( "RGB" , ( r , g , b ) ) a = ImageEnhance . Brightness ( a ) . enhance ( opacity ) overlay . putalpha ( a ) mask = Image . merge ( "L" , ( a , ) ) result . paste ( overlay , ( 0 , 0 ) , mask ) return self . _image_tile ( result )
Merge tiles of all layers into the specified tile path
39,598
def _tile_image ( self , data ) : image = Image . open ( BytesIO ( data ) ) return image . convert ( 'RGBA' )
Tile binary content as PIL Image .
39,599
def zoomlevels ( self ) : zooms = set ( ) for coverage in self . _bboxes : for zoom in coverage [ 1 ] : zooms . add ( zoom ) return sorted ( zooms )
Return the list of covered zoom levels in ascending order