idx int64 0 252k | question stringlengths 48 5.28k | target stringlengths 5 1.23k |
|---|---|---|
245,500 | def dict_keys_without_hyphens ( a_dict ) : return dict ( ( key . replace ( '-' , '_' ) , val ) for key , val in a_dict . items ( ) ) | Return the a new dict with underscores instead of hyphens in keys . |
245,501 | def update_relations ( context , namespace_separator = ':' ) : relation_type = charmhelpers . core . hookenv . relation_type ( ) relations = [ ] context [ 'current_relation' ] = { } if relation_type is not None : relation_data = charmhelpers . core . hookenv . relation_get ( ) context [ 'current_relation' ] = relation_... | Update the context with the relation data . |
245,502 | def juju_state_to_yaml ( yaml_path , namespace_separator = ':' , allow_hyphens_in_keys = True , mode = None ) : config = charmhelpers . core . hookenv . config ( ) config [ 'charm_dir' ] = charm_dir config [ 'local_unit' ] = charmhelpers . core . hookenv . local_unit ( ) config [ 'unit_private_address' ] = charmhelpers... | Update the juju config and state in a yaml file . |
245,503 | def get_audits ( ) : if subprocess . call ( [ 'which' , 'apache2' ] , stdout = subprocess . PIPE ) != 0 : log ( "Apache server does not appear to be installed on this node - " "skipping apache hardening" , level = INFO ) return [ ] context = ApacheConfContext ( ) settings = utils . get_settings ( 'apache' ) audits = [ ... | Get Apache hardening config audits . |
245,504 | def get_audits ( ) : audits = [ ] settings = utils . get_settings ( 'os' ) path_folders = { '/usr/local/sbin' , '/usr/local/bin' , '/usr/sbin' , '/usr/bin' , '/bin' } extra_user_paths = settings [ 'environment' ] [ 'extra_user_paths' ] path_folders . update ( extra_user_paths ) audits . append ( ReadOnly ( path_folders... | Get OS hardening access audits . |
245,505 | def harden ( overrides = None ) : if overrides is None : overrides = [ ] def _harden_inner1 ( f ) : _logged = { 'done' : False } def _harden_inner2 ( * args , ** kwargs ) : if _DISABLE_HARDENING_FOR_UNIT_TEST : return f ( * args , ** kwargs ) if not _logged [ 'done' ] : log ( "Hardening function '%s'" % ( f . __name__ ... | Hardening decorator . |
245,506 | def parse_mappings ( mappings , key_rvalue = False ) : parsed = { } if mappings : mappings = mappings . split ( ) for m in mappings : p = m . partition ( ':' ) if key_rvalue : key_index = 2 val_index = 0 if not p [ 1 ] : continue else : key_index = 0 val_index = 2 key = p [ key_index ] . strip ( ) parsed [ key ] = p [ ... | By default mappings are lvalue keyed . |
245,507 | def parse_data_port_mappings ( mappings , default_bridge = 'br-data' ) : _mappings = parse_mappings ( mappings , key_rvalue = True ) if not _mappings or list ( _mappings . values ( ) ) == [ '' ] : if not mappings : return { } _mappings = { mappings . split ( ) [ 0 ] : default_bridge } ports = _mappings . keys ( ) if le... | Parse data port mappings . |
245,508 | def parse_vlan_range_mappings ( mappings ) : _mappings = parse_mappings ( mappings ) if not _mappings : return { } mappings = { } for p , r in six . iteritems ( _mappings ) : mappings [ p ] = tuple ( r . split ( ':' ) ) return mappings | Parse vlan range mappings . |
245,509 | def extract_tarfile ( archive_name , destpath ) : "Unpack a tar archive, optionally compressed" archive = tarfile . open ( archive_name ) archive . extractall ( destpath ) | Unpack a tar archive optionally compressed |
245,510 | def extract_zipfile ( archive_name , destpath ) : "Unpack a zip file" archive = zipfile . ZipFile ( archive_name ) archive . extractall ( destpath ) | Unpack a zip file |
245,511 | def get_address_in_network ( network , fallback = None , fatal = False ) : if network is None : if fallback is not None : return fallback if fatal : no_ip_found_error_out ( network ) else : return None networks = network . split ( ) or [ network ] for network in networks : _validate_cidr ( network ) network = netaddr .... | Get an IPv4 or IPv6 address within the network from the host . |
245,512 | def is_ipv6 ( address ) : try : address = netaddr . IPAddress ( address ) except netaddr . AddrFormatError : return False return address . version == 6 | Determine whether provided address is IPv6 or not . |
245,513 | def is_address_in_network ( network , address ) : try : network = netaddr . IPNetwork ( network ) except ( netaddr . core . AddrFormatError , ValueError ) : raise ValueError ( "Network (%s) is not in CIDR presentation format" % network ) try : address = netaddr . IPAddress ( address ) except ( netaddr . core . AddrForm... | Determine whether the provided address is within a network range . |
245,514 | def _get_for_address ( address , key ) : address = netaddr . IPAddress ( address ) for iface in netifaces . interfaces ( ) : addresses = netifaces . ifaddresses ( iface ) if address . version == 4 and netifaces . AF_INET in addresses : addr = addresses [ netifaces . AF_INET ] [ 0 ] [ 'addr' ] netmask = addresses [ neti... | Retrieve an attribute of or the physical interface that the IP address provided could be bound to . |
245,515 | def resolve_network_cidr ( ip_address ) : netmask = get_netmask_for_address ( ip_address ) return str ( netaddr . IPNetwork ( "%s/%s" % ( ip_address , netmask ) ) . cidr ) | Resolves the full address cidr of an ip_address based on configured network interfaces |
245,516 | def get_iface_addr ( iface = 'eth0' , inet_type = 'AF_INET' , inc_aliases = False , fatal = True , exc_list = None ) : if '/' in iface : iface = iface . split ( '/' ) [ - 1 ] if not exc_list : exc_list = [ ] try : inet_num = getattr ( netifaces , inet_type ) except AttributeError : raise Exception ( "Unknown inet type ... | Return the assigned IP address for a given interface if any . |
245,517 | def get_iface_from_addr ( addr ) : for iface in netifaces . interfaces ( ) : addresses = netifaces . ifaddresses ( iface ) for inet_type in addresses : for _addr in addresses [ inet_type ] : _addr = _addr [ 'addr' ] ll_key = re . compile ( "(.+)%.*" ) raw = re . match ( ll_key , _addr ) if raw : _addr = raw . group ( 1... | Work out on which interface the provided address is configured . |
245,518 | def sniff_iface ( f ) : def iface_sniffer ( * args , ** kwargs ) : if not kwargs . get ( 'iface' , None ) : kwargs [ 'iface' ] = get_iface_from_addr ( unit_get ( 'private-address' ) ) return f ( * args , ** kwargs ) return iface_sniffer | Ensure decorated function is called with a value for iface . |
245,519 | def get_ipv6_addr ( iface = None , inc_aliases = False , fatal = True , exc_list = None , dynamic_only = True ) : addresses = get_iface_addr ( iface = iface , inet_type = 'AF_INET6' , inc_aliases = inc_aliases , fatal = fatal , exc_list = exc_list ) if addresses : global_addrs = [ ] for addr in addresses : key_scope_li... | Get assigned IPv6 address for a given interface . |
245,520 | def get_bridges ( vnic_dir = '/sys/devices/virtual/net' ) : b_regex = "%s/*/bridge" % vnic_dir return [ x . replace ( vnic_dir , '' ) . split ( '/' ) [ 1 ] for x in glob . glob ( b_regex ) ] | Return a list of bridges on the system . |
245,521 | def get_bridge_nics ( bridge , vnic_dir = '/sys/devices/virtual/net' ) : brif_regex = "%s/%s/brif/*" % ( vnic_dir , bridge ) return [ x . split ( '/' ) [ - 1 ] for x in glob . glob ( brif_regex ) ] | Return a list of nics comprising a given bridge on the system . |
245,522 | def is_ip ( address ) : try : address = netaddr . IPAddress ( address ) return True except ( netaddr . AddrFormatError , ValueError ) : return False | Returns True if address is a valid IP address . |
245,523 | def get_host_ip ( hostname , fallback = None ) : if is_ip ( hostname ) : return hostname ip_addr = ns_query ( hostname ) if not ip_addr : try : ip_addr = socket . gethostbyname ( hostname ) except Exception : log ( "Failed to resolve hostname '%s'" % ( hostname ) , level = WARNING ) return fallback return ip_addr | Resolves the IP for a given hostname or returns the input if it is already an IP . |
245,524 | def get_hostname ( address , fqdn = True ) : if is_ip ( address ) : try : import dns . reversename except ImportError : if six . PY2 : apt_install ( "python-dnspython" , fatal = True ) else : apt_install ( "python3-dnspython" , fatal = True ) import dns . reversename rev = dns . reversename . from_address ( address ) r... | Resolves hostname for given IP or returns the input if it is already a hostname . |
245,525 | def get_relation_ip ( interface , cidr_network = None ) : try : address = network_get_primary_address ( interface ) except NotImplementedError : address = get_host_ip ( unit_get ( 'private-address' ) ) except NoNetworkBinding : log ( "No network binding for {}" . format ( interface ) , WARNING ) address = get_host_ip (... | Return this unit s IP for the given interface . |
245,526 | def ensure_compliance ( self ) : for p in self . paths : if os . path . exists ( p ) : if self . is_compliant ( p ) : continue log ( 'File %s is not in compliance.' % p , level = INFO ) else : if not self . always_comply : log ( "Non-existent path '%s' - skipping compliance check" % ( p ) , level = INFO ) continue if s... | Ensure that the all registered files comply to registered criteria . |
245,527 | def is_compliant ( self , path ) : stat = self . _get_stat ( path ) user = self . user group = self . group compliant = True if stat . st_uid != user . pw_uid or stat . st_gid != group . gr_gid : log ( 'File %s is not owned by %s:%s.' % ( path , user . pw_name , group . gr_name ) , level = INFO ) compliant = False perm... | Checks if the path is in compliance . |
245,528 | def comply ( self , path ) : utils . ensure_permissions ( path , self . user . pw_name , self . group . gr_name , self . mode ) | Issues a chown and chmod to the file paths specified . |
245,529 | def is_compliant ( self , path ) : if not os . path . isdir ( path ) : log ( 'Path specified %s is not a directory.' % path , level = ERROR ) raise ValueError ( "%s is not a directory." % path ) if not self . recursive : return super ( DirectoryPermissionAudit , self ) . is_compliant ( path ) compliant = True for root ... | Checks if the directory is compliant . |
245,530 | def is_compliant ( self , path ) : same_templates = self . templates_match ( path ) same_content = self . contents_match ( path ) same_permissions = self . permissions_match ( path ) if same_content and same_permissions and same_templates : return True return False | Determines if the templated file is compliant . |
245,531 | def run_service_actions ( self ) : if not self . service_actions : return for svc_action in self . service_actions : name = svc_action [ 'service' ] actions = svc_action [ 'actions' ] log ( "Running service '%s' actions '%s'" % ( name , actions ) , level = DEBUG ) for action in actions : cmd = [ 'service' , name , acti... | Run any actions on services requested . |
245,532 | def comply ( self , path ) : dirname = os . path . dirname ( path ) if not os . path . exists ( dirname ) : os . makedirs ( dirname ) self . pre_write ( ) render_and_write ( self . template_dir , path , self . context ( ) ) utils . ensure_permissions ( path , self . user , self . group , self . mode ) self . run_servic... | Ensures the contents and the permissions of the file . |
245,533 | def templates_match ( self , path ) : template_path = get_template_path ( self . template_dir , path ) key = 'hardening:template:%s' % template_path template_checksum = file_hash ( template_path ) kv = unitdata . kv ( ) stored_tmplt_checksum = kv . get ( key ) if not stored_tmplt_checksum : kv . set ( key , template_ch... | Determines if the template files are the same . |
245,534 | def contents_match ( self , path ) : checksum = file_hash ( path ) kv = unitdata . kv ( ) stored_checksum = kv . get ( 'hardening:%s' % path ) if not stored_checksum : log ( 'Checksum for %s has not been calculated.' % path , level = DEBUG ) return False elif stored_checksum != checksum : log ( 'Checksum mismatch for %... | Determines if the file content is the same . |
245,535 | def permissions_match ( self , path ) : audit = FilePermissionAudit ( path , self . user , self . group , self . mode ) return audit . is_compliant ( path ) | Determines if the file owner and permissions match . |
245,536 | def save_checksum ( self , path ) : checksum = file_hash ( path ) kv = unitdata . kv ( ) kv . set ( 'hardening:%s' % path , checksum ) kv . flush ( ) | Calculates and saves the checksum for the path specified . |
245,537 | def bool_from_string ( value ) : if isinstance ( value , six . string_types ) : value = six . text_type ( value ) else : msg = "Unable to interpret non-string value '%s' as boolean" % ( value ) raise ValueError ( msg ) value = value . strip ( ) . lower ( ) if value in [ 'y' , 'yes' , 'true' , 't' , 'on' ] : return True... | Interpret string value as boolean . |
245,538 | def bytes_from_string ( value ) : BYTE_POWER = { 'K' : 1 , 'KB' : 1 , 'M' : 2 , 'MB' : 2 , 'G' : 3 , 'GB' : 3 , 'T' : 4 , 'TB' : 4 , 'P' : 5 , 'PB' : 5 , } if isinstance ( value , six . string_types ) : value = six . text_type ( value ) else : msg = "Unable to interpret non-string value '%s' as bytes" % ( value ) raise... | Interpret human readable string value as bytes . |
245,539 | def audit ( * args ) : def wrapper ( f ) : test_name = f . __name__ if _audits . get ( test_name ) : raise RuntimeError ( "Test name '{}' used more than once" . format ( test_name ) ) non_callables = [ fn for fn in args if not callable ( fn ) ] if non_callables : raise RuntimeError ( "Configuration includes non-callabl... | Decorator to register an audit . |
245,540 | def is_audit_type ( * args ) : def _is_audit_type ( audit_options ) : if audit_options . get ( 'audit_type' ) in args : return True else : return False return _is_audit_type | This audit is included in the specified kinds of audits . |
245,541 | def run ( audit_options ) : errors = { } results = { } for name , audit in sorted ( _audits . items ( ) ) : result_name = name . replace ( '_' , '-' ) if result_name in audit_options . get ( 'excludes' , [ ] ) : print ( "Skipping {} because it is" "excluded in audit config" . format ( result_name ) ) continue if all ( ... | Run the configured audits with the specified audit_options . |
245,542 | def action_parse_results ( result ) : passed = True for test , result in result . items ( ) : if result [ 'success' ] : hookenv . action_set ( { test : 'PASS' } ) else : hookenv . action_set ( { test : 'FAIL - {}' . format ( result [ 'message' ] ) } ) passed = False if not passed : hookenv . action_fail ( "One or more ... | Parse the result of run in the context of an action . |
245,543 | def generate_selfsigned ( keyfile , certfile , keysize = "1024" , config = None , subject = None , cn = None ) : cmd = [ ] if config : cmd = [ "/usr/bin/openssl" , "req" , "-new" , "-newkey" , "rsa:{}" . format ( keysize ) , "-days" , "365" , "-nodes" , "-x509" , "-keyout" , keyfile , "-out" , certfile , "-config" , co... | Generate selfsigned SSL keypair |
245,544 | def ssh_directory_for_unit ( application_name , user = None ) : if user : application_name = "{}_{}" . format ( application_name , user ) _dir = os . path . join ( NOVA_SSH_DIR , application_name ) for d in [ NOVA_SSH_DIR , _dir ] : if not os . path . isdir ( d ) : os . mkdir ( d ) for f in [ 'authorized_keys' , 'known... | Return the directory used to store ssh assets for the application . |
245,545 | def ssh_known_host_key ( host , application_name , user = None ) : cmd = [ 'ssh-keygen' , '-f' , known_hosts ( application_name , user ) , '-H' , '-F' , host ] try : output = subprocess . check_output ( cmd ) except subprocess . CalledProcessError as e : if e . returncode == 1 : output = e . output else : raise output ... | Return the first entry in known_hosts for host . |
245,546 | def remove_known_host ( host , application_name , user = None ) : log ( 'Removing SSH known host entry for compute host at %s' % host ) cmd = [ 'ssh-keygen' , '-f' , known_hosts ( application_name , user ) , '-R' , host ] subprocess . check_call ( cmd ) | Remove the entry in known_hosts for host . |
245,547 | def is_same_key ( key_1 , key_2 ) : k_1 = key_1 . split ( '= ' ) [ 1 ] k_2 = key_2 . split ( '= ' ) [ 1 ] return k_1 == k_2 | Extract the key from two host entries and compare them . |
245,548 | def add_known_host ( host , application_name , user = None ) : cmd = [ 'ssh-keyscan' , '-H' , '-t' , 'rsa' , host ] try : remote_key = subprocess . check_output ( cmd ) . strip ( ) except Exception as e : log ( 'Could not obtain SSH host key from %s' % host , level = ERROR ) raise e current_key = ssh_known_host_key ( h... | Add the given host key to the known hosts file . |
245,549 | def ssh_authorized_key_exists ( public_key , application_name , user = None ) : with open ( authorized_keys ( application_name , user ) ) as keys : return ( '%s' % public_key ) in keys . read ( ) | Check if given key is in the authorized_key file . |
245,550 | def add_authorized_key ( public_key , application_name , user = None ) : with open ( authorized_keys ( application_name , user ) , 'a' ) as keys : keys . write ( "{}\n" . format ( public_key ) ) | Add given key to the authorized_key file . |
245,551 | def ssh_known_hosts_lines ( application_name , user = None ) : known_hosts_list = [ ] with open ( known_hosts ( application_name , user ) ) as hosts : for hosts_line in hosts : if hosts_line . rstrip ( ) : known_hosts_list . append ( hosts_line . rstrip ( ) ) return ( known_hosts_list ) | Return contents of known_hosts file for given application . |
245,552 | def ssh_authorized_keys_lines ( application_name , user = None ) : authorized_keys_list = [ ] with open ( authorized_keys ( application_name , user ) ) as keys : for authkey_line in keys : if authkey_line . rstrip ( ) : authorized_keys_list . append ( authkey_line . rstrip ( ) ) return ( authorized_keys_list ) | Return contents of authorized_keys file for given application . |
245,553 | def ssh_compute_remove ( public_key , application_name , user = None ) : if not ( os . path . isfile ( authorized_keys ( application_name , user ) ) or os . path . isfile ( known_hosts ( application_name , user ) ) ) : return keys = ssh_authorized_keys_lines ( application_name , user = None ) keys = [ k . strip ( ) for... | Remove given public key from authorized_keys file . |
245,554 | def apt_cache ( in_memory = True , progress = None ) : from apt import apt_pkg apt_pkg . init ( ) if in_memory : apt_pkg . config . set ( "Dir::Cache::pkgcache" , "" ) apt_pkg . config . set ( "Dir::Cache::srcpkgcache" , "" ) return apt_pkg . Cache ( progress ) | Build and return an apt cache . |
245,555 | def apt_mark ( packages , mark , fatal = False ) : log ( "Marking {} as {}" . format ( packages , mark ) ) cmd = [ 'apt-mark' , mark ] if isinstance ( packages , six . string_types ) : cmd . append ( packages ) else : cmd . extend ( packages ) if fatal : subprocess . check_call ( cmd , universal_newlines = True ) else ... | Flag one or more packages using apt - mark . |
245,556 | def import_key ( key ) : key = key . strip ( ) if '-' in key or '\n' in key : log ( "PGP key found (looks like ASCII Armor format)" , level = DEBUG ) if ( '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and '-----END PGP PUBLIC KEY BLOCK-----' in key ) : log ( "Writing provided PGP key in the binary format" , level = DEB... | Import an ASCII Armor key . |
245,557 | def _dearmor_gpg_key ( key_asc ) : ps = subprocess . Popen ( [ 'gpg' , '--dearmor' ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE , stdin = subprocess . PIPE ) out , err = ps . communicate ( input = key_asc ) if six . PY3 : err = err . decode ( 'utf-8' ) if 'gpg: no valid OpenPGP data found.' in err : rais... | Converts a GPG key in the ASCII armor format to the binary format . |
245,558 | def _write_apt_gpg_keyfile ( key_name , key_material ) : with open ( '/etc/apt/trusted.gpg.d/{}.gpg' . format ( key_name ) , 'wb' ) as keyf : keyf . write ( key_material ) | Writes GPG key material into a file at a provided path . |
245,559 | def _add_apt_repository ( spec ) : if '{series}' in spec : series = get_distrib_codename ( ) spec = spec . replace ( '{series}' , series ) _run_with_retries ( [ 'add-apt-repository' , '--yes' , spec ] , cmd_env = env_proxy_settings ( [ 'https' ] ) ) | Add the spec using add_apt_repository |
245,560 | def _add_cloud_distro_check ( cloud_archive_release , openstack_release ) : _verify_is_ubuntu_rel ( cloud_archive_release , openstack_release ) _add_cloud_pocket ( "{}-{}" . format ( cloud_archive_release , openstack_release ) ) | Add the cloud pocket but also check the cloud_archive_release against the current distro and use the openstack_release as the full lookup . |
245,561 | def _verify_is_ubuntu_rel ( release , os_release ) : ubuntu_rel = get_distrib_codename ( ) if release != ubuntu_rel : raise SourceConfigError ( 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu' 'version ({})' . format ( release , os_release , ubuntu_rel ) ) | Verify that the release is in the same as the current ubuntu release . |
245,562 | def _run_with_retries ( cmd , max_retries = CMD_RETRY_COUNT , retry_exitcodes = ( 1 , ) , retry_message = "" , cmd_env = None ) : env = None kwargs = { } if cmd_env : env = os . environ . copy ( ) env . update ( cmd_env ) kwargs [ 'env' ] = env if not retry_message : retry_message = "Failed executing '{}'" . format ( "... | Run a command and retry until success or max_retries is reached . |
245,563 | def _run_apt_command ( cmd , fatal = False ) : cmd_env = { 'DEBIAN_FRONTEND' : os . environ . get ( 'DEBIAN_FRONTEND' , 'noninteractive' ) } if fatal : _run_with_retries ( cmd , cmd_env = cmd_env , retry_exitcodes = ( 1 , APT_NO_LOCK , ) , retry_message = "Couldn't acquire DPKG lock" ) else : env = os . environ . copy ... | Run an apt command with optional retries . |
245,564 | def get_upstream_version ( package ) : import apt_pkg cache = apt_cache ( ) try : pkg = cache [ package ] except Exception : return None if not pkg . current_ver : return None return apt_pkg . upstream_version ( pkg . current_ver . ver_str ) | Determine upstream version based on installed package |
245,565 | def get_bcache_fs ( ) : cachesetroot = "{}/fs/bcache" . format ( SYSFS ) try : dirs = os . listdir ( cachesetroot ) except OSError : log ( "No bcache fs found" ) return [ ] cacheset = set ( [ Bcache ( '{}/{}' . format ( cachesetroot , d ) ) for d in dirs if not d . startswith ( 'register' ) ] ) return cacheset | Return all cache sets |
245,566 | def get_stats_action ( cachespec , interval ) : if cachespec == 'global' : caches = get_bcache_fs ( ) else : caches = [ Bcache . fromdevice ( cachespec ) ] res = dict ( ( c . cachepath , c . get_stats ( interval ) ) for c in caches ) return json . dumps ( res , indent = 4 , separators = ( ',' , ': ' ) ) | Action for getting bcache statistics for a given cachespec . Cachespec can either be a device name eg . sdb which will retrieve cache stats for the given device or global which will retrieve stats for all cachesets |
245,567 | def get_stats ( self , interval ) : intervaldir = 'stats_{}' . format ( interval ) path = "{}/{}" . format ( self . cachepath , intervaldir ) out = dict ( ) for elem in os . listdir ( path ) : out [ elem ] = open ( '{}/{}' . format ( path , elem ) ) . read ( ) . strip ( ) return out | Get cache stats |
245,568 | def update_dns_ha_resource_params ( resources , resource_params , relation_id = None , crm_ocf = 'ocf:maas:dns' ) : _relation_data = { 'resources' : { } , 'resource_params' : { } } update_hacluster_dns_ha ( charm_name ( ) , _relation_data , crm_ocf ) resources . update ( _relation_data [ 'resources' ] ) resource_params... | Configure DNS - HA resources based on provided configuration and update resource dictionaries for the HA relation . |
245,569 | def expect_ha ( ) : ha_related_units = [ ] try : ha_related_units = list ( expected_related_units ( reltype = 'ha' ) ) except ( NotImplementedError , KeyError ) : pass return len ( ha_related_units ) > 0 or config ( 'vip' ) or config ( 'dns-ha' ) | Determine if the unit expects to be in HA |
245,570 | def generate_ha_relation_data ( service , extra_settings = None ) : _haproxy_res = 'res_{}_haproxy' . format ( service ) _relation_data = { 'resources' : { _haproxy_res : 'lsb:haproxy' , } , 'resource_params' : { _haproxy_res : 'op monitor interval="5s"' } , 'init_services' : { _haproxy_res : 'haproxy' } , 'clones' : {... | Generate relation data for ha relation |
245,571 | def update_hacluster_dns_ha ( service , relation_data , crm_ocf = 'ocf:maas:dns' ) : assert_charm_supports_dns_ha ( ) settings = [ 'os-admin-hostname' , 'os-internal-hostname' , 'os-public-hostname' , 'os-access-hostname' ] hostname_group = [ ] for setting in settings : hostname = config ( setting ) if hostname is None... | Configure DNS - HA resources based on provided configuration |
245,572 | def get_vip_settings ( vip ) : iface = get_iface_for_address ( vip ) netmask = get_netmask_for_address ( vip ) fallback = False if iface is None : iface = config ( 'vip_iface' ) fallback = True if netmask is None : netmask = config ( 'vip_cidr' ) fallback = True return iface , netmask , fallback | Calculate which nic is on the correct network for the given vip . |
245,573 | def update_hacluster_vip ( service , relation_data ) : cluster_config = get_hacluster_config ( ) vip_group = [ ] vips_to_delete = [ ] for vip in cluster_config [ 'vip' ] . split ( ) : if is_ipv6 ( vip ) : res_vip = 'ocf:heartbeat:IPv6addr' vip_params = 'ipv6addr' else : res_vip = 'ocf:heartbeat:IPaddr2' vip_params = 'i... | Configure VIP resources based on provided configuration |
245,574 | def _add_services ( self , this_service , other_services ) : if this_service [ 'name' ] != os . path . basename ( os . getcwd ( ) ) : s = this_service [ 'name' ] msg = "The charm's root directory name needs to be {}" . format ( s ) amulet . raise_status ( amulet . FAIL , msg = msg ) if 'units' not in this_service : thi... | Add services . |
245,575 | def _add_relations ( self , relations ) : for k , v in six . iteritems ( relations ) : self . d . relate ( k , v ) | Add all of the relations for the services . |
245,576 | def _configure_services ( self , configs ) : for service , config in six . iteritems ( configs ) : self . d . configure ( service , config ) | Configure all of the services . |
245,577 | def _deploy ( self ) : timeout = int ( os . environ . get ( 'AMULET_SETUP_TIMEOUT' , 900 ) ) try : self . d . setup ( timeout = timeout ) self . d . sentry . wait ( timeout = timeout ) except amulet . helpers . TimeoutError : amulet . raise_status ( amulet . FAIL , msg = "Deployment timed out ({}s)" . format ( timeout ... | Deploy environment and wait for all hooks to finish executing . |
245,578 | def _init_ca ( self ) : if not exists ( path_join ( self . ca_dir , 'ca.cnf' ) ) : with open ( path_join ( self . ca_dir , 'ca.cnf' ) , 'w' ) as fh : fh . write ( CA_CONF_TEMPLATE % ( self . get_conf_variables ( ) ) ) if not exists ( path_join ( self . ca_dir , 'signing.cnf' ) ) : with open ( path_join ( self . ca_dir ... | Generate the root ca s cert and key . |
245,579 | def format_endpoint ( schema , addr , port , api_version ) : return '{}://{}:{}/{}/' . format ( schema , addr , port , get_api_suffix ( api_version ) ) | Return a formatted keystone endpoint |
245,580 | def get_keystone_manager ( endpoint , api_version , ** kwargs ) : if api_version == 2 : return KeystoneManager2 ( endpoint , ** kwargs ) if api_version == 3 : return KeystoneManager3 ( endpoint , ** kwargs ) raise ValueError ( 'No manager found for api version {}' . format ( api_version ) ) | Return a keystonemanager for the correct API version |
245,581 | def get_keystone_manager_from_identity_service_context ( ) : context = IdentityServiceContext ( ) ( ) if not context : msg = "Identity service context cannot be generated" log ( msg , level = ERROR ) raise ValueError ( msg ) endpoint = format_endpoint ( context [ 'service_protocol' ] , context [ 'service_host' ] , cont... | Return a keystonmanager generated from a instance of charmhelpers . contrib . openstack . context . IdentityServiceContext |
245,582 | def resolve_service_id ( self , service_name = None , service_type = None ) : services = [ s . _info for s in self . api . services . list ( ) ] service_name = service_name . lower ( ) for s in services : name = s [ 'name' ] . lower ( ) if service_type and service_name : if ( service_name == name and service_type == s ... | Find the service_id of a given service |
245,583 | def deactivate_lvm_volume_group ( block_device ) : vg = list_lvm_volume_group ( block_device ) if vg : cmd = [ 'vgchange' , '-an' , vg ] check_call ( cmd ) | Deactivate any volume gruop associated with an LVM physical volume . |
245,584 | def remove_lvm_physical_volume ( block_device ) : p = Popen ( [ 'pvremove' , '-ff' , block_device ] , stdin = PIPE ) p . communicate ( input = 'y\n' ) | Remove LVM PV signatures from a given block device . |
245,585 | def list_lvm_volume_group ( block_device ) : vg = None pvd = check_output ( [ 'pvdisplay' , block_device ] ) . splitlines ( ) for lvm in pvd : lvm = lvm . decode ( 'UTF-8' ) if lvm . strip ( ) . startswith ( 'VG Name' ) : vg = ' ' . join ( lvm . strip ( ) . split ( ) [ 2 : ] ) return vg | List LVM volume group associated with a given block device . |
245,586 | def list_logical_volumes ( select_criteria = None , path_mode = False ) : lv_diplay_attr = 'lv_name' if path_mode : lv_diplay_attr = 'vg_name,' + lv_diplay_attr cmd = [ 'lvs' , '--options' , lv_diplay_attr , '--noheadings' ] if select_criteria : cmd . extend ( [ '--select' , select_criteria ] ) lvs = [ ] for lv in chec... | List logical volumes |
245,587 | def create_logical_volume ( lv_name , volume_group , size = None ) : if size : check_call ( [ 'lvcreate' , '--yes' , '-L' , '{}' . format ( size ) , '-n' , lv_name , volume_group ] ) else : check_call ( [ 'lvcreate' , '--yes' , '-l' , '100%FREE' , '-n' , lv_name , volume_group ] ) | Create a new logical volume in an existing volume group |
245,588 | def render ( source , target , context , owner = 'root' , group = 'root' , perms = 0o444 , templates_dir = None , encoding = 'UTF-8' , template_loader = None , config_template = None ) : try : from jinja2 import FileSystemLoader , Environment , exceptions except ImportError : try : from charmhelpers . fetch import apt_... | Render a template . |
245,589 | def cached ( func ) : @ wraps ( func ) def wrapper ( * args , ** kwargs ) : global cache key = json . dumps ( ( func , args , kwargs ) , sort_keys = True , default = str ) try : return cache [ key ] except KeyError : pass res = func ( * args , ** kwargs ) cache [ key ] = res return res wrapper . _wrapped = func return ... | Cache return values for multiple executions of func + args |
245,590 | def flush ( key ) : flush_list = [ ] for item in cache : if key in item : flush_list . append ( item ) for item in flush_list : del cache [ item ] | Flushes any entries from function cache where the key is found in the function + args |
245,591 | def log ( message , level = None ) : command = [ 'juju-log' ] if level : command += [ '-l' , level ] if not isinstance ( message , six . string_types ) : message = repr ( message ) command += [ message [ : SH_MAX_ARG ] ] try : subprocess . call ( command ) except OSError as e : if e . errno == errno . ENOENT : if level... | Write a message to the juju log |
245,592 | def execution_environment ( ) : context = { } context [ 'conf' ] = config ( ) if relation_id ( ) : context [ 'reltype' ] = relation_type ( ) context [ 'relid' ] = relation_id ( ) context [ 'rel' ] = relation_get ( ) context [ 'unit' ] = local_unit ( ) context [ 'rels' ] = relations ( ) context [ 'env' ] = os . environ ... | A convenient bundling of the current execution context |
245,593 | def relation_id ( relation_name = None , service_or_unit = None ) : if not relation_name and not service_or_unit : return os . environ . get ( 'JUJU_RELATION_ID' , None ) elif relation_name and service_or_unit : service_name = service_or_unit . split ( '/' ) [ 0 ] for relid in relation_ids ( relation_name ) : remote_se... | The relation ID for the current or a specified relation |
245,594 | def principal_unit ( ) : principal_unit = os . environ . get ( 'JUJU_PRINCIPAL_UNIT' , None ) if principal_unit == '' : return os . environ [ 'JUJU_UNIT_NAME' ] elif principal_unit is not None : return principal_unit for reltype in relation_types ( ) : for rid in relation_ids ( reltype ) : for unit in related_units ( r... | Returns the principal unit of this unit otherwise None |
245,595 | def relation_get ( attribute = None , unit = None , rid = None ) : _args = [ 'relation-get' , '--format=json' ] if rid : _args . append ( '-r' ) _args . append ( rid ) _args . append ( attribute or '-' ) if unit : _args . append ( unit ) try : return json . loads ( subprocess . check_output ( _args ) . decode ( 'UTF-8'... | Get relation information |
245,596 | def relation_set ( relation_id = None , relation_settings = None , ** kwargs ) : relation_settings = relation_settings if relation_settings else { } relation_cmd_line = [ 'relation-set' ] accepts_file = "--file" in subprocess . check_output ( relation_cmd_line + [ "--help" ] , universal_newlines = True ) if relation_id... | Set relation information for the current unit |
245,597 | def relation_clear ( r_id = None ) : settings = relation_get ( rid = r_id , unit = local_unit ( ) ) for setting in settings : if setting not in [ 'public-address' , 'private-address' ] : settings [ setting ] = None relation_set ( relation_id = r_id , ** settings ) | Clears any relation data already set on relation r_id |
245,598 | def relation_ids ( reltype = None ) : reltype = reltype or relation_type ( ) relid_cmd_line = [ 'relation-ids' , '--format=json' ] if reltype is not None : relid_cmd_line . append ( reltype ) return json . loads ( subprocess . check_output ( relid_cmd_line ) . decode ( 'UTF-8' ) ) or [ ] return [ ] | A list of relation_ids |
245,599 | def related_units ( relid = None ) : relid = relid or relation_id ( ) units_cmd_line = [ 'relation-list' , '--format=json' ] if relid is not None : units_cmd_line . extend ( ( '-r' , relid ) ) return json . loads ( subprocess . check_output ( units_cmd_line ) . decode ( 'UTF-8' ) ) or [ ] | A list of related units |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.