idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
41,700 | def _update_nic_data_from_nic_info_based_on_model ( self , nic_dict , item , port , mac ) : if 'G7' in self . model : nic_dict [ port ] = mac else : location = item [ 'LOCATION' ] [ 'VALUE' ] if location == 'Embedded' : nic_dict [ port ] = mac | This method updates with port number and corresponding mac |
41,701 | def _get_response_body_from_gzipped_content ( self , url , response ) : try : gzipper = gzip . GzipFile ( fileobj = six . BytesIO ( response . text ) ) LOG . debug ( self . _ ( "Received compressed response for " "url %(url)s." ) , { 'url' : url } ) uncompressed_string = ( gzipper . read ( ) . decode ( 'UTF-8' ) ) response_body = json . loads ( uncompressed_string ) except Exception as e : LOG . debug ( self . _ ( "Error occurred while decompressing body. " "Got invalid response '%(response)s' for " "url %(url)s: %(error)s" ) , { 'url' : url , 'response' : response . text , 'error' : e } ) raise exception . IloError ( e ) return response_body | Get the response body from gzipped content |
41,702 | def _rest_patch ( self , suburi , request_headers , request_body ) : return self . _rest_op ( 'PATCH' , suburi , request_headers , request_body ) | REST PATCH operation . |
41,703 | def _rest_put ( self , suburi , request_headers , request_body ) : return self . _rest_op ( 'PUT' , suburi , request_headers , request_body ) | REST PUT operation . |
41,704 | def _rest_post ( self , suburi , request_headers , request_body ) : return self . _rest_op ( 'POST' , suburi , request_headers , request_body ) | REST POST operation . |
41,705 | def red_dim ( self , array ) : while isinstance ( array , list ) == True or isinstance ( array , np . ndarray ) == True : try : if len ( array ) == 1 : array = array [ 0 ] else : break except : break return array | This function reduces the dimensions of an array until it is no longer of length 1 . |
41,706 | def _padding_model_number ( number , max_num ) : cnum = str ( number ) clen = len ( cnum ) cmax = int ( log10 ( max_num ) ) + 1 return ( cmax - clen ) * '0' + cnum | This method returns a zero - front padded string |
41,707 | def _sparse ( self , x , y , sparse ) : tmpX = [ ] tmpY = [ ] for i in range ( len ( x ) ) : if sparse == 1 : return x , y if ( i % sparse ) == 0 : tmpX . append ( x [ i ] ) tmpY . append ( y [ i ] ) return tmpX , tmpY | Method that removes every non sparse th element . |
41,708 | def plotMulti ( self , atrix , atriy , cyclist , title , path = '/' , legend = None , labelx = None , labely = None , logx = False , logy = False , base = 10 , sparse = 1 , pdf = False , limits = None ) : if str ( legend . __class__ ) != "<type 'list'>" : legendList = False else : legendList = True if legendList and len ( cyclist ) != len ( legend ) : print ( 'Please input a proper legend, with correct length, aborting plot' ) return None for i in range ( len ( cyclist ) ) : if legendList : self . plot ( atrix , atriy , cyclist [ i ] , 'ndump' , legend [ i ] , labelx , labely , base = base , sparse = sparse , logx = logx , logy = logy , show = False , limits = limits ) else : self . plot ( atrix , atriy , cyclist [ i ] , 'ndump' , legend , labelx , labely , base = base , sparse = sparse , logx = logx , logy = logy , show = False , limits = limits ) pl . title ( title ) if not pdf : currentDir = os . getcwd ( ) os . chdir ( path ) pl . savefig ( title + str ( cyclist [ i ] ) + '.png' , dpi = 400 ) os . chdir ( currentDir ) else : currentDir = os . getcwd ( ) os . chdir ( path ) pl . savefig ( title + str ( cyclist [ i ] ) + '.pdf' , dpi = 400 ) os . chdir ( currentDir ) pl . clf ( ) return None | Method for plotting multiple plots and saving it to multiple pngs or PDFs . |
41,709 | def iso_abundMulti ( self , cyclist , stable = False , amass_range = None , mass_range = None , ylim = [ 0 , 0 ] , ref = - 1 , decayed = False , include_title = False , title = None , pdf = False , color_plot = True , grid = False , point_set = 1 ) : max_num = max ( cyclist ) for i in range ( len ( cyclist ) ) : self . iso_abund ( cyclist [ i ] , stable , amass_range , mass_range , ylim , ref , decayed = decayed , show = False , color_plot = color_plot , grid = False , point_set = 1 , include_title = include_title ) if title != None : pl . title ( title ) else : name = 'IsoAbund' number_str = _padding_model_number ( cyclist [ i ] , max_num ) if not pdf : pl . savefig ( name + number_str + '.png' , dpi = 200 ) else : pl . savefig ( name + number_str + '.pdf' , dpi = 200 ) pl . clf ( ) return None | Method that plots figures and saves those figures to a . png file . Plots a figure for each cycle in the argument cycle . Can be called via iso_abund method by passing a list to cycle . |
41,710 | def _do_title_string ( self , title_items , cycle ) : title_string = [ ] form_str = '%4.1F' for item in title_items : num = self . get ( item , fname = cycle ) if num > 999 or num < 0.1 : num = log10 ( num ) prefix = 'log ' else : prefix = '' title_string . append ( prefix + item + '=' + form_str % num ) tt = '' for thing in title_string : tt = tt + thing + ", " return tt . rstrip ( ', ' ) | Create title string |
41,711 | def plotprofMulti ( self , ini , end , delta , what_specie , xlim1 , xlim2 , ylim1 , ylim2 , symbol = None ) : plotType = self . _classTest ( ) if plotType == 'se' : for i in range ( ini , end + 1 , delta ) : step = int ( i ) if symbol == None : symbol_dummy = '-' for j in range ( len ( what_specie ) ) : self . plot_prof_1 ( step , what_specie [ j ] , xlim1 , xlim2 , ylim1 , ylim2 , symbol_dummy ) else : for j in range ( len ( what_specie ) ) : symbol_dummy = symbol [ j ] self . plot_prof_1 ( step , what_specie [ j ] , xlim1 , xlim2 , ylim1 , ylim2 , symbol_dummy ) filename = str ( '%03d' % step ) + '_test.png' pl . savefig ( filename , dpi = 400 ) print ( 'wrote file ' , filename ) pl . clf ( ) else : print ( 'This method is not supported for ' + str ( self . __class__ ) ) return | create a movie with mass fractions vs mass coordinate between xlim1 and xlim2 ylim1 and ylim2 . Only works with instances of se . |
41,712 | def plot_prof_1 ( self , species , keystring , xlim1 , xlim2 , ylim1 , ylim2 , symbol = None , show = False ) : plotType = self . _classTest ( ) if plotType == 'se' : tot_mass = self . se . get ( 'mini' ) age = self . se . get ( keystring , 'age' ) mass = self . se . get ( keystring , 'mass' ) Xspecies = self . se . get ( keystring , 'iso_massf' , species ) mod = keystring elif plotType == 'mesa_profile' : tot_mass = self . header_attr [ 'star_mass' ] age = self . header_attr [ 'star_age' ] mass = self . get ( 'mass' ) mod = self . header_attr [ 'model_number' ] Xspecies = self . get ( species ) else : print ( 'This method is not supported for ' + str ( self . __class__ ) ) return if symbol == None : symbol = '-' x , y = self . _logarithm ( Xspecies , mass , True , False , 10 ) pl . plot ( y , x , symbol , label = str ( species ) ) pl . xlim ( xlim1 , xlim2 ) pl . ylim ( ylim1 , ylim2 ) pl . legend ( ) pl . xlabel ( '$Mass$ $coordinate$' , fontsize = 20 ) pl . ylabel ( '$X_{i}$' , fontsize = 20 ) pl . title ( 'Mass=' + str ( tot_mass ) + ', cycle=' + str ( mod ) ) if show : pl . show ( ) | Plot one species for cycle between xlim1 and xlim2 Only works with instances of se and mesa _profile . |
41,713 | def density_profile ( self , ixaxis = 'mass' , ifig = None , colour = None , label = None , fname = None ) : pT = self . _classTest ( ) if pT is 'mesa_profile' : x = self . get ( ixaxis ) if ixaxis is 'radius' : x = x * ast . rsun_cm y = self . get ( 'logRho' ) elif pT is 'se' : if fname is None : raise IOError ( "Please provide the cycle number fname" ) x = self . se . get ( fname , ixaxis ) y = np . log10 ( self . se . get ( fname , 'rho' ) ) else : raise IOError ( "Sorry. the density_profile method is not available \ for this class" ) if ixaxis is 'radius' : x = np . log10 ( x ) xlab = '$\log_{10}(r\,/\,{\\rm cm})$' else : xlab = '${\\rm Mass}\,/\,M_\odot$' if ifig is not None : pl . figure ( ifig ) if label is not None : if colour is not None : pl . plot ( x , y , color = colour , label = label ) else : pl . plot ( x , y , label = label ) pl . legend ( loc = 'best' ) . draw_frame ( False ) else : if colour is not None : pl . plot ( x , y , color = colour ) else : pl . plot ( x , y ) pl . xlabel ( xlab ) pl . ylabel ( '$\log_{10}(\\rho\,/\,{\\rm g\,cm}^{-3})$' ) | Plot density as a function of either mass coordiate or radius . |
41,714 | def get_system ( self , identity ) : return system . HPESystem ( self . _conn , identity , redfish_version = self . redfish_version ) | Given the identity return a HPESystem object |
41,715 | def get_manager ( self , identity ) : return manager . HPEManager ( self . _conn , identity , redfish_version = self . redfish_version ) | Given the identity return a HPEManager object |
41,716 | def get_update_service ( self ) : update_service_url = utils . get_subresource_path_by ( self , 'UpdateService' ) return ( update_service . HPEUpdateService ( self . _conn , update_service_url , redfish_version = self . redfish_version ) ) | Return a HPEUpdateService object |
41,717 | def get_account_service ( self ) : account_service_url = utils . get_subresource_path_by ( self , 'AccountService' ) return ( account_service . HPEAccountService ( self . _conn , account_service_url , redfish_version = self . redfish_version ) ) | Return a HPEAccountService object |
41,718 | def _execute_sum ( sum_file_path , mount_point , components = None ) : cmd = ' --c ' + ' --c ' . join ( components ) if components else '' try : if SUM_LOCATION in sum_file_path : location = os . path . join ( mount_point , 'packages' ) processutils . execute ( './launch_sum.sh' , '--s' , '--romonly' , '--use_location' , location , cmd , cwd = mount_point ) else : processutils . execute ( sum_file_path , '--s' , '--romonly' , cmd ) except processutils . ProcessExecutionError as e : result = _parse_sum_ouput ( e . exit_code ) if result : return result else : raise exception . SUMOperationError ( reason = str ( e ) ) | Executes the SUM based firmware update command . |
41,719 | def _get_log_file_data_as_encoded_content ( ) : with io . BytesIO ( ) as fp : with tarfile . open ( fileobj = fp , mode = 'w:gz' ) as tar : for f in OUTPUT_FILES : if os . path . isfile ( f ) : tar . add ( f ) fp . seek ( 0 ) return base64 . encode_as_bytes ( fp . getvalue ( ) ) | Gzip and base64 encode files and BytesIO buffers . |
41,720 | def _parse_sum_ouput ( exit_code ) : if exit_code == 3 : return "Summary: %s" % EXIT_CODE_TO_STRING . get ( exit_code ) if exit_code in ( 0 , 1 , 253 ) : if os . path . exists ( OUTPUT_FILES [ 0 ] ) : with open ( OUTPUT_FILES [ 0 ] , 'r' ) as f : output_data = f . read ( ) ret_data = output_data [ ( output_data . find ( 'Deployed Components:' ) + len ( 'Deployed Components:' ) ) : output_data . find ( 'Exit status:' ) ] failed = 0 success = 0 for line in re . split ( '\n\n' , ret_data ) : if line : if 'Success' not in line : failed += 1 else : success += 1 return { 'Summary' : ( "%(return_string)s Status of updated components: Total: " "%(total)s Success: %(success)s Failed: %(failed)s." % { 'return_string' : EXIT_CODE_TO_STRING . get ( exit_code ) , 'total' : ( success + failed ) , 'success' : success , 'failed' : failed } ) , 'Log Data' : _get_log_file_data_as_encoded_content ( ) } return "UPDATE STATUS: UNKNOWN" | Parse the SUM output log file . |
41,721 | def update_firmware ( node ) : sum_update_iso = node [ 'clean_step' ] [ 'args' ] . get ( 'url' ) try : utils . validate_href ( sum_update_iso ) except exception . ImageRefValidationFailed as e : raise exception . SUMOperationError ( reason = e ) info = node . get ( 'driver_info' ) ilo_object = client . IloClient ( info . get ( 'ilo_address' ) , info . get ( 'ilo_username' ) , info . get ( 'ilo_password' ) ) ilo_object . eject_virtual_media ( 'CDROM' ) ilo_object . insert_virtual_media ( sum_update_iso , 'CDROM' ) time . sleep ( WAIT_TIME_DISK_LABEL_TO_BE_VISIBLE ) vmedia_device_dir = "/dev/disk/by-label/" for file in os . listdir ( vmedia_device_dir ) : if fnmatch . fnmatch ( file , 'SPP*' ) : vmedia_device_file = os . path . join ( vmedia_device_dir , file ) if not os . path . exists ( vmedia_device_file ) : msg = "Unable to find the virtual media device for SUM" raise exception . SUMOperationError ( reason = msg ) expected_checksum = node [ 'clean_step' ] [ 'args' ] . get ( 'checksum' ) try : utils . verify_image_checksum ( vmedia_device_file , expected_checksum ) except exception . ImageRefValidationFailed as e : raise exception . SUMOperationError ( reason = e ) vmedia_mount_point = tempfile . mkdtemp ( ) try : try : processutils . execute ( "mount" , vmedia_device_file , vmedia_mount_point ) except processutils . ProcessExecutionError as e : msg = ( "Unable to mount virtual media device %(device)s: " "%(error)s" % { 'device' : vmedia_device_file , 'error' : e } ) raise exception . SUMOperationError ( reason = msg ) sum_file_path = os . path . join ( vmedia_mount_point , SUM_LOCATION ) if not os . path . exists ( sum_file_path ) : sum_file_path = os . path . join ( vmedia_mount_point , HPSUM_LOCATION ) components = node [ 'clean_step' ] [ 'args' ] . get ( 'components' ) result = _execute_sum ( sum_file_path , vmedia_mount_point , components = components ) processutils . trycmd ( "umount" , vmedia_mount_point ) finally : shutil . rmtree ( vmedia_mount_point , ignore_errors = True ) return result | Performs SUM based firmware update on the node . |
41,722 | def parse ( self , text ) : tags , results = [ ] , [ ] text = self . re_tag . sub ( lambda m : self . sub_tag ( m , tags , results ) , text ) if self . strict and tags : markup = "%s%s%s" % ( self . tag_sep [ 0 ] , tags . pop ( 0 ) , self . tag_sep [ 1 ] ) raise MismatchedTag ( 'opening tag "%s" has no corresponding closing tag' % markup ) if self . always_reset : if not text . endswith ( Style . RESET_ALL ) : text += Style . RESET_ALL return text | Return a string with markup tags converted to ansi - escape sequences . |
41,723 | def strip ( self , text ) : tags , results = [ ] , [ ] return self . re_tag . sub ( lambda m : self . clear_tag ( m , tags , results ) , text ) | Return string with markup tags removed . |
41,724 | def process_firmware_image ( compact_firmware_file , ilo_object ) : fw_img_extractor = firmware_controller . get_fw_extractor ( compact_firmware_file ) LOG . debug ( 'Extracting firmware file: %s ...' , compact_firmware_file ) raw_fw_file_path , is_extracted = fw_img_extractor . extract ( ) to_upload = False m = re . search ( 'Gen(\d+)' , ilo_object . model ) if int ( m . group ( 1 ) ) > 8 : to_upload = True LOG . debug ( 'Extracting firmware file: %s ... done' , compact_firmware_file ) msg = ( 'Firmware file %(fw_file)s is %(msg)s. Need hosting (on an http ' 'store): %(yes_or_no)s' % { 'fw_file' : compact_firmware_file , 'msg' : ( 'extracted. Extracted file: %s' % raw_fw_file_path if is_extracted else 'already in raw format' ) , 'yes_or_no' : 'Yes' if to_upload else 'No' } ) LOG . info ( msg ) return raw_fw_file_path , to_upload , is_extracted | Processes the firmware file . |
41,725 | def _get_hash_object ( hash_algo_name ) : algorithms = ( hashlib . algorithms_guaranteed if six . PY3 else hashlib . algorithms ) if hash_algo_name not in algorithms : msg = ( "Unsupported/Invalid hash name '%s' provided." % hash_algo_name ) raise exception . InvalidInputError ( msg ) return getattr ( hashlib , hash_algo_name ) ( ) | Create a hash object based on given algorithm . |
41,726 | def hash_file ( file_like_object , hash_algo = 'md5' ) : checksum = _get_hash_object ( hash_algo ) for chunk in iter ( lambda : file_like_object . read ( 32768 ) , b'' ) : checksum . update ( chunk ) return checksum . hexdigest ( ) | Generate a hash for the contents of a file . |
41,727 | def validate_href ( image_href ) : try : response = requests . head ( image_href ) if response . status_code != http_client . OK : raise exception . ImageRefValidationFailed ( image_href = image_href , reason = ( "Got HTTP code %s instead of 200 in response to " "HEAD request." % response . status_code ) ) except requests . RequestException as e : raise exception . ImageRefValidationFailed ( image_href = image_href , reason = e ) return response | Validate HTTP image reference . |
41,728 | def apply_bios_properties_filter ( settings , filter_to_be_applied ) : if not settings or not filter_to_be_applied : return settings return { k : settings [ k ] for k in filter_to_be_applied if k in settings } | Applies the filter to return the dict of filtered BIOS properties . |
41,729 | def accounts ( self ) : return account . HPEAccountCollection ( self . _conn , utils . get_subresource_path_by ( self , 'Accounts' ) , redfish_version = self . redfish_version ) | Property to provide instance of HPEAccountCollection |
41,730 | def update_credentials ( self , password ) : data = { 'Password' : password , } self . _conn . patch ( self . path , data = data ) | Update credentials of a redfish system |
41,731 | def get_member_details ( self , username ) : members = self . get_members ( ) for member in members : if member . username == username : return member | Returns the HPEAccount object |
41,732 | def _get_media ( media_types ) : get_mapped_media = ( lambda x : maps . VIRTUAL_MEDIA_TYPES_MAP [ x ] if x in maps . VIRTUAL_MEDIA_TYPES_MAP else None ) return list ( map ( get_mapped_media , media_types ) ) | Helper method to map the media types . |
41,733 | def _get_action_element ( self , action_type ) : action = eval ( "self._hpe_actions." + action_type + "_vmedia" ) if not action : if action_type == "insert" : action_path = '#HpeiLOVirtualMedia.InsertVirtualMedia' else : action_path = '#HpeiLOVirtualMedia.EjectVirtualMedia' raise exception . MissingAttributeError ( attribute = action_path , resource = self . _path ) return action | Helper method to return the action object . |
41,734 | def insert_media ( self , url ) : try : super ( VirtualMedia , self ) . insert_media ( url , write_protected = True ) except sushy_exceptions . SushyError : target_uri = self . _get_action_element ( 'insert' ) . target_uri data = { 'Image' : url } self . _conn . post ( target_uri , data = data ) | Inserts Virtual Media to the device |
41,735 | def eject_media ( self ) : try : super ( VirtualMedia , self ) . eject_media ( ) except sushy_exceptions . SushyError : target_uri = self . _get_action_element ( 'eject' ) . target_uri self . _conn . post ( target_uri , data = { } ) | Ejects Virtual Media . |
41,736 | def set_vm_status ( self , boot_on_next_reset ) : data = { "Oem" : { "Hpe" : { "BootOnNextServerReset" : boot_on_next_reset } } } self . _conn . patch ( self . path , data = data ) | Set the Virtual Media drive status . |
41,737 | def get_member_device ( self , device ) : for vmedia_device in self . get_members ( ) : if device in vmedia_device . media_types : return vmedia_device | Returns the given virtual media device object . |
41,738 | def _get_entity ( partition_key , row_key , select , accept ) : _validate_not_none ( 'partition_key' , partition_key ) _validate_not_none ( 'row_key' , row_key ) _validate_not_none ( 'accept' , accept ) request = HTTPRequest ( ) request . method = 'GET' request . headers = [ ( 'Accept' , _to_str ( accept ) ) ] request . query = [ ( '$select' , _to_str ( select ) ) ] return request | Constructs a get entity request . |
41,739 | def _insert_entity ( entity ) : _validate_entity ( entity ) request = HTTPRequest ( ) request . method = 'POST' request . headers = [ _DEFAULT_CONTENT_TYPE_HEADER , _DEFAULT_PREFER_HEADER , _DEFAULT_ACCEPT_HEADER ] request . body = _get_request_body ( _convert_entity_to_json ( entity ) ) return request | Constructs an insert entity request . |
41,740 | def _delete_entity ( partition_key , row_key , if_match ) : _validate_not_none ( 'if_match' , if_match ) _validate_not_none ( 'partition_key' , partition_key ) _validate_not_none ( 'row_key' , row_key ) request = HTTPRequest ( ) request . method = 'DELETE' request . headers = [ _DEFAULT_ACCEPT_HEADER , ( 'If-Match' , _to_str ( if_match ) ) ] return request | Constructs a delete entity request . |
41,741 | def find_executable ( executable_name ) : if six . PY3 : executable_abs = shutil . which ( executable_name ) else : import distutils . spawn executable_abs = distutils . spawn . find_executable ( executable_name ) return executable_abs | Tries to find executable in PATH environment |
41,742 | def check_firmware_update_component ( func ) : @ six . wraps ( func ) def wrapper ( self , filename , component_type ) : component_type = component_type and component_type . lower ( ) if ( component_type not in SUPPORTED_FIRMWARE_UPDATE_COMPONENTS ) : msg = ( "Got invalid component type for firmware update: " "``update_firmware`` is not supported on %(component)s" % { 'component' : component_type } ) LOG . error ( self . _ ( msg ) ) raise exception . InvalidInputError ( msg ) return func ( self , filename , component_type ) return wrapper | Checks the firmware update component . |
41,743 | def get_fw_extractor ( fw_file ) : fw_img_extractor = FirmwareImageExtractor ( fw_file ) extension = fw_img_extractor . fw_file_ext . lower ( ) if extension == '.scexe' : fw_img_extractor . _do_extract = types . MethodType ( _extract_scexe_file , fw_img_extractor ) elif extension == '.rpm' : fw_img_extractor . _do_extract = types . MethodType ( _extract_rpm_file , fw_img_extractor ) elif extension in RAW_FIRMWARE_EXTNS : def dummy_extract ( self ) : return fw_img_extractor . fw_file , False fw_img_extractor . extract = types . MethodType ( dummy_extract , fw_img_extractor ) else : raise exception . InvalidInputError ( 'Unexpected compact firmware file type: %s' % fw_file ) return fw_img_extractor | Gets the firmware extractor object fine - tuned for specified type |
41,744 | def _extract_scexe_file ( self , target_file , extract_path ) : unpack_cmd = '--unpack=' + extract_path cmd = [ target_file , unpack_cmd ] out , err = utils . trycmd ( * cmd ) | Extracts the scexe file . |
41,745 | def _extract_rpm_file ( self , target_file , extract_path ) : if not os . path . exists ( extract_path ) : os . makedirs ( extract_path ) os . chdir ( extract_path ) if find_executable ( 'rpm2cpio' ) is None : raise exception . ImageExtractionFailed ( image_ref = target_file , reason = 'Command `rpm2cpio` not found.' ) if find_executable ( 'cpio' ) is None : raise exception . ImageExtractionFailed ( image_ref = target_file , reason = 'Command `cpio` not found.' ) try : rpm2cpio = subprocess . Popen ( 'rpm2cpio ' + target_file , shell = True , stdout = subprocess . PIPE ) cpio = subprocess . Popen ( 'cpio -idm' , shell = True , stdin = rpm2cpio . stdout ) out , err = cpio . communicate ( ) except ( OSError , ValueError ) as e : raise exception . ImageExtractionFailed ( image_ref = target_file , reason = 'Unexpected error in extracting file. ' + str ( e ) ) | Extracts the rpm file . |
41,746 | def _get_firmware_file ( path ) : for dirpath , dirnames , filenames in os . walk ( path ) : for filename in filenames : file_name , file_ext = os . path . splitext ( os . path . basename ( filename ) ) if file_ext in RAW_FIRMWARE_EXTNS : return os . path . join ( dirpath , filename ) | Gets the raw firmware file |
41,747 | def _get_firmware_file_in_new_path ( searching_path ) : firmware_file_path = _get_firmware_file ( searching_path ) if not firmware_file_path : return None file_name , file_ext_with_dot = common . get_filename_and_extension_of ( firmware_file_path ) new_firmware_file_path = os . path . join ( tempfile . gettempdir ( ) , str ( uuid . uuid4 ( ) ) + '_' + file_name + file_ext_with_dot ) os . link ( firmware_file_path , new_firmware_file_path ) return new_firmware_file_path | Gets the raw firmware file in a new path |
41,748 | def upload_file_to ( self , addressinfo , timeout ) : self . hostname , self . port = addressinfo self . timeout = timeout filename = self . fw_file firmware = open ( filename , 'rb' ) . read ( ) boundary = b ( '------hpiLO3t' + str ( random . randint ( 100000 , 1000000 ) ) + 'z' ) while boundary in firmware : boundary = b ( '------hpiLO3t' + str ( random . randint ( 100000 , 1000000 ) ) + 'z' ) parts = [ b ( "--" ) + boundary + b ( ) , b ( "\r\n--" ) + boundary + b ( ) + b ( filename ) + b ( ) , firmware , b ( "\r\n--" ) + boundary + b ( "--\r\n" ) , ] total_bytes = sum ( [ len ( x ) for x in parts ] ) sock = self . _get_socket ( ) sock . write ( b ( self . HTTP_UPLOAD_HEADER % ( total_bytes , boundary . decode ( 'ascii' ) ) ) ) for part in parts : sock . write ( part ) data = '' try : while True : d = sock . read ( ) data += d . decode ( 'latin-1' ) if not d : break except socket . sslerror : e = sys . exc_info ( ) [ 1 ] if not data : raise exception . IloConnectionError ( "Communication with %(hostname)s:%(port)d failed: " "%(error)s" % { 'hostname' : self . hostname , 'port' : self . port , 'error' : str ( e ) } ) cookie_match = re . search ( 'Set-Cookie: *(.*)' , data ) if not cookie_match : raise exception . IloError ( "Uploading of file: %s failed due " "to unknown reason." % filename ) return cookie_match . group ( 1 ) | Uploads the raw firmware file to iLO |
41,749 | def extract ( self ) : target_file = self . fw_file common . add_exec_permission_to ( target_file ) temp_dir = tempfile . mkdtemp ( ) extract_path = os . path . join ( temp_dir , self . fw_filename ) try : self . _do_extract ( target_file , extract_path ) except exception . ImageExtractionFailed : shutil . rmtree ( temp_dir , ignore_errors = True ) raise firmware_file_path = _get_firmware_file_in_new_path ( extract_path ) shutil . rmtree ( temp_dir , ignore_errors = True ) if not firmware_file_path : raise exception . InvalidInputError ( "Raw firmware file not found in: '%s'" % target_file ) return firmware_file_path , True | Extracts the raw firmware file from its compact format |
41,750 | def _generic_format ( self , raid_config , controller = None ) : logical_drives = raid_config [ "LogicalDrives" ] logical_disks = [ ] controller = controller for ld in logical_drives : prop = { 'size_gb' : ld [ 'CapacityGiB' ] , 'raid_level' : ld [ 'Raid' ] . strip ( 'Raid' ) , 'root_device_hint' : { 'wwn' : '0x' + ld [ 'VolumeUniqueIdentifier' ] } , 'controller' : controller , 'physical_disks' : ld [ 'DataDrives' ] , 'volume_name' : ld [ 'LogicalDriveName' ] } logical_disks . append ( prop ) return logical_disks | Convert redfish data of current raid config to generic format . |
41,751 | def _check_smart_storage_message ( self ) : ssc_mesg = self . smart_storage_config_message result = True raid_message = "" for element in ssc_mesg : if "Success" not in element [ 'MessageId' ] : result = False raid_message = element [ 'MessageId' ] return result , raid_message | Check for smart storage message . |
41,752 | def read_raid ( self , controller = None ) : if controller : if not self . logical_drives : msg = ( 'No logical drives found on the controller' ) LOG . debug ( msg ) raise exception . IloLogicalDriveNotFoundError ( msg ) raid_op = 'create_raid' else : raid_op = 'delete_raid' result , raid_message = self . _check_smart_storage_message ( ) if result : configured_raid_settings = self . _conn . get ( self . settings_uri ) raid_data = { 'logical_disks' : self . _generic_format ( configured_raid_settings . json ( ) , controller = controller ) } return raid_data else : if self . physical_drives is None or not raid_message : return else : msg = ( 'Failed to perform the %(opr)s operation ' 'successfully. Error - %(error)s' % { 'opr' : raid_op , 'error' : str ( raid_message ) } ) raise exception . IloError ( msg ) | Get the current RAID configuration from the system . |
41,753 | def delete_raid ( self ) : if not self . logical_drives : msg = ( 'No logical drives found on the controller ' '%(controller)s' % { 'controller' : str ( self . controller_id ) } ) LOG . debug ( msg ) raise exception . IloLogicalDriveNotFoundError ( msg ) lds = [ { 'Actions' : [ { "Action" : "LogicalDriveDelete" } ] , 'VolumeUniqueIdentifier' : logical_drive . volume_unique_identifier } for logical_drive in self . logical_drives ] data = { 'LogicalDrives' : lds , 'DataGuard' : 'Permissive' } self . _conn . put ( self . settings_uri , data = data ) | Clears the RAID configuration from the system . |
41,754 | def _readFile ( self , fname , sldir ) : if sldir . endswith ( '/' ) : fname = str ( sldir ) + str ( fname ) else : fname = str ( sldir ) + '/' + str ( fname ) f = open ( fname , 'r' ) line = f . readline ( ) cols = [ ] ispec = 0 for i in range ( 1 , len ( line . split ( '|' ) ) ) : col = line . split ( '|' ) [ i ] . strip ( ) if '-' in col : ispec += 1 col = col . split ( '-' ) [ 1 ] cols . append ( col ) col_num = { } col_tot = len ( cols ) print ( 'number of species: ' , str ( ispec ) ) print ( 'number of cols: ' , str ( col_tot ) ) col_num = { } for a , b in zip ( cols , list ( range ( col_tot ) ) ) : col_num [ a ] = b lines = f . readlines ( ) data = [ ] for i in range ( len ( lines ) ) : v = lines [ i ] . split ( ) vv = array ( v , dtype = 'float' ) data . append ( vv ) ilines = i print ( "There are " + str ( ilines ) + " time steps found." ) return data , col_num , cols , col_tot , ilines | Private method that reads in the data file and organizes it within this object . |
41,755 | def get ( self , col_str ) : data_column = zeros ( self . ilines ) for i in range ( self . ilines ) : data_column [ i ] = self . data [ i ] [ self . col_num [ col_str ] ] return data_column | get one data column with the data |
41,756 | def plot_xtime ( self , y , x = 'time' , label = 'default' , labelx = None , labely = None , title = None , shape = '.' , logx = False , logy = True , base = 10 ) : if label is 'default' : lab_str = y else : lab_str = label try : self . get ( x ) except KeyError : x = 'age' DataPlot . plot ( self , x , y , legend = lab_str , labelx = labelx , labely = labely , title = title , shape = shape , logx = logx , logy = logy , base = base ) | make a simple plot of two columns against each other . |
41,757 | def getCycleData ( self , attri , fname , numtype = 'cycNum' ) : fname = self . findFile ( fname , numtype ) if self . inputdir == '' : self . inputdir = self . sldir os . chdir ( self . inputdir ) self . sldir = os . getcwd ( ) + '/' f = open ( fname , 'r' ) lines = f . readlines ( ) if self . inputdir != './' : os . chdir ( self . startdir ) self . sldir = self . inputdir for i in range ( len ( lines ) ) : lines [ i ] = lines [ i ] . strip ( ) for i in range ( len ( lines ) ) : if lines [ i ] . startswith ( '#' ) : lines [ i ] = lines [ i ] . strip ( '#' ) tmp = lines [ i ] . split ( ) tmp1 = [ ] for j in range ( len ( tmp ) ) : if tmp [ j ] != '=' or '' : tmp1 . append ( tmp [ j ] ) tmp = tmp1 for j in range ( len ( tmp ) ) : if tmp [ j ] == attri : try : if '.' in tmp [ j + 1 ] : return float ( tmp [ j + 1 ] ) else : return int ( tmp [ j + 1 ] ) except ValueError : return str ( tmp [ j + 1 ] ) elif lines [ i ] . startswith ( 'H' ) : continue else : print ( 'This cycle attribute does not exist' ) print ( 'Returning None' ) return None | In this method a column of data for the associated cycle attribute is returned . |
41,758 | def getColData ( self , attri , fname , numtype = 'cycNum' ) : fname = self . findFile ( fname , numtype ) f = open ( fname , 'r' ) for i in range ( self . index + 1 ) : f . readline ( ) lines = f . readlines ( ) for i in range ( len ( lines ) ) : lines [ i ] = lines [ i ] . strip ( ) lines [ i ] = lines [ i ] . split ( ) index = 0 data = [ ] while index < len ( self . dcols ) : if attri == self . dcols [ index ] : break index += 1 for i in range ( len ( lines ) ) : if index == 5 and len ( lines [ i ] ) == 7 : data . append ( str ( lines [ i ] [ index ] . capitalize ( ) ) + '-' + str ( lines [ i ] [ index + 1 ] ) ) elif index == 5 and len ( lines [ i ] ) != 7 : tmp = str ( lines [ i ] [ index ] ) if tmp [ len ( tmp ) - 1 ] . isdigit ( ) : tmp1 = tmp [ 0 ] + tmp [ 1 ] tmp1 = tmp1 . capitalize ( ) tmp2 = '' for j in range ( len ( tmp ) ) : if j == 0 or j == 1 : continue tmp2 += tmp [ j ] data . append ( tmp1 + '-' + tmp2 ) elif tmp == 'PROT' : data . append ( 'H-1' ) elif tmp == ( 'NEUT' or 'NEUTR' or 'nn' or 'N 1' or 'N-1' ) : data . append ( 'N-1' ) else : data . append ( tmp ) elif index == 0 : data . append ( int ( lines [ i ] [ index ] ) ) else : data . append ( float ( lines [ i ] [ index ] ) ) return array ( data ) | In this method a column of data for the associated column attribute is returned . |
41,759 | def getElement ( self , attri , fname , numtype = 'cycNum' ) : element = [ ] number = [ ] z = [ ] a = [ ] abd = [ ] data = [ ] fname = self . findFile ( fname , numtype ) f = open ( fname , 'r' ) for i in range ( self . index + 1 ) : f . readline ( ) lines = f . readlines ( ) for i in range ( len ( lines ) ) : lines [ i ] = lines [ i ] . strip ( ) lines [ i ] = lines [ i ] . split ( ) index = 0 data = [ ] while index < len ( self . dcols ) : if attri == self . dcols [ index ] : break index += 1 element = self . get ( self . dcols [ 5 ] , fname , numtype ) number = [ ] z = [ ] a = [ ] isom = [ ] abd = [ ] for i in range ( len ( lines ) ) : number . append ( int ( lines [ i ] [ 0 ] ) ) z . append ( float ( lines [ i ] [ 1 ] ) ) isom . append ( float ( lines [ i ] [ 2 ] ) ) abd . append ( float ( lines [ i ] [ 1 ] ) ) index = 0 while index < len ( element ) : if attri == element [ index ] : break index += 1 data . append ( number [ index ] ) data . append ( z [ index ] ) data . append ( a [ index ] ) data . append ( isom [ index ] ) data . append ( abd [ index ] ) return array ( data ) | In this method instead of getting a particular column of data the program gets a particular row of data for a particular element name . |
41,760 | def _getcycle ( self , cycle , decayed = False ) : yps = self . get ( 'ABUNDANCE_MF' , cycle ) z = self . get ( 'Z' , cycle ) a = self . get ( 'A' , cycle ) isomers = self . get ( 'ISOM' , cycle ) a_iso_to_plot , z_iso_to_plot , abunds , isotope_to_plot , el_iso_to_plot , isom = self . _process_abundance_vector ( a , z , isomers , yps ) self . a_iso_to_plot = a_iso_to_plot self . isotope_to_plot = isotope_to_plot self . z_iso_to_plot = z_iso_to_plot self . el_iso_to_plot = el_iso_to_plot self . abunds = array ( abunds ) self . isom = isom if decayed : try : self . decay_idp except AttributeError : print ( "WARNING: decayed in _getcycle ignores isomers " "and will decay alpha-unstable p-rich nuclei as if they were beta+ stable." ) print ( "Initialising decay index pointers ...." ) self . decay_indexpointer ( ) ind_tmp = self . idp_to_stables_in_isostoplot isotope_decay = array ( isotope_to_plot ) [ ind_tmp ] z_iso_decay = array ( z_iso_to_plot ) [ ind_tmp ] a_iso_decay = array ( a_iso_to_plot ) [ ind_tmp ] el_iso_decay = array ( el_iso_to_plot ) [ ind_tmp ] abunds_decay = zeros ( len ( ind_tmp ) , dtype = 'float64' ) for i in range ( len ( isotope_to_plot ) ) : idp = where ( isotope_decay == isotope_to_plot [ self . decay_idp [ i ] ] ) [ 0 ] abunds_decay [ idp ] += abunds [ i ] if self . debug : print ( "Decayed array:" ) for i in range ( len ( ind_tmp ) ) : print ( isotope_decay [ i ] , z_iso_decay [ i ] , a_iso_decay [ i ] , el_iso_decay [ i ] , abunds_decay [ i ] ) self . a_iso_to_plot = a_iso_decay self . isotope_to_plot = isotope_decay self . z_iso_to_plot = z_iso_decay self . el_iso_to_plot = el_iso_decay self . abunds = abunds_decay | Private method for getting a cycle called from get . |
41,761 | def _getattr ( self , attri , fname = None , numtype = 'cycNum' ) : if str ( fname . __class__ ) == "<type 'list'>" : isList = True else : isList = False data = [ ] if fname == None : fname = self . files numtype = 'file' isList = True if isList : for i in range ( len ( fname ) ) : if attri in self . cattrs : data . append ( self . getCycleData ( attri , fname [ i ] , numtype ) ) elif attri in self . dcols : data . append ( self . getColData ( attri , fname [ i ] , numtype ) ) elif attri in self . get ( 'ISOTP' , fname , numtype ) : data . append ( self . getElement ( attri , fname [ i ] , numtype ) ) else : print ( 'Attribute ' + attri + ' does not exist' ) print ( 'Returning none' ) return None else : if attri in self . cattrs : return self . getCycleData ( attri , fname , numtype ) elif attri in self . dcols : return self . getColData ( attri , fname , numtype ) elif attri in self . get ( 'ISOTP' , fname , numtype ) : return self . getElement ( attri , fname , numtype ) else : print ( 'Attribute ' + attri + ' does not exist' ) print ( 'Returning none' ) return None return data | Private method for getting an attribute called from get . |
41,762 | def _readPPN ( self , fname , sldir ) : if sldir . endswith ( os . sep ) : fname = str ( sldir ) + str ( fname ) else : fname = str ( sldir ) + os . sep + str ( fname ) self . sldir += os . sep f = open ( fname , 'r' ) lines = f . readlines ( ) for i in range ( len ( lines ) ) : lines [ i ] = lines [ i ] . strip ( ) cols = [ 'ISOTP' , 'ABUNDANCE_MF' ] for i in range ( len ( lines ) ) : if not lines [ i ] . startswith ( 'H' ) : index = i - 1 break return cols , index | Private method that reads in and organizes the . ppn file Loads the data of the . ppn file into the variable cols . |
41,763 | def _readFile ( self , fname , sldir ) : cattrs = [ ] if sldir . endswith ( os . sep ) : fname = str ( sldir ) + str ( fname ) else : fname = str ( sldir ) + os . sep + str ( fname ) self . sldir += os . sep f = open ( fname , 'r' ) lines = f . readlines ( ) for i in range ( len ( lines ) ) : lines [ i ] = lines [ i ] . strip ( ) cols = lines [ 0 ] . strip ( 'H' ) cols = cols . strip ( ) cols = cols . split ( ) for i in range ( len ( lines ) ) : if lines [ i ] . startswith ( '#' ) : lines [ i ] = lines [ i ] . strip ( '#' ) tmp = lines [ i ] . split ( ) tmp1 = [ ] for j in range ( len ( tmp ) ) : if tmp [ j ] != '=' or '' : tmp1 . append ( tmp [ j ] ) tmp = tmp1 j = 0 while j < len ( tmp ) : cattrs . append ( tmp [ j ] ) j += 2 elif not lines [ i ] . startswith ( 'H' ) : index = i - 1 break return cattrs , cols , index | private method that reads in and organizes the . DAT file Loads the data of the . DAT File into the variables cattrs and cols . In both these cases they are dictionaries but in the case of cols it is a dictionary of numpy array exect for the element element_name where it is just a list |
41,764 | def findFile ( self , fname , numtype ) : numType = numtype . upper ( ) if numType == 'FILE' : return fname elif numType == 'CYCNUM' : try : fname = int ( fname ) except ValueError : print ( 'Improper choice:' + str ( fname ) ) print ( 'Reselecting as 0' ) fname = 0 print ( 'Using ' + self . files [ fname ] ) try : return self . files [ self . indexp_cyc2filels [ fname ] ] except IndexError : mods = array ( self . get ( 'mod' ) , dtype = int ) if fname not in mods : print ( 'You seem to try to plot a cycle that is not present: ' + str ( fname ) ) fname = mods [ - 1 ] print ( 'I will assume you want to plot the last cycle in the run: ' + str ( fname ) ) print ( '[I am not 100% sure this escape is debugged. You better do this again with' ) print ( 'the correct input.]' ) return self . files [ fname ] | Function that finds the associated file for fname when Fname is time or NDump . |
41,765 | def _get_firmware_update_element ( self ) : fw_update_action = self . _actions . update_firmware if not fw_update_action : raise ( sushy . exceptions . MissingActionError ( action = '#UpdateService.SimpleUpdate' , resource = self . _path ) ) return fw_update_action | Get the url for firmware update |
41,766 | def flash_firmware ( self , redfish_inst , file_url ) : action_data = { 'ImageURI' : file_url , } target_uri = self . _get_firmware_update_element ( ) . target_uri try : self . _conn . post ( target_uri , data = action_data ) except sushy . exceptions . SushyError as e : msg = ( ( 'The Redfish controller failed to update firmware ' 'with file %(file)s Error %(error)s' ) % { 'file' : file_url , 'error' : str ( e ) } ) LOG . debug ( msg ) raise exception . IloError ( msg ) self . wait_for_redfish_firmware_update_to_complete ( redfish_inst ) try : state , percent = self . get_firmware_update_progress ( ) except sushy . exceptions . SushyError as e : msg = ( 'Failed to get firmware progress update ' 'Error %(error)s' % { 'error' : str ( e ) } ) LOG . debug ( msg ) raise exception . IloError ( msg ) if state == "Error" : msg = 'Unable to update firmware' LOG . debug ( msg ) raise exception . IloError ( msg ) elif state == "Unknown" : msg = 'Status of firmware update not known' LOG . debug ( msg ) else : LOG . info ( 'Flashing firmware file: %s ... done' , file_url ) | Perform firmware flashing on a redfish system |
41,767 | def pending_settings ( self ) : return BIOSPendingSettings ( self . _conn , utils . get_subresource_path_by ( self , [ "@Redfish.Settings" , "SettingsObject" ] ) , redfish_version = self . redfish_version ) | Property to provide reference to bios_pending_settings instance |
41,768 | def boot_settings ( self ) : return BIOSBootSettings ( self . _conn , utils . get_subresource_path_by ( self , [ "Oem" , "Hpe" , "Links" , "Boot" ] ) , redfish_version = self . redfish_version ) | Property to provide reference to bios boot instance |
41,769 | def iscsi_resource ( self ) : return iscsi . ISCSIResource ( self . _conn , utils . get_subresource_path_by ( self , [ "Oem" , "Hpe" , "Links" , "iScsi" ] ) , redfish_version = self . redfish_version ) | Property to provide reference to bios iscsi resource instance |
41,770 | def bios_mappings ( self ) : return BIOSMappings ( self . _conn , utils . get_subresource_path_by ( self , [ "Oem" , "Hpe" , "Links" , "Mappings" ] ) , redfish_version = self . redfish_version ) | Property to provide reference to bios mappings instance |
41,771 | def _get_base_configs ( self ) : return BIOSBaseConfigs ( self . _conn , utils . get_subresource_path_by ( self , [ "Oem" , "Hpe" , "Links" , "BaseConfigs" ] ) , redfish_version = self . redfish_version ) | Method that returns object of bios base configs . |
41,772 | def update_bios_data_by_post ( self , data ) : bios_settings_data = { 'Attributes' : data } self . _conn . post ( self . path , data = bios_settings_data ) | Update bios data by post |
41,773 | def update_bios_data_by_patch ( self , data ) : bios_settings_data = { 'Attributes' : data } self . _conn . patch ( self . path , data = bios_settings_data ) | Update bios data by patch |
41,774 | def get_uefi_boot_string ( self , mac ) : boot_sources = self . boot_sources if not boot_sources : msg = ( 'Boot sources are not found' ) LOG . debug ( msg ) raise exception . IloError ( msg ) for boot_source in boot_sources : if ( mac . upper ( ) in boot_source [ 'UEFIDevicePath' ] and 'iSCSI' in boot_source [ 'UEFIDevicePath' ] ) : return boot_source [ 'StructuredBootString' ] else : msg = ( 'MAC provided "%s" is Invalid' % mac ) raise exception . IloInvalidInputError ( msg ) | Get uefi iscsi boot string for the host |
41,775 | def Gamma1_gasrad ( beta ) : Gamma3minus1 = ( old_div ( 2. , 3. ) ) * ( 4. - 3. * beta ) / ( 8. - 7. * beta ) Gamma1 = beta + ( 4. - 3. * beta ) * Gamma3minus1 return Gamma1 | Gamma1 for a mix of ideal gas and radiation |
41,776 | def mimf_ferrario ( mi ) : mf = - 0.00012336 * mi ** 6 + 0.003160 * mi ** 5 - 0.02960 * mi ** 4 + 0.12350 * mi ** 3 - 0.21550 * mi ** 2 + 0.19022 * mi + 0.46575 return mf | Curvature MiMf from Ferrario etal . 2005MNRAS . 361 . 1131 . |
41,777 | def int_imf_dm ( m1 , m2 , m , imf , bywhat = 'bymass' , integral = 'normal' ) : ind_m = ( m >= min ( m1 , m2 ) ) & ( m <= max ( m1 , m2 ) ) if integral is 'normal' : int_func = sc . integrate . trapz elif integral is 'cum' : int_func = sc . integrate . cumtrapz else : print ( "Error in int_imf_dm: don't know how to integrate" ) return 0 if bywhat is 'bymass' : return int_func ( m [ ind_m ] * imf [ ind_m ] , m [ ind_m ] ) elif bywhat is 'bynumber' : return int_func ( imf [ ind_m ] , m [ ind_m ] ) else : print ( "Error in int_imf_dm: don't know by what to integrate" ) return 0 | Integrate IMF between m1 and m2 . |
41,778 | def am_orb ( m1 , m2 , a , e ) : a_cm = a * rsun_cm m1_g = m1 * msun_g m2_g = m2 * msun_g J_orb = np . sqrt ( grav_const * a_cm * ( old_div ( ( m1_g ** 2 * m2_g ** 2 ) , ( m1_g + m2_g ) ) ) ) * ( 1 - e ** 2 ) return J_orb | orbital angular momentum . |
41,779 | def period ( A , M1 , M2 ) : A *= rsun_cm print ( A ) velocity = np . sqrt ( grav_const * msun_g * ( M1 + M2 ) / A ) print ( old_div ( velocity , 1.e5 ) ) p = 2. * np . pi * A / velocity p /= ( 60 * 60 * 24. ) return p | calculate binary period from separation . |
41,780 | def mu ( X , Z , A ) : if not isinstance ( Z , np . ndarray ) : Z = np . array ( Z ) if not isinstance ( A , np . ndarray ) : A = np . array ( A ) if not isinstance ( X , np . ndarray ) : X = np . array ( X ) try : mu = old_div ( 1. , sum ( X * ( 1. + Z ) / A ) ) except TypeError : X = np . array ( [ X ] ) A = np . array ( [ A ] ) Z = np . array ( [ Z ] ) mu = old_div ( 1. , sum ( X * ( 1. + Z ) / A ) ) return mu | mean molecular weight assuming full ionisation . |
41,781 | def _get_criteria_matching_disks ( logical_disk , physical_drives ) : matching_physical_drives = [ ] criteria_to_consider = [ x for x in FILTER_CRITERIA if x in logical_disk ] for physical_drive_object in physical_drives : for criteria in criteria_to_consider : logical_drive_value = logical_disk . get ( criteria ) physical_drive_value = getattr ( physical_drive_object , criteria ) if logical_drive_value != physical_drive_value : break else : matching_physical_drives . append ( physical_drive_object ) return matching_physical_drives | Finds the physical drives matching the criteria of logical disk . |
41,782 | def allocate_disks ( logical_disk , server , raid_config ) : size_gb = logical_disk [ 'size_gb' ] raid_level = logical_disk [ 'raid_level' ] number_of_physical_disks = logical_disk . get ( 'number_of_physical_disks' , constants . RAID_LEVEL_MIN_DISKS [ raid_level ] ) share_physical_disks = logical_disk . get ( 'share_physical_disks' , False ) for controller in server . controllers : physical_drives = controller . unassigned_physical_drives physical_drives = _get_criteria_matching_disks ( logical_disk , physical_drives ) if size_gb != "MAX" : reverse_sort = False physical_drives = [ x for x in physical_drives if x . size_gb >= size_gb ] else : reverse_sort = True if len ( physical_drives ) >= number_of_physical_disks : selected_drives = sorted ( physical_drives , key = lambda x : x . size_gb , reverse = reverse_sort ) selected_drive_ids = [ x . id for x in selected_drives ] logical_disk [ 'controller' ] = controller . id physical_disks = selected_drive_ids [ : number_of_physical_disks ] logical_disk [ 'physical_disks' ] = physical_disks return if share_physical_disks : sharable_disk_wwns = [ ] for sharable_logical_disk in raid_config [ 'logical_disks' ] : if ( sharable_logical_disk . get ( 'share_physical_disks' , False ) and 'root_device_hint' in sharable_logical_disk ) : wwn = sharable_logical_disk [ 'root_device_hint' ] [ 'wwn' ] sharable_disk_wwns . append ( wwn ) for controller in server . controllers : sharable_arrays = [ x for x in controller . raid_arrays if x . logical_drives [ 0 ] . wwn in sharable_disk_wwns ] for array in sharable_arrays : criteria_matched_disks = _get_criteria_matching_disks ( logical_disk , array . physical_drives ) if len ( criteria_matched_disks ) != len ( array . physical_drives ) : continue if array . can_accomodate ( logical_disk ) : logical_disk [ 'controller' ] = controller . id logical_disk [ 'array' ] = array . id return raise exception . PhysicalDisksNotFoundError ( size_gb = size_gb , raid_level = raid_level ) | Allocate physical disks to a logical disk . |
41,783 | def trajectory_SgConst ( Sg = 0.1 , delta_logt_dex = - 0.01 ) : logtimerev = np . arange ( 5. , - 6. , delta_logt_dex ) logrho = np . linspace ( 0 , 8.5 , len ( logtimerev ) ) logT = ( old_div ( 1. , 3. ) ) * ( logrho + 21.9161 + np . log10 ( Sg ) ) pl . close ( 3 ) pl . figure ( 3 ) pl . plot ( logrho , logT , label = '$S/\mathrm{N_Ak}=' + str ( Sg ) + '$' ) pl . legend ( loc = 2 ) pl . xlabel ( '$\log \\rho$' ) pl . ylabel ( '$\log T$' ) pl . close ( 5 ) pl . figure ( 5 ) pl . plot ( logtimerev , logrho ) pl . xlabel ( '$\log (t_\mathrm{final}-t)$' ) pl . ylabel ( '$\log \\rho$' ) pl . xlim ( 8 , - 6 ) pl . close ( 6 ) pl . figure ( 6 ) pl . plot ( logtimerev ) pl . ylabel ( '$\log (t_\mathrm{final}-t)$' ) pl . xlabel ( 'cycle' ) T9 = old_div ( 10 ** logT , 1.e9 ) data = [ logtimerev , T9 , logrho ] att . writeTraj ( filename = 'trajectory.input' , data = data , ageunit = 2 , tunit = 1 , rhounit = 1 , idNum = 1 ) | setup trajectories for constant radiation entropy . |
41,784 | def species_list ( what_list ) : if what_list is "CNONe" : list_to_print = [ 'H-1' , 'He-4' , 'C-12' , 'N-14' , 'O-16' , 'Ne-20' ] elif what_list is "sprocess" : list_to_print = [ 'Fe-56' , 'Ge-70' , 'Zn-70' , 'Se-76' , 'Kr-80' , 'Kr-82' , 'Kr-86' , 'Sr-88' , 'Ba-138' , 'Pb-208' ] elif what_list is "burn_stages" : list_to_print = [ 'H-1' , 'He-4' , 'C-12' , 'O-16' , 'Ne-20' , 'Si-28' ] elif what_list is "list_marco_1" : list_to_print = [ 'C-12' , 'O-16' , 'Ne-20' , 'Ne-22' , 'Na-23' , 'Fe-54' , 'Fe-56' , 'Zn-70' , 'Ge-70' , 'Se-76' , 'Kr-80' , 'Kr-82' , 'Sr-88' , 'Y-89' , 'Zr-96' , 'Te-124' , 'Xe-130' , 'Xe-134' , 'Ba-138' ] return list_to_print | provide default lists of elements to plot . |
41,785 | def linestyle ( i , a = 5 , b = 3 ) : lines = [ '-' , '--' , '-.' , ':' ] points = [ 'v' , '^' , '<' , '>' , '1' , '2' , '3' , '4' , 's' , 'p' , '*' , 'h' , 'H' , '+' , 'x' , 'D' , 'd' , 'o' ] colors = [ 'b' , 'g' , 'r' , 'c' , 'm' , 'k' ] ls_string = colors [ sc . mod ( i , 6 ) ] + lines [ sc . mod ( i , 4 ) ] + points [ sc . mod ( i , 18 ) ] mark_i = a + sc . mod ( i , b ) return ls_string , int ( mark_i ) | provide one out of 25 unique combinations of style color and mark |
41,786 | def linestylecb ( i , a = 5 , b = 3 ) : lines = [ '-' , '--' , '-.' , ':' ] points = [ 'v' , '^' , '<' , '>' , '1' , '2' , '3' , '4' , 's' , 'p' , '*' , 'h' , 'H' , '+' , 'x' , 'D' , 'd' , 'o' ] colors = [ 'b' , 'g' , 'r' , 'c' , 'm' , 'k' ] col = colourblind ( i ) style = lines [ sc . mod ( i , 4 ) ] point = points [ sc . mod ( i , 18 ) ] mark_i = a + sc . mod ( i , b ) return style , point , col , mark_i | version of linestyle function with colourblind colour scheme |
41,787 | def symbol_list ( what_list ) : if what_list is "list1" : symbol = [ 'ro' , 'bo' , 'ko' , 'go' , 'mo' , 'r-' , 'b-' , 'k-' , 'g-' , 'm-' , 'r--' , 'b--' , 'k--' , 'g--' , 'r1' ] elif what_list is "list2" : symbol = [ 'r-' , 'b--' , 'g-.' , 'k:' , 'md' , '.' , 'o' , 'v' , '^' , '<' , '>' , '1' , '2' , '3' , '4' , 's' , 'p' , '*' , 'h' , 'H' , '+' ] elif what_list is "lines1" : symbol = [ 'b--' , 'k--' , 'r--' , 'c--' , 'm--' , 'g--' , 'b-' , 'k-' , 'r-' , 'c-' , 'm-' , 'g-' , 'b.' , 'b-.' , 'k-.' , 'r-.' , 'c-.' , 'm-.' , 'g-.' , 'b:' , 'k:' , 'r:' , 'c:' , 'm:' , 'g:' ] elif what_list is "lines2" : symbol = [ 'g:' , 'r-.' , 'k-' , 'b--' , 'k-.' , 'b+' , 'r:' , 'b-' , 'c--' , 'm--' , 'g--' , 'r-' , 'c-' , 'm-' , 'g-' , 'k-.' , 'c-.' , 'm-.' , 'g-.' , 'k:' , 'r:' , 'c:' , 'm:' , 'b-.' , 'b:' ] return symbol | provide default symbol lists |
41,788 | def strictly_monotonic ( bb ) : cc = bb [ np . where ( bb >= 0 ) ] cc . sort ( ) dc = cc [ 1 : ] - cc [ : - 1 ] dc = np . insert ( dc , 0 , 1 ) dc_mask = np . ma . masked_equal ( dc , 0 ) return np . ma . array ( cc , mask = dc_mask . mask ) . compressed ( ) | bb is an index array which may have numerous double or triple occurrences of indices such as for example the decay_index_pointer . This method removes all entries < = - then all dublicates and finally returns a sorted list of indices . |
41,789 | def solar ( filename_solar , solar_factor ) : f0 = open ( filename_solar ) sol = f0 . readlines ( ) f0 . close sol [ 0 ] . split ( " " ) global names_sol names_sol = [ ] global z_sol z_sol = [ ] yps = np . zeros ( len ( sol ) ) mass_number = np . zeros ( len ( sol ) ) for i in range ( len ( sol ) ) : z_sol . append ( int ( sol [ i ] [ 1 : 3 ] ) ) names_sol . extend ( [ sol [ i ] . split ( " " ) [ 0 ] [ 4 : ] ] ) yps [ i ] = float ( sol [ i ] . split ( " " ) [ 1 ] ) * solar_factor try : mass_number [ i ] = int ( names_sol [ i ] [ 2 : 5 ] ) except ValueError : print ( "WARNING:" ) print ( "This initial abundance file uses an element name that does" ) print ( "not contain the mass number in the 3rd to 5th position." ) print ( "It is assumed that this is the proton and we will change" ) print ( "the name to 'h 1' to be consistent with the notation used in" ) print ( "iniab.dat files" ) names_sol [ i ] = 'h 1' mass_number [ i ] = int ( names_sol [ i ] [ 2 : 5 ] ) if mass_number [ i ] == 1 or mass_number [ i ] == 4 : yps [ i ] = old_div ( yps [ i ] , solar_factor ) global solar_abundance solar_abundance = { } for a , b in zip ( names_sol , yps ) : solar_abundance [ a ] = b z_bismuth = 83 global solar_elem_abund solar_elem_abund = np . zeros ( z_bismuth ) for i in range ( z_bismuth ) : dummy = 0. for j in range ( len ( solar_abundance ) ) : if z_sol [ j ] == i + 1 : dummy = dummy + float ( solar_abundance [ names_sol [ j ] ] ) solar_elem_abund [ i ] = dummy | read solar abundances from filename_solar . |
41,790 | def convert_specie_naming_from_h5_to_ppn ( isotope_names ) : spe_rude1 = [ ] spe_rude2 = [ ] spe_rude3 = [ ] for i in range ( len ( isotope_names ) ) : spe_rude1 . append ( isotope_names [ i ] . split ( '-' ) [ 0 ] ) spe_rude2 . append ( isotope_names [ i ] . split ( '-' ) [ 1 ] ) k = 0 for i in range ( len ( spe_rude1 ) ) : try : if int ( spe_rude2 [ i ] ) < 10 : spe_rude3 . append ( str ( spe_rude1 [ i ] [ 0 : 2 ] ) + str ( ' ' ) + str ( spe_rude2 [ i ] [ 0 : 3 ] ) ) elif int ( spe_rude2 [ i ] ) >= 10 and int ( spe_rude2 [ i ] ) < 100 : spe_rude3 . append ( str ( spe_rude1 [ i ] [ 0 : 2 ] ) + str ( ' ' ) + str ( spe_rude2 [ i ] [ 0 : 3 ] ) ) elif int ( spe_rude2 [ i ] ) >= 100 : spe_rude3 . append ( str ( spe_rude1 [ i ] [ 0 : 2 ] ) + str ( spe_rude2 [ i ] [ 0 : 3 ] ) ) except ValueError : k = k + 1 None global spe spe = [ ] global n_array n_array = [ ] for i in range ( len ( spe_rude3 ) ) : if len ( str ( spe_rude1 [ i ] ) ) == 1 : spe . append ( str ( spe_rude3 [ i ] [ 0 : 1 ] ) + str ( ' ' ) + str ( spe_rude3 [ i ] [ 1 : 4 ] ) ) else : spe . append ( spe_rude3 [ i ] ) n_array . append ( i ) if spe [ 0 ] == 'Ne 1' : spe [ 0 ] = 'N 1' global amass_int amass_int = np . zeros ( len ( spe_rude2 ) ) for i in range ( len ( spe_rude2 ) - k ) : amass_int [ i ] = int ( spe_rude2 [ i ] ) global znum_int znum_int = np . zeros ( len ( spe ) ) for i in range ( len ( spe ) ) : znum_int [ i ] = Utils . elements_names . index ( str ( spe [ i ] [ 0 : 2 ] ) . strip ( ) ) if spe [ 0 ] == 'N 1' : znum_int [ 0 ] = 0 global index_atomic_number index_atomic_number = { } for a , b in zip ( spe , znum_int ) : index_atomic_number [ a ] = b | read isotopes names from h5 files and convert them according to standard scheme used inside ppn and mppnp . Also Z and A are recalculated for these species . Isomers are excluded for now since there were recent changes in isomers name . As soon as the isomers names are settled than Z and A provided here will be obsolete and can be changed by usual Z and A . |
41,791 | def define_zip_index_for_species ( names_ppn_world , number_names_ppn_world ) : global cl cl = { } for a , b in zip ( names_ppn_world , number_names_ppn_world ) : cl [ a ] = b | This just give back cl that is the original index as it is read from files from a data file . |
41,792 | def element_abund_marco ( i_decay , stable_isotope_list , stable_isotope_identifier , mass_fractions_array_not_decayed , mass_fractions_array_decayed ) : global elem_abund elem_abund = np . zeros ( z_bismuth ) global elem_abund_decayed elem_abund_decayed = np . zeros ( z_bismuth ) global elem_prod_fac elem_prod_fac = np . zeros ( z_bismuth ) global elem_prod_fac_decayed elem_prod_fac_decayed = np . zeros ( z_bismuth ) for i in range ( z_bismuth ) : dummy = 0. for j in range ( len ( spe ) ) : if znum_int [ j ] == i + 1 and stable_isotope_identifier [ j ] > 0.5 : dummy = dummy + float ( mass_fractions_array_not_decayed [ j ] ) elem_abund [ i ] = dummy for i in range ( z_bismuth ) : if index_stable [ i ] == 1 : elem_prod_fac [ i ] = float ( old_div ( elem_abund [ i ] , solar_elem_abund [ i ] ) ) elif index_stable [ i ] == 0 : elem_prod_fac [ i ] = 0. if i_decay == 2 : for i in range ( z_bismuth ) : dummy = 0. for j in range ( len ( mass_fractions_array_decayed ) ) : if znum_int [ cl [ stable_isotope_list [ j ] . capitalize ( ) ] ] == i + 1 : dummy = dummy + float ( mass_fractions_array_decayed [ j ] ) elem_abund_decayed [ i ] = dummy for i in range ( z_bismuth ) : if index_stable [ i ] == 1 : elem_prod_fac_decayed [ i ] = float ( old_div ( elem_abund_decayed [ i ] , solar_elem_abund [ i ] ) ) elif index_stable [ i ] == 0 : elem_prod_fac_decayed [ i ] = 0. | Given an array of isotopic abundances not decayed and a similar array of isotopic abundances not decayed here elements abundances and production factors for elements are calculated |
41,793 | def fit ( self , x , y , dcoef = 'none' ) : self . x = x self . y = y if dcoef is not 'none' : coef = dcoef else : coef = self . coef fcoef = optimize . leastsq ( self . residual , coef , args = ( y , self . func , x ) ) self . fcoef = fcoef [ 0 ] . tolist ( ) return fcoef [ 1 ] | performs the fit |
41,794 | def plot ( self , ifig = 1 , data_label = 'data' , fit_label = 'fit' , data_shape = 'o' , fit_shape = '-' ) : if len ( self . coef ) is not len ( self . fcoef ) : print ( "Warning: the fitted coefficient list is not same" ) print ( " length as guessed list - still I will try ..." ) pl . figure ( ifig ) pl . plot ( self . x , self . y , data_shape , label = data_label ) if fit_label is 'fit' : fit_label = self . __name__ pl . plot ( self . x , self . func ( self . fcoef , self . x ) , fit_shape , label = fit_label ) pl . legend ( ) | plot the data and the fitted function . |
41,795 | def _read_isotopedatabase ( self , ffname = 'isotopedatabase.txt' ) : name = self . sldir + ffname z_db , a_db , el_db , stable_a_db , logic_db = np . loadtxt ( name , unpack = True , dtype = 'str' ) z_db = np . array ( z_db , dtype = 'int' ) a_db = np . array ( a_db , dtype = 'int' ) stable_a_db = np . array ( stable_a_db , dtype = 'int' ) charge_from_element_name = { } for name in self . stable_names : if name == 'Neutron' or name == 'Neut' or name == 'NEUT' or name == 'N-1' : name = 'nn' try : zz = z_db [ np . where ( el_db == name ) ] [ 0 ] charge_from_element_name [ name ] = zz except IndexError : print ( name + " does not exist in this run" ) return z_db , a_db , el_db , stable_a_db , logic_db , charge_from_element_name | This private method reads the isotopedatabase . txt file in sldir run dictory and returns z a elements the cutoff mass for each species that delineate beta + and beta - decay and the logical in the last column . Also provides charge_from_element dictionary according to isotopedatabase . txt . |
41,796 | def is_stable ( self , species ) : element_name_of_iso = species . split ( '-' ) [ 0 ] try : a_of_iso = int ( species . split ( '-' ) [ 1 ] ) except ValueError : a_of_iso = 999 idp_of_element_in_stable_names = self . stable_names . index ( element_name_of_iso ) if a_of_iso in self . stable_el [ idp_of_element_in_stable_names ] [ 1 : ] : return True else : return False | This routine accepts input formatted like He - 3 and checks with stable_el list if occurs in there . If it does the routine returns True otherwise False . |
41,797 | def write_mesa ( self , mesa_isos_file = 'isos.txt' , add_excess_iso = 'fe56' , outfile = 'xa_iniabu.dat' , header_string = 'initial abundances for a MESA run' , header_char = '!' ) : f = open ( 'isos.txt' ) a = f . readlines ( ) isos = [ ] for i in range ( len ( a ) ) : isos . append ( a [ i ] . strip ( ) . rstrip ( ',' ) ) mesa_names = [ ] abus = [ ] for i in range ( len ( self . z ) ) : b = self . names [ i ] . split ( ) a = '' a = a . join ( b ) if a in isos : mesa_names . append ( a ) abus . append ( self . abu [ i ] ) for i in range ( len ( isos ) ) : if isos [ i ] not in mesa_names : mesa_names . append ( isos [ i ] ) abus . append ( 0.0 ) excess = 1. - np . sum ( np . array ( abus ) ) abus = np . array ( abus ) abus [ mesa_names . index ( add_excess_iso ) ] += excess dcols = [ '' , '' ] data = [ mesa_names , abus ] hd = [ header_string ] att . write ( outfile , hd , dcols , data , header_char = header_char ) return mesa_names , abus | Write initial abundance file returns written abundances and mesa names . |
41,798 | def set_and_normalize ( self , species_hash ) : sum_before = sum ( self . abu ) for i in range ( len ( species_hash ) ) : sum_before -= self . abu [ self . hindex [ list ( species_hash . keys ( ) ) [ i ] ] ] print ( "sum_before = " + str ( sum_before ) ) normalization_factor = old_div ( 1.0 - sum ( species_hash . values ( ) ) , sum_before ) print ( "normalizing the rest witih factor " + str ( normalization_factor ) ) self . abu *= normalization_factor for i in range ( len ( species_hash ) ) : self . abu [ self . hindex [ list ( species_hash . keys ( ) ) [ i ] ] ] = list ( species_hash . values ( ) ) [ i ] for l in range ( len ( self . abu ) ) : if self . abu [ l ] <= 1e-99 : self . abu [ l ] = 1.0e-99 for name in self . habu : self . habu [ name ] = self . abu [ self . hindex [ name ] ] | species_hash is a hash array in which you provide abundances referenced by species names that you want to set to some particular value ; all other species are then normalised so that the total sum is 1 . |
41,799 | def drive_rotational_speed_rpm ( self ) : drv_rot_speed_rpm = set ( ) for member in self . get_members ( ) : if member . rotational_speed_rpm is not None : drv_rot_speed_rpm . add ( member . rotational_speed_rpm ) return drv_rot_speed_rpm | Gets the set of rotational speed of the HDD drives |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.