idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
21,000
def clean_unused_venvs ( self , max_days_to_keep ) : with filelock ( self . stat_file_lock ) : now = datetime . utcnow ( ) venvs_dict = self . _get_compacted_dict_usage_from_file ( ) for venv_uuid , usage_date in venvs_dict . copy ( ) . items ( ) : usage_date = self . _str_to_datetime ( usage_date ) if ( now - usage_date ) . days > max_days_to_keep : # remove venv from usage dict del venvs_dict [ venv_uuid ] venv_meta = self . venvscache . get_venv ( uuid = venv_uuid ) if venv_meta is None : # if meta isn't found means that something had failed previously and # usage_file wasn't updated. continue env_path = venv_meta [ 'env_path' ] logger . info ( "Destroying virtualenv at: %s" , env_path ) # #256 destroy_venv ( env_path , self . venvscache ) self . _write_compacted_dict_usage_to_file ( venvs_dict )
Compact usage stats and remove venvs .
275
9
21,001
def logged_exec ( cmd ) : logger = logging . getLogger ( 'fades.exec' ) logger . debug ( "Executing external command: %r" , cmd ) p = subprocess . Popen ( cmd , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , universal_newlines = True ) stdout = [ ] for line in p . stdout : line = line [ : - 1 ] stdout . append ( line ) logger . debug ( STDOUT_LOG_PREFIX + line ) retcode = p . wait ( ) if retcode : raise ExecutionError ( retcode , cmd , stdout ) return stdout
Execute a command redirecting the output to the log .
146
12
21,002
def _get_specific_dir ( dir_type ) : if SNAP_BASEDIR_NAME in os . environ : logger . debug ( "Getting base dir information from SNAP_BASEDIR_NAME env var." ) direct = os . path . join ( os . environ [ SNAP_BASEDIR_NAME ] , dir_type ) else : try : basedirectory = _get_basedirectory ( ) except ImportError : logger . debug ( "Using last resort base dir: ~/.fades" ) from os . path import expanduser direct = os . path . join ( expanduser ( "~" ) , ".fades" ) else : xdg_attrib = 'xdg_{}_home' . format ( dir_type ) base = getattr ( basedirectory , xdg_attrib ) direct = os . path . join ( base , 'fades' ) if not os . path . exists ( direct ) : os . makedirs ( direct ) return direct
Get a specific directory using some XDG base with sensible default .
214
13
21,003
def _get_interpreter_info ( interpreter = None ) : if interpreter is None : # If interpreter is None by default returns the current interpreter data. major , minor = sys . version_info [ : 2 ] executable = sys . executable else : args = [ interpreter , '-c' , SHOW_VERSION_CMD ] try : requested_interpreter_info = logged_exec ( args ) except Exception as error : logger . error ( "Error getting requested interpreter version: %s" , error ) raise FadesError ( "Could not get interpreter version" ) requested_interpreter_info = json . loads ( requested_interpreter_info [ 0 ] ) executable = requested_interpreter_info [ 'path' ] major = requested_interpreter_info [ 'major' ] minor = requested_interpreter_info [ 'minor' ] if executable [ - 1 ] . isdigit ( ) : executable = executable . split ( "." ) [ 0 ] [ : - 1 ] interpreter = "{}{}.{}" . format ( executable , major , minor ) return interpreter
Return the interpreter s full path using pythonX . Y format .
234
13
21,004
def get_interpreter_version ( requested_interpreter ) : logger . debug ( 'Getting interpreter version for: %s' , requested_interpreter ) current_interpreter = _get_interpreter_info ( ) logger . debug ( 'Current interpreter is %s' , current_interpreter ) if requested_interpreter is None : return ( current_interpreter , True ) else : requested_interpreter = _get_interpreter_info ( requested_interpreter ) is_current = requested_interpreter == current_interpreter logger . debug ( 'Interpreter=%s. It is the same as fades?=%s' , requested_interpreter , is_current ) return ( requested_interpreter , is_current )
Return a sanitized interpreter and indicates if it is the current one .
173
14
21,005
def check_pypi_updates ( dependencies ) : dependencies_up_to_date = [ ] for dependency in dependencies . get ( 'pypi' , [ ] ) : # get latest version from PyPI api try : latest_version = get_latest_version_number ( dependency . project_name ) except Exception as error : logger . warning ( "--check-updates command will be aborted. Error: %s" , error ) return dependencies # get required version required_version = None if dependency . specs : _ , required_version = dependency . specs [ 0 ] if required_version : dependencies_up_to_date . append ( dependency ) if latest_version > required_version : logger . info ( "There is a new version of %s: %s" , dependency . project_name , latest_version ) elif latest_version < required_version : logger . warning ( "The requested version for %s is greater " "than latest found in PyPI: %s" , dependency . project_name , latest_version ) else : logger . info ( "The requested version for %s is the latest one in PyPI: %s" , dependency . project_name , latest_version ) else : project_name_plus = "{}=={}" . format ( dependency . project_name , latest_version ) dependencies_up_to_date . append ( pkg_resources . Requirement . parse ( project_name_plus ) ) logger . info ( "The latest version of %r is %s and will use it." , dependency . project_name , latest_version ) dependencies [ "pypi" ] = dependencies_up_to_date return dependencies
Return a list of dependencies to upgrade .
360
8
21,006
def _pypi_head_package ( dependency ) : if dependency . specs : _ , version = dependency . specs [ 0 ] url = BASE_PYPI_URL_WITH_VERSION . format ( name = dependency . project_name , version = version ) else : url = BASE_PYPI_URL . format ( name = dependency . project_name ) logger . debug ( "Doing HEAD requests against %s" , url ) req = request . Request ( url , method = 'HEAD' ) try : response = request . urlopen ( req ) except HTTPError as http_error : if http_error . code == HTTP_STATUS_NOT_FOUND : return False else : raise if response . status == HTTP_STATUS_OK : logger . debug ( "%r exists in PyPI." , dependency ) return True else : # Maybe we are getting somethink like a redirect. In this case we are only # warning to the user and trying to install the dependency. # In the worst scenery fades will fail to install it. logger . warning ( "Got a (unexpected) HTTP_STATUS=%r and reason=%r checking if %r exists" , response . status , response . reason , dependency ) return True
Hit pypi with a http HEAD to check if pkg_name exists .
264
17
21,007
def check_pypi_exists ( dependencies ) : for dependency in dependencies . get ( 'pypi' , [ ] ) : logger . debug ( "Checking if %r exists in PyPI" , dependency ) try : exists = _pypi_head_package ( dependency ) except Exception as error : logger . error ( "Error checking %s in PyPI: %r" , dependency , error ) raise FadesError ( "Could not check if dependency exists in PyPI" ) else : if not exists : logger . error ( "%s doesn't exists in PyPI." , dependency ) return False return True
Check if the indicated dependencies actually exists in pypi .
132
12
21,008
def download_remote_script ( url ) : temp_fh = tempfile . NamedTemporaryFile ( 'wt' , encoding = 'utf8' , suffix = ".py" , delete = False ) downloader = _ScriptDownloader ( url ) logger . info ( "Downloading remote script from %r using (%r downloader) to %r" , url , downloader . name , temp_fh . name ) content = downloader . get ( ) temp_fh . write ( content ) temp_fh . close ( ) return temp_fh . name
Download the content of a remote script to a local temp file .
123
13
21,009
def dump_to_log ( self , logger ) : logger . error ( "Execution ended in %s for cmd %s" , self . _retcode , self . _cmd ) for line in self . _collected_stdout : logger . error ( STDOUT_LOG_PREFIX + line )
Send the cmd info and collected stdout to logger .
67
11
21,010
def _decide ( self ) : netloc = parse . urlparse ( self . url ) . netloc name = self . NETLOCS . get ( netloc , 'raw' ) return name
Find out which method should be applied to download that URL .
42
12
21,011
def get ( self ) : method_name = "_download_" + self . name method = getattr ( self , method_name ) return method ( )
Get the script content from the URL using the decided downloader .
33
13
21,012
def _download_raw ( self , url = None ) : if url is None : url = self . url req = request . Request ( url , headers = self . HEADERS_PLAIN ) return request . urlopen ( req ) . read ( ) . decode ( "utf8" )
Download content from URL directly .
61
6
21,013
def _download_linkode ( self ) : # build the API url linkode_id = self . url . split ( "/" ) [ - 1 ] if linkode_id . startswith ( "#" ) : linkode_id = linkode_id [ 1 : ] url = "https://linkode.org/api/1/linkodes/" + linkode_id req = request . Request ( url , headers = self . HEADERS_JSON ) resp = request . urlopen ( req ) raw = resp . read ( ) data = json . loads ( raw . decode ( "utf8" ) ) content = data [ 'content' ] return content
Download content from Linkode pastebin .
141
8
21,014
def _download_pastebin ( self ) : paste_id = self . url . split ( "/" ) [ - 1 ] url = "https://pastebin.com/raw/" + paste_id return self . _download_raw ( url )
Download content from Pastebin itself .
53
6
21,015
def _download_gist ( self ) : parts = parse . urlparse ( self . url ) url = "https://gist.github.com" + parts . path + "/raw" return self . _download_raw ( url )
Download content from github s pastebin .
51
8
21,016
def get_version ( ) : with open ( 'fades/_version.py' ) as fh : m = re . search ( "\(([^']*)\)" , fh . read ( ) ) if m is None : raise ValueError ( "Unrecognized version in 'fades/_version.py'" ) return m . groups ( ) [ 0 ] . replace ( ', ' , '.' )
Retrieves package version from the file .
87
9
21,017
def initialize_options ( self ) : install . initialize_options ( self ) # leave the proper script according to the platform script = SCRIPT_WIN if sys . platform == "win32" else SCRIPT_REST self . distribution . scripts = [ script ]
Run parent initialization and then fix the scripts var .
55
10
21,018
def run ( self ) : install . run ( self ) # man directory if self . _custom_man_dir is not None : if not os . path . exists ( self . _custom_man_dir ) : os . makedirs ( self . _custom_man_dir ) shutil . copy ( "man/fades.1" , self . _custom_man_dir )
Run parent install and then save the man file .
84
10
21,019
def finalize_options ( self ) : install . finalize_options ( self ) if self . prefix is None : # no place for man page (like in a 'snap') man_dir = None else : man_dir = os . path . join ( self . prefix , "share" , "man" , "man1" ) # if we have 'root', put the building path also under it (used normally # by pbuilder) if self . root is not None : man_dir = os . path . join ( self . root , man_dir [ 1 : ] ) self . _custom_man_dir = man_dir
Alter the installation path .
136
6
21,020
def options_from_file ( args ) : logger . debug ( "updating options from config files" ) updated_from_file = [ ] for config_file in CONFIG_FILES : logger . debug ( "updating from: %s" , config_file ) parser = ConfigParser ( ) parser . read ( config_file ) try : items = parser . items ( 'fades' ) except NoSectionError : continue for config_key , config_value in items : if config_value in [ 'true' , 'false' ] : config_value = config_value == 'true' if config_key in MERGEABLE_CONFIGS : current_value = getattr ( args , config_key , [ ] ) if current_value is None : current_value = [ ] current_value . append ( config_value ) setattr ( args , config_key , current_value ) if not getattr ( args , config_key , False ) or config_key in updated_from_file : # By default all 'store-true' arguments are False. So we only # override them if they are False. If they are True means that the # user is setting those on the CLI. setattr ( args , config_key , config_value ) updated_from_file . append ( config_key ) logger . debug ( "updating %s to %s from file settings" , config_key , config_value ) return args
Get a argparse . Namespace and return it updated with options from config files .
309
17
21,021
def _venv_match ( self , installed , requirements ) : if not requirements : # special case for no requirements, where we can't actually # check anything: the venv is useful if nothing installed too return None if installed else [ ] satisfying_deps = [ ] for repo , req_deps in requirements . items ( ) : useful_inst = set ( ) if repo not in installed : # the venv doesn't even have the repo return None if repo == REPO_VCS : inst_deps = { VCSDependency ( url ) for url in installed [ repo ] . keys ( ) } else : inst_deps = { Distribution ( project_name = dep , version = ver ) for ( dep , ver ) in installed [ repo ] . items ( ) } for req in req_deps : for inst in inst_deps : if inst in req : useful_inst . add ( inst ) break else : # nothing installed satisfied that requirement return None # assure *all* that is installed is useful for the requirements if useful_inst == inst_deps : satisfying_deps . extend ( inst_deps ) else : return None # it did it through! return satisfying_deps
Return True if what is installed satisfies the requirements .
257
10
21,022
def _match_by_uuid ( self , current_venvs , uuid ) : for venv_str in current_venvs : venv = json . loads ( venv_str ) env_path = venv . get ( 'metadata' , { } ) . get ( 'env_path' ) _ , env_uuid = os . path . split ( env_path ) if env_uuid == uuid : return venv
Select a venv matching exactly by uuid .
97
10
21,023
def _select_better_fit ( self , matching_venvs ) : # keep the venvs in a separate array, to pick up the winner, and the (sorted, to compare # each dependency with its equivalent) in other structure to later compare venvs = [ ] to_compare = [ ] for matching , venv in matching_venvs : to_compare . append ( sorted ( matching , key = lambda req : getattr ( req , 'key' , '' ) ) ) venvs . append ( venv ) # compare each n-tuple of dependencies to see which one is bigger, and add score to the # position of the winner scores = [ 0 ] * len ( venvs ) for dependencies in zip ( * to_compare ) : if not isinstance ( dependencies [ 0 ] , Distribution ) : # only distribution URLs can be compared continue winner = dependencies . index ( max ( dependencies ) ) scores [ winner ] = scores [ winner ] + 1 # get the rightmost winner (in case of ties, to select the latest venv) winner_pos = None winner_score = - 1 for i , score in enumerate ( scores ) : if score >= winner_score : winner_score = score winner_pos = i return venvs [ winner_pos ]
Receive a list of matching venvs and decide which one is the best fit .
271
17
21,024
def _match_by_requirements ( self , current_venvs , requirements , interpreter , options ) : matching_venvs = [ ] for venv_str in current_venvs : venv = json . loads ( venv_str ) # simple filter, need to have exactly same options and interpreter if venv . get ( 'options' ) != options or venv . get ( 'interpreter' ) != interpreter : continue # requirements complying: result can be None (no comply) or a score to later sort matching = self . _venv_match ( venv [ 'installed' ] , requirements ) if matching is not None : matching_venvs . append ( ( matching , venv ) ) if not matching_venvs : return return self . _select_better_fit ( matching_venvs )
Select a venv matching interpreter and options complying with requirements .
174
12
21,025
def _select ( self , current_venvs , requirements = None , interpreter = '' , uuid = '' , options = None ) : if uuid : logger . debug ( "Searching a venv by uuid: %s" , uuid ) venv = self . _match_by_uuid ( current_venvs , uuid ) else : logger . debug ( "Searching a venv for: reqs=%s interpreter=%s options=%s" , requirements , interpreter , options ) venv = self . _match_by_requirements ( current_venvs , requirements , interpreter , options ) if venv is None : logger . debug ( "No matching venv found :(" ) return logger . debug ( "Found a matching venv! %s" , venv ) return venv [ 'metadata' ]
Select which venv satisfy the received requirements .
180
9
21,026
def get_venv ( self , requirements = None , interpreter = '' , uuid = '' , options = None ) : lines = self . _read_cache ( ) return self . _select ( lines , requirements , interpreter , uuid = uuid , options = options )
Find a venv that serves these requirements if any .
58
11
21,027
def store ( self , installed_stuff , metadata , interpreter , options ) : new_content = { 'timestamp' : int ( time . mktime ( time . localtime ( ) ) ) , 'installed' : installed_stuff , 'metadata' : metadata , 'interpreter' : interpreter , 'options' : options } logger . debug ( "Storing installed=%s metadata=%s interpreter=%s options=%s" , installed_stuff , metadata , interpreter , options ) with filelock ( self . lockpath ) : self . _write_cache ( [ json . dumps ( new_content ) ] , append = True )
Store the virtualenv metadata for the indicated installed_stuff .
137
12
21,028
def remove ( self , env_path ) : with filelock ( self . lockpath ) : cache = self . _read_cache ( ) logger . debug ( "Removing virtualenv from cache: %s" % env_path ) lines = [ line for line in cache if json . loads ( line ) . get ( 'metadata' , { } ) . get ( 'env_path' ) != env_path ] self . _write_cache ( lines )
Remove metadata for a given virtualenv from cache .
98
10
21,029
def _read_cache ( self ) : if os . path . exists ( self . filepath ) : with open ( self . filepath , 'rt' , encoding = 'utf8' ) as fh : lines = [ x . strip ( ) for x in fh ] else : logger . debug ( "Index not found, starting empty" ) lines = [ ] return lines
Read virtualenv metadata from cache .
80
7
21,030
def _write_cache ( self , lines , append = False ) : mode = 'at' if append else 'wt' with open ( self . filepath , mode , encoding = 'utf8' ) as fh : fh . writelines ( line + '\n' for line in lines )
Write virtualenv metadata to cache .
64
7
21,031
def install ( self , dependency ) : if not self . pip_installed : logger . info ( "Need to install a dependency with pip, but no builtin, " "doing it manually (just wait a little, all should go well)" ) self . _brute_force_install_pip ( ) # split to pass several tokens on multiword dependency (this is very specific for '-e' on # external requirements, but implemented generically; note that this does not apply for # normal reqs, because even if it originally is 'foo > 1.2', after parsing it loses the # internal spaces) str_dep = str ( dependency ) args = [ self . pip_exe , "install" ] + str_dep . split ( ) if self . options : for option in self . options : args . extend ( option . split ( ) ) logger . info ( "Installing dependency: %r" , str_dep ) try : helpers . logged_exec ( args ) except helpers . ExecutionError as error : error . dump_to_log ( logger ) raise error except Exception as error : logger . exception ( "Error installing %s: %s" , str_dep , error ) raise error
Install a new dependency .
254
5
21,032
def get_version ( self , dependency ) : logger . debug ( "getting installed version for %s" , dependency ) stdout = helpers . logged_exec ( [ self . pip_exe , "show" , str ( dependency ) ] ) version = [ line for line in stdout if line . startswith ( 'Version:' ) ] if len ( version ) == 1 : version = version [ 0 ] . strip ( ) . split ( ) [ 1 ] logger . debug ( "Installed version of %s is: %s" , dependency , version ) return version else : logger . error ( 'Fades is having problems getting the installed version. ' 'Run with -v or check the logs for details' ) return ''
Return the installed version parsing the output of pip show .
153
11
21,033
def _brute_force_install_pip ( self ) : if os . path . exists ( self . pip_installer_fname ) : logger . debug ( "Using pip installer from %r" , self . pip_installer_fname ) else : logger . debug ( "Installer for pip not found in %r, downloading it" , self . pip_installer_fname ) self . _download_pip_installer ( ) logger . debug ( "Installing PIP manually in the virtualenv" ) python_exe = os . path . join ( self . env_bin_path , "python" ) helpers . logged_exec ( [ python_exe , self . pip_installer_fname , '-I' ] ) self . pip_installed = True
A brute force install of pip itself .
171
8
21,034
def _generate_configs_from_default ( self , overrides = None ) : # type: (Dict[str, int]) -> Dict[str, int] config = DEFAULT_CONFIG . copy ( ) if not overrides : overrides = { } for k , v in overrides . items ( ) : config [ k ] = v return config
Generate configs by inheriting from defaults
80
9
21,035
def read_ical ( self , ical_file_location ) : # type: (str) -> Calendar with open ( ical_file_location , 'r' ) as ical_file : data = ical_file . read ( ) self . cal = Calendar . from_ical ( data ) return self . cal
Read the ical file
69
5
21,036
def read_csv ( self , csv_location , csv_configs = None ) : # type: (str, Dict[str, int]) -> List[List[str]] csv_configs = self . _generate_configs_from_default ( csv_configs ) with open ( csv_location , 'r' ) as csv_file : csv_reader = csv . reader ( csv_file ) self . csv_data = list ( csv_reader ) self . csv_data = self . csv_data [ csv_configs [ 'HEADER_COLUMNS_TO_SKIP' ] : ] return self . csv_data
Read the csv file
156
5
21,037
def make_ical ( self , csv_configs = None ) : # type: (Dict[str, int]) -> Calendar csv_configs = self . _generate_configs_from_default ( csv_configs ) self . cal = Calendar ( ) for row in self . csv_data : event = Event ( ) event . add ( 'summary' , row [ csv_configs [ 'CSV_NAME' ] ] ) event . add ( 'dtstart' , row [ csv_configs [ 'CSV_START_DATE' ] ] ) event . add ( 'dtend' , row [ csv_configs [ 'CSV_END_DATE' ] ] ) event . add ( 'description' , row [ csv_configs [ 'CSV_DESCRIPTION' ] ] ) event . add ( 'location' , row [ csv_configs [ 'CSV_LOCATION' ] ] ) self . cal . add_component ( event ) return self . cal
Make iCal entries
226
4
21,038
def save_ical ( self , ical_location ) : # type: (str) -> None data = self . cal . to_ical ( ) with open ( ical_location , 'w' ) as ical_file : ical_file . write ( data . decode ( 'utf-8' ) )
Save the calendar instance to a file
68
7
21,039
def save_csv ( self , csv_location ) : # type: (str) -> None with open ( csv_location , 'w' ) as csv_handle : writer = csv . writer ( csv_handle ) for row in self . csv_data : writer . writerow ( row )
Save the csv to a file
68
7
21,040
def open ( cls , filename ) : if filename . endswith ( '.gz' ) : fp = gzip . open ( filename , 'rb' ) try : return cls ( fp , filename , compression = 'gz' ) finally : fp . close ( ) elif filename . endswith ( '.bz2' ) : fp = bz2 . BZ2File ( filename , 'rb' ) try : return cls ( fp , filename , compression = 'bz2' ) finally : fp . close ( ) else : with open ( filename , 'rb' ) as fp : return cls ( fp , filename )
Read an image file from disk
144
6
21,041
def image ( self ) : if self . bands == 1 : return self . data . squeeze ( ) elif self . bands == 3 : return numpy . dstack ( self . data )
An Image like array of self . data convenient for image processing tasks
40
13
21,042
def apply_numpy_specials ( self , copy = True ) : if copy : data = self . data . astype ( numpy . float64 ) elif self . data . dtype != numpy . float64 : data = self . data = self . data . astype ( numpy . float64 ) else : data = self . data data [ data == self . specials [ 'Null' ] ] = numpy . nan data [ data < self . specials [ 'Min' ] ] = numpy . NINF data [ data > self . specials [ 'Max' ] ] = numpy . inf return data
Convert isis special pixel values to numpy special pixel values .
133
14
21,043
def parse ( cls , value , record_bytes ) : if isinstance ( value , six . string_types ) : return cls ( value , 0 ) if isinstance ( value , list ) : if len ( value ) == 1 : return cls ( value [ 0 ] , 0 ) if len ( value ) == 2 : return cls ( value [ 0 ] , cls . _parse_bytes ( value [ 1 ] , record_bytes ) ) raise ValueError ( 'Unsupported pointer type' ) return cls ( None , cls . _parse_bytes ( value , record_bytes ) )
Parses the pointer label .
129
7
21,044
def _save ( self , file_to_write , overwrite ) : if overwrite : file_to_write = self . filename elif os . path . isfile ( file_to_write ) : msg = 'File ' + file_to_write + ' already exists !\n' + 'Call save() with "overwrite = True" to overwrite the file.' raise IOError ( msg ) encoder = pvl . encoder . PDSLabelEncoder serial_label = pvl . dumps ( self . label , cls = encoder ) label_sz = len ( serial_label ) image_pointer = int ( label_sz / self . label [ 'RECORD_BYTES' ] ) + 1 self . label [ '^IMAGE' ] = image_pointer + 1 if self . _sample_bytes != self . label [ 'IMAGE' ] [ 'SAMPLE_BITS' ] * 8 : self . label [ 'IMAGE' ] [ 'SAMPLE_BITS' ] = self . data . itemsize * 8 sample_type_to_save = self . DTYPES [ self . _sample_type [ 0 ] + self . dtype . kind ] self . label [ 'IMAGE' ] [ 'SAMPLE_TYPE' ] = sample_type_to_save if len ( self . data . shape ) == 3 : self . label [ 'IMAGE' ] [ 'BANDS' ] = self . data . shape [ 0 ] self . label [ 'IMAGE' ] [ 'LINES' ] = self . data . shape [ 1 ] self . label [ 'IMAGE' ] [ 'LINE_SAMPLES' ] = self . data . shape [ 2 ] else : self . label [ 'IMAGE' ] [ 'BANDS' ] = 1 self . label [ 'IMAGE' ] [ 'LINES' ] = self . data . shape [ 0 ] self . label [ 'IMAGE' ] [ 'LINE_SAMPLES' ] = self . data . shape [ 1 ] diff = 0 if len ( pvl . dumps ( self . label , cls = encoder ) ) != label_sz : diff = abs ( label_sz - len ( pvl . dumps ( self . label , cls = encoder ) ) ) pvl . dump ( self . label , file_to_write , cls = encoder ) offset = image_pointer * self . label [ 'RECORD_BYTES' ] - label_sz stream = open ( file_to_write , 'a' ) for i in range ( 0 , offset + diff ) : stream . write ( " " ) if ( self . _bands > 1 and self . _format != 'BAND_SEQUENTIAL' ) : raise NotImplementedError else : self . data . tofile ( stream , format = '%' + self . dtype . kind ) stream . close ( )
Save PDS3Image object as PDS3 file .
638
12
21,045
def _create_label ( self , array ) : if len ( array . shape ) == 3 : bands = array . shape [ 0 ] lines = array . shape [ 1 ] line_samples = array . shape [ 2 ] else : bands = 1 lines = array . shape [ 0 ] line_samples = array . shape [ 1 ] record_bytes = line_samples * array . itemsize label_module = pvl . PVLModule ( [ ( 'PDS_VERSION_ID' , 'PDS3' ) , ( 'RECORD_TYPE' , 'FIXED_LENGTH' ) , ( 'RECORD_BYTES' , record_bytes ) , ( 'LABEL_RECORDS' , 1 ) , ( '^IMAGE' , 1 ) , ( 'IMAGE' , { 'BANDS' : bands , 'LINES' : lines , 'LINE_SAMPLES' : line_samples , 'MAXIMUM' : 0 , 'MEAN' : 0 , 'MEDIAN' : 0 , 'MINIMUM' : 0 , 'SAMPLE_BITS' : array . itemsize * 8 , 'SAMPLE_TYPE' : 'MSB_INTEGER' , 'STANDARD_DEVIATION' : 0 } ) ] ) return self . _update_label ( label_module , array )
Create sample PDS3 label for NumPy Array . It is called by image . py to create PDS3Image object from Numpy Array .
294
30
21,046
def _update_label ( self , label , array ) : maximum = float ( numpy . max ( array ) ) mean = float ( numpy . mean ( array ) ) median = float ( numpy . median ( array ) ) minimum = float ( numpy . min ( array ) ) stdev = float ( numpy . std ( array , ddof = 1 ) ) encoder = pvl . encoder . PDSLabelEncoder serial_label = pvl . dumps ( label , cls = encoder ) label_sz = len ( serial_label ) image_pointer = int ( label_sz / label [ 'RECORD_BYTES' ] ) + 1 label [ '^IMAGE' ] = image_pointer + 1 label [ 'LABEL_RECORDS' ] = image_pointer label [ 'IMAGE' ] [ 'MEAN' ] = mean label [ 'IMAGE' ] [ 'MAXIMUM' ] = maximum label [ 'IMAGE' ] [ 'MEDIAN' ] = median label [ 'IMAGE' ] [ 'MINIMUM' ] = minimum label [ 'IMAGE' ] [ 'STANDARD_DEVIATION' ] = stdev return label
Update PDS3 label for NumPy Array . It is called by _create_label to update label values such as - ^IMAGE RECORD_BYTES - STANDARD_DEVIATION - MAXIMUM MINIMUM - MEDIAN MEAN
260
53
21,047
def iv ( base , counter ) : if ( counter >> 64 ) != 0 : raise ECEException ( u"Counter too big" ) ( mask , ) = struct . unpack ( "!Q" , base [ 4 : ] ) return base [ : 4 ] + struct . pack ( "!Q" , counter ^ mask )
Generate an initialization vector .
70
6
21,048
def encrypt ( content , salt = None , key = None , private_key = None , dh = None , auth_secret = None , keyid = None , keylabel = "P-256" , rs = 4096 , version = "aes128gcm" ) : def encrypt_record ( key , nonce , counter , buf , last ) : encryptor = Cipher ( algorithms . AES ( key ) , modes . GCM ( iv ( nonce , counter ) ) , backend = default_backend ( ) ) . encryptor ( ) if version == 'aes128gcm' : data = encryptor . update ( buf + ( b'\x02' if last else b'\x01' ) ) else : data = encryptor . update ( ( b"\x00" * versions [ version ] [ 'pad' ] ) + buf ) data += encryptor . finalize ( ) data += encryptor . tag return data def compose_aes128gcm ( salt , content , rs , keyid ) : """Compose the header and content of an aes128gcm encrypted message body :param salt: The sender's salt value :type salt: str :param content: The encrypted body of the message :type content: str :param rs: Override for the content length :type rs: int :param keyid: The keyid to use for this message :type keyid: str """ if len ( keyid ) > 255 : raise ECEException ( "keyid is too long" ) header = salt if rs > MAX_RECORD_SIZE : raise ECEException ( "Too much content" ) header += struct . pack ( "!L" , rs ) header += struct . pack ( "!B" , len ( keyid ) ) header += keyid return header + content if version not in versions : raise ECEException ( u"Invalid version" ) if salt is None : salt = os . urandom ( 16 ) ( key_ , nonce_ ) = derive_key ( "encrypt" , version = version , salt = salt , key = key , private_key = private_key , dh = dh , auth_secret = auth_secret , keyid = keyid , keylabel = keylabel ) overhead = versions [ version ] [ 'pad' ] if version == 'aes128gcm' : overhead += 16 end = len ( content ) else : end = len ( content ) + 1 if rs <= overhead : raise ECEException ( u"Record size too small" ) chunk_size = rs - overhead result = b"" counter = 0 # the extra one on the loop ensures that we produce a padding only # record if the data length is an exact multiple of the chunk size for i in list ( range ( 0 , end , chunk_size ) ) : result += encrypt_record ( key_ , nonce_ , counter , content [ i : i + chunk_size ] , ( i + chunk_size ) >= end ) counter += 1 if version == "aes128gcm" : if keyid is None and private_key is not None : kid = private_key . public_key ( ) . public_bytes ( Encoding . X962 , PublicFormat . UncompressedPoint ) else : kid = ( keyid or '' ) . encode ( 'utf-8' ) return compose_aes128gcm ( salt , result , rs , keyid = kid ) return result
Encrypt a data block
730
5
21,049
def parameters ( self , namespaced = False ) : if namespaced : return json . loads ( json . dumps ( self . args [ 0 ] [ 'parameters' ] ) , object_hook = lambda d : SimpleNamespace ( * * d ) ) else : return self . args [ 0 ] . get ( 'parameters' )
returns the exception varlink error parameters
71
8
21,050
def handle ( self , message , _server = None , _request = None ) : if not message : return if message [ - 1 ] == 0 : message = message [ : - 1 ] string = message . decode ( 'utf-8' ) handle = self . _handle ( json . loads ( string ) , message , _server , _request ) for out in handle : if out == None : return try : yield json . dumps ( out , cls = VarlinkEncoder ) . encode ( 'utf-8' ) except ConnectionError as e : try : handle . throw ( e ) except StopIteration : pass
This generator function handles any incoming message .
132
8
21,051
def server_close ( self ) : if self . remove_file : try : os . remove ( self . remove_file ) except : pass self . socket . close ( )
Called to clean - up the server .
37
9
21,052
def open ( self , interface_name , namespaced = False , connection = None ) : if not connection : connection = self . open_connection ( ) if interface_name not in self . _interfaces : self . get_interface ( interface_name , socket_connection = connection ) if interface_name not in self . _interfaces : raise InterfaceNotFound ( interface_name ) return self . handler ( self . _interfaces [ interface_name ] , connection , namespaced = namespaced )
Open a new connection and get a client interface handle with the varlink methods installed .
105
17
21,053
def get_interfaces ( self , socket_connection = None ) : if not socket_connection : socket_connection = self . open_connection ( ) close_socket = True else : close_socket = False # noinspection PyUnresolvedReferences _service = self . handler ( self . _interfaces [ "org.varlink.service" ] , socket_connection ) self . info = _service . GetInfo ( ) if close_socket : socket_connection . close ( ) return self . info [ 'interfaces' ]
Returns the a list of Interface objects the service implements .
114
11
21,054
def add_interface ( self , interface ) : if not isinstance ( interface , Interface ) : raise TypeError self . _interfaces [ interface . name ] = interface
Manually add or overwrite an interface definition from an Interface object .
35
13
21,055
def create ( atoms_list , N , L , cutoff = 0 , all_atomtypes = [ ] ) : myAlphas , myBetas = genBasis . getBasisFunc ( cutoff , N ) # get information about feature length n_datapoints = len ( atoms_list ) atoms = atoms_list [ 0 ] x = get_lastatom_soap ( atoms , cutoff , myAlphas , myBetas , N , L , all_atomtypes = all_atomtypes ) n_features = x . shape [ 1 ] print ( "soap first" , x . shape ) print ( n_datapoints , n_features ) soapmatrix = np . zeros ( ( n_datapoints , n_features ) ) i = - 1 for atoms in atoms_list : i += 1 #atoms print ( "Processing " + str ( atoms . info ) , " Run time: " + str ( time . time ( ) - t0_total ) , end = "\r" ) soapmatrix [ i , : ] = get_lastatom_soap ( atoms , cutoff , myAlphas , myBetas , N , L , all_atomtypes = all_atomtypes ) print ( "" ) # infos print ( "shape" , soapmatrix . shape ) return soapmatrix
Takes a trajectory xyz file and writes soap features
289
11
21,056
def getPoly ( rCut , nMax ) : rCutVeryHard = rCut + 5.0 rx = 0.5 * rCutVeryHard * ( x + 1 ) basisFunctions = [ ] for i in range ( 1 , nMax + 1 ) : basisFunctions . append ( lambda rr , i = i , rCut = rCut : ( rCut - np . clip ( rr , 0 , rCut ) ) ** ( i + 2 ) ) # Calculate the overlap of the different polynomial functions in a # matrix S. These overlaps defined through the dot product over the # radial coordinate are analytically calculable: Integrate[(rc - r)^(a # + 2) (rc - r)^(b + 2) r^2, {r, 0, rc}]. Then the weights B that make # the basis orthonormal are given by B=S^{-1/2} S = np . zeros ( ( nMax , nMax ) ) for i in range ( 1 , nMax + 1 ) : for j in range ( 1 , nMax + 1 ) : S [ i - 1 , j - 1 ] = ( 2 * ( rCut ) ** ( 7 + i + j ) ) / ( ( 5 + i + j ) * ( 6 + i + j ) * ( 7 + i + j ) ) betas = sqrtm ( np . linalg . inv ( S ) ) # If the result is complex, the calculation is currently halted. if ( betas . dtype == np . complex128 ) : raise ValueError ( "Could not calculate normalization factors for the polynomial basis" " in the domain of real numbers. Lowering the number of radial " "basis functions is advised." ) fs = np . zeros ( [ nMax , len ( x ) ] ) for n in range ( 1 , nMax + 1 ) : fs [ n - 1 , : ] = ( rCut - np . clip ( rx , 0 , rCut ) ) ** ( n + 2 ) gss = np . dot ( betas , fs ) return nMax , rx , gss
Used to calculate discrete vectors for the polynomial basis functions .
465
13
21,057
def _format_ase2clusgeo ( obj , all_atomtypes = None ) : #atoms metadata totalAN = len ( obj ) if all_atomtypes is not None : atomtype_set = set ( all_atomtypes ) else : atomtype_set = set ( obj . get_atomic_numbers ( ) ) atomtype_lst = np . sort ( list ( atomtype_set ) ) n_atoms_per_type_lst = [ ] pos_lst = [ ] for atomtype in atomtype_lst : condition = obj . get_atomic_numbers ( ) == atomtype pos_onetype = obj . get_positions ( ) [ condition ] n_onetype = pos_onetype . shape [ 0 ] # store data in lists pos_lst . append ( pos_onetype ) n_atoms_per_type_lst . append ( n_onetype ) typeNs = n_atoms_per_type_lst Ntypes = len ( n_atoms_per_type_lst ) atomtype_lst Apos = np . concatenate ( pos_lst ) . ravel ( ) return Apos , typeNs , Ntypes , atomtype_lst , totalAN
Takes an ase Atoms object and returns numpy arrays and integers which are read by the internal clusgeo . Apos is currently a flattened out numpy array
280
36
21,058
def get_soap_structure ( obj , alp , bet , rCut = 5.0 , nMax = 5 , Lmax = 5 , crossOver = True , all_atomtypes = None , eta = 1.0 ) : Hpos = obj . get_positions ( ) arrsoap = get_soap_locals ( obj , Hpos , alp , bet , rCut , nMax , Lmax , crossOver , all_atomtypes = all_atomtypes , eta = eta ) return arrsoap
Get the RBF basis SOAP output for atoms in a finite structure .
118
15
21,059
def get_periodic_soap_locals ( obj , Hpos , alp , bet , rCut = 5.0 , nMax = 5 , Lmax = 5 , crossOver = True , all_atomtypes = None , eta = 1.0 ) : suce = _get_supercell ( obj , rCut ) arrsoap = get_soap_locals ( suce , Hpos , alp , bet , rCut , nMax = nMax , Lmax = Lmax , crossOver = crossOver , all_atomtypes = all_atomtypes , eta = eta ) return arrsoap
Get the RBF basis SOAP output for the given position in a periodic system .
137
17
21,060
def orbit_gen ( self ) : if self . norbits == 1 : yield self else : for i in range ( self . norbits ) : yield self [ : , i ]
Generator for iterating over each orbit .
38
9
21,061
def zmax ( self , return_times = False , func = np . mean , interp_kwargs = None , minimize_kwargs = None , approximate = False ) : if return_times and func is not None : raise ValueError ( "Cannot return times if reducing " "using an input function. Pass `func=None` if " "you want to return all individual values " "and times." ) if func is None : reduce = False func = lambda x : x else : reduce = True # time must increase if self . t [ - 1 ] < self . t [ 0 ] : self = self [ : : - 1 ] vals = [ ] times = [ ] for orbit in self . orbit_gen ( ) : v , t = orbit . _max_helper ( np . abs ( orbit . cylindrical . z ) , interp_kwargs = interp_kwargs , minimize_kwargs = minimize_kwargs , approximate = approximate ) vals . append ( func ( v ) ) times . append ( t ) return self . _max_return_helper ( vals , times , return_times , reduce )
Estimate the maximum z height of the orbit by identifying local maxima in the absolute value of the z position and interpolating between timesteps near the maxima .
245
34
21,062
def eccentricity ( self , * * kw ) : ra = self . apocenter ( * * kw ) rp = self . pericenter ( * * kw ) return ( ra - rp ) / ( ra + rp )
r Returns the eccentricity computed from the mean apocenter and mean pericenter .
53
18
21,063
def estimate_period ( self , radial = True ) : if self . t is None : raise ValueError ( "To compute the period, a time array is needed." " Specify a time array when creating this object." ) if radial : r = self . physicsspherical . r . value if self . norbits == 1 : T = peak_to_peak_period ( self . t . value , r ) T = T * self . t . unit else : T = [ peak_to_peak_period ( self . t . value , r [ : , n ] ) for n in range ( r . shape [ 1 ] ) ] T = T * self . t . unit else : raise NotImplementedError ( "sorry 'bout that..." ) return T
Estimate the period of the orbit . By default computes the radial period . If radial == False this returns period estimates for each dimension of the orbit .
164
31
21,064
def circulation ( self ) : L = self . angular_momentum ( ) # if only 2D, add another empty axis if L . ndim == 2 : single_orbit = True L = L [ ... , None ] else : single_orbit = False ndim , ntimes , norbits = L . shape # initial angular momentum L0 = L [ : , 0 ] # see if at any timestep the sign has changed circ = np . ones ( ( ndim , norbits ) ) for ii in range ( ndim ) : cnd = ( np . sign ( L0 [ ii ] ) != np . sign ( L [ ii , 1 : ] ) ) | ( np . abs ( L [ ii , 1 : ] ) . value < 1E-13 ) ix = np . atleast_1d ( np . any ( cnd , axis = 0 ) ) circ [ ii , ix ] = 0 circ = circ . astype ( int ) if single_orbit : return circ . reshape ( ( ndim , ) ) else : return circ
Determine which axes the Orbit circulates around by checking whether there is a change of sign of the angular momentum about an axis . Returns a 2D array with ndim integers per orbit point . If a box orbit all integers will be 0 . A 1 indicates circulation about the corresponding axis .
228
59
21,065
def align_circulation_with_z ( self , circulation = None ) : if circulation is None : circulation = self . circulation ( ) circulation = atleast_2d ( circulation , insert_axis = 1 ) cart = self . cartesian pos = cart . xyz vel = np . vstack ( ( cart . v_x . value [ None ] , cart . v_y . value [ None ] , cart . v_z . value [ None ] ) ) * cart . v_x . unit if pos . ndim < 3 : pos = pos [ ... , np . newaxis ] vel = vel [ ... , np . newaxis ] if ( circulation . shape [ 0 ] != self . ndim or circulation . shape [ 1 ] != pos . shape [ 2 ] ) : raise ValueError ( "Shape of 'circulation' array should match the " "shape of the position/velocity (minus the time " "axis)." ) new_pos = pos . copy ( ) new_vel = vel . copy ( ) for n in range ( pos . shape [ 2 ] ) : if circulation [ 2 , n ] == 1 or np . all ( circulation [ : , n ] == 0 ) : # already circulating about z or box orbit continue if sum ( circulation [ : , n ] ) > 1 : logger . warning ( "Circulation about multiple axes - are you sure " "the orbit has been integrated for long enough?" ) if circulation [ 0 , n ] == 1 : circ = 0 elif circulation [ 1 , n ] == 1 : circ = 1 else : raise RuntimeError ( "Should never get here..." ) new_pos [ circ , : , n ] = pos [ 2 , : , n ] new_pos [ 2 , : , n ] = pos [ circ , : , n ] new_vel [ circ , : , n ] = vel [ 2 , : , n ] new_vel [ 2 , : , n ] = vel [ circ , : , n ] return self . __class__ ( pos = new_pos . reshape ( cart . xyz . shape ) , vel = new_vel . reshape ( cart . xyz . shape ) , t = self . t , hamiltonian = self . hamiltonian )
If the input orbit is a tube orbit this function aligns the circulation axis with the z axis and returns a copy .
476
24
21,066
def greatcircle_to_greatcircle ( from_greatcircle_coord , to_greatcircle_frame ) : # This transform goes through the parent frames on each side. # from_frame -> from_frame.origin -> to_frame.origin -> to_frame intermediate_from = from_greatcircle_coord . transform_to ( from_greatcircle_coord . pole ) intermediate_to = intermediate_from . transform_to ( to_greatcircle_frame . pole ) return intermediate_to . transform_to ( to_greatcircle_frame )
Transform between two greatcircle frames .
117
7
21,067
def reference_to_greatcircle ( reference_frame , greatcircle_frame ) : # Define rotation matrices along the position angle vector, and # relative to the origin. pole = greatcircle_frame . pole . transform_to ( coord . ICRS ) ra0 = greatcircle_frame . ra0 center = greatcircle_frame . center R_rot = rotation_matrix ( greatcircle_frame . rotation , 'z' ) if not np . isnan ( ra0 ) : xaxis = np . array ( [ np . cos ( ra0 ) , np . sin ( ra0 ) , 0. ] ) zaxis = pole . cartesian . xyz . value if np . abs ( zaxis [ 2 ] ) >= 1e-15 : xaxis [ 2 ] = - ( zaxis [ 0 ] * xaxis [ 0 ] + zaxis [ 1 ] * xaxis [ 1 ] ) / zaxis [ 2 ] # what? else : xaxis [ 2 ] = 0. xaxis = xaxis / np . sqrt ( np . sum ( xaxis ** 2 ) ) yaxis = np . cross ( zaxis , xaxis ) R = np . stack ( ( xaxis , yaxis , zaxis ) ) elif center is not None : R1 = rotation_matrix ( pole . ra , 'z' ) R2 = rotation_matrix ( 90 * u . deg - pole . dec , 'y' ) Rtmp = matrix_product ( R2 , R1 ) rot = center . cartesian . transform ( Rtmp ) rot_lon = rot . represent_as ( coord . UnitSphericalRepresentation ) . lon R3 = rotation_matrix ( rot_lon , 'z' ) R = matrix_product ( R3 , R2 , R1 ) else : R1 = rotation_matrix ( pole . ra , 'z' ) R2 = rotation_matrix ( pole . dec , 'y' ) R = matrix_product ( R2 , R1 ) return matrix_product ( R_rot , R )
Convert a reference coordinate to a great circle frame .
442
11
21,068
def pole_from_endpoints ( coord1 , coord2 ) : c1 = coord1 . cartesian / coord1 . cartesian . norm ( ) coord2 = coord2 . transform_to ( coord1 . frame ) c2 = coord2 . cartesian / coord2 . cartesian . norm ( ) pole = c1 . cross ( c2 ) pole = pole / pole . norm ( ) return coord1 . frame . realize_frame ( pole )
Compute the pole from a great circle that connects the two specified coordinates .
97
15
21,069
def sph_midpoint ( coord1 , coord2 ) : c1 = coord1 . cartesian / coord1 . cartesian . norm ( ) coord2 = coord2 . transform_to ( coord1 . frame ) c2 = coord2 . cartesian / coord2 . cartesian . norm ( ) midpt = 0.5 * ( c1 + c2 ) usph = midpt . represent_as ( coord . UnitSphericalRepresentation ) return coord1 . frame . realize_frame ( usph )
Compute the midpoint between two points on the sphere .
110
12
21,070
def get_uv_tan ( c ) : l = c . spherical . lon b = c . spherical . lat p = np . array ( [ - np . sin ( l ) , np . cos ( l ) , np . zeros_like ( l . value ) ] ) . T q = np . array ( [ - np . cos ( l ) * np . sin ( b ) , - np . sin ( l ) * np . sin ( b ) , np . cos ( b ) ] ) . T return np . stack ( ( p , q ) , axis = - 1 )
Get tangent plane basis vectors on the unit sphere at the given spherical coordinates .
125
16
21,071
def transform_pm_cov ( c , cov , to_frame ) : if c . isscalar and cov . shape != ( 2 , 2 ) : raise ValueError ( 'If input coordinate object is a scalar coordinate, ' 'the proper motion covariance matrix must have shape ' '(2, 2), not {}' . format ( cov . shape ) ) elif not c . isscalar and len ( c ) != cov . shape [ 0 ] : raise ValueError ( 'Input coordinates and covariance matrix must have ' 'the same number of entries ({} vs {}).' . format ( len ( c ) , cov . shape [ 0 ] ) ) # 3D rotation matrix, to be projected onto the tangent plane if hasattr ( c , 'frame' ) : frame = c . frame else : frame = c R = get_transform_matrix ( frame . __class__ , to_frame ) # Get input coordinates in the desired frame: c_to = c . transform_to ( to_frame ) # Get tangent plane coordinates: uv_in = get_uv_tan ( c ) uv_to = get_uv_tan ( c_to ) if not c . isscalar : G = np . einsum ( 'nab,nac->nbc' , uv_to , np . einsum ( 'ji,nik->njk' , R , uv_in ) ) # transform cov_to = np . einsum ( 'nba,nac->nbc' , G , np . einsum ( 'nij,nki->njk' , cov , G ) ) else : G = np . einsum ( 'ab,ac->bc' , uv_to , np . einsum ( 'ji,ik->jk' , R , uv_in ) ) # transform cov_to = np . einsum ( 'ba,ac->bc' , G , np . einsum ( 'ij,ki->jk' , cov , G ) ) return cov_to
Transform a proper motion covariance matrix to a new frame .
448
12
21,072
def rodrigues_axis_angle_rotate ( x , vec , theta ) : x = np . array ( x ) . T vec = np . array ( vec ) . T theta = np . array ( theta ) . T [ ... , None ] out = np . cos ( theta ) * x + np . sin ( theta ) * np . cross ( vec , x ) + ( 1 - np . cos ( theta ) ) * ( vec * x ) . sum ( axis = - 1 ) [ ... , None ] * vec return out . T
Rotated the input vector or set of vectors x around the axis vec by the angle theta .
122
20
21,073
def z_angle_rotate ( xy , theta ) : xy = np . array ( xy ) . T theta = np . array ( theta ) . T out = np . zeros_like ( xy ) out [ ... , 0 ] = np . cos ( theta ) * xy [ ... , 0 ] - np . sin ( theta ) * xy [ ... , 1 ] out [ ... , 1 ] = np . sin ( theta ) * xy [ ... , 0 ] + np . cos ( theta ) * xy [ ... , 1 ] return out . T
Rotated the input vector or set of vectors xy by the angle theta .
131
17
21,074
def static_to_constantrotating ( frame_i , frame_r , w , t = None ) : return _constantrotating_static_helper ( frame_r = frame_r , frame_i = frame_i , w = w , t = t , sign = 1. )
Transform from an inertial static frame to a rotating frame .
66
12
21,075
def constantrotating_to_static ( frame_r , frame_i , w , t = None ) : return _constantrotating_static_helper ( frame_r = frame_r , frame_i = frame_i , w = w , t = t , sign = - 1. )
Transform from a constantly rotating frame to a static inertial frame .
66
13
21,076
def to_dict ( potential ) : from . . import potential as gp if isinstance ( potential , gp . CompositePotential ) : d = dict ( ) d [ 'class' ] = potential . __class__ . __name__ d [ 'components' ] = [ ] for k , p in potential . items ( ) : comp_dict = _to_dict_help ( p ) comp_dict [ 'name' ] = k d [ 'components' ] . append ( comp_dict ) if potential . __class__ . __name__ == 'CompositePotential' or potential . __class__ . __name__ == 'CCompositePotential' : d [ 'type' ] = 'composite' else : d [ 'type' ] = 'custom' else : d = _to_dict_help ( potential ) return d
Turn a potential object into a dictionary that fully specifies the state of the object .
184
16
21,077
def _prepare_ws ( self , w0 , mmap , n_steps ) : from . . dynamics import PhaseSpacePosition if not isinstance ( w0 , PhaseSpacePosition ) : w0 = PhaseSpacePosition . from_w ( w0 ) arr_w0 = w0 . w ( self . _func_units ) self . ndim , self . norbits = arr_w0 . shape self . ndim = self . ndim // 2 return_shape = ( 2 * self . ndim , n_steps + 1 , self . norbits ) if mmap is None : # create the return arrays ws = np . zeros ( return_shape , dtype = float ) else : if mmap . shape != return_shape : raise ValueError ( "Shape of memory-mapped array doesn't match " "expected shape of return array ({} vs {})" . format ( mmap . shape , return_shape ) ) if not mmap . flags . writeable : raise TypeError ( "Memory-mapped array must be a writable mode, " " not '{}'" . format ( mmap . mode ) ) ws = mmap return w0 , arr_w0 , ws
Decide how to make the return array . If mmap is False this returns a full array of zeros but with the correct shape as the output . If mmap is True return a pointer to a memory - mapped array . The latter is particularly useful for integrating a large number of orbits or integrating a large number of time steps .
261
67
21,078
def fast_lyapunov_max ( w0 , hamiltonian , dt , n_steps , d0 = 1e-5 , n_steps_per_pullback = 10 , noffset_orbits = 2 , t1 = 0. , atol = 1E-10 , rtol = 1E-10 , nmax = 0 , return_orbit = True ) : from . lyapunov import dop853_lyapunov_max , dop853_lyapunov_max_dont_save # TODO: remove in v1.0 if isinstance ( hamiltonian , PotentialBase ) : from . . potential import Hamiltonian hamiltonian = Hamiltonian ( hamiltonian ) if not hamiltonian . c_enabled : raise TypeError ( "Input Hamiltonian must contain a C-implemented " "potential and frame." ) if not isinstance ( w0 , PhaseSpacePosition ) : w0 = np . asarray ( w0 ) ndim = w0 . shape [ 0 ] // 2 w0 = PhaseSpacePosition ( pos = w0 [ : ndim ] , vel = w0 [ ndim : ] ) _w0 = np . squeeze ( w0 . w ( hamiltonian . units ) ) if _w0 . ndim > 1 : raise ValueError ( "Can only compute fast Lyapunov exponent for a single orbit." ) if return_orbit : t , w , l = dop853_lyapunov_max ( hamiltonian , _w0 , dt , n_steps + 1 , t1 , d0 , n_steps_per_pullback , noffset_orbits , atol , rtol , nmax ) w = np . rollaxis ( w , - 1 ) try : tunit = hamiltonian . units [ 'time' ] except ( TypeError , AttributeError ) : tunit = u . dimensionless_unscaled orbit = Orbit . from_w ( w = w , units = hamiltonian . units , t = t * tunit , hamiltonian = hamiltonian ) return l / tunit , orbit else : l = dop853_lyapunov_max_dont_save ( hamiltonian , _w0 , dt , n_steps + 1 , t1 , d0 , n_steps_per_pullback , noffset_orbits , atol , rtol , nmax ) try : tunit = hamiltonian . units [ 'time' ] except ( TypeError , AttributeError ) : tunit = u . dimensionless_unscaled return l / tunit
Compute the maximum Lyapunov exponent using a C - implemented estimator that uses the DOPRI853 integrator .
578
27
21,079
def surface_of_section ( orbit , plane_ix , interpolate = False ) : w = orbit . w ( ) if w . ndim == 2 : w = w [ ... , None ] ndim , ntimes , norbits = w . shape H_dim = ndim // 2 p_ix = plane_ix + H_dim if interpolate : raise NotImplementedError ( "Not yet implemented, sorry!" ) # record position on specified plane when orbit crosses all_sos = np . zeros ( ( ndim , norbits ) , dtype = object ) for n in range ( norbits ) : cross_ix = argrelmin ( w [ plane_ix , : , n ] ** 2 ) [ 0 ] cross_ix = cross_ix [ w [ p_ix , cross_ix , n ] > 0. ] sos = w [ : , cross_ix , n ] for j in range ( ndim ) : all_sos [ j , n ] = sos [ j , : ] return all_sos
Generate and return a surface of section from the given orbit .
225
13
21,080
def _remove_units ( self , x ) : if hasattr ( x , 'unit' ) : x = x . decompose ( self . units ) . value else : x = np . array ( x ) return x
Always returns an array . If a Quantity is passed in it converts to the units associated with this object and returns the value .
47
25
21,081
def mass_enclosed ( self , q , t = 0. ) : q = self . _remove_units_prepare_shape ( q ) orig_shape , q = self . _get_c_valid_arr ( q ) t = self . _validate_prepare_time ( t , q ) # small step-size in direction of q h = 1E-3 # MAGIC NUMBER # Radius r = np . sqrt ( np . sum ( q ** 2 , axis = 1 ) ) epsilon = h * q / r [ : , np . newaxis ] dPhi_dr_plus = self . _energy ( q + epsilon , t = t ) dPhi_dr_minus = self . _energy ( q - epsilon , t = t ) diff = ( dPhi_dr_plus - dPhi_dr_minus ) if isinstance ( self . units , DimensionlessUnitSystem ) : Gee = 1. else : Gee = G . decompose ( self . units ) . value Menc = np . abs ( r * r * diff / Gee / ( 2. * h ) ) Menc = Menc . reshape ( orig_shape [ 1 : ] ) sgn = 1. if 'm' in self . parameters and self . parameters [ 'm' ] < 0 : sgn = - 1. return sgn * Menc * self . units [ 'mass' ]
Estimate the mass enclosed within the given position by assuming the potential is spherical .
313
16
21,082
def circular_velocity ( self , q , t = 0. ) : q = self . _remove_units_prepare_shape ( q ) # Radius r = np . sqrt ( np . sum ( q ** 2 , axis = 0 ) ) * self . units [ 'length' ] dPhi_dxyz = self . gradient ( q , t = t ) dPhi_dr = np . sum ( dPhi_dxyz * q / r . value , axis = 0 ) return self . units . decompose ( np . sqrt ( r * np . abs ( dPhi_dr ) ) )
Estimate the circular velocity at the given position assuming the potential is spherical .
136
15
21,083
def format_doc ( * args , * * kwargs ) : def set_docstring ( obj ) : # None means: use the objects __doc__ doc = obj . __doc__ # Delete documentation in this case so we don't end up with # awkwardly self-inserted docs. obj . __doc__ = None # If the original has a not-empty docstring append it to the format # kwargs. kwargs [ '__doc__' ] = obj . __doc__ or '' obj . __doc__ = doc . format ( * args , * * kwargs ) return obj return set_docstring
Replaces the docstring of the decorated object and then formats it .
134
14
21,084
def quantity_to_hdf5 ( f , key , q ) : if hasattr ( q , 'unit' ) : f [ key ] = q . value f [ key ] . attrs [ 'unit' ] = str ( q . unit ) else : f [ key ] = q f [ key ] . attrs [ 'unit' ] = ""
Turn an Astropy Quantity object into something we can write out to an HDF5 file .
76
19
21,085
def get_constant ( self , name ) : try : c = getattr ( const , name ) except AttributeError : raise ValueError ( "Constant name '{}' doesn't exist in astropy.constants" . format ( name ) ) return c . decompose ( self . _core_units ) . value
Retrieve a constant with specified name in this unit system .
70
12
21,086
def atleast_2d ( * arys , * * kwargs ) : insert_axis = kwargs . pop ( 'insert_axis' , 0 ) slc = [ slice ( None ) ] * 2 slc [ insert_axis ] = None slc = tuple ( slc ) res = [ ] for ary in arys : ary = np . asanyarray ( ary ) if len ( ary . shape ) == 0 : result = ary . reshape ( 1 , 1 ) elif len ( ary . shape ) == 1 : result = ary [ slc ] else : result = ary res . append ( result ) if len ( res ) == 1 : return res [ 0 ] else : return res
View inputs as arrays with at least two dimensions .
160
10
21,087
def from_v_theta ( cls , v , theta ) : theta = np . asarray ( theta ) v = np . asarray ( v ) s = np . sin ( 0.5 * theta ) c = np . cos ( 0.5 * theta ) vnrm = np . sqrt ( np . sum ( v * v ) ) q = np . concatenate ( [ [ c ] , s * v / vnrm ] ) return cls ( q )
Create a quaternion from unit vector v and rotation angle theta .
109
15
21,088
def random ( cls ) : s = np . random . uniform ( ) s1 = np . sqrt ( 1 - s ) s2 = np . sqrt ( s ) t1 = np . random . uniform ( 0 , 2 * np . pi ) t2 = np . random . uniform ( 0 , 2 * np . pi ) w = np . cos ( t2 ) * s2 x = np . sin ( t1 ) * s1 y = np . cos ( t1 ) * s1 z = np . sin ( t2 ) * s2 return cls ( [ w , x , y , z ] )
Randomly sample a Quaternion from a distribution uniform in 3D rotation angles .
134
17
21,089
def step ( self , t , x_im1 , v_im1_2 , dt ) : x_i = x_im1 + v_im1_2 * dt F_i = self . F ( t , np . vstack ( ( x_i , v_im1_2 ) ) , * self . _func_args ) a_i = F_i [ self . ndim : ] v_i = v_im1_2 + a_i * dt / 2 v_ip1_2 = v_i + a_i * dt / 2 return x_i , v_i , v_ip1_2
Step forward the positions and velocities by the given timestep .
145
15
21,090
def fit_isochrone ( orbit , m0 = 2E11 , b0 = 1. , minimize_kwargs = None ) : pot = orbit . hamiltonian . potential if pot is None : raise ValueError ( "The orbit object must have an associated potential" ) w = np . squeeze ( orbit . w ( pot . units ) ) if w . ndim > 2 : raise ValueError ( "Input orbit object must be a single orbit." ) def f ( p , w ) : logm , logb = p potential = IsochronePotential ( m = np . exp ( logm ) , b = np . exp ( logb ) , units = pot . units ) H = ( potential . value ( w [ : 3 ] ) . decompose ( pot . units ) . value + 0.5 * np . sum ( w [ 3 : ] ** 2 , axis = 0 ) ) return np . sum ( np . squeeze ( H - np . mean ( H ) ) ** 2 ) logm0 = np . log ( m0 ) logb0 = np . log ( b0 ) if minimize_kwargs is None : minimize_kwargs = dict ( ) minimize_kwargs [ 'x0' ] = np . array ( [ logm0 , logb0 ] ) minimize_kwargs [ 'method' ] = minimize_kwargs . get ( 'method' , 'Nelder-Mead' ) res = minimize ( f , args = ( w , ) , * * minimize_kwargs ) if not res . success : raise ValueError ( "Failed to fit toy potential to orbit." ) logm , logb = np . abs ( res . x ) m = np . exp ( logm ) b = np . exp ( logb ) return IsochronePotential ( m = m , b = b , units = pot . units )
r Fit the toy Isochrone potential to the sum of the energy residuals relative to the mean energy by minimizing the function
400
26
21,091
def fit_harmonic_oscillator ( orbit , omega0 = [ 1. , 1 , 1 ] , minimize_kwargs = None ) : omega0 = np . atleast_1d ( omega0 ) pot = orbit . hamiltonian . potential if pot is None : raise ValueError ( "The orbit object must have an associated potential" ) w = np . squeeze ( orbit . w ( pot . units ) ) if w . ndim > 2 : raise ValueError ( "Input orbit object must be a single orbit." ) def f ( omega , w ) : potential = HarmonicOscillatorPotential ( omega = omega , units = pot . units ) H = ( potential . value ( w [ : 3 ] ) . decompose ( pot . units ) . value + 0.5 * np . sum ( w [ 3 : ] ** 2 , axis = 0 ) ) return np . sum ( np . squeeze ( H - np . mean ( H ) ) ** 2 ) if minimize_kwargs is None : minimize_kwargs = dict ( ) minimize_kwargs [ 'x0' ] = omega0 minimize_kwargs [ 'method' ] = minimize_kwargs . get ( 'method' , 'Nelder-Mead' ) res = minimize ( f , args = ( w , ) , * * minimize_kwargs ) if not res . success : raise ValueError ( "Failed to fit toy potential to orbit." ) best_omega = np . abs ( res . x ) return HarmonicOscillatorPotential ( omega = best_omega , units = pot . units )
r Fit the toy harmonic oscillator potential to the sum of the energy residuals relative to the mean energy by minimizing the function
342
25
21,092
def check_angle_sampling ( nvecs , angles ) : failed_nvecs = [ ] failures = [ ] for i , vec in enumerate ( nvecs ) : # N = np.linalg.norm(vec) # X = np.dot(angles,vec) X = ( angles * vec [ : , None ] ) . sum ( axis = 0 ) diff = float ( np . abs ( X . max ( ) - X . min ( ) ) ) if diff < ( 2. * np . pi ) : warnings . warn ( "Need a longer integration window for mode {0}" . format ( vec ) ) failed_nvecs . append ( vec . tolist ( ) ) # P.append(2.*np.pi - diff) failures . append ( 0 ) elif ( diff / len ( X ) ) > np . pi : warnings . warn ( "Need a finer sampling for mode {0}" . format ( str ( vec ) ) ) failed_nvecs . append ( vec . tolist ( ) ) # P.append(np.pi - diff/len(X)) failures . append ( 1 ) return np . array ( failed_nvecs ) , np . array ( failures )
Returns a list of the index of elements of n which do not have adequate toy angle coverage . The criterion is that we must have at least one sample in each Nyquist box when we project the toy angles along the vector n .
263
46
21,093
def find_actions ( orbit , N_max , force_harmonic_oscillator = False , toy_potential = None ) : if orbit . norbits == 1 : return _single_orbit_find_actions ( orbit , N_max , force_harmonic_oscillator = force_harmonic_oscillator , toy_potential = toy_potential ) else : norbits = orbit . norbits actions = np . zeros ( ( 3 , norbits ) ) angles = np . zeros ( ( 3 , norbits ) ) freqs = np . zeros ( ( 3 , norbits ) ) for n in range ( norbits ) : aaf = _single_orbit_find_actions ( orbit [ : , n ] , N_max , force_harmonic_oscillator = force_harmonic_oscillator , toy_potential = toy_potential ) actions [ n ] = aaf [ 'actions' ] . value angles [ n ] = aaf [ 'angles' ] . value freqs [ n ] = aaf [ 'freqs' ] . value return dict ( actions = actions * aaf [ 'actions' ] . unit , angles = angles * aaf [ 'angles' ] . unit , freqs = freqs * aaf [ 'freqs' ] . unit , Sn = actions [ 3 : ] , dSn = angles [ 6 : ] , nvecs = aaf [ 'nvecs' ] )
r Find approximate actions and angles for samples of a phase - space orbit . Uses toy potentials with known analytic action - angle transformations to approximate the true coordinates as a Fourier sum .
317
37
21,094
def angact_ho ( x , omega ) : action = ( x [ 3 : ] ** 2 + ( omega * x [ : 3 ] ) ** 2 ) / ( 2. * omega ) angle = np . array ( [ np . arctan ( - x [ 3 + i ] / omega [ i ] / x [ i ] ) if x [ i ] != 0. else - np . sign ( x [ 3 + i ] ) * np . pi / 2. for i in range ( 3 ) ] ) for i in range ( 3 ) : if ( x [ i ] < 0 ) : angle [ i ] += np . pi return np . concatenate ( ( action , angle % ( 2. * np . pi ) ) )
Calculate angle and action variable in sho potential with parameter omega
157
13
21,095
def peak_to_peak_period ( t , f , amplitude_threshold = 1E-2 ) : if hasattr ( t , 'unit' ) : t_unit = t . unit t = t . value else : t_unit = u . dimensionless_unscaled # find peaks max_ix = argrelmax ( f , mode = 'wrap' ) [ 0 ] max_ix = max_ix [ ( max_ix != 0 ) & ( max_ix != ( len ( f ) - 1 ) ) ] # find troughs min_ix = argrelmin ( f , mode = 'wrap' ) [ 0 ] min_ix = min_ix [ ( min_ix != 0 ) & ( min_ix != ( len ( f ) - 1 ) ) ] # neglect minor oscillations if abs ( np . mean ( f [ max_ix ] ) - np . mean ( f [ min_ix ] ) ) < amplitude_threshold : return np . nan # compute mean peak-to-peak if len ( max_ix ) > 0 : T_max = np . mean ( t [ max_ix [ 1 : ] ] - t [ max_ix [ : - 1 ] ] ) else : T_max = np . nan # now compute mean trough-to-trough if len ( min_ix ) > 0 : T_min = np . mean ( t [ min_ix [ 1 : ] ] - t [ min_ix [ : - 1 ] ] ) else : T_min = np . nan # then take the mean of these two return np . mean ( [ T_max , T_min ] ) * t_unit
Estimate the period of the input time series by measuring the average peak - to - peak time .
355
20
21,096
def estimate_dt_n_steps ( w0 , hamiltonian , n_periods , n_steps_per_period , dE_threshold = 1E-9 , func = np . nanmax , * * integrate_kwargs ) : if not isinstance ( w0 , PhaseSpacePosition ) : w0 = np . asarray ( w0 ) w0 = PhaseSpacePosition . from_w ( w0 , units = hamiltonian . units ) # integrate orbit dt = _autodetermine_initial_dt ( w0 , hamiltonian , dE_threshold = dE_threshold , * * integrate_kwargs ) n_steps = int ( round ( 10000 / dt ) ) orbit = hamiltonian . integrate_orbit ( w0 , dt = dt , n_steps = n_steps , * * integrate_kwargs ) # if loop, align circulation with Z and take R period circ = orbit . circulation ( ) if np . any ( circ ) : orbit = orbit . align_circulation_with_z ( circulation = circ ) cyl = orbit . represent_as ( coord . CylindricalRepresentation ) # convert to cylindrical coordinates R = cyl . rho . value phi = cyl . phi . value z = cyl . z . value T = np . array ( [ peak_to_peak_period ( orbit . t , f ) . value for f in [ R , phi , z ] ] ) * orbit . t . unit else : T = np . array ( [ peak_to_peak_period ( orbit . t , f ) . value for f in orbit . pos ] ) * orbit . t . unit # timestep from number of steps per period T = func ( T ) if np . isnan ( T ) : raise RuntimeError ( "Failed to find period." ) T = T . decompose ( hamiltonian . units ) . value dt = T / float ( n_steps_per_period ) n_steps = int ( round ( n_periods * T / dt ) ) if dt == 0. or dt < 1E-13 : raise ValueError ( "Timestep is zero or very small!" ) return dt , n_steps
Estimate the timestep and number of steps to integrate an orbit for given its initial conditions and a potential object .
487
24
21,097
def reflex_correct ( coords , galactocentric_frame = None ) : c = coord . SkyCoord ( coords ) # If not specified, use the Astropy default Galactocentric frame if galactocentric_frame is None : galactocentric_frame = coord . Galactocentric ( ) v_sun = galactocentric_frame . galcen_v_sun observed = c . transform_to ( galactocentric_frame ) rep = observed . cartesian . without_differentials ( ) rep = rep . with_differentials ( observed . cartesian . differentials [ 's' ] + v_sun ) fr = galactocentric_frame . realize_frame ( rep ) . transform_to ( c . frame ) return coord . SkyCoord ( fr )
Correct the input Astropy coordinate object for solar reflex motion .
174
12
21,098
def plot_projections ( x , relative_to = None , autolim = True , axes = None , subplots_kwargs = dict ( ) , labels = None , plot_function = None , * * kwargs ) : # don't propagate changes back... x = np . array ( x , copy = True ) ndim = x . shape [ 0 ] # get axes object from arguments if axes is None : axes = _get_axes ( dim = ndim , subplots_kwargs = subplots_kwargs ) # if the quantities are relative if relative_to is not None : x -= relative_to # name of the plotting function plot_fn_name = plot_function . __name__ # automatically determine limits if autolim : lims = [ ] for i in range ( ndim ) : max_ , min_ = np . max ( x [ i ] ) , np . min ( x [ i ] ) delta = max_ - min_ if delta == 0. : delta = 1. lims . append ( [ min_ - delta * 0.02 , max_ + delta * 0.02 ] ) k = 0 for i in range ( ndim ) : for j in range ( ndim ) : if i >= j : continue # skip diagonal, upper triangle plot_func = getattr ( axes [ k ] , plot_fn_name ) plot_func ( x [ i ] , x [ j ] , * * kwargs ) if labels is not None : axes [ k ] . set_xlabel ( labels [ i ] ) axes [ k ] . set_ylabel ( labels [ j ] ) if autolim : axes [ k ] . set_xlim ( lims [ i ] ) axes [ k ] . set_ylim ( lims [ j ] ) k += 1 axes [ 0 ] . figure . tight_layout ( ) return axes [ 0 ] . figure
Given N - dimensional quantity x make a figure containing 2D projections of all combinations of the axes .
412
20
21,099
def angmom ( x ) : return np . array ( [ x [ 1 ] * x [ 5 ] - x [ 2 ] * x [ 4 ] , x [ 2 ] * x [ 3 ] - x [ 0 ] * x [ 5 ] , x [ 0 ] * x [ 4 ] - x [ 1 ] * x [ 3 ] ] )
returns angular momentum vector of phase - space point x
74
11