idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
27,700
def parse_plist ( self , preferences_file ) : preferences_file = os . path . expanduser ( preferences_file ) # Try to open using FoundationPlist. If it's not available, # fall back to plistlib and hope it's not binary encoded. try : prefs = FoundationPlist . readPlist ( preferences_file ) except NameError : try : prefs = plistlib . readPlist ( preferences_file ) except ExpatError : # If we're on OSX, try to convert using another # tool. if is_osx ( ) : subprocess . call ( [ "plutil" , "-convert" , "xml1" , preferences_file ] ) prefs = plistlib . readPlist ( preferences_file ) self . preferences_file = preferences_file self . user = prefs . get ( "jss_user" ) self . password = prefs . get ( "jss_pass" ) self . url = prefs . get ( "jss_url" ) if not all ( [ self . user , self . password , self . url ] ) : raise JSSPrefsMissingKeyError ( "Please provide all required " "preferences!" ) # Optional file repository array. Defaults to empty list. self . repos = [ ] for repo in prefs . get ( "repos" , [ ] ) : self . repos . append ( dict ( repo ) ) self . verify = prefs . get ( "verify" , True ) self . suppress_warnings = prefs . get ( "suppress_warnings" , True )
Try to reset preferences from preference_file .
349
9
27,701
def configure ( self ) : root = ElementTree . Element ( "dict" ) print ( "It seems like you do not have a preferences file configured. " "Please answer the following questions to generate a plist at " "%s for use with python-jss." % self . preferences_file ) self . url = _get_user_input ( "The complete URL to your JSS, with port (e.g. " "'https://mycasperserver.org:8443')\nURL: " , "jss_url" , root ) self . user = _get_user_input ( "API Username: " , "jss_user" , root ) self . password = _get_user_input ( "API User's Password: " , "jss_pass" , root , getpass . getpass ) verify_prompt = ( "Do you want to verify that traffic is encrypted by " "a certificate that you trust?: (Y|N) " ) self . verify = _get_user_input ( verify_prompt , "verify" , root , loop_until_valid_response ) self . _handle_repos ( root ) self . _write_plist ( root ) print "Preferences created.\n"
Prompt user for config and write to plist
271
10
27,702
def _handle_repos ( self , root ) : ElementTree . SubElement ( root , "key" ) . text = "repos" repos_array = ElementTree . SubElement ( root , "array" ) # Make a temporary jss object to try to pull repo information. jss_server = JSS ( url = self . url , user = self . user , password = self . password , ssl_verify = self . verify , suppress_warnings = True ) print "Fetching distribution point info..." try : dpts = jss_server . DistributionPoint ( ) except JSSGetError : print ( "Fetching distribution point info failed. If you want to " "configure distribution points, ensure that your API user " "has read permissions for distribution points, and that " "the URL, username, and password are correct." ) dpts = None if dpts : print ( "There are file share distribution points configured on " "your JSS. Most of the configuration can be automated " "from the information on the JSS, with the exception of " "the password for the R/W user.\n" ) for dpt in dpts : repo_dict = ElementTree . SubElement ( repos_array , "dict" ) repo_name_key = ElementTree . SubElement ( repo_dict , "key" ) repo_name_key . text = "name" repo_name_string = ElementTree . SubElement ( repo_dict , "string" ) repo_name_string . text = dpt . get ( "name" ) repo_pass_key = ElementTree . SubElement ( repo_dict , "key" ) repo_pass_key . text = "password" repo_pass_string = ElementTree . SubElement ( repo_dict , "string" ) repo_pass_string . text = getpass . getpass ( "Please enter the R/W user's password for distribution " "point: %s: " % dpt . get ( "name" , "<NO NAME CONFIGURED>" ) ) _handle_dist_server ( "JDS" , repos_array ) _handle_dist_server ( "CDP" , repos_array )
Handle repo configuration .
482
4
27,703
def _write_plist ( self , root ) : # prettify the XML indent_xml ( root ) tree = ElementTree . ElementTree ( root ) with open ( self . preferences_file , "w" ) as prefs_file : prefs_file . write ( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" "<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" " "\"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n" "<plist version=\"1.0\">\n" ) tree . write ( prefs_file , xml_declaration = False , encoding = "utf-8" ) prefs_file . write ( "</plist>" )
Write plist file based on our generated tree .
181
10
27,704
def update ( self ) : response = self . jss . session . post ( self . url , data = self . auth ) response_xml = ElementTree . fromstring ( response . text . encode ( "utf_8" ) ) # Remove previous data, if any, and then add in response's XML. self . clear ( ) for child in response_xml . getchildren ( ) : self . append ( child )
Request an updated set of data from casper . jxml .
89
13
27,705
def mount_share_at_path ( share_path , mount_path ) : sh_url = CFURLCreateWithString ( None , share_path , None ) mo_url = CFURLCreateWithString ( None , mount_path , None ) # Set UI to reduced interaction open_options = { NetFS . kNAUIOptionKey : NetFS . kNAUIOptionNoUI } # Allow mounting sub-directories of root shares # Also specify the share should be mounted directly at (not under) # mount_path mount_options = { NetFS . kNetFSAllowSubMountsKey : True , NetFS . kNetFSMountAtMountDirKey : True } # Mount! result , output = NetFS . NetFSMountURLSync ( sh_url , mo_url , None , None , open_options , mount_options , None ) # Check if it worked if result != 0 : raise Exception ( 'Error mounting url "%s" at path "%s": %s' % ( share_path , mount_path , output ) ) # Return the mountpath return str ( output [ 0 ] )
Mounts a share at the specified path
244
8
27,706
def auto_mounter ( original ) : def mounter ( * args ) : """If not mounted, mount.""" self = args [ 0 ] if not self . is_mounted ( ) : self . mount ( ) return original ( * args ) return mounter
Decorator for automatically mounting if needed .
55
9
27,707
def copy_pkg ( self , filename , _ ) : basename = os . path . basename ( filename ) self . _copy ( filename , os . path . join ( self . connection [ "mount_point" ] , "Packages" , basename ) )
Copy a package to the repo s Package subdirectory .
57
11
27,708
def copy_script ( self , filename , id_ = - 1 ) : if ( "jss" in self . connection . keys ( ) and self . connection [ "jss" ] . jss_migrated ) : self . _copy_script_migrated ( filename , id_ , SCRIPT_FILE_TYPE ) else : basename = os . path . basename ( filename ) self . _copy ( filename , os . path . join ( self . connection [ "mount_point" ] , "Scripts" , basename ) )
Copy a script to the repo s Script subdirectory .
117
11
27,709
def _copy_script_migrated ( self , filename , id_ = - 1 , file_type = SCRIPT_FILE_TYPE ) : basefname = os . path . basename ( filename ) resource = open ( filename , "rb" ) headers = { "DESTINATION" : "1" , "OBJECT_ID" : str ( id_ ) , "FILE_TYPE" : file_type , "FILE_NAME" : basefname } response = self . connection [ "jss" ] . session . post ( url = "%s/%s" % ( self . connection [ "jss" ] . base_url , "dbfileupload" ) , data = resource , headers = headers ) return response
Upload a script to a migrated JSS s database .
158
11
27,710
def delete ( self , filename ) : folder = "Packages" if is_package ( filename ) else "Scripts" path = os . path . join ( self . connection [ "mount_point" ] , folder , filename ) if os . path . isdir ( path ) : shutil . rmtree ( path ) elif os . path . isfile ( path ) : os . remove ( path )
Delete a file from the repository .
87
7
27,711
def exists ( self , filename ) : if is_package ( filename ) : filepath = os . path . join ( self . connection [ "mount_point" ] , "Packages" , filename ) else : filepath = os . path . join ( self . connection [ "mount_point" ] , "Scripts" , filename ) return os . path . exists ( filepath )
Report whether a file exists on the distribution point .
82
10
27,712
def mount ( self ) : if not self . is_mounted ( ) : # OS X mounting is handled automagically in /Volumes: # DO NOT mkdir there! # For Linux, ensure the mountpoint exists. if not is_osx ( ) : if not os . path . exists ( self . connection [ "mount_point" ] ) : os . mkdir ( self . connection [ "mount_point" ] ) self . _mount ( )
Mount the repository .
97
4
27,713
def umount ( self , forced = True ) : if self . is_mounted ( ) : if is_osx ( ) : cmd = [ "/usr/sbin/diskutil" , "unmount" , self . connection [ "mount_point" ] ] if forced : cmd . insert ( 2 , "force" ) subprocess . check_call ( cmd ) else : cmd = [ "umount" , self . connection [ "mount_point" ] ] if forced : cmd . insert ( 1 , "-f" ) subprocess . check_call ( cmd )
Try to unmount our mount point .
122
8
27,714
def is_mounted ( self ) : mount_check = subprocess . check_output ( "mount" ) . splitlines ( ) # The mount command returns lines like this on OS X... # //username@pretendco.com/JSS%20REPO on /Volumes/JSS REPO # (afpfs, nodev, nosuid, mounted by local_me) # and like this on Linux... # //pretendco.com/jamf on /mnt/jamf type cifs (rw,relatime, # <options>...) valid_mount_strings = self . _get_valid_mount_strings ( ) was_mounted = False if is_osx ( ) : mount_string_regex = re . compile ( r"\(([\w]*),*.*\)$" ) mount_point_regex = re . compile ( r"on ([\w/ -]*) \(.*$" ) elif is_linux ( ) : mount_string_regex = re . compile ( r"type ([\w]*) \(.*\)$" ) mount_point_regex = re . compile ( r"on ([\w/ -]*) type .*$" ) else : raise JSSError ( "Unsupported OS." ) for mount in mount_check : fs_match = re . search ( mount_string_regex , mount ) fs_type = fs_match . group ( 1 ) if fs_match else None # Automounts, non-network shares, and network shares # all have a slightly different format, so it's easiest to # just split. mount_string = mount . split ( " on " ) [ 0 ] # Does the mount_string match one of our valid_mount_strings? if [ mstring for mstring in valid_mount_strings if mstring in mount_string ] and self . fs_type == fs_type : # Get the mount point string between from the end back to # the last "on", but before the options (wrapped in # parenthesis). Considers alphanumerics, / , _ , - and a # blank space as valid, but no crazy chars. match = re . search ( mount_point_regex , mount ) mount_point = match . group ( 1 ) if match else None was_mounted = True # Reset the connection's mount point to the discovered # value. if mount_point : self . connection [ "mount_point" ] = mount_point if self . connection [ "jss" ] . verbose : print ( "%s is already mounted at %s.\n" % ( self . connection [ "url" ] , mount_point ) ) # We found the share, no need to continue. break if not was_mounted : # If the share is not mounted, check for another share # mounted to the same path and if found, incremement the # name to avoid conflicts. count = 1 while os . path . ismount ( self . connection [ "mount_point" ] ) : self . connection [ "mount_point" ] = ( "%s-%s" % ( self . connection [ "mount_point" ] , count ) ) count += 1 # Do an inexpensive double check... return os . path . ismount ( self . connection [ "mount_point" ] )
Test for whether a mount point is mounted .
717
9
27,715
def _get_valid_mount_strings ( self ) : results = set ( ) join = os . path . join url = self . connection [ "url" ] share_name = urllib . quote ( self . connection [ "share_name" ] , safe = "~()*!.'" ) port = self . connection [ "port" ] # URL from python-jss form: results . add ( join ( url , share_name ) ) results . add ( join ( "%s:%s" % ( url , port ) , share_name ) ) # IP Address form: # socket.gethostbyname() will return an IP address whether # an IP address, FQDN, or .local name is provided. ip_address = socket . gethostbyname ( url ) results . add ( join ( ip_address , share_name ) ) results . add ( join ( "%s:%s" % ( ip_address , port ) , share_name ) ) # Domain name only form: domain_name = url . split ( "." ) [ 0 ] results . add ( join ( domain_name , share_name ) ) results . add ( join ( "%s:%s" % ( domain_name , port ) , share_name ) ) # FQDN form using getfqdn: # socket.getfqdn() could just resolve back to the ip # or be the same as the initial URL so only add it if it's # different than both. fqdn = socket . getfqdn ( ip_address ) results . add ( join ( fqdn , share_name ) ) results . add ( join ( "%s:%s" % ( fqdn , port ) , share_name ) ) return tuple ( results )
Return a tuple of potential mount strings .
379
8
27,716
def _mount ( self ) : # mount_afp "afp://scraig:<password>@address/share" <mnt_point> if is_osx ( ) : if self . connection [ "jss" ] . verbose : print self . connection [ "mount_url" ] if mount_share : self . connection [ "mount_point" ] = mount_share ( self . connection [ "mount_url" ] ) else : # Non-Apple OS X python: args = [ "mount" , "-t" , self . protocol , self . connection [ "mount_url" ] , self . connection [ "mount_point" ] ] if self . connection [ "jss" ] . verbose : print " " . join ( args ) subprocess . check_call ( args ) elif is_linux ( ) : args = [ "mount_afp" , "-t" , self . protocol , self . connection [ "mount_url" ] , self . connection [ "mount_point" ] ] if self . connection [ "jss" ] . verbose : print " " . join ( args ) subprocess . check_call ( args ) else : raise JSSError ( "Unsupported OS." )
Mount based on which OS is running .
269
8
27,717
def _build_url ( self ) : self . connection [ "upload_url" ] = ( "%s/%s" % ( self . connection [ "jss" ] . base_url , "dbfileupload" ) ) self . connection [ "delete_url" ] = ( "%s/%s" % ( self . connection [ "jss" ] . base_url , "casperAdminSave.jxml" ) )
Build the URL for POSTing files .
95
8
27,718
def copy_pkg ( self , filename , id_ = - 1 ) : self . _copy ( filename , id_ = id_ , file_type = PKG_FILE_TYPE )
Copy a package to the distribution server .
40
8
27,719
def copy_script ( self , filename , id_ = - 1 ) : self . _copy ( filename , id_ = id_ , file_type = SCRIPT_FILE_TYPE )
Copy a script to the distribution server .
40
8
27,720
def _copy ( self , filename , id_ = - 1 , file_type = 0 ) : if os . path . isdir ( filename ) : raise JSSUnsupportedFileType ( "Distribution Server type repos do not permit directory " "uploads. You are probably trying to upload a non-flat " "package. Please zip or create a flat package." ) basefname = os . path . basename ( filename ) resource = open ( filename , "rb" ) headers = { "DESTINATION" : self . destination , "OBJECT_ID" : str ( id_ ) , "FILE_TYPE" : file_type , "FILE_NAME" : basefname } response = self . connection [ "jss" ] . session . post ( url = self . connection [ "upload_url" ] , data = resource , headers = headers ) if self . connection [ "jss" ] . verbose : print response
Upload a file to the distribution server .
201
8
27,721
def delete_with_casper_admin_save ( self , pkg ) : # The POST needs the package ID. if pkg . __class__ . __name__ == "Package" : package_to_delete = pkg . id elif isinstance ( pkg , int ) : package_to_delete = pkg elif isinstance ( pkg , str ) : package_to_delete = self . connection [ "jss" ] . Package ( pkg ) . id else : raise TypeError data_dict = { "username" : self . connection [ "jss" ] . user , "password" : self . connection [ "jss" ] . password , "deletedPackageID" : package_to_delete } self . connection [ "jss" ] . session . post ( url = self . connection [ "delete_url" ] , data = data_dict )
Delete a pkg from the distribution server .
193
9
27,722
def delete ( self , filename ) : if is_package ( filename ) : self . connection [ "jss" ] . Package ( filename ) . delete ( ) else : self . connection [ "jss" ] . Script ( filename ) . delete ( )
Delete a package or script from the distribution server .
54
10
27,723
def exists ( self , filename ) : # Technically, the results of the casper.jxml page list the # package files on the server. This is an undocumented # interface, however. result = False if is_package ( filename ) : packages = self . connection [ "jss" ] . Package ( ) . retrieve_all ( ) for package in packages : if package . findtext ( "filename" ) == filename : result = True break else : scripts = self . connection [ "jss" ] . Script ( ) . retrieve_all ( ) for script in scripts : if script . findtext ( "filename" ) == filename : result = True break return result
Check for the existence of a package or script .
141
10
27,724
def exists_using_casper ( self , filename ) : casper_results = casper . Casper ( self . connection [ "jss" ] ) distribution_servers = casper_results . find ( "distributionservers" ) # Step one: Build a list of sets of all package names. all_packages = [ ] for distribution_server in distribution_servers : packages = set ( ) for package in distribution_server . findall ( "packages/package" ) : packages . add ( os . path . basename ( package . find ( "fileURL" ) . text ) ) all_packages . append ( packages ) # Step two: Intersect the sets. base_set = all_packages . pop ( ) for packages in all_packages : base_set = base_set . intersection ( packages ) # Step three: Check for membership. return filename in base_set
Check for the existence of a package file .
191
9
27,725
def command_flush_for ( self , id_type , command_id , status ) : id_types = ( 'computers' , 'computergroups' , 'mobiledevices' , 'mobiledevicegroups' ) status_types = ( 'Pending' , 'Failed' , 'Pending+Failed' ) if id_type not in id_types or status not in status_types : raise ValueError ( "Invalid arguments." ) if isinstance ( command_id , list ) : command_id = "," . join ( str ( item ) for item in command_id ) flush_url = "{}/{}/id/{}/status/{}" . format ( self . url , id_type , command_id , status ) self . jss . delete ( flush_url )
Flush commands for an individual device .
179
8
27,726
def mac_addresses ( self ) : mac_addresses = [ self . findtext ( "general/mac_address" ) ] if self . findtext ( "general/alt_mac_address" ) : mac_addresses . append ( self . findtext ( "general/alt_mac_address" ) ) return mac_addresses
Return a list of mac addresses for this device .
74
10
27,727
def _set_upload_url ( self ) : # pylint: disable=protected-access self . _upload_url = "/" . join ( [ self . jss . _url , self . _url , self . resource_type , self . id_type , str ( self . _id ) ] )
Generate the full URL for a POST .
68
9
27,728
def save ( self ) : try : response = requests . post ( self . _upload_url , auth = self . jss . session . auth , verify = self . jss . session . verify , files = self . resource ) except JSSPostError as error : if error . status_code == 409 : raise JSSPostError ( error ) else : raise JSSMethodNotAllowedError ( self . __class__ . __name__ ) if response . status_code == 201 : if self . jss . verbose : print "POST: Success" print response . text . encode ( "utf-8" ) elif response . status_code >= 400 : error_handler ( JSSPostError , response )
POST the object to the JSS .
153
8
27,729
def search_users ( self , user ) : user_url = "%s/%s/%s" % ( self . url , "user" , user ) response = self . jss . get ( user_url ) return LDAPUsersResults ( self . jss , response )
Search for LDAP users .
61
6
27,730
def search_groups ( self , group ) : group_url = "%s/%s/%s" % ( self . url , "group" , group ) response = self . jss . get ( group_url ) return LDAPGroupsResults ( self . jss , response )
Search for LDAP groups .
62
6
27,731
def is_user_in_group ( self , user , group ) : search_url = "%s/%s/%s/%s/%s" % ( self . url , "group" , group , "user" , user ) response = self . jss . get ( search_url ) # Sanity check length = len ( response ) result = False if length == 1 : # User doesn't exist. Use default False value. pass elif length == 2 : if response . findtext ( "ldap_user/username" ) == user : if response . findtext ( "ldap_user/is_member" ) == "Yes" : result = True elif len ( response ) >= 2 : raise JSSGetError ( "Unexpected response." ) return result
Test for whether a user is in a group .
168
10
27,732
def log_flush_with_xml ( self , data ) : if not isinstance ( data , basestring ) : data = ElementTree . tostring ( data ) response = self . delete ( data )
Flush logs for devices with a supplied xml string .
44
11
27,733
def log_flush_for_interval ( self , log_type , interval ) : if not log_type : log_type = "policies" # The XML for the /logflush basic endpoint allows spaces # instead of "+", so do a replace here just in case. interval = interval . replace ( " " , "+" ) flush_url = "{}/{}/interval/{}" . format ( self . url , log_type , interval ) self . jss . delete ( flush_url )
Flush logs for an interval of time .
111
9
27,734
def log_flush_for_obj_for_interval ( self , log_type , obj_id , interval ) : if not log_type : log_type = "policies" # The XML for the /logflush basic endpoint allows spaces # instead of "+", so do a replace here just in case. interval = interval . replace ( " " , "+" ) flush_url = "{}/{}/id/{}/interval/{}" . format ( self . url , log_type , obj_id , interval ) self . jss . delete ( flush_url )
Flush logs for an interval of time for a specific object .
128
13
27,735
def _new ( self , name , * * kwargs ) : # We want these to match, so circumvent the for loop. # ElementTree.SubElement(self, "name").text = name super ( Package , self ) . _new ( name , * * kwargs ) ElementTree . SubElement ( self , "filename" ) . text = name
Create a new Package from scratch .
77
7
27,736
def set_category ( self , category ) : # For some reason, packages only have the category name, not the # ID. if isinstance ( category , Category ) : name = category . name else : name = category self . find ( "category" ) . text = name
Set package category
58
3
27,737
def add_object_to_scope ( self , obj ) : if isinstance ( obj , Computer ) : self . add_object_to_path ( obj , "scope/computers" ) elif isinstance ( obj , ComputerGroup ) : self . add_object_to_path ( obj , "scope/computer_groups" ) elif isinstance ( obj , Building ) : self . add_object_to_path ( obj , "scope/buildings" ) elif isinstance ( obj , Department ) : self . add_object_to_path ( obj , "scope/departments" ) else : raise TypeError
Add an object to the appropriate scope block .
136
9
27,738
def add_package ( self , pkg , action_type = "Install" ) : if isinstance ( pkg , Package ) : if action_type not in ( "Install" , "Cache" , "Install Cached" ) : raise ValueError package = self . add_object_to_path ( pkg , "package_configuration/packages" ) # If there's already an action specified, get it, then # overwrite. Otherwise, make a new subelement. action = package . find ( "action" ) if not action : action = ElementTree . SubElement ( package , "action" ) action . text = action_type else : raise ValueError ( "Please pass a Package object to parameter: " "pkg." )
Add a Package object to the policy with action = install .
157
12
27,739
def set_category ( self , category ) : pcategory = self . find ( "general/category" ) pcategory . clear ( ) name = ElementTree . SubElement ( pcategory , "name" ) if isinstance ( category , Category ) : id_ = ElementTree . SubElement ( pcategory , "id" ) id_ . text = category . id name . text = category . name elif isinstance ( category , basestring ) : name . text = category
Set the policy s category .
101
6
27,740
def cyvcf2 ( context , vcf , include , exclude , chrom , start , end , loglevel , silent , individual , no_inds ) : coloredlogs . install ( log_level = loglevel ) start_parsing = datetime . now ( ) log . info ( "Running cyvcf2 version %s" , __version__ ) if include and exclude : log . warning ( "Can not use include and exclude at the same time" ) context . abort ( ) region = '' if ( chrom or start or end ) : if not ( chrom and start and end ) : log . warning ( "Please specify chromosome, start and end for region" ) context . abort ( ) else : region = "{0}:{1}-{2}" . format ( chrom , start , end ) vcf_obj = VCF ( vcf ) for inclusion in include : if vcf_obj . contains ( inclusion ) : log . info ( "Including %s in output" , inclusion ) else : log . warning ( "%s does not exist in header" , inclusion ) context . abort ( ) for exclusion in exclude : if vcf_obj . contains ( exclusion ) : log . info ( "Excluding %s in output" , exclusion ) else : log . warning ( "%s does not exist in header" , exclusion ) context . abort ( ) if individual : # Check if the choosen individuals exists in vcf test = True for ind_id in individual : if ind_id not in vcf_obj . samples : log . warning ( "Individual '%s' does not exist in vcf" , ind_id ) test = False if not test : context . abort ( ) # Convert individuals to list for VCF.set_individuals individual = list ( individual ) else : individual = None # Set individual to be empty list to skip all genotypes if no_inds : individual = [ ] if not silent : print_header ( vcf_obj , include , exclude , individual ) nr_variants = None try : for nr_variants , variant in enumerate ( vcf_obj ( region ) ) : if not silent : print_variant ( variant , include , exclude ) except Exception as err : log . warning ( err ) context . abort ( ) if nr_variants is None : log . info ( "No variants in vcf" ) return log . info ( "{0} variants parsed" . format ( nr_variants + 1 ) ) log . info ( "Time to parse variants: {0}" . format ( datetime . now ( ) - start_parsing ) )
fast vcf parsing with cython + htslib
561
12
27,741
def get_version ( ) : import ast with open ( os . path . join ( "cyvcf2" , "__init__.py" ) , "r" ) as init_file : module = ast . parse ( init_file . read ( ) ) version = ( ast . literal_eval ( node . value ) for node in ast . walk ( module ) if isinstance ( node , ast . Assign ) and node . targets [ 0 ] . id == "__version__" ) try : return next ( version ) except StopIteration : raise ValueError ( "version could not be located" )
Get the version info from the mpld3 package without importing it
130
13
27,742
def verify ( self , verify_locations : str ) -> None : # Ensure the file exists with open ( verify_locations ) : pass try : self . _ocsp_response . basic_verify ( verify_locations ) except _nassl . OpenSSLError as e : if 'certificate verify error' in str ( e ) : raise OcspResponseNotTrustedError ( verify_locations ) raise
Verify that the OCSP response is trusted .
93
10
27,743
def _parse_ocsp_response_from_openssl_text ( cls , response_text : str , response_status : OcspResponseStatusEnum ) -> Dict [ str , Any ] : response_dict = { 'responseStatus' : cls . _get_value_from_text_output_no_p ( 'OCSP Response Status:' , response_text ) , 'version' : cls . _get_value_from_text_output_no_p ( 'Version:' , response_text ) , 'responseType' : cls . _get_value_from_text_output ( 'Response Type:' , response_text ) , 'responderID' : cls . _get_value_from_text_output ( 'Responder Id:' , response_text ) , 'producedAt' : cls . _get_value_from_text_output ( 'Produced At:' , response_text ) , } # type: Dict[str, Any] if response_status != OcspResponseStatusEnum . SUCCESSFUL : return response_dict # A successful OCSP response will contain more data - let's parse it # TODO(ad): This will not work correctly if there are multiple responses as it assumes just one response_dict [ 'responses' ] = [ { 'certID' : { 'hashAlgorithm' : cls . _get_value_from_text_output ( 'Hash Algorithm:' , response_text ) , 'issuerNameHash' : cls . _get_value_from_text_output ( 'Issuer Name Hash:' , response_text ) , 'issuerKeyHash' : cls . _get_value_from_text_output ( 'Issuer Key Hash:' , response_text ) , 'serialNumber' : cls . _get_value_from_text_output ( 'Serial Number:' , response_text ) } , 'certStatus' : cls . _get_value_from_text_output ( 'Cert Status:' , response_text ) , 'thisUpdate' : cls . _get_value_from_text_output ( 'This Update:' , response_text ) , 'nextUpdate' : cls . _get_value_from_text_output ( 'Next Update:' , response_text ) , } ] if cls . _get_scts_from_text_output ( response_text ) : # SCT extension present response_dict [ 'responses' ] [ 0 ] [ 'singleExtensions' ] = { 'ctCertificateScts' : cls . _get_scts_from_text_output ( response_text ) } return response_dict
Parse OpenSSL s text output and make a lot of assumptions .
592
14
27,744
def _init_base_objects ( self , ssl_version : OpenSslVersionEnum , underlying_socket : Optional [ socket . socket ] ) -> None : self . _is_handshake_completed = False self . _ssl_version = ssl_version self . _ssl_ctx = self . _NASSL_MODULE . SSL_CTX ( ssl_version . value ) # A Python socket handles transmission of the data self . _sock = underlying_socket
Setup the socket and SSL_CTX objects .
105
10
27,745
def _init_server_authentication ( self , ssl_verify : OpenSslVerifyEnum , ssl_verify_locations : Optional [ str ] ) -> None : self . _ssl_ctx . set_verify ( ssl_verify . value ) if ssl_verify_locations : # Ensure the file exists with open ( ssl_verify_locations ) : pass self . _ssl_ctx . load_verify_locations ( ssl_verify_locations )
Setup the certificate validation logic for authenticating the server .
115
11
27,746
def _init_client_authentication ( self , client_certchain_file : Optional [ str ] , client_key_file : Optional [ str ] , client_key_type : OpenSslFileTypeEnum , client_key_password : str , ignore_client_authentication_requests : bool ) -> None : if client_certchain_file is not None and client_key_file is not None : self . _use_private_key ( client_certchain_file , client_key_file , client_key_type , client_key_password ) if ignore_client_authentication_requests : if client_certchain_file : raise ValueError ( 'Cannot enable both client_certchain_file and ignore_client_authentication_requests' ) self . _ssl_ctx . set_client_cert_cb_NULL ( )
Setup client authentication using the supplied certificate and key .
189
10
27,747
def shutdown ( self ) -> None : self . _is_handshake_completed = False try : self . _flush_ssl_engine ( ) except IOError : # Ensure shutting down the connection never raises an exception pass try : self . _ssl . shutdown ( ) except OpenSSLError as e : # Ignore "uninitialized" exception if 'SSL_shutdown:uninitialized' not in str ( e ) and 'shutdown while in init' not in str ( e ) : raise if self . _sock : self . _sock . close ( )
Close the TLS connection and the underlying network socket .
121
10
27,748
def _use_private_key ( self , client_certchain_file : str , client_key_file : str , client_key_type : OpenSslFileTypeEnum , client_key_password : str ) -> None : # Ensure the files exist with open ( client_certchain_file ) : pass with open ( client_key_file ) : pass self . _ssl_ctx . use_certificate_chain_file ( client_certchain_file ) self . _ssl_ctx . set_private_key_password ( client_key_password ) try : self . _ssl_ctx . use_PrivateKey_file ( client_key_file , client_key_type . value ) except OpenSSLError as e : if 'bad password read' in str ( e ) or 'bad decrypt' in str ( e ) : raise ValueError ( 'Invalid Private Key' ) else : raise self . _ssl_ctx . check_private_key ( )
The certificate chain file must be in PEM format . Private method because it should be set via the constructor .
212
22
27,749
def get_tlsext_status_ocsp_resp ( self ) -> Optional [ OcspResponse ] : ocsp_response = self . _ssl . get_tlsext_status_ocsp_resp ( ) if ocsp_response : return OcspResponse ( ocsp_response ) else : return None
Retrieve the server s OCSP Stapling status .
73
11
27,750
def fetch_source ( self ) -> None : import requests # Do not import at the top that this file can be imported by setup.py with TemporaryFile ( ) as temp_file : # Download the source archive request = requests . get ( self . src_tar_gz_url ) temp_file . write ( request . content ) # Rewind the file temp_file . seek ( 0 ) # Extract the content of the archive tar_file = tarfile . open ( fileobj = temp_file ) tar_file . extractall ( path = _DEPS_PATH )
Download the tar archive that contains the source code for the library .
121
13
27,751
def do_renegotiate ( self ) -> None : if not self . _is_handshake_completed : raise IOError ( 'SSL Handshake was not completed; cannot renegotiate.' ) self . _ssl . renegotiate ( ) self . do_handshake ( )
Initiate an SSL renegotiation .
60
8
27,752
def _download_file_vizier ( cat , filePath , catalogname = 'catalog.dat' ) : sys . stdout . write ( '\r' + "Downloading file %s ...\r" % ( os . path . basename ( filePath ) ) ) sys . stdout . flush ( ) try : # make all intermediate directories os . makedirs ( os . path . dirname ( filePath ) ) except OSError : pass # Safe way of downloading downloading = True interrupted = False file , tmp_savefilename = tempfile . mkstemp ( ) os . close ( file ) #Easier this way ntries = 1 while downloading : try : ftp = FTP ( 'cdsarc.u-strasbg.fr' ) ftp . login ( 'anonymous' , 'test' ) ftp . cwd ( os . path . join ( 'pub' , 'cats' , cat ) ) with open ( tmp_savefilename , 'wb' ) as savefile : ftp . retrbinary ( 'RETR %s' % catalogname , savefile . write ) shutil . move ( tmp_savefilename , filePath ) downloading = False if interrupted : raise KeyboardInterrupt except : raise if not downloading : #Assume KeyboardInterrupt raise elif ntries > _MAX_NTRIES : raise IOError ( 'File %s does not appear to exist on the server ...' % ( os . path . basename ( filePath ) ) ) finally : if os . path . exists ( tmp_savefilename ) : os . remove ( tmp_savefilename ) ntries += 1 sys . stdout . write ( '\r' + _ERASESTR + '\r' ) sys . stdout . flush ( ) return None
Stolen from Jo Bovy s gaia_tools package!
391
13
27,753
def ensure_dir ( f ) : d = os . path . dirname ( f ) if not os . path . exists ( d ) : os . makedirs ( d )
Ensure a a file exists and if not make the relevant path
38
13
27,754
def dePeriod ( arr ) : diff = arr - nu . roll ( arr , 1 , axis = 1 ) w = diff < - 6. addto = nu . cumsum ( w . astype ( int ) , axis = 1 ) return arr + _TWOPI * addto
make an array of periodic angles increase linearly
62
9
27,755
def hinit ( func , x , t , pos_neg , f0 , iord , hmax , rtol , atol , args ) : sk = atol + rtol * np . fabs ( x ) dnf = np . sum ( np . square ( f0 / sk ) , axis = 0 ) dny = np . sum ( np . square ( x / sk ) , axis = 0 ) h = np . sqrt ( dny / dnf ) * 0.01 h = np . min ( [ h , np . fabs ( hmax ) ] ) h = custom_sign ( h , pos_neg ) # perform an explicit Euler step xx1 = x + h * f0 f1 = np . array ( func ( xx1 , t [ 0 ] + h , * args ) ) # estimate the second derivative of the solution der2 = np . sum ( np . square ( ( f1 - f0 ) / sk ) , axis = 0 ) der2 = np . sqrt ( der2 ) / h # step size is computed such that h ** iord * max_d(norm(f0), norm(der2)) = 0.01 der12 = np . max ( [ np . fabs ( der2 ) , np . sqrt ( dnf ) ] ) h1 = np . power ( 0.01 / der12 , 1.0 / iord ) h = np . min ( [ 100.0 * np . fabs ( h ) , np . min ( [ np . fabs ( h1 ) , np . fabs ( hmax ) ] ) ] ) return custom_sign ( h , pos_neg ) , f0 , f1 , xx1
Estimate initial step size
370
5
27,756
def dense_output ( t_current , t_old , h_current , rcont ) : # initialization s = ( t_current - t_old ) / h_current s1 = 1.0 - s return rcont [ 0 ] + s * ( rcont [ 1 ] + s1 * ( rcont [ 2 ] + s * ( rcont [ 3 ] + s1 * ( rcont [ 4 ] + s * ( rcont [ 5 ] + s1 * ( rcont [ 6 ] + s * rcont [ 7 ] ) ) ) ) ) )
Dense output function basically extrapolatin
123
8
27,757
def phiME_dens ( R , z , phi , dens , Sigma , dSigmadR , d2SigmadR2 , hz , Hz , dHzdz , Sigma_amp ) : r = numpy . sqrt ( R ** 2. + z ** 2. ) out = dens ( R , z , phi ) for a , s , ds , d2s , h , H , dH in zip ( Sigma_amp , Sigma , dSigmadR , d2SigmadR2 , hz , Hz , dHzdz ) : out -= a * ( s ( r ) * h ( z ) + d2s ( r ) * H ( z ) + 2. / r * ds ( r ) * ( H ( z ) + z * dH ( z ) ) ) return out
The density corresponding to phi_ME
184
8
27,758
def _parse_integrator ( int_method ) : #Pick integrator if int_method . lower ( ) == 'rk4_c' : int_method_c = 1 elif int_method . lower ( ) == 'rk6_c' : int_method_c = 2 elif int_method . lower ( ) == 'symplec4_c' : int_method_c = 3 elif int_method . lower ( ) == 'symplec6_c' : int_method_c = 4 elif int_method . lower ( ) == 'dopr54_c' : int_method_c = 5 elif int_method . lower ( ) == 'dop853_c' : int_method_c = 6 else : int_method_c = 0 return int_method_c
parse the integrator method to pass to C
184
9
27,759
def _parse_tol ( rtol , atol ) : #Process atol and rtol if rtol is None : rtol = - 12. * nu . log ( 10. ) else : #pragma: no cover rtol = nu . log ( rtol ) if atol is None : atol = - 12. * nu . log ( 10. ) else : #pragma: no cover atol = nu . log ( atol ) return ( rtol , atol )
Parse the tolerance keywords
114
5
27,760
def _check_integrate_dt ( t , dt ) : if dt is None : return True mult = round ( ( t [ 1 ] - t [ 0 ] ) / dt ) if nu . fabs ( mult * dt - t [ 1 ] + t [ 0 ] ) < 10. ** - 10. : return True else : return False
Check that the stepszie in t is an integer x dt
77
13
27,761
def _forceInt ( x , y , z , dens , b2 , c2 , i , glx = None , glw = None ) : def integrand ( s ) : t = 1 / s ** 2. - 1. return dens ( numpy . sqrt ( x ** 2. / ( 1. + t ) + y ** 2. / ( b2 + t ) + z ** 2. / ( c2 + t ) ) ) * ( x / ( 1. + t ) * ( i == 0 ) + y / ( b2 + t ) * ( i == 1 ) + z / ( c2 + t ) * ( i == 2 ) ) / numpy . sqrt ( ( 1. + ( b2 - 1. ) * s ** 2. ) * ( 1. + ( c2 - 1. ) * s ** 2. ) ) if glx is None : return integrate . quad ( integrand , 0. , 1. ) [ 0 ] else : return numpy . sum ( glw * integrand ( glx ) )
Integral that gives the force in x y z
227
10
27,762
def _2ndDerivInt ( x , y , z , dens , densDeriv , b2 , c2 , i , j , glx = None , glw = None ) : def integrand ( s ) : t = 1 / s ** 2. - 1. m = numpy . sqrt ( x ** 2. / ( 1. + t ) + y ** 2. / ( b2 + t ) + z ** 2. / ( c2 + t ) ) return ( densDeriv ( m ) * ( x / ( 1. + t ) * ( i == 0 ) + y / ( b2 + t ) * ( i == 1 ) + z / ( c2 + t ) * ( i == 2 ) ) * ( x / ( 1. + t ) * ( j == 0 ) + y / ( b2 + t ) * ( j == 1 ) + z / ( c2 + t ) * ( j == 2 ) ) / m + dens ( m ) * ( i == j ) * ( ( 1. / ( 1. + t ) * ( i == 0 ) + 1. / ( b2 + t ) * ( i == 1 ) + 1. / ( c2 + t ) * ( i == 2 ) ) ) ) / numpy . sqrt ( ( 1. + ( b2 - 1. ) * s ** 2. ) * ( 1. + ( c2 - 1. ) * s ** 2. ) ) if glx is None : return integrate . quad ( integrand , 0. , 1. ) [ 0 ] else : return numpy . sum ( glw * integrand ( glx ) )
Integral that gives the 2nd derivative of the potential in x y z
355
15
27,763
def _mdens ( self , m ) : return ( self . a / m ) ** self . alpha / ( 1. + m / self . a ) ** ( self . betaminusalpha )
Density as a function of m
42
7
27,764
def _fit_orbit ( orb , vxvv , vxvv_err , pot , radec = False , lb = False , customsky = False , lb_to_customsky = None , pmllpmbb_to_customsky = None , tintJ = 100 , ntintJ = 1000 , integrate_method = 'dopr54_c' , ro = None , vo = None , obs = None , disp = False ) : # Need to turn this off for speed coords . _APY_COORDS_ORIG = coords . _APY_COORDS coords . _APY_COORDS = False #Import here, because otherwise there is an infinite loop of imports from galpy . actionAngle import actionAngleIsochroneApprox , actionAngle #Mock this up, bc we want to use its orbit-integration routines class mockActionAngleIsochroneApprox ( actionAngleIsochroneApprox ) : def __init__ ( self , tintJ , ntintJ , pot , integrate_method = 'dopr54_c' ) : actionAngle . __init__ ( self ) self . _tintJ = tintJ self . _ntintJ = ntintJ self . _tsJ = nu . linspace ( 0. , self . _tintJ , self . _ntintJ ) self . _integrate_dt = None self . _pot = pot self . _integrate_method = integrate_method return None tmockAA = mockActionAngleIsochroneApprox ( tintJ , ntintJ , pot , integrate_method = integrate_method ) opt_vxvv = optimize . fmin_powell ( _fit_orbit_mlogl , orb . vxvv , args = ( vxvv , vxvv_err , pot , radec , lb , customsky , lb_to_customsky , pmllpmbb_to_customsky , tmockAA , ro , vo , obs ) , disp = disp ) maxLogL = - _fit_orbit_mlogl ( opt_vxvv , vxvv , vxvv_err , pot , radec , lb , customsky , lb_to_customsky , pmllpmbb_to_customsky , tmockAA , ro , vo , obs ) coords . _APY_COORDS = coords . _APY_COORDS_ORIG return ( opt_vxvv , maxLogL )
Fit an orbit to data in a given potential
560
9
27,765
def actionAngle_physical_input ( method ) : @ wraps ( method ) def wrapper ( * args , * * kwargs ) : if len ( args ) < 3 : # orbit input return method ( * args , * * kwargs ) ro = kwargs . get ( 'ro' , None ) if ro is None and hasattr ( args [ 0 ] , '_ro' ) : ro = args [ 0 ] . _ro if _APY_LOADED and isinstance ( ro , units . Quantity ) : ro = ro . to ( units . kpc ) . value vo = kwargs . get ( 'vo' , None ) if vo is None and hasattr ( args [ 0 ] , '_vo' ) : vo = args [ 0 ] . _vo if _APY_LOADED and isinstance ( vo , units . Quantity ) : vo = vo . to ( units . km / units . s ) . value # Loop through args newargs = ( ) for ii in range ( len ( args ) ) : if _APY_LOADED and isinstance ( args [ ii ] , units . Quantity ) : try : targ = args [ ii ] . to ( units . kpc ) . value / ro except units . UnitConversionError : try : targ = args [ ii ] . to ( units . km / units . s ) . value / vo except units . UnitConversionError : try : targ = args [ ii ] . to ( units . rad ) . value except units . UnitConversionError : raise units . UnitConversionError ( "Input units not understood" ) newargs = newargs + ( targ , ) else : newargs = newargs + ( args [ ii ] , ) args = newargs return method ( * args , * * kwargs ) return wrapper
Decorator to convert inputs to actionAngle functions from physical to internal coordinates
385
16
27,766
def _direct_nbody_force ( q , m , t , pot , softening , softening_args ) : #First do the particles #Calculate all the distances nq = len ( q ) dim = len ( q [ 0 ] ) dist_vec = nu . zeros ( ( nq , nq , dim ) ) dist = nu . zeros ( ( nq , nq ) ) for ii in range ( nq ) : for jj in range ( ii + 1 , nq ) : dist_vec [ ii , jj , : ] = q [ jj ] - q [ ii ] dist_vec [ jj , ii , : ] = - dist_vec [ ii , jj , : ] dist [ ii , jj ] = linalg . norm ( dist_vec [ ii , jj , : ] ) dist [ jj , ii ] = dist [ ii , jj ] #Calculate all the forces force = [ ] for ii in range ( nq ) : thisforce = nu . zeros ( dim ) for jj in range ( nq ) : if ii == jj : continue thisforce += m [ jj ] * softening ( dist [ ii , jj ] , * softening_args ) / dist [ ii , jj ] * dist_vec [ ii , jj , : ] force . append ( thisforce ) #Then add the external force if pot is None : return force for ii in range ( nq ) : force [ ii ] += _external_force ( q [ ii ] , t , pot ) return force
Calculate the force
341
5
27,767
def _vmomentsurfaceIntegrand ( vR , vT , R , az , df , n , m , sigmaR1 , sigmaT1 , t , initvmoment ) : o = Orbit ( [ R , vR * sigmaR1 , vT * sigmaT1 , az ] ) return vR ** n * vT ** m * df ( o , t ) / initvmoment
Internal function that is the integrand for the velocity moment times surface mass integration
92
15
27,768
def _vmomentsurfacemassGrid ( self , n , m , grid ) : if len ( grid . df . shape ) == 3 : tlist = True else : tlist = False if tlist : nt = grid . df . shape [ 2 ] out = [ ] for ii in range ( nt ) : out . append ( nu . dot ( grid . vRgrid ** n , nu . dot ( grid . df [ : , : , ii ] , grid . vTgrid ** m ) ) * ( grid . vRgrid [ 1 ] - grid . vRgrid [ 0 ] ) * ( grid . vTgrid [ 1 ] - grid . vTgrid [ 0 ] ) ) return nu . array ( out ) else : return nu . dot ( grid . vRgrid ** n , nu . dot ( grid . df , grid . vTgrid ** m ) ) * ( grid . vRgrid [ 1 ] - grid . vRgrid [ 0 ] ) * ( grid . vTgrid [ 1 ] - grid . vTgrid [ 0 ] )
Internal function to evaluate vmomentsurfacemass using a grid rather than direct integration
230
18
27,769
def _buildvgrid ( self , R , phi , nsigma , t , sigmaR1 , sigmaT1 , meanvR , meanvT , gridpoints , print_progress , integrate_method , deriv ) : out = evolveddiskdfGrid ( ) out . sigmaR1 = sigmaR1 out . sigmaT1 = sigmaT1 out . meanvR = meanvR out . meanvT = meanvT out . vRgrid = nu . linspace ( meanvR - nsigma * sigmaR1 , meanvR + nsigma * sigmaR1 , gridpoints ) out . vTgrid = nu . linspace ( meanvT - nsigma * sigmaT1 , meanvT + nsigma * sigmaT1 , gridpoints ) if isinstance ( t , ( list , nu . ndarray ) ) : nt = len ( t ) out . df = nu . zeros ( ( gridpoints , gridpoints , nt ) ) for ii in range ( gridpoints ) : for jj in range ( gridpoints - 1 , - 1 , - 1 ) : #Reverse, so we get the peak before we get to the extreme lags NOT NECESSARY if print_progress : #pragma: no cover sys . stdout . write ( '\r' + "Velocity gridpoint %i out of %i" % ( jj + ii * gridpoints + 1 , gridpoints * gridpoints ) ) sys . stdout . flush ( ) thiso = Orbit ( [ R , out . vRgrid [ ii ] , out . vTgrid [ jj ] , phi ] ) out . df [ ii , jj , : ] = self ( thiso , nu . array ( t ) . flatten ( ) , integrate_method = integrate_method , deriv = deriv , use_physical = False ) out . df [ ii , jj , nu . isnan ( out . df [ ii , jj , : ] ) ] = 0. #BOVY: for now if print_progress : sys . stdout . write ( '\n' ) #pragma: no cover else : out . df = nu . zeros ( ( gridpoints , gridpoints ) ) for ii in range ( gridpoints ) : for jj in range ( gridpoints ) : if print_progress : #pragma: no cover sys . stdout . write ( '\r' + "Velocity gridpoint %i out of %i" % ( jj + ii * gridpoints + 1 , gridpoints * gridpoints ) ) sys . stdout . flush ( ) thiso = Orbit ( [ R , out . vRgrid [ ii ] , out . vTgrid [ jj ] , phi ] ) out . df [ ii , jj ] = self ( thiso , t , integrate_method = integrate_method , deriv = deriv , use_physical = False ) if nu . isnan ( out . df [ ii , jj ] ) : out . df [ ii , jj ] = 0. #BOVY: for now if print_progress : sys . stdout . write ( '\n' ) #pragma: no cover return out
Internal function to grid the vDF at a given location
700
11
27,770
def _determine_stream_spread_single ( sigomatrixEig , thetasTrack , sigOmega , sigAngle , allinvjacsTrack ) : #Estimate the spread in all frequencies and angles sigObig2 = sigOmega ( thetasTrack ) ** 2. tsigOdiag = copy . copy ( sigomatrixEig [ 0 ] ) tsigOdiag [ numpy . argmax ( tsigOdiag ) ] = sigObig2 tsigO = numpy . dot ( sigomatrixEig [ 1 ] , numpy . dot ( numpy . diag ( tsigOdiag ) , numpy . linalg . inv ( sigomatrixEig [ 1 ] ) ) ) #angles if hasattr ( sigAngle , '__call__' ) : sigangle2 = sigAngle ( thetasTrack ) ** 2. else : sigangle2 = sigAngle ** 2. tsigadiag = numpy . ones ( 3 ) * sigangle2 tsigadiag [ numpy . argmax ( tsigOdiag ) ] = 1. tsiga = numpy . dot ( sigomatrixEig [ 1 ] , numpy . dot ( numpy . diag ( tsigadiag ) , numpy . linalg . inv ( sigomatrixEig [ 1 ] ) ) ) #correlations, assume half correlated for now (can be calculated) correlations = numpy . diag ( 0.5 * numpy . ones ( 3 ) ) * numpy . sqrt ( tsigOdiag * tsigadiag ) correlations [ numpy . argmax ( tsigOdiag ) , numpy . argmax ( tsigOdiag ) ] = 0. correlations = numpy . dot ( sigomatrixEig [ 1 ] , numpy . dot ( correlations , numpy . linalg . inv ( sigomatrixEig [ 1 ] ) ) ) #Now convert fullMatrix = numpy . empty ( ( 6 , 6 ) ) fullMatrix [ : 3 , : 3 ] = tsigO fullMatrix [ 3 : , 3 : ] = tsiga fullMatrix [ 3 : , : 3 ] = correlations fullMatrix [ : 3 , 3 : ] = correlations . T return numpy . dot ( allinvjacsTrack , numpy . dot ( fullMatrix , allinvjacsTrack . T ) )
sigAngle input may either be a function that returns the dispersion in perpendicular angle as a function of parallel angle or a value
529
27
27,771
def _progenitor_setup ( self , progenitor , leading , useTMHessian ) : #Progenitor orbit: Calculate actions, frequencies, and angles for the progenitor self . _progenitor = progenitor ( ) #call to get new Orbit # Make sure we do not use physical coordinates self . _progenitor . turn_physical_off ( ) acfs = self . _aA . actionsFreqsAngles ( self . _progenitor , _firstFlip = ( not leading ) , use_physical = False ) self . _progenitor_jr = acfs [ 0 ] [ 0 ] self . _progenitor_lz = acfs [ 1 ] [ 0 ] self . _progenitor_jz = acfs [ 2 ] [ 0 ] self . _progenitor_Omegar = acfs [ 3 ] self . _progenitor_Omegaphi = acfs [ 4 ] self . _progenitor_Omegaz = acfs [ 5 ] self . _progenitor_Omega = numpy . array ( [ acfs [ 3 ] , acfs [ 4 ] , acfs [ 5 ] ] ) . reshape ( 3 ) self . _progenitor_angler = acfs [ 6 ] self . _progenitor_anglephi = acfs [ 7 ] self . _progenitor_anglez = acfs [ 8 ] self . _progenitor_angle = numpy . array ( [ acfs [ 6 ] , acfs [ 7 ] , acfs [ 8 ] ] ) . reshape ( 3 ) #Calculate dO/dJ Jacobian at the progenitor if useTMHessian : h , fr , fp , fz , e = self . _aAT . hessianFreqs ( self . _progenitor_jr , self . _progenitor_lz , self . _progenitor_jz ) self . _dOdJp = h # Replace frequencies with TM frequencies self . _progenitor_Omegar = fr self . _progenitor_Omegaphi = fp self . _progenitor_Omegaz = fz self . _progenitor_Omega = numpy . array ( [ self . _progenitor_Omegar , self . _progenitor_Omegaphi , self . _progenitor_Omegaz ] ) . reshape ( 3 ) else : self . _dOdJp = calcaAJac ( self . _progenitor . _orb . vxvv , self . _aA , dxv = None , dOdJ = True , _initacfs = acfs ) self . _dOdJpInv = numpy . linalg . inv ( self . _dOdJp ) self . _dOdJpEig = numpy . linalg . eig ( self . _dOdJp ) return None
The part of the setup relating to the progenitor s orbit
654
13
27,772
def _setup_progIsTrack ( self ) : # We need to flip the sign of the offset, to go to the progenitor self . _sigMeanSign *= - 1. # Use _determine_stream_track_single to calculate the track-progenitor # offset at zero angle separation prog_stream_offset = _determine_stream_track_single ( self . _aA , self . _progenitor , 0. , #time = 0 self . _progenitor_angle , self . _sigMeanSign , self . _dsigomeanProgDirection , lambda x : self . meanOmega ( x , use_physical = False ) , 0. ) #angle = 0 # Setup the new progenitor orbit progenitor = Orbit ( prog_stream_offset [ 3 ] ) # Flip the offset sign again self . _sigMeanSign *= - 1. # Now re-do the previous setup self . _progenitor_setup ( progenitor , self . _leading , False ) self . _offset_setup ( self . _sigangle , self . _leading , self . _deltaAngleTrack ) return None
If progIsTrack the progenitor orbit that was passed to the streamdf initialization is the track at zero angle separation ; this routine computes an actual progenitor position that gives the desired track given the parameters of the streamdf
261
47
27,773
def _determine_nTrackIterations ( self , nTrackIterations ) : if not nTrackIterations is None : self . nTrackIterations = nTrackIterations return None if numpy . fabs ( self . misalignment ( quantity = False ) ) < 1. / 180. * numpy . pi : self . nTrackIterations = 0 elif numpy . fabs ( self . misalignment ( quantity = False ) ) >= 1. / 180. * numpy . pi and numpy . fabs ( self . misalignment ( quantity = False ) ) < 3. / 180. * numpy . pi : self . nTrackIterations = 1 elif numpy . fabs ( self . misalignment ( quantity = False ) ) >= 3. / 180. * numpy . pi : self . nTrackIterations = 2 return None
Determine a good value for nTrackIterations based on the misalignment between stream and orbit ; just based on some rough experience for now
188
30
27,774
def _interpolate_stream_track_aA ( self ) : if hasattr ( self , '_interpolatedObsTrackAA' ) : return None #Already did this #Calculate 1D meanOmega on a fine grid in angle and interpolate if not hasattr ( self , '_interpolatedThetasTrack' ) : self . _interpolate_stream_track ( ) dmOs = numpy . array ( [ self . meanOmega ( da , oned = True , use_physical = False ) for da in self . _interpolatedThetasTrack ] ) self . _interpTrackAAdmeanOmegaOneD = interpolate . InterpolatedUnivariateSpline ( self . _interpolatedThetasTrack , dmOs , k = 3 ) #Build the interpolated AA self . _interpolatedObsTrackAA = numpy . empty ( ( len ( self . _interpolatedThetasTrack ) , 6 ) ) for ii in range ( len ( self . _interpolatedThetasTrack ) ) : self . _interpolatedObsTrackAA [ ii , : 3 ] = self . _progenitor_Omega + dmOs [ ii ] * self . _dsigomeanProgDirection * self . _sigMeanSign self . _interpolatedObsTrackAA [ ii , 3 : ] = self . _progenitor_angle + self . _interpolatedThetasTrack [ ii ] * self . _dsigomeanProgDirection * self . _sigMeanSign self . _interpolatedObsTrackAA [ ii , 3 : ] = numpy . mod ( self . _interpolatedObsTrackAA [ ii , 3 : ] , 2. * numpy . pi ) return None
Build interpolations of the stream track in action - angle coordinates
394
12
27,775
def _find_closest_trackpoint ( self , R , vR , vT , z , vz , phi , interp = True , xy = False , usev = False ) : return self . find_closest_trackpoint ( R , vR , vT , z , vz , phi , interp = interp , xy = xy , usev = usev )
For backward compatibility
91
3
27,776
def _density_par ( self , dangle , tdisrupt = None ) : if tdisrupt is None : tdisrupt = self . _tdisrupt dOmin = dangle / tdisrupt # Normalize to 1 close to progenitor return 0.5 * ( 1. + special . erf ( ( self . _meandO - dOmin ) / numpy . sqrt ( 2. * self . _sortedSigOEig [ 2 ] ) ) )
The raw density as a function of parallel angle
106
9
27,777
def _sample_aAt ( self , n ) : #Sample frequency along largest eigenvalue using ARS dO1s = bovy_ars . bovy_ars ( [ 0. , 0. ] , [ True , False ] , [ self . _meandO - numpy . sqrt ( self . _sortedSigOEig [ 2 ] ) , self . _meandO + numpy . sqrt ( self . _sortedSigOEig [ 2 ] ) ] , _h_ars , _hp_ars , nsamples = n , hxparams = ( self . _meandO , self . _sortedSigOEig [ 2 ] ) , maxn = 100 ) dO1s = numpy . array ( dO1s ) * self . _sigMeanSign dO2s = numpy . random . normal ( size = n ) * numpy . sqrt ( self . _sortedSigOEig [ 1 ] ) dO3s = numpy . random . normal ( size = n ) * numpy . sqrt ( self . _sortedSigOEig [ 0 ] ) #Rotate into dOs in R,phi,z coordinates dO = numpy . vstack ( ( dO3s , dO2s , dO1s ) ) dO = numpy . dot ( self . _sigomatrixEig [ 1 ] [ : , self . _sigomatrixEigsortIndx ] , dO ) Om = dO + numpy . tile ( self . _progenitor_Omega . T , ( n , 1 ) ) . T #Also generate angles da = numpy . random . normal ( size = ( 3 , n ) ) * self . _sigangle #And a random time dt = self . sample_t ( n ) #Integrate the orbits relative to the progenitor da += dO * numpy . tile ( dt , ( 3 , 1 ) ) angle = da + numpy . tile ( self . _progenitor_angle . T , ( n , 1 ) ) . T return ( Om , angle , dt )
Sampling frequencies angles and times part of sampling
472
9
27,778
def _check_consistent_units ( self ) : if isinstance ( self . _pot , list ) : if self . _roSet and self . _pot [ 0 ] . _roSet : assert m . fabs ( self . _ro - self . _pot [ 0 ] . _ro ) < 10. ** - 10. , 'Physical conversion for the actionAngle object is not consistent with that of the Potential given to it' if self . _voSet and self . _pot [ 0 ] . _voSet : assert m . fabs ( self . _vo - self . _pot [ 0 ] . _vo ) < 10. ** - 10. , 'Physical conversion for the actionAngle object is not consistent with that of the Potential given to it' else : if self . _roSet and self . _pot . _roSet : assert m . fabs ( self . _ro - self . _pot . _ro ) < 10. ** - 10. , 'Physical conversion for the actionAngle object is not consistent with that of the Potential given to it' if self . _voSet and self . _pot . _voSet : assert m . fabs ( self . _vo - self . _pot . _vo ) < 10. ** - 10. , 'Physical conversion for the actionAngle object is not consistent with that of the Potential given to it' return None
Internal function to check that the set of units for this object is consistent with that for the potential
296
19
27,779
def _check_consistent_units_orbitInput ( self , orb ) : if self . _roSet and orb . _roSet : assert m . fabs ( self . _ro - orb . _ro ) < 10. ** - 10. , 'Physical conversion for the actionAngle object is not consistent with that of the Orbit given to it' if self . _voSet and orb . _voSet : assert m . fabs ( self . _vo - orb . _vo ) < 10. ** - 10. , 'Physical conversion for the actionAngle object is not consistent with that of the Orbit given to it' return None
Internal function to check that the set of units for this object is consistent with that for an input orbit
135
20
27,780
def check_inputs_not_arrays ( func ) : @ wraps ( func ) def func_wrapper ( self , R , z , phi , t ) : if ( hasattr ( R , '__len__' ) and len ( R ) > 1 ) or ( hasattr ( z , '__len__' ) and len ( z ) > 1 ) or ( hasattr ( phi , '__len__' ) and len ( phi ) > 1 ) or ( hasattr ( t , '__len__' ) and len ( t ) > 1 ) : raise TypeError ( 'Methods in SpiralArmsPotential do not accept array inputs. Please input scalars.' ) return func ( self , R , z , phi , t ) return func_wrapper
Decorator to check inputs and throw TypeError if any of the inputs are arrays . Methods potentially return with silent errors if inputs are not checked .
165
30
27,781
def _JRAxiIntegrand ( r , E , L , pot ) : return nu . sqrt ( 2. * ( E - potentialAxi ( r , pot ) ) - L ** 2. / r ** 2. )
The J_R integrand
49
6
27,782
def _rapRperiAxiEq ( R , E , L , pot ) : return E - potentialAxi ( R , pot ) - L ** 2. / 2. / R ** 2.
The vr = 0 equation that needs to be solved to find apo - and pericenter
44
20
27,783
def _rlfunc ( rl , lz , pot ) : thisvcirc = vcirc ( pot , rl , use_physical = False ) return rl * thisvcirc - lz
Function that gives rvc - lz
43
8
27,784
def _rlFindStart ( rl , lz , pot , lower = False ) : rtry = 2. * rl while ( 2. * lower - 1. ) * _rlfunc ( rtry , lz , pot ) > 0. : if lower : rtry /= 2. else : rtry *= 2. return rtry
find a starting interval for rl
75
7
27,785
def _check_roSet ( orb , kwargs , funcName ) : if not orb . _roSet and kwargs . get ( 'ro' , None ) is None : warnings . warn ( "Method %s(.) requires ro to be given at Orbit initialization or at method evaluation; using default ro which is %f kpc" % ( funcName , orb . _ro ) , galpyWarning )
Function to check whether ro is set because it s required for funcName
88
14
27,786
def _check_voSet ( orb , kwargs , funcName ) : if not orb . _voSet and kwargs . get ( 'vo' , None ) is None : warnings . warn ( "Method %s(.) requires vo to be given at Orbit initialization or at method evaluation; using default vo which is %f km/s" % ( funcName , orb . _vo ) , galpyWarning )
Function to check whether vo is set because it s required for funcName
89
14
27,787
def _radec ( self , * args , * * kwargs ) : lbd = self . _lbd ( * args , * * kwargs ) return coords . lb_to_radec ( lbd [ : , 0 ] , lbd [ : , 1 ] , degree = True , epoch = None )
Calculate ra and dec
70
6
27,788
def _pmrapmdec ( self , * args , * * kwargs ) : lbdvrpmllpmbb = self . _lbdvrpmllpmbb ( * args , * * kwargs ) return coords . pmllpmbb_to_pmrapmdec ( lbdvrpmllpmbb [ : , 4 ] , lbdvrpmllpmbb [ : , 5 ] , lbdvrpmllpmbb [ : , 0 ] , lbdvrpmllpmbb [ : , 1 ] , degree = True , epoch = None )
Calculate pmra and pmdec
130
8
27,789
def _lbd ( self , * args , * * kwargs ) : obs , ro , vo = self . _parse_radec_kwargs ( kwargs , dontpop = True ) X , Y , Z = self . _helioXYZ ( * args , * * kwargs ) bad_indx = ( X == 0. ) * ( Y == 0. ) * ( Z == 0. ) if True in bad_indx : X [ bad_indx ] += ro / 10000. return coords . XYZ_to_lbd ( X , Y , Z , degree = True )
Calculate l b and d
133
7
27,790
def _lbdvrpmllpmbb ( self , * args , * * kwargs ) : obs , ro , vo = self . _parse_radec_kwargs ( kwargs , dontpop = True ) X , Y , Z , vX , vY , vZ = self . _XYZvxvyvz ( * args , * * kwargs ) bad_indx = ( X == 0. ) * ( Y == 0. ) * ( Z == 0. ) if True in bad_indx : X [ bad_indx ] += ro / 10000. return coords . rectgal_to_sphergal ( X , Y , Z , vX , vY , vZ , degree = True )
Calculate l b d vr pmll pmbb
161
13
27,791
def _parse_plot_quantity ( self , quant , * * kwargs ) : # Cannot be using Quantity output kwargs [ 'quantity' ] = False if callable ( quant ) : return quant ( self . t ) def _eval ( q ) : # Check those that don't have the exact name of the function if q == 't' : return self . time ( self . t , * * kwargs ) elif q == 'Enorm' : return self . E ( self . t , * * kwargs ) / self . E ( 0. , * * kwargs ) elif q == 'Eznorm' : return self . Ez ( self . t , * * kwargs ) / self . Ez ( 0. , * * kwargs ) elif q == 'ERnorm' : return self . ER ( self . t , * * kwargs ) / self . ER ( 0. , * * kwargs ) elif q == 'Jacobinorm' : return self . Jacobi ( self . t , * * kwargs ) / self . Jacobi ( 0. , * * kwargs ) else : # these are exact, e.g., 'x' for self.x return self . __getattribute__ ( q ) ( self . t , * * kwargs ) try : return _eval ( quant ) except AttributeError : pass try : import numexpr except ImportError : #pragma: no cover raise ImportError ( 'Parsing the quantity to be plotted failed; if you are trying to plot an expression, please make sure to install numexpr first' ) # Figure out the variables in the expression to be computed to plot try : vars = numexpr . NumExpr ( quant ) . input_names except TypeError as err : raise TypeError ( 'Parsing the expression {} failed, with error message:\n"{}"' . format ( quant , err ) ) # Construct dictionary of necessary parameters vars_dict = { } for var in vars : vars_dict [ var ] = _eval ( var ) return numexpr . evaluate ( quant , local_dict = vars_dict )
Internal function to parse a quantity to be plotted based on input data
468
13
27,792
def _jmomentdensity ( self , R , z , n , m , o , nsigma = None , mc = True , nmc = 10000 , _returnmc = False , _vrs = None , _vts = None , _vzs = None , * * kwargs ) : if nsigma == None : nsigma = _NSIGMA sigmaR1 = self . _sr * numpy . exp ( ( self . _refr - R ) / self . _hsr ) sigmaz1 = self . _sz * numpy . exp ( ( self . _refr - R ) / self . _hsz ) thisvc = potential . vcirc ( self . _pot , R , use_physical = False ) #Use the asymmetric drift equation to estimate va gamma = numpy . sqrt ( 0.5 ) va = sigmaR1 ** 2. / 2. / thisvc * ( gamma ** 2. - 1. #Assume close to flat rotation curve, sigphi2/sigR2 =~ 0.5 + R * ( 1. / self . _hr + 2. / self . _hsr ) ) if math . fabs ( va ) > sigmaR1 : va = 0. #To avoid craziness near the center if mc : mvT = ( thisvc - va ) / gamma / sigmaR1 if _vrs is None : vrs = numpy . random . normal ( size = nmc ) else : vrs = _vrs if _vts is None : vts = numpy . random . normal ( size = nmc ) + mvT else : vts = _vts if _vzs is None : vzs = numpy . random . normal ( size = nmc ) else : vzs = _vzs Is = _jmomentsurfaceMCIntegrand ( vzs , vrs , vts , numpy . ones ( nmc ) * R , numpy . ones ( nmc ) * z , self , sigmaR1 , gamma , sigmaz1 , mvT , n , m , o ) if _returnmc : return ( numpy . mean ( Is ) * sigmaR1 ** 2. * gamma * sigmaz1 , vrs , vts , vzs ) else : return numpy . mean ( Is ) * sigmaR1 ** 2. * gamma * sigmaz1 else : #pragma: no cover because this is too slow; a warning is shown warnings . warn ( "Calculations using direct numerical integration using tplquad is not recommended and extremely slow; it has also not been carefully tested" , galpyWarning ) return integrate . tplquad ( _jmomentsurfaceIntegrand , 1. / gamma * ( thisvc - va ) / sigmaR1 - nsigma , 1. / gamma * ( thisvc - va ) / sigmaR1 + nsigma , lambda x : 0. , lambda x : nsigma , lambda x , y : 0. , lambda x , y : nsigma , ( R , z , self , sigmaR1 , gamma , sigmaz1 , n , m , o ) , * * kwargs ) [ 0 ] * sigmaR1 ** 2. * gamma * sigmaz1
Non - physical version of jmomentdensity otherwise the same
718
12
27,793
def impact_check_range ( func ) : @ wraps ( func ) def impact_wrapper ( * args , * * kwargs ) : if isinstance ( args [ 1 ] , numpy . ndarray ) : out = numpy . zeros ( len ( args [ 1 ] ) ) goodIndx = ( args [ 1 ] < args [ 0 ] . _deltaAngleTrackImpact ) * ( args [ 1 ] > 0. ) out [ goodIndx ] = func ( args [ 0 ] , args [ 1 ] [ goodIndx ] ) return out elif args [ 1 ] >= args [ 0 ] . _deltaAngleTrackImpact or args [ 1 ] <= 0. : return 0. else : return func ( * args , * * kwargs ) return impact_wrapper
Decorator to check the range of interpolated kicks
173
11
27,794
def _density_par ( self , dangle , tdisrupt = None , approx = True , higherorder = None ) : if higherorder is None : higherorder = self . _higherorderTrack if tdisrupt is None : tdisrupt = self . _tdisrupt if approx : return self . _density_par_approx ( dangle , tdisrupt , higherorder = higherorder ) else : return integrate . quad ( lambda T : numpy . sqrt ( self . _sortedSigOEig [ 2 ] ) * ( 1 + T * T ) / ( 1 - T * T ) ** 2. * self . pOparapar ( T / ( 1 - T * T ) * numpy . sqrt ( self . _sortedSigOEig [ 2 ] ) + self . _meandO , dangle ) , - 1. , 1. ) [ 0 ]
The raw density as a function of parallel angle approx = use faster method that directly integrates the spline representation
194
21
27,795
def _density_par_approx ( self , dangle , tdisrupt , _return_array = False , higherorder = False ) : # First construct the breakpoints for this dangle Oparb = ( dangle - self . _kick_interpdOpar_poly . x ) / self . _timpact # Find the lower limit of the integration in the pw-linear-kick approx. lowbindx , lowx = self . minOpar ( dangle , tdisrupt , _return_raw = True ) lowbindx = numpy . arange ( len ( Oparb ) - 1 ) [ lowbindx ] Oparb [ lowbindx + 1 ] = Oparb [ lowbindx ] - lowx # Now integrate between breakpoints out = ( 0.5 / ( 1. + self . _kick_interpdOpar_poly . c [ - 2 ] * self . _timpact ) * ( special . erf ( 1. / numpy . sqrt ( 2. * self . _sortedSigOEig [ 2 ] ) * ( Oparb [ : - 1 ] - self . _kick_interpdOpar_poly . c [ - 1 ] - self . _meandO ) ) - special . erf ( 1. / numpy . sqrt ( 2. * self . _sortedSigOEig [ 2 ] ) * ( numpy . roll ( Oparb , - 1 ) [ : - 1 ] - self . _kick_interpdOpar_poly . c [ - 1 ] - self . _meandO - self . _kick_interpdOpar_poly . c [ - 2 ] * self . _timpact * ( Oparb - numpy . roll ( Oparb , - 1 ) ) [ : - 1 ] ) ) ) ) if _return_array : return out out = numpy . sum ( out [ : lowbindx + 1 ] ) if higherorder : # Add higher-order contribution out += self . _density_par_approx_higherorder ( Oparb , lowbindx ) # Add integration to infinity out += 0.5 * ( 1. + special . erf ( ( self . _meandO - Oparb [ 0 ] ) / numpy . sqrt ( 2. * self . _sortedSigOEig [ 2 ] ) ) ) return out
Compute the density as a function of parallel angle using the spline representation + approximations
517
19
27,796
def _density_par_approx_higherorder ( self , Oparb , lowbindx , _return_array = False , gaussxpolyInt = None ) : spline_order = self . _kick_interpdOpar_raw . _eval_args [ 2 ] if spline_order == 1 : return 0. # Form all Gaussian-like integrals necessary ll = ( numpy . roll ( Oparb , - 1 ) [ : - 1 ] - self . _kick_interpdOpar_poly . c [ - 1 ] - self . _meandO - self . _kick_interpdOpar_poly . c [ - 2 ] * self . _timpact * ( Oparb - numpy . roll ( Oparb , - 1 ) ) [ : - 1 ] ) / numpy . sqrt ( 2. * self . _sortedSigOEig [ 2 ] ) ul = ( Oparb [ : - 1 ] - self . _kick_interpdOpar_poly . c [ - 1 ] - self . _meandO ) / numpy . sqrt ( 2. * self . _sortedSigOEig [ 2 ] ) if gaussxpolyInt is None : gaussxpolyInt = self . _densMoments_approx_higherorder_gaussxpolyInts ( ll , ul , spline_order + 1 ) # Now multiply in the coefficients for each order powers = numpy . tile ( numpy . arange ( spline_order + 1 ) [ : : - 1 ] , ( len ( ul ) , 1 ) ) . T gaussxpolyInt *= - 0.5 * ( - numpy . sqrt ( 2. ) ) ** ( powers + 1 ) * self . _sortedSigOEig [ 2 ] ** ( 0.5 * ( powers - 1 ) ) powers = numpy . tile ( numpy . arange ( spline_order + 1 ) [ : : - 1 ] [ : - 2 ] , ( len ( ul ) , 1 ) ) . T for jj in range ( spline_order + 1 ) : gaussxpolyInt [ - jj - 1 ] *= numpy . sum ( self . _kick_interpdOpar_poly . c [ : - 2 ] * self . _timpact ** powers / ( 1. + self . _kick_interpdOpar_poly . c [ - 2 ] * self . _timpact ) ** ( powers + 1 ) * special . binom ( powers , jj ) * ( Oparb [ : - 1 ] - self . _kick_interpdOpar_poly . c [ - 1 ] - self . _meandO ) ** ( powers - jj ) , axis = 0 ) if _return_array : return numpy . sum ( gaussxpolyInt , axis = 0 ) else : return numpy . sum ( gaussxpolyInt [ : , : lowbindx + 1 ] )
Contribution from non - linear spline terms
651
9
27,797
def _densMoments_approx_higherorder_gaussxpolyInts ( self , ll , ul , maxj ) : gaussxpolyInt = numpy . zeros ( ( maxj , len ( ul ) ) ) gaussxpolyInt [ - 1 ] = 1. / numpy . sqrt ( numpy . pi ) * ( numpy . exp ( - ll ** 2. ) - numpy . exp ( - ul ** 2. ) ) gaussxpolyInt [ - 2 ] = 1. / numpy . sqrt ( numpy . pi ) * ( numpy . exp ( - ll ** 2. ) * ll - numpy . exp ( - ul ** 2. ) * ul ) + 0.5 * ( special . erf ( ul ) - special . erf ( ll ) ) for jj in range ( maxj - 2 ) : gaussxpolyInt [ - jj - 3 ] = 1. / numpy . sqrt ( numpy . pi ) * ( numpy . exp ( - ll ** 2. ) * ll ** ( jj + 2 ) - numpy . exp ( - ul ** 2. ) * ul ** ( jj + 2 ) ) + 0.5 * ( jj + 2 ) * gaussxpolyInt [ - jj - 1 ] return gaussxpolyInt
Calculate all of the polynomial x Gaussian integrals occuring in the higher - order terms recursively
291
25
27,798
def _meanOmega_num_approx ( self , dangle , tdisrupt , higherorder = False ) : # First construct the breakpoints for this dangle Oparb = ( dangle - self . _kick_interpdOpar_poly . x ) / self . _timpact # Find the lower limit of the integration in the pw-linear-kick approx. lowbindx , lowx = self . minOpar ( dangle , tdisrupt , _return_raw = True ) lowbindx = numpy . arange ( len ( Oparb ) - 1 ) [ lowbindx ] Oparb [ lowbindx + 1 ] = Oparb [ lowbindx ] - lowx # Now integrate between breakpoints out = numpy . sum ( ( ( Oparb [ : - 1 ] + ( self . _meandO + self . _kick_interpdOpar_poly . c [ - 1 ] - Oparb [ : - 1 ] ) / ( 1. + self . _kick_interpdOpar_poly . c [ - 2 ] * self . _timpact ) ) * self . _density_par_approx ( dangle , tdisrupt , _return_array = True ) + numpy . sqrt ( self . _sortedSigOEig [ 2 ] / 2. / numpy . pi ) / ( 1. + self . _kick_interpdOpar_poly . c [ - 2 ] * self . _timpact ) ** 2. * ( numpy . exp ( - 0.5 * ( Oparb [ : - 1 ] - self . _kick_interpdOpar_poly . c [ - 1 ] - ( 1. + self . _kick_interpdOpar_poly . c [ - 2 ] * self . _timpact ) * ( Oparb - numpy . roll ( Oparb , - 1 ) ) [ : - 1 ] - self . _meandO ) ** 2. / self . _sortedSigOEig [ 2 ] ) - numpy . exp ( - 0.5 * ( Oparb [ : - 1 ] - self . _kick_interpdOpar_poly . c [ - 1 ] - self . _meandO ) ** 2. / self . _sortedSigOEig [ 2 ] ) ) ) [ : lowbindx + 1 ] ) if higherorder : # Add higher-order contribution out += self . _meanOmega_num_approx_higherorder ( Oparb , lowbindx ) # Add integration to infinity out += 0.5 * ( numpy . sqrt ( 2. / numpy . pi ) * numpy . sqrt ( self . _sortedSigOEig [ 2 ] ) * numpy . exp ( - 0.5 * ( self . _meandO - Oparb [ 0 ] ) ** 2. / self . _sortedSigOEig [ 2 ] ) + self . _meandO * ( 1. + special . erf ( ( self . _meandO - Oparb [ 0 ] ) / numpy . sqrt ( 2. * self . _sortedSigOEig [ 2 ] ) ) ) ) return out
Compute the numerator going into meanOmega using the direct integration of the spline representation
704
19
27,799
def _interpolate_stream_track_kick_aA ( self ) : if hasattr ( self , '_kick_interpolatedObsTrackAA' ) : #pragma: no cover return None #Already did this #Calculate 1D meanOmega on a fine grid in angle and interpolate dmOs = numpy . array ( [ super ( streamgapdf , self ) . meanOmega ( da , oned = True , tdisrupt = self . _tdisrupt - self . _timpact , use_physical = False ) for da in self . _kick_interpolatedThetasTrack ] ) self . _kick_interpTrackAAdmeanOmegaOneD = interpolate . InterpolatedUnivariateSpline ( self . _kick_interpolatedThetasTrack , dmOs , k = 3 ) #Build the interpolated AA self . _kick_interpolatedObsTrackAA = numpy . empty ( ( len ( self . _kick_interpolatedThetasTrack ) , 6 ) ) for ii in range ( len ( self . _kick_interpolatedThetasTrack ) ) : self . _kick_interpolatedObsTrackAA [ ii , : 3 ] = self . _progenitor_Omega + dmOs [ ii ] * self . _dsigomeanProgDirection * self . _gap_sigMeanSign self . _kick_interpolatedObsTrackAA [ ii , 3 : ] = self . _progenitor_angle + self . _kick_interpolatedThetasTrack [ ii ] * self . _dsigomeanProgDirection * self . _gap_sigMeanSign - self . _timpact * self . _progenitor_Omega self . _kick_interpolatedObsTrackAA [ ii , 3 : ] = numpy . mod ( self . _kick_interpolatedObsTrackAA [ ii , 3 : ] , 2. * numpy . pi ) return None
Build interpolations of the stream track near the impact in action - angle coordinates
440
15