idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
39,900 | def update ( self ) : self . _controller . update ( self . _id , wake_if_asleep = False ) data = self . _controller . get_charging_params ( self . _id ) if data : self . __battery_level = data [ 'battery_level' ] self . __charging_state = data [ 'charging_state' ] | Update the battery state . |
39,901 | def update ( self ) : self . _controller . update ( self . _id , wake_if_asleep = False ) data = self . _controller . get_charging_params ( self . _id ) if data : self . __battery_range = data [ 'battery_range' ] self . __est_battery_range = data [ 'est_battery_range' ] self . __ideal_battery_range = data [ 'ideal_battery_range' ] data = self . _controller . get_gui_params ( self . _id ) if data : if data [ 'gui_distance_units' ] == "mi/hr" : self . measurement = 'LENGTH_MILES' else : self . measurement = 'LENGTH_KILOMETERS' self . __rated = ( data [ 'gui_range_display' ] == "Rated" ) | Update the battery range state . |
39,902 | def assumed_state ( self ) : return ( not self . _controller . car_online [ self . id ( ) ] and ( self . _controller . _last_update_time [ self . id ( ) ] - self . _controller . _last_wake_up_time [ self . id ( ) ] > self . _controller . update_interval ) ) | Return whether the data is from an online vehicle . |
39,903 | def update ( self ) : self . _controller . update ( self . _id , wake_if_asleep = False ) data = self . _controller . get_drive_params ( self . _id ) if data : self . __longitude = data [ 'longitude' ] self . __latitude = data [ 'latitude' ] self . __heading = data [ 'heading' ] if self . __longitude and self . __latitude and self . __heading : self . __location = { 'longitude' : self . __longitude , 'latitude' : self . __latitude , 'heading' : self . __heading } | Update the current GPS location . |
39,904 | def update ( self ) : self . _controller . update ( self . _id , wake_if_asleep = False ) data = self . _controller . get_state_params ( self . _id ) if data : self . __odometer = data [ 'odometer' ] data = self . _controller . get_gui_params ( self . _id ) if data : if data [ 'gui_distance_units' ] == "mi/hr" : self . measurement = 'LENGTH_MILES' else : self . measurement = 'LENGTH_KILOMETERS' self . __rated = ( data [ 'gui_range_display' ] == "Rated" ) | Update the odometer and the unit of measurement based on GUI . |
39,905 | def lc ( ** kwargs ) : obs_params = [ ] syn_params , constraints = lc_syn ( syn = False , ** kwargs ) obs_params += syn_params . to_list ( ) return ParameterSet ( obs_params ) , constraints | Create parameters for a new light curve dataset . |
39,906 | def rv ( ** kwargs ) : obs_params = [ ] syn_params , constraints = rv_syn ( syn = False , ** kwargs ) obs_params += syn_params . to_list ( ) return ParameterSet ( obs_params ) , constraints | Create parameters for a new radial velocity dataset . |
39,907 | def lp ( ** kwargs ) : obs_params = [ ] syn_params , constraints = lp_syn ( syn = False , ** kwargs ) obs_params += syn_params . to_list ( ) return ParameterSet ( obs_params ) , constraints | Create parameters for a new line profile dataset . |
39,908 | def etv ( ** kwargs ) : if not conf . devel : raise NotImplementedError ( "'etv' dataset not officially supported for this release. Enable developer mode to test." ) obs_params = [ ] syn_params , constraints = etv_syn ( syn = False , ** kwargs ) obs_params += syn_params . to_list ( ) return ParameterSet ( obs_params ) , constraints | Create parameters for a new eclipse timing variations dataset . |
39,909 | def orb ( ** kwargs ) : obs_params = [ ] syn_params , constraints = orb_syn ( syn = False , ** kwargs ) obs_params += syn_params . to_list ( ) return ParameterSet ( obs_params ) , [ ] | Create parameters for a new orbit dataset . |
39,910 | def mesh ( ** kwargs ) : obs_params = [ ] syn_params , constraints = mesh_syn ( syn = False , ** kwargs ) obs_params += syn_params . to_list ( ) obs_params += [ SelectParameter ( qualifier = 'include_times' , value = kwargs . get ( 'include_times' , [ ] ) , description = 'append to times from the following datasets/time standards' , choices = [ 't0@system' ] ) ] obs_params += [ SelectParameter ( qualifier = 'columns' , value = kwargs . get ( 'columns' , [ ] ) , description = 'columns to expose within the mesh' , choices = _mesh_columns ) ] return ParameterSet ( obs_params ) , constraints | Create parameters for a new mesh dataset . |
39,911 | def team_required ( func = None ) : def decorator ( view_func ) : @ functools . wraps ( view_func , assigned = available_attrs ( view_func ) ) def _wrapped_view ( request , * args , ** kwargs ) : slug = kwargs . pop ( "slug" , None ) if not getattr ( request , "team" , None ) : request . team = get_object_or_404 ( Team , slug = slug ) return view_func ( request , * args , ** kwargs ) return _wrapped_view if func : return decorator ( func ) return decorator | Decorator for views that require a team be supplied wither via a slug in the url pattern or already set on the request object from the TeamMiddleware |
39,912 | def manager_required ( func = None ) : def decorator ( view_func ) : @ team_required @ login_required @ functools . wraps ( view_func , assigned = available_attrs ( view_func ) ) def _wrapped_view ( request , * args , ** kwargs ) : role = request . team . role_for ( request . user ) if role not in [ Membership . ROLE_MANAGER , Membership . ROLE_OWNER ] : raise Http404 ( ) return view_func ( request , * args , ** kwargs ) return _wrapped_view if func : return decorator ( func ) return decorator | Decorator for views that require not only a team but also that a user be logged in and be the manager or owner of that team . |
39,913 | def _create_syns ( b , needed_syns ) : params = [ ] for needed_syn in needed_syns : syn_kind = '{}' . format ( needed_syn [ 'kind' ] ) if 'times' in needed_syn . keys ( ) : needed_syn [ 'times' ] . sort ( ) needed_syn [ 'empty_arrays_len' ] = len ( needed_syn [ 'times' ] ) these_params , these_constraints = getattr ( _dataset , "{}_syn" . format ( syn_kind . lower ( ) ) ) ( ** needed_syn ) these_params = these_params . to_list ( ) for param in these_params : if param . _dataset is None : param . _dataset = needed_syn [ 'dataset' ] param . _kind = syn_kind param . _component = needed_syn [ 'component' ] param . _copy_for = { } params += these_params return ParameterSet ( params ) | Create empty synthetics |
39,914 | def _make_packet ( qualifier , value , time , info , ** kwargs ) : packet = { 'dataset' : kwargs . get ( 'dataset' , info [ 'dataset' ] ) , 'component' : kwargs . get ( 'component' , info [ 'component' ] ) , 'kind' : kwargs . get ( 'kind' , info [ 'kind' ] ) , 'qualifier' : qualifier , 'value' : value , 'time' : time } return packet | where kwargs overrides info |
39,915 | def run_checks ( self , b , compute , times = [ ] , ** kwargs ) : raise NotImplementedError ( "run_checks is not implemented by the {} backend" . format ( self . __class__ . __name__ ) ) | run any sanity checks to make sure the parameters and options are legal for this backend . If they are not raise an error here to avoid errors within the workers . |
39,916 | def _fill_syns ( self , new_syns , rpacketlists_per_worker ) : logger . debug ( "rank:{}/{} {}._fill_syns" . format ( mpi . myrank , mpi . nprocs , self . __class__ . __name__ ) ) for packetlists in rpacketlists_per_worker : for packetlist in packetlists : for packet in packetlist : new_syns . set_value ( ** packet ) return new_syns | rpacket_per_worker is a list of packetlists as returned by _run_chunk |
39,917 | def run ( self , b , compute , times = [ ] , ** kwargs ) : self . run_checks ( b , compute , times , ** kwargs ) logger . debug ( "rank:{}/{} calling get_packet_and_syns" . format ( mpi . myrank , mpi . nprocs ) ) packet , new_syns = self . get_packet_and_syns ( b , compute , times , ** kwargs ) if mpi . enabled : mpi . comm . bcast ( packet , root = 0 ) packet [ 'b' ] = b rpacketlists = self . _run_chunk ( ** packet ) rpacketlists_per_worker = mpi . comm . gather ( rpacketlists , root = 0 ) else : rpacketlists_per_worker = [ self . _run_chunk ( ** packet ) ] return self . _fill_syns ( new_syns , rpacketlists_per_worker ) | if within mpirun workers should call _run_worker instead of run |
39,918 | def compute_volume ( sizes , centers , normals ) : normal_mags = np . linalg . norm ( normals , axis = 1 ) return np . sum ( sizes * ( ( centers * normals ) . sum ( axis = 1 ) / normal_mags ) / 3 ) | Compute the numerical volume of a convex mesh |
39,919 | def spin_in_system ( incl , long_an ) : return np . dot ( Rz ( long_an ) , np . dot ( Rx ( - incl ) , np . array ( [ 0. , 0. , 1. ] ) ) ) | Spin in the plane of sky of a star given its inclination and long_an |
39,920 | def spin_in_roche ( s , etheta , elongan , eincl ) : m = euler_trans_matrix ( etheta , elongan , eincl ) return np . dot ( m . T , s ) | Transform the spin s of a star on Kerpler orbit with |
39,921 | def transform_position_array ( array , pos , euler , is_normal , reverse = False ) : trans_matrix = euler_trans_matrix ( * euler ) if not reverse : trans_matrix = trans_matrix . T if isinstance ( array , ComputedColumn ) : array = array . for_computations if is_normal : return np . dot ( np . asarray ( array ) , trans_matrix ) else : return np . dot ( np . asarray ( array ) , trans_matrix ) + np . asarray ( pos ) | Transform any Nx3 position array by translating to a center - of - mass pos and applying an euler transformation |
39,922 | def transform_velocity_array ( array , pos_array , vel , euler , rotation_vel = ( 0 , 0 , 0 ) ) : trans_matrix = euler_trans_matrix ( * euler ) rotation_component = np . cross ( rotation_vel , pos_array , axisb = 1 ) orbital_component = np . asarray ( vel ) if isinstance ( array , ComputedColumn ) : array = array . for_computations new_vel = np . dot ( np . asarray ( array ) + rotation_component , trans_matrix . T ) + orbital_component return new_vel | Transform any Nx3 velocity vector array by adding the center - of - mass vel accounting for solid - body rotation and applying an euler transformation . |
39,923 | def wd_grid_to_mesh_dict ( the_grid , q , F , d ) : triangles_9N = the_grid [ : , 4 : 13 ] new_mesh = { } new_mesh [ 'compute_at_vertices' ] = False new_mesh [ 'vertices' ] = triangles_9N . reshape ( - 1 , 3 ) new_mesh [ 'triangles' ] = np . arange ( len ( triangles_9N ) * 3 ) . reshape ( - 1 , 3 ) new_mesh [ 'centers' ] = the_grid [ : , 0 : 3 ] new_mesh [ 'tnormals' ] = the_grid [ : , 13 : 16 ] norms = np . linalg . norm ( new_mesh [ 'tnormals' ] , axis = 1 ) new_mesh [ 'normgrads' ] = norms new_mesh [ 'tnormals' ] = np . array ( [ tn / n for tn , n in zip ( new_mesh [ 'tnormals' ] , norms ) ] ) new_mesh [ 'areas' ] = the_grid [ : , 3 ] new_mesh [ 'tareas' ] = the_grid [ : , 18 ] new_mesh [ 'thetas' ] = the_grid [ : , 16 ] new_mesh [ 'phis' ] = the_grid [ : , 17 ] new_mesh [ 'volume' ] = compute_volume ( new_mesh [ 'areas' ] , new_mesh [ 'centers' ] , new_mesh [ 'tnormals' ] ) new_mesh [ 'velocities' ] = np . zeros ( new_mesh [ 'centers' ] . shape ) return new_mesh | Transform a wd - style mesh to the format used by PHOEBE . Namely this handles translating vertices from Nx9 to Nx3x3 and creating the array of indices for each triangle . |
39,924 | def weighted_averages ( self ) : if not self . mesh . _compute_at_vertices : return None vertices_per_triangle = self . vertices_per_triangle if vertices_per_triangle . ndim == 2 : return np . sum ( vertices_per_triangle * self . mesh . weights , axis = 1 ) elif vertices_per_triangle . ndim == 3 : return np . sum ( vertices_per_triangle * self . mesh . weights [ : , np . newaxis ] , axis = 1 ) else : raise NotImplementedError | Access to the weighted averages of the values at the vertices for each triangle based on the weights provided by mesh . weights . This is most useful for partially visible triangles when using libphoebe s eclipse detection that returns weights for each vertex . |
39,925 | def from_proto ( cls , proto_mesh , scale , pos , vel , euler , euler_vel , rotation_vel = ( 0 , 0 , 0 ) , component_com_x = None ) : mesh = cls ( ** proto_mesh . items ( ) ) mesh . _copy_roche_values ( ) mesh . _scale_mesh ( scale = scale ) mesh . _place_in_orbit ( pos , vel , euler , euler_vel , rotation_vel , component_com_x ) return mesh | Turn a ProtoMesh into a Mesh scaled and placed in orbit . |
39,926 | def update_columns ( self , field , value_dict , inds = None , computed_type = None ) : if not isinstance ( value_dict , dict ) : value_dict = { comp_no : value_dict for comp_no in self . _dict . keys ( ) } for comp , value in value_dict . items ( ) : if computed_type is not None : self . _dict [ comp ] . _observables [ field ] = ComputedColumn ( self . _dict [ comp ] , compute_at_vertices = computed_type == 'vertices' ) if inds : raise NotImplementedError ( 'setting column with indices not yet ported to new meshing' ) else : if comp in self . _dict . keys ( ) : self . _dict [ comp ] [ field ] = value else : meshes = self . _dict [ self . _parent_envelope_of [ comp ] ] meshes [ comp ] [ field ] = value | update the columns of all meshes |
39,927 | def _true_anomaly ( M , ecc , itermax = 8 ) : r Fn = M + ecc * sin ( M ) + ecc ** 2 / 2. * sin ( 2 * M ) for i in range ( itermax ) : F = Fn Mn = F - ecc * sin ( F ) Fn = F + ( M - Mn ) / ( 1. - ecc * cos ( F ) ) keep = F != 0 if hasattr ( F , '__iter__' ) : if np . all ( abs ( ( Fn - F ) [ keep ] / F [ keep ] ) < 0.00001 ) : break elif ( abs ( ( Fn - F ) / F ) < 0.00001 ) : break true_an = 2. * arctan ( sqrt ( ( 1. + ecc ) / ( 1. - ecc ) ) * tan ( Fn / 2. ) ) return Fn , true_an | r Calculation of true and eccentric anomaly in Kepler orbits . |
39,928 | def spot ( feature , ** kwargs ) : params = [ ] params += [ FloatParameter ( qualifier = "colat" , value = kwargs . get ( 'colat' , 0.0 ) , default_unit = u . deg , description = 'Colatitude of the center of the spot wrt spin axes' ) ] params += [ FloatParameter ( qualifier = "long" , value = kwargs . get ( 'long' , 0.0 ) , default_unit = u . deg , description = 'Longitude of the center of the spot wrt spin axis' ) ] params += [ FloatParameter ( qualifier = 'radius' , value = kwargs . get ( 'radius' , 1.0 ) , default_unit = u . deg , description = 'Angular radius of the spot' ) ] params += [ FloatParameter ( qualifier = 'relteff' , value = kwargs . get ( 'relteff' , 1.0 ) , limits = ( 0. , None ) , default_unit = u . dimensionless_unscaled , description = 'Temperature of the spot relative to the intrinsic temperature' ) ] constraints = [ ] return ParameterSet ( params ) , constraints | Create parameters for a spot |
39,929 | def pulsation ( feature , ** kwargs ) : if not conf . devel : raise NotImplementedError ( "'pulsation' feature not officially supported for this release. Enable developer mode to test." ) params = [ ] params += [ FloatParameter ( qualifier = 'radamp' , value = kwargs . get ( 'radamp' , 0.1 ) , default_unit = u . dimensionless_unscaled , description = 'Relative radial amplitude of the pulsations' ) ] params += [ FloatParameter ( qualifier = 'freq' , value = kwargs . get ( 'freq' , 1.0 ) , default_unit = u . d ** - 1 , description = 'Frequency of the pulsations' ) ] params += [ IntParameter ( qualifier = 'l' , value = kwargs . get ( 'l' , 0 ) , default_unit = u . dimensionless_unscaled , description = 'Non-radial degree l' ) ] params += [ IntParameter ( qualifier = 'm' , value = kwargs . get ( 'm' , 0 ) , default_unit = u . dimensionless_unscaled , description = 'Azimuthal order m' ) ] params += [ BoolParameter ( qualifier = 'teffext' , value = kwargs . get ( 'teffext' , False ) , description = 'Switch to denote whether Teffs are provided by the external code' ) ] constraints = [ ] return ParameterSet ( params ) , constraints | Create parameters for a pulsation feature |
39,930 | def load_lc_data ( filename , indep , dep , indweight = None , mzero = None , dir = './' ) : if '/' in filename : path , filename = os . path . split ( filename ) else : path = dir load_file = os . path . join ( path , filename ) lcdata = np . loadtxt ( load_file ) ncol = len ( lcdata [ 0 ] ) if dep == 'Magnitude' : mag = lcdata [ : , 1 ] flux = 10 ** ( - 0.4 * ( mag - mzero ) ) lcdata [ : , 1 ] = flux d = { } d [ 'phoebe_lc_time' ] = lcdata [ : , 0 ] d [ 'phoebe_lc_flux' ] = lcdata [ : , 1 ] if indweight == "Standard deviation" : if ncol >= 3 : d [ 'phoebe_lc_sigmalc' ] = lcdata [ : , 2 ] else : logger . warning ( 'A sigma column was mentioned in the .phoebe file but is not present in the lc data file' ) elif indweight == "Standard weight" : if ncol >= 3 : sigma = np . sqrt ( 1 / lcdata [ : , 2 ] ) d [ 'phoebe_lc_sigmalc' ] = sigma logger . warning ( 'Standard weight has been converted to Standard deviation.' ) else : logger . warning ( 'A sigma column was mentioned in the .phoebe file but is not present in the lc data file' ) else : logger . warning ( 'Phoebe 2 currently only supports standard deviaton' ) return d | load dictionary with lc data |
39,931 | def load_rv_data ( filename , indep , dep , indweight = None , dir = './' ) : if '/' in filename : path , filename = os . path . split ( filename ) else : path = dir load_file = os . path . join ( path , filename ) rvdata = np . loadtxt ( load_file ) d = { } d [ 'phoebe_rv_time' ] = rvdata [ : , 0 ] d [ 'phoebe_rv_vel' ] = rvdata [ : , 1 ] ncol = len ( rvdata [ 0 ] ) if indweight == "Standard deviation" : if ncol >= 3 : d [ 'phoebe_rv_sigmarv' ] = rvdata [ : , 2 ] else : logger . warning ( 'A sigma column is mentioned in the .phoebe file but is not present in the rv data file' ) elif indweight == "Standard weight" : if ncol >= 3 : sigma = np . sqrt ( 1 / rvdata [ : , 2 ] ) d [ 'phoebe_rv_sigmarv' ] = sigma logger . warning ( 'Standard weight has been converted to Standard deviation.' ) else : logger . warning ( 'Phoebe 2 currently only supports standard deviaton' ) return d | load dictionary with rv data . |
39,932 | def det_dataset ( eb , passband , dataid , comp , time ) : rvs = eb . get_dataset ( kind = 'rv' ) . datasets if dataid == 'Undefined' : dataid = None try : eb . _check_label ( dataid ) rv_dataset = eb . add_dataset ( 'rv' , dataset = dataid , times = [ ] ) except ValueError : logger . warning ( "The name picked for the radial velocity curve is forbidden. Applying default name instead" ) rv_dataset = eb . add_dataset ( 'rv' , times = [ ] ) return rv_dataset | Since RV datasets can have values related to each component in phoebe2 but are component specific in phoebe1 it is important to determine which dataset to add parameters to . This function will do that . eb - bundle rvpt - relevant phoebe 1 parameters |
39,933 | def rpole_to_pot_aligned ( rpole , sma , q , F , d , component = 1 ) : q = q_for_component ( q , component = component ) rpole_ = np . array ( [ 0 , 0 , rpole / sma ] ) logger . debug ( "libphoebe.roche_Omega(q={}, F={}, d={}, rpole={})" . format ( q , F , d , rpole_ ) ) pot = libphoebe . roche_Omega ( q , F , d , rpole_ ) return pot_for_component ( pot , component , reverse = True ) | Transforms polar radius to surface potential |
39,934 | def pot_to_rpole_aligned ( pot , sma , q , F , d , component = 1 ) : q = q_for_component ( q , component = component ) Phi = pot_for_component ( pot , q , component = component ) logger . debug ( "libphobe.roche_pole(q={}, F={}, d={}, Omega={})" . format ( q , F , d , pot ) ) return libphoebe . roche_pole ( q , F , d , pot ) * sma | Transforms surface potential to polar radius |
39,935 | def BinaryRoche ( r , D , q , F , Omega = 0.0 ) : r return 1.0 / sqrt ( r [ 0 ] * r [ 0 ] + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) + q * ( 1.0 / sqrt ( ( r [ 0 ] - D ) * ( r [ 0 ] - D ) + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) - r [ 0 ] / D / D ) + 0.5 * F * F * ( 1 + q ) * ( r [ 0 ] * r [ 0 ] + r [ 1 ] * r [ 1 ] ) - Omega | r Computes a value of the asynchronous eccentric Roche potential . |
39,936 | def dBinaryRochedx ( r , D , q , F ) : return - r [ 0 ] * ( r [ 0 ] * r [ 0 ] + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) ** - 1.5 - q * ( r [ 0 ] - D ) * ( ( r [ 0 ] - D ) * ( r [ 0 ] - D ) + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) ** - 1.5 - q / D / D + F * F * ( 1 + q ) * r [ 0 ] | Computes a derivative of the potential with respect to x . |
39,937 | def d2BinaryRochedx2 ( r , D , q , F ) : return ( 2 * r [ 0 ] * r [ 0 ] - r [ 1 ] * r [ 1 ] - r [ 2 ] * r [ 2 ] ) / ( r [ 0 ] * r [ 0 ] + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) ** 2.5 + q * ( 2 * ( r [ 0 ] - D ) * ( r [ 0 ] - D ) - r [ 1 ] * r [ 1 ] - r [ 2 ] * r [ 2 ] ) / ( ( r [ 0 ] - D ) * ( r [ 0 ] - D ) + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) ** 2.5 + F * F * ( 1 + q ) | Computes second derivative of the potential with respect to x . |
39,938 | def dBinaryRochedy ( r , D , q , F ) : return - r [ 1 ] * ( r [ 0 ] * r [ 0 ] + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) ** - 1.5 - q * r [ 1 ] * ( ( r [ 0 ] - D ) * ( r [ 0 ] - D ) + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) ** - 1.5 + F * F * ( 1 + q ) * r [ 1 ] | Computes a derivative of the potential with respect to y . |
39,939 | def dBinaryRochedz ( r , D , q , F ) : return - r [ 2 ] * ( r [ 0 ] * r [ 0 ] + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) ** - 1.5 - q * r [ 2 ] * ( ( r [ 0 ] - D ) * ( r [ 0 ] - D ) + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) ** - 1.5 | Computes a derivative of the potential with respect to z . |
39,940 | def dBinaryRochedr ( r , D , q , F ) : r2 = ( r * r ) . sum ( ) r1 = np . sqrt ( r2 ) return - 1. / r2 - q * ( r1 - r [ 0 ] / r1 * D ) / ( ( r [ 0 ] - D ) * ( r [ 0 ] - D ) + r [ 1 ] * r [ 1 ] + r [ 2 ] * r [ 2 ] ) ** 1.5 - q * r [ 0 ] / r1 / D / D + F * F * ( 1 + q ) * ( 1 - r [ 2 ] * r [ 2 ] / r2 ) * r1 | Computes a derivative of the potential with respect to r . |
39,941 | def send_if_client ( fctn ) : @ functools . wraps ( fctn ) def _send_if_client ( self , * args , ** kwargs ) : fctn_map = { 'set_quantity' : 'set_value' } b = self . _bundle if b is not None and b . is_client : method = fctn_map . get ( fctn . __name__ , fctn . __name__ ) d = self . _filter if hasattr ( self , '_filter' ) else { 'twig' : self . twig } d [ 'bundleid' ] = b . _bundleid for k , v in kwargs . items ( ) : d [ k ] = v logger . info ( 'emitting to {}({}) to server' . format ( method , d ) ) b . _socketio . emit ( method , d ) if fctn . __name__ in [ 'run_compute' , 'run_fitting' ] : self . _bundle . client_update ( ) else : return fctn ( self , * args , ** kwargs ) return _send_if_client | Intercept and send to the server if bundle is in client mode . |
39,942 | def update_if_client ( fctn ) : @ functools . wraps ( fctn ) def _update_if_client ( self , * args , ** kwargs ) : b = self . _bundle if b is None or not hasattr ( b , 'is_client' ) : return fctn ( self , * args , ** kwargs ) elif b . is_client and ( b . _last_client_update is None or ( datetime . now ( ) - b . _last_client_update ) . seconds > 1 ) : b . client_update ( ) return fctn ( self , * args , ** kwargs ) return _update_if_client | Intercept and check updates from server if bundle is in client mode . |
39,943 | def _uniqueid ( n = 30 ) : return '' . join ( random . SystemRandom ( ) . choice ( string . ascii_uppercase + string . ascii_lowercase ) for _ in range ( n ) ) | Return a unique string with length n . |
39,944 | def parameter_from_json ( dictionary , bundle = None ) : if isinstance ( dictionary , str ) : dictionary = json . loads ( dictionary , object_pairs_hook = parse_json ) classname = dictionary . pop ( 'Class' ) if classname not in _parameter_class_that_require_bundle : bundle = None cls = getattr ( sys . modules [ __name__ ] , classname ) return cls . _from_json ( bundle , ** dictionary ) | Load a single parameter from a JSON dictionary . |
39,945 | def get_meta ( self , ignore = [ 'uniqueid' ] ) : return OrderedDict ( [ ( k , getattr ( self , k ) ) for k in _meta_fields_twig if k not in ignore ] ) | Dictionary of all meta - tags with option to ignore certain tags . |
39,946 | def set_meta ( self , ** kwargs ) : for param in self . to_list ( ) : for k , v in kwargs . items ( ) : if getattr ( param , '_{}' . format ( k ) ) is None : setattr ( param , '_{}' . format ( k ) , v ) | Set the value of tags for all Parameters in this ParameterSet . |
39,947 | def tags ( self ) : ret = { } for typ in _meta_fields_twig : if typ in [ 'uniqueid' , 'plugin' , 'feedback' , 'fitting' , 'history' , 'twig' , 'uniquetwig' ] : continue k = '{}s' . format ( typ ) ret [ k ] = getattr ( self , k ) return ret | Returns a dictionary that lists all available tags that can be used for further filtering |
39,948 | def _set_meta ( self ) : for field in _meta_fields_twig : keys_for_this_field = set ( [ getattr ( p , field ) for p in self . to_list ( ) if getattr ( p , field ) is not None ] ) if len ( keys_for_this_field ) == 1 : setattr ( self , '_' + field , list ( keys_for_this_field ) [ 0 ] ) else : setattr ( self , '_' + field , None ) | set the meta fields of the ParameterSet as those that are shared by ALL parameters in the ParameterSet . For any fields that are not |
39,949 | def _uniquetwig ( self , twig , force_levels = [ 'qualifier' ] ) : for_this_param = self . filter ( twig , check_visible = False ) metawargs = { } if 'context' not in force_levels : force_levels . append ( 'context' ) for k in force_levels : metawargs [ k ] = getattr ( for_this_param , k ) prev_count = len ( self ) ps_for_this_search = [ ] for k in _meta_fields_twig : metawargs [ k ] = getattr ( for_this_param , k ) if getattr ( for_this_param , k ) is None : continue ps_for_this_search = self . filter ( check_visible = False , ** metawargs ) if len ( ps_for_this_search ) < prev_count and k not in force_levels : prev_count = len ( ps_for_this_search ) elif k not in force_levels : metawargs [ k ] = None if len ( ps_for_this_search ) != 1 : return twig for k in _meta_fields_twig : if metawargs [ k ] is None or k in force_levels : continue ps_for_this_search = self . filter ( check_visible = False , ** { ki : metawargs [ k ] for ki in _meta_fields_twig if ki != k } ) if len ( ps_for_this_search ) == 1 : metawargs [ k ] = None context = for_this_param . context if hasattr ( for_this_param , context ) : metawargs [ context ] = getattr ( for_this_param , context ) return "@" . join ( [ metawargs [ k ] for k in _meta_fields_twig if metawargs [ k ] is not None ] ) | get the least unique twig for the parameter given by twig that will return this single result for THIS PS |
39,950 | def _check_copy_for ( self ) : if not self . _bundle : return for param in self . to_list ( ) : if param . copy_for : attrs = [ k for k , v in param . copy_for . items ( ) if '*' in v ] ps = self . _bundle . filter ( check_visible = False , check_default = False , force_ps = True , ** param . copy_for ) metawargs = { k : v for k , v in ps . meta . items ( ) if v is not None and k in attrs } for k , v in param . meta . items ( ) : if k not in [ 'twig' , 'uniquetwig' ] and k not in attrs : metawargs [ k ] = v for attrvalues in itertools . product ( * ( getattr ( ps , '{}s' . format ( attr ) ) for attr in attrs ) ) : for attr , attrvalue in zip ( attrs , attrvalues ) : metawargs [ attr ] = attrvalue if not len ( self . _bundle . filter ( check_visible = False , ** metawargs ) ) : logger . debug ( "copying '{}' parameter for {}" . format ( param . qualifier , { attr : attrvalue for attr , attrvalue in zip ( attrs , attrvalues ) } ) ) newparam = param . copy ( ) for attr , attrvalue in zip ( attrs , attrvalues ) : setattr ( newparam , '_{}' . format ( attr ) , attrvalue ) newparam . _copy_for = False if newparam . _visible_if and newparam . _visible_if . lower ( ) == 'false' : newparam . _visible_if = None newparam . _bundle = self . _bundle self . _params . append ( newparam ) if hasattr ( param , 'is_constraint' ) and param . is_constraint : param_constraint = param . is_constraint copied_param = self . _bundle . get_parameter ( check_visible = False , check_default = False , ** metawargs ) if not copied_param . is_constraint : constraint_kwargs = param_constraint . constraint_kwargs . copy ( ) for attr , attrvalue in zip ( attrs , attrvalues ) : if attr in constraint_kwargs . keys ( ) : constraint_kwargs [ attr ] = attrvalue logger . debug ( "copying constraint '{}' parameter for {}" . format ( param_constraint . constraint_func , { attr : attrvalue for attr , attrvalue in zip ( attrs , attrvalues ) } ) ) self . add_constraint ( func = param_constraint . constraint_func , ** constraint_kwargs ) return | Check the value of copy_for and make appropriate copies . |
39,951 | def _check_label ( self , label ) : if not isinstance ( label , str ) : label = str ( label ) if label . lower ( ) in _forbidden_labels : raise ValueError ( "'{}' is forbidden to be used as a label" . format ( label ) ) if not re . match ( "^[a-z,A-Z,0-9,_]*$" , label ) : raise ValueError ( "label '{}' is forbidden - only alphabetic, numeric, and '_' characters are allowed in labels" . format ( label ) ) if len ( self . filter ( twig = label , check_visible = False ) ) : raise ValueError ( "label '{}' is already in use" . format ( label ) ) if label [ 0 ] in [ '_' ] : raise ValueError ( "first character of label is a forbidden character" ) | Check to see if the label is allowed . |
39,952 | def save ( self , filename , incl_uniqueid = False , compact = False ) : filename = os . path . expanduser ( filename ) f = open ( filename , 'w' ) if compact : if _can_ujson : ujson . dump ( self . to_json ( incl_uniqueid = incl_uniqueid ) , f , sort_keys = False , indent = 0 ) else : logger . warning ( "for faster compact saving, install ujson" ) json . dump ( self . to_json ( incl_uniqueid = incl_uniqueid ) , f , sort_keys = False , indent = 0 ) else : json . dump ( self . to_json ( incl_uniqueid = incl_uniqueid ) , f , sort_keys = True , indent = 0 , separators = ( ',' , ': ' ) ) f . close ( ) return filename | Save the ParameterSet to a JSON - formatted ASCII file |
39,953 | def set ( self , key , value , ** kwargs ) : twig = key method = None twigsplit = re . findall ( r"[\w']+" , twig ) if twigsplit [ 0 ] == 'value' : twig = '@' . join ( twigsplit [ 1 : ] ) method = 'set_value' elif twigsplit [ 0 ] == 'quantity' : twig = '@' . join ( twigsplit [ 1 : ] ) method = 'set_quantity' elif twigsplit [ 0 ] in [ 'unit' , 'default_unit' ] : twig = '@' . join ( twigsplit [ 1 : ] ) method = 'set_default_unit' elif twigsplit [ 0 ] in [ 'timederiv' ] : twig = '@' . join ( twigsplit [ 1 : ] ) method = 'set_timederiv' elif twigsplit [ 0 ] in [ 'description' ] : raise KeyError ( "cannot set {} of {}" . format ( twigsplit [ 0 ] , '@' . join ( twigsplit [ 1 : ] ) ) ) if self . _bundle is not None and self . _bundle . get_setting ( 'dict_set_all' ) . get_value ( ) and len ( self . filter ( twig = twig , ** kwargs ) ) > 1 : for param in self . filter ( twig = twig , ** kwargs ) . to_list ( ) : self . set ( '{}@{}' . format ( method , param . twig ) if method is not None else param . twig , value ) else : if method is None : return self . set_value ( twig = twig , value = value , ** kwargs ) else : param = self . get_parameter ( twig = twig , ** kwargs ) return getattr ( param , method ) ( value ) | Set the value of a Parameter in the ParameterSet . |
39,954 | def to_json ( self , incl_uniqueid = False ) : lst = [ ] for context in _contexts : lst += [ v . to_json ( incl_uniqueid = incl_uniqueid ) for v in self . filter ( context = context , check_visible = False , check_default = False ) . to_list ( ) ] return lst | Convert the ParameterSet to a json - compatible dictionary |
39,955 | def filter ( self , twig = None , check_visible = True , check_default = True , ** kwargs ) : kwargs [ 'check_visible' ] = check_visible kwargs [ 'check_default' ] = check_default kwargs [ 'force_ps' ] = True return self . filter_or_get ( twig = twig , ** kwargs ) | Filter the ParameterSet based on the meta - tags of the Parameters and return another ParameterSet . |
39,956 | def get ( self , twig = None , check_visible = True , check_default = True , ** kwargs ) : kwargs [ 'check_visible' ] = check_visible kwargs [ 'check_default' ] = check_default ps = self . filter ( twig = twig , ** kwargs ) if not len ( ps ) : raise ValueError ( "0 results found" ) elif len ( ps ) != 1 : raise ValueError ( "{} results found: {}" . format ( len ( ps ) , ps . twigs ) ) else : return ps . _params [ 0 ] | Get a single parameter from this ParameterSet . This works exactly the same as filter except there must be only a single result and the Parameter itself is returned instead of a ParameterSet . |
39,957 | def exclude ( self , twig = None , check_visible = True , ** kwargs ) : return self - self . filter ( twig = twig , check_visible = check_visible , ** kwargs ) | Exclude the results from this filter from the current ParameterSet . |
39,958 | def _remove_parameter ( self , param ) : self . _params = [ p for p in self . _params if p != param ] | Remove a Parameter from the ParameterSet |
39,959 | def show ( self , ** kwargs ) : kwargs . setdefault ( 'show' , True ) kwargs . setdefault ( 'save' , False ) kwargs . setdefault ( 'animate' , False ) return self . _show_or_save ( ** kwargs ) | Draw and show the plot . |
39,960 | def savefig ( self , filename , ** kwargs ) : filename = os . path . expanduser ( filename ) kwargs . setdefault ( 'show' , False ) kwargs . setdefault ( 'save' , filename ) kwargs . setdefault ( 'animate' , False ) return self . _show_or_save ( ** kwargs ) | Draw and save the plot . |
39,961 | def save ( self , filename , incl_uniqueid = False ) : filename = os . path . expanduser ( filename ) f = open ( filename , 'w' ) json . dump ( self . to_json ( incl_uniqueid = incl_uniqueid ) , f , sort_keys = True , indent = 0 , separators = ( ',' , ': ' ) ) f . close ( ) return filename | Save the Parameter to a JSON - formatted ASCII file |
39,962 | def get_meta ( self , ignore = [ 'uniqueid' ] ) : return OrderedDict ( [ ( k , getattr ( self , k ) ) for k in _meta_fields_all if k not in ignore ] ) | See all the meta - tag properties for this Parameter |
39,963 | def expand_value ( self , ** kwargs ) : selection = [ ] for v in self . get_value ( ** kwargs ) : for choice in self . choices : if v == choice and choice not in selection : selection . append ( choice ) elif fnmatch ( choice , v ) and choice not in selection : selection . append ( choice ) return selection | expand the selection to account for wildcards |
39,964 | def is_constraint ( self ) : if self . _is_constraint is None : return None return self . _bundle . get_parameter ( context = 'constraint' , uniqueid = self . _is_constraint ) | returns the expression of the constraint that constrains this parameter |
39,965 | def constrained_by ( self ) : if self . _is_constraint is None : return [ ] params = [ ] for var in self . is_constraint . _vars : param = var . get_parameter ( ) if param . uniqueid != self . uniqueid : params . append ( param ) return params | returns a list of parameters that constrain this parameter |
39,966 | def in_constraints ( self ) : expressions = [ ] for uniqueid in self . _in_constraints : expressions . append ( self . _bundle . get_parameter ( context = 'constraint' , uniqueid = uniqueid ) ) return expressions | returns a list of the expressions in which this parameter constrains another |
39,967 | def constrains ( self ) : params = [ ] for constraint in self . in_constraints : for var in constraint . _vars : param = var . get_parameter ( ) if param . component == constraint . component and param . qualifier == constraint . qualifier : if param not in params and param . uniqueid != self . uniqueid : params . append ( param ) return params | returns a list of parameters that are constrained by this parameter |
39,968 | def related_to ( self ) : params = [ ] constraints = self . in_constraints if self . is_constraint is not None : constraints . append ( self . is_constraint ) for constraint in constraints : for var in constraint . _vars : param = var . get_parameter ( ) if param not in params and param . uniqueid != self . uniqueid : params . append ( param ) return params | returns a list of all parameters that are either constrained by or constrain this parameter |
39,969 | def set_property ( self , ** kwargs ) : if not isinstance ( self . _value , nparray . ndarray ) : raise ValueError ( "value is not a nparray object" ) for property , value in kwargs . items ( ) : setattr ( self . _value , property , value ) | set any property of the underlying nparray object |
39,970 | def _recurse_find_trace ( self , structure , item , trace = [ ] ) : try : i = structure . index ( item ) except ValueError : for j , substructure in enumerate ( structure ) : if isinstance ( substructure , list ) : return self . _recurse_find_trace ( substructure , item , trace + [ j ] ) else : return trace + [ i ] | given a nested structure from _parse_repr and find the trace route to get to item |
39,971 | def get_stars ( self ) : l = re . findall ( r"[\w']+" , self . get_value ( ) ) return [ l [ i + 1 ] for i , s in enumerate ( l ) if s == 'star' ] | get component of all stars in order primary - > secondary |
39,972 | def get_orbits ( self ) : orbits = [ ] for star in self . get_stars ( ) : parent = self . get_parent_of ( star ) if parent not in orbits and parent != 'component' and parent is not None : orbits . append ( parent ) return orbits | get component of all orbits in order primary - > secondary |
39,973 | def get_stars_of_sibling_of ( self , component ) : sibling = self . get_sibling_of ( component ) if sibling in self . get_stars ( ) : return sibling stars = [ child for child in self . get_stars_of_children_of ( sibling ) ] return stars | same as get_sibling_of except if the sibling is an orbit this will recursively follow the tree to return a list of all stars under that orbit |
39,974 | def get_children_of ( self , component , kind = None ) : structure , trace , item = self . _get_structure_and_trace ( component ) item_kind , item_label = item . split ( ':' ) if isinstance ( kind , str ) : kind = [ kind ] if item_kind not in [ 'orbit' ] : return [ ] else : items = self . _get_by_trace ( structure , trace [ : - 1 ] + [ trace [ - 1 ] + 1 ] ) return [ str ( ch . split ( ':' ) [ - 1 ] ) for ch in items if isinstance ( ch , unicode ) and ( kind is None or ch . split ( ':' ) [ 0 ] in kind ) ] | get to component labels of the children of a given component |
39,975 | def get_primary_or_secondary ( self , component , return_ind = False ) : parent = self . get_parent_of ( component ) if parent is None : return 'primary' children_of_parent = self . get_children_of ( parent ) ind = children_of_parent . index ( component ) if ind > 1 : return None if return_ind : return ind + 1 return [ 'primary' , 'secondary' ] [ ind ] | return whether a given component is the primary or secondary component in its parent orbit |
39,976 | def vars ( self ) : if self . _var_params is None : self . _var_params = ParameterSet ( [ var . get_parameter ( ) for var in self . _vars ] ) return self . _var_params | return all the variables in a PS |
39,977 | def get_parameter ( self , twig = None , ** kwargs ) : kwargs [ 'twig' ] = twig kwargs [ 'check_default' ] = False kwargs [ 'check_visible' ] = False ps = self . vars . filter ( ** kwargs ) if len ( ps ) == 1 : return ps . get ( check_visible = False , check_default = False ) elif len ( ps ) > 1 : return ps . to_list ( ) [ 0 ] else : raise KeyError ( "no result found" ) | get a parameter from those that are variables |
39,978 | def flip_for ( self , twig = None , expression = None , ** kwargs ) : _orig_expression = self . get_value ( ) kwargs [ 'twig' ] = twig newly_constrained_var = self . _get_var ( ** kwargs ) newly_constrained_param = self . get_parameter ( ** kwargs ) check_kwargs = { k : v for k , v in newly_constrained_param . meta . items ( ) if k not in [ 'context' , 'twig' , 'uniquetwig' ] } check_kwargs [ 'context' ] = 'constraint' if len ( self . _bundle . filter ( ** check_kwargs ) ) : raise ValueError ( "'{}' is already constrained" . format ( newly_constrained_param . twig ) ) currently_constrained_var = self . _get_var ( qualifier = self . qualifier , component = self . component ) currently_constrained_param = currently_constrained_var . get_parameter ( ) import constraint if self . constraint_func is not None and hasattr ( constraint , self . constraint_func ) : if True : lhs , rhs , constraint_kwargs = getattr ( constraint , self . constraint_func ) ( self . _bundle , solve_for = newly_constrained_param , ** self . constraint_kwargs ) expression = rhs . _value if expression is not None : expression = expression elif _use_sympy : eq_safe = "({}) - {}" . format ( self . _value , currently_constrained_var . safe_label ) expression = sympy . solve ( eq_safe , newly_constrained_var . safe_label ) [ 0 ] else : raise ValueError ( "must either have sympy installed or provide a new expression" ) self . _qualifier = newly_constrained_param . qualifier self . _component = newly_constrained_param . component self . _kind = newly_constrained_param . kind self . _value = str ( expression ) self . _default_unit = None self . set_default_unit ( newly_constrained_param . default_unit ) self . _update_bookkeeping ( ) self . _add_history ( redo_func = 'flip_constraint' , redo_kwargs = { 'expression' : expression , 'uniqueid' : newly_constrained_param . uniqueid } , undo_func = 'flip_constraint' , undo_kwargs = { 'expression' : _orig_expression , 'uniqueid' : currently_constrained_param . uniqueid } ) | flip the constraint to solve for for any of the parameters in the expression |
39,979 | def get_unbound_form ( self ) : form_kwargs = self . get_form_kwargs ( ) bound_fields = [ "data" , "files" ] for field in bound_fields : form_kwargs . pop ( field , None ) return self . get_form_class ( ) ( ** form_kwargs ) | Overrides behavior of FormView . get_form_kwargs when method is POST or PUT |
39,980 | def get_form_success_data ( self , form ) : data = { "html" : render_to_string ( "pinax/teams/_invite_form.html" , { "invite_form" : self . get_unbound_form ( ) , "team" : self . team } , request = self . request ) } membership = self . membership if membership is not None : if membership . state == Membership . STATE_APPLIED : fragment_class = ".applicants" elif membership . state == Membership . STATE_INVITED : fragment_class = ".invitees" elif membership . state in ( Membership . STATE_AUTO_JOINED , Membership . STATE_ACCEPTED ) : fragment_class = { Membership . ROLE_OWNER : ".owners" , Membership . ROLE_MANAGER : ".managers" , Membership . ROLE_MEMBER : ".members" } [ membership . role ] data . update ( { "append-fragments" : { fragment_class : render_to_string ( "pinax/teams/_membership.html" , { "membership" : membership , "team" : self . team } , request = self . request ) } } ) return data | Allows customization of the JSON data returned when a valid form submission occurs . |
39,981 | def _value ( obj ) : if hasattr ( obj , 'value' ) : return obj . value elif isinstance ( obj , np . ndarray ) : return np . array ( [ o . value for o in obj ] ) elif hasattr ( obj , '__iter__' ) : return [ _value ( o ) for o in obj ] return obj | make sure to get a float |
39,982 | def _estimate_delta ( ntriangles , area ) : return np . sqrt ( 4. / np . sqrt ( 3 ) * float ( area ) / float ( ntriangles ) ) | estimate the value for delta to send to marching based on the number of requested triangles and the expected surface area of mesh |
39,983 | def handle_eclipses ( self , expose_horizon = False , ** kwargs ) : eclipse_method = kwargs . get ( 'eclipse_method' , self . eclipse_method ) horizon_method = kwargs . get ( 'horizon_method' , self . horizon_method ) possible_eclipse = False if len ( self . bodies ) == 1 : if self . bodies [ 0 ] . __class__ . __name__ == 'Envelope' : possible_eclipse = True else : possible_eclipse = False else : logger . debug ( "system.handle_eclipses: determining if eclipses are possible from instantaneous_maxr" ) max_rs = [ body . instantaneous_maxr for body in self . bodies ] for i in range ( 0 , len ( max_rs ) - 1 ) : for j in range ( i + 1 , len ( max_rs ) ) : proj_sep_sq = sum ( [ ( c [ i ] - c [ j ] ) ** 2 for c in ( self . xs , self . ys ) ] ) max_sep_ecl = max_rs [ i ] + max_rs [ j ] if proj_sep_sq < ( 1.05 * max_sep_ecl ) ** 2 : possible_eclipse = True break if not possible_eclipse and not expose_horizon and horizon_method == 'boolean' : eclipse_method = 'only_horizon' meshes = self . meshes meshes . update_columns ( 'visiblities' , 1.0 ) ecl_func = getattr ( eclipse , eclipse_method ) if eclipse_method == 'native' : ecl_kwargs = { 'horizon_method' : horizon_method } else : ecl_kwargs = { } logger . debug ( "system.handle_eclipses: possible_eclipse={}, expose_horizon={}, calling {} with kwargs {}" . format ( possible_eclipse , expose_horizon , eclipse_method , ecl_kwargs ) ) visibilities , weights , horizon = ecl_func ( meshes , self . xs , self . ys , self . zs , expose_horizon = expose_horizon , ** ecl_kwargs ) meshes . update_columns ( 'visibilities' , visibilities ) if weights is not None : meshes . update_columns ( 'weights' , weights ) return horizon | Detect the triangles at the horizon and the eclipsed triangles handling any necessary subdivision . |
39,984 | def compute_pblum_scale ( self , dataset , pblum , ** kwargs ) : logger . debug ( "{}.compute_pblum_scale(dataset={}, pblum={})" . format ( self . component , dataset , pblum ) ) abs_luminosity = self . compute_luminosity ( dataset , ** kwargs ) pblum_scale = pblum / abs_luminosity self . set_pblum_scale ( dataset , pblum_scale ) | intensities should already be computed for this dataset at the time for which pblum is being provided |
39,985 | def _populate_lp ( self , dataset , ** kwargs ) : logger . debug ( "{}._populate_lp(dataset={})" . format ( self . component , dataset ) ) profile_rest = kwargs . get ( 'profile_rest' , self . lp_profile_rest . get ( dataset ) ) rv_cols = self . _populate_rv ( dataset , ** kwargs ) cols = rv_cols return cols | Populate columns necessary for an LP dataset |
39,986 | def _populate_rv ( self , dataset , ** kwargs ) : logger . debug ( "{}._populate_rv(dataset={})" . format ( self . component , dataset ) ) lc_cols = self . _populate_lc ( dataset , ** kwargs ) rvs = - 1 * self . mesh . velocities . for_computations [ : , 2 ] if self . do_rv_grav : rv_grav = c . G * ( self . mass * u . solMass ) / ( self . instantaneous_rpole * u . solRad ) / c . c rv_grav = rv_grav . to ( 'solRad/d' ) . value rvs += rv_grav cols = lc_cols cols [ 'rvs' ] = rvs return cols | Populate columns necessary for an RV dataset |
39,987 | def from_bundle ( cls , b , feature ) : feature_ps = b . get_feature ( feature ) colat = feature_ps . get_value ( 'colat' , unit = u . rad ) longitude = feature_ps . get_value ( 'long' , unit = u . rad ) if len ( b . hierarchy . get_stars ( ) ) >= 2 : star_ps = b . get_component ( feature_ps . component ) orbit_ps = b . get_component ( b . hierarchy . get_parent_of ( feature_ps . component ) ) syncpar = star_ps . get_value ( 'syncpar' ) period = orbit_ps . get_value ( 'period' ) dlongdt = ( syncpar - 1 ) / period * 2 * np . pi else : star_ps = b . get_component ( feature_ps . component ) dlongdt = star_ps . get_value ( 'freq' , unit = u . rad / u . d ) longitude = np . pi / 2 radius = feature_ps . get_value ( 'radius' , unit = u . rad ) relteff = feature_ps . get_value ( 'relteff' , unit = u . dimensionless_unscaled ) t0 = b . get_value ( 't0' , context = 'system' , unit = u . d ) return cls ( colat , longitude , dlongdt , radius , relteff , t0 ) | Initialize a Spot feature from the bundle . |
39,988 | def pointing_vector ( self , s , time ) : t = time - self . _t0 longitude = self . _longitude + self . _dlongdt * t ex = np . array ( [ 1. , 0. , 0. ] ) ezp = s exp = ( ex - s * np . dot ( s , ex ) ) eyp = np . cross ( s , exp ) return np . sin ( self . _colat ) * np . cos ( longitude ) * exp + np . sin ( self . _colat ) * np . sin ( longitude ) * eyp + np . cos ( self . _colat ) * ezp | s is the spin vector in roche coordinates time is the current time |
39,989 | def process_teffs ( self , teffs , coords , s = np . array ( [ 0. , 0. , 1. ] ) , t = None ) : if t is None : t = self . _t0 pointing_vector = self . pointing_vector ( s , t ) logger . debug ( "spot.process_teffs at t={} with pointing_vector={} and radius={}" . format ( t , pointing_vector , self . _radius ) ) cos_alpha_coords = np . dot ( coords , pointing_vector ) / np . linalg . norm ( coords , axis = 1 ) cos_alpha_spot = np . cos ( self . _radius ) filter_ = cos_alpha_coords > cos_alpha_spot teffs [ filter_ ] = teffs [ filter_ ] * self . _relteff return teffs | Change the local effective temperatures for any values within the cone defined by the spot . Any teff within the spot will have its current value multiplied by the relteff factor |
39,990 | def from_bundle ( cls , b , feature ) : feature_ps = b . get_feature ( feature ) freq = feature_ps . get_value ( 'freq' , unit = u . d ** - 1 ) radamp = feature_ps . get_value ( 'radamp' , unit = u . dimensionless_unscaled ) l = feature_ps . get_value ( 'l' , unit = u . dimensionless_unscaled ) m = feature_ps . get_value ( 'm' , unit = u . dimensionless_unscaled ) teffext = feature_ps . get_value ( 'teffext' ) GM = c . G . to ( 'solRad3 / (solMass d2)' ) . value * b . get_value ( qualifier = 'mass' , component = feature_ps . component , context = 'component' , unit = u . solMass ) R = b . get_value ( qualifier = 'rpole' , component = feature_ps . component , section = 'component' , unit = u . solRad ) tanamp = GM / R ** 3 / freq ** 2 return cls ( radamp , freq , l , m , tanamp , teffext ) | Initialize a Pulsation feature from the bundle . |
39,991 | def from_server ( cls , bundleid , server = 'http://localhost:5555' , as_client = True ) : if not conf . devel : raise NotImplementedError ( "'from_server' not officially supported for this release. Enable developer mode to test." ) url = "{}/{}/json" . format ( server , bundleid ) logger . info ( "downloading bundle from {}" . format ( url ) ) r = requests . get ( url , timeout = 5 ) rjson = r . json ( ) b = cls ( rjson [ 'data' ] ) if as_client : b . as_client ( as_client , server = server , bundleid = rjson [ 'meta' ] [ 'bundleid' ] ) logger . warning ( "This bundle is in client mode, meaning all\ computations will be handled by the server at {}. To disable\ client mode, call as_client(False) or in the future pass\ as_client=False to from_server" . format ( server ) ) return b | Load a new bundle from a server . |
39,992 | def from_legacy ( cls , filename , add_compute_legacy = True , add_compute_phoebe = True ) : logger . warning ( "importing from legacy is experimental until official 1.0 release" ) filename = os . path . expanduser ( filename ) return io . load_legacy ( filename , add_compute_legacy , add_compute_phoebe ) | Load a bundle from a PHOEBE 1 . 0 Legacy file . |
39,993 | def default_triple ( cls , inner_as_primary = True , inner_as_overcontact = False , starA = 'starA' , starB = 'starB' , starC = 'starC' , inner = 'inner' , outer = 'outer' , contact_envelope = 'contact_envelope' ) : if not conf . devel : raise NotImplementedError ( "'default_triple' not officially supported for this release. Enable developer mode to test." ) b = cls ( ) b . add_star ( component = starA ) b . add_star ( component = starB ) b . add_star ( component = starC ) b . add_orbit ( component = inner , period = 1 ) b . add_orbit ( component = outer , period = 10 ) if inner_as_overcontact : b . add_envelope ( component = contact_envelope ) inner_hier = _hierarchy . binaryorbit ( b [ inner ] , b [ starA ] , b [ starB ] , b [ contact_envelope ] ) else : inner_hier = _hierarchy . binaryorbit ( b [ inner ] , b [ starA ] , b [ starB ] ) if inner_as_primary : hierstring = _hierarchy . binaryorbit ( b [ outer ] , inner_hier , b [ starC ] ) else : hierstring = _hierarchy . binaryorbit ( b [ outer ] , b [ starC ] , inner_hier ) b . set_hierarchy ( hierstring ) b . add_constraint ( constraint . keplers_third_law_hierarchical , outer , inner ) b . add_compute ( ) return b | Load a bundle with a default triple system . |
39,994 | def save ( self , filename , clear_history = True , incl_uniqueid = False , compact = False ) : if clear_history : self . remove_history ( ) return super ( Bundle , self ) . save ( filename , incl_uniqueid = incl_uniqueid , compact = compact ) | Save the bundle to a JSON - formatted ASCII file . |
39,995 | def _default_label ( self , base , context , ** kwargs ) : kwargs [ 'context' ] = context params = len ( getattr ( self . filter ( check_visible = False , ** kwargs ) , '{}s' . format ( context ) ) ) return "{}{:02d}" . format ( base , params + 1 ) | Determine a default label given a base label and the passed kwargs |
39,996 | def get_setting ( self , twig = None , ** kwargs ) : if twig is not None : kwargs [ 'twig' ] = twig kwargs [ 'context' ] = 'setting' return self . filter_or_get ( ** kwargs ) | Filter in the setting context |
39,997 | def get_history ( self , i = None ) : ps = self . filter ( context = 'history' ) if i is not None : return ps . to_list ( ) [ i ] else : return ps | Get a history item by index . |
39,998 | def remove_history ( self , i = None ) : if i is None : self . remove_parameters_all ( context = 'history' ) else : param = self . get_history ( i = i ) self . remove_parameter ( uniqueid = param . uniqueid ) | Remove a history item from the bundle by index . |
39,999 | def undo ( self , i = - 1 ) : _history_enabled = self . history_enabled param = self . get_history ( i ) self . disable_history ( ) param . undo ( ) self . remove_parameter ( uniqueid = param . uniqueid ) if _history_enabled : self . enable_history ( ) | Undo an item in the history logs |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.