idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
242,400
def get_step_output ( section , default_output ) : step_output : sos_targets = sos_targets ( ) if 'provides' in section . options and default_output : step_output = default_output output_idx = find_statement ( section , 'output' ) if output_idx is None : return step_output value = section . statements [ output_idx ] [ ...
determine step output
242,401
def analyze_section ( section : SoS_Step , default_input : Optional [ sos_targets ] = None , default_output : Optional [ sos_targets ] = None , context = { } , vars_and_output_only : bool = False ) -> Dict [ str , Any ] : new_env , old_env = env . request_new ( ) try : prepare_env ( section . global_def , section . glo...
Analyze a section for how it uses input and output what variables it uses and input output etc .
242,402
def extract_workflow ( notebook ) : if isinstance ( notebook , str ) : nb = nbformat . read ( notebook , nbformat . NO_CONVERT ) else : nb = notebook cells = nb . cells content = '#!/usr/bin/env sos-runner\n#fileformat=SOS1.0\n\n' for cell in cells : if cell . cell_type != "code" : continue if 'kernel' in cell . metada...
Extract workflow from a notebook file or notebook JSON instance
242,403
def vim_ipython_is_open ( ) : for w in vim . windows : if w . buffer . name is not None and w . buffer . name . endswith ( "vim-ipython" ) : return True return False
Helper function to let us know if the vim - ipython shell is currently visible
242,404
def with_subchannel ( f , * args ) : "conditionally monitor subchannel" def f_with_update ( * args ) : try : f ( * args ) if monitor_subchannel : update_subchannel_msgs ( force = True ) except AttributeError : echo ( "not connected to IPython" , 'Error' ) return f_with_update
conditionally monitor subchannel
242,405
def set_pid ( ) : global pid lines = '\n' . join ( [ 'import os' , '_pid = os.getpid()' ] ) try : msg_id = send ( lines , silent = True , user_variables = [ '_pid' ] ) except TypeError : msg_id = send ( lines , silent = True , user_expressions = { '_pid' : '_pid' } ) try : child = get_child_msg ( msg_id ) except Empty ...
Explicitly ask the ipython kernel for its pid
242,406
def fetchmany ( self , size = - 1 ) : self . _cursorLock . acquire ( ) if size < 0 or size > self . MAX_BLOCK_SIZE : size = self . arraysize recs = [ ] for i in range ( 0 , size ) : recs . append ( self . fetchone ( ) ) self . _cursorLock . release ( ) return recs
return a sequential set of records . This is guaranteed by locking so that no other thread can grab a few records while a set is fetched . this has the side effect that other threads may have to wait for an arbitrary long time for the completion of the current request .
242,407
def on_number ( self , ctx , value ) : value = int ( value ) if value . isdigit ( ) else float ( value ) top = self . _stack [ - 1 ] if top is JSONCompositeType . OBJECT : self . fire ( JSONStreamer . VALUE_EVENT , value ) elif top is JSONCompositeType . ARRAY : self . fire ( JSONStreamer . ELEMENT_EVENT , value ) else...
Since this is defined both integer and double callbacks are useless
242,408
def close ( self ) : self . fire ( JSONStreamer . DOC_END_EVENT ) self . _stack = None self . _parser . close ( )
Closes the streamer which causes a DOC_END_EVENT to be fired and frees up memory used by yajl
242,409
async def minizinc ( mzn , * dzn_files , args = None , data = None , include = None , stdlib_dir = None , globals_dir = None , declare_enums = True , allow_multiple_assignments = False , keep = False , output_vars = None , output_base = None , output_mode = 'dict' , solver = None , timeout = None , two_pass = None , pr...
Coroutine version of the pymzn . minizinc function .
242,410
def parse_value ( val , var_type = None , enums = None , rebase_arrays = True ) : if not var_type : p_val = _parse_array ( val , rebase_arrays = rebase_arrays , enums = enums , raise_errors = False ) if p_val is not None : return p_val return _parse_val ( val , enums = enums ) if 'dims' in var_type : return _parse_arra...
Parses the value of a dzn statement .
242,411
def dzn2dict ( dzn , * , rebase_arrays = True , types = None , return_enums = False ) : dzn_ext = os . path . splitext ( dzn ) [ 1 ] if dzn_ext == '.dzn' : with open ( dzn ) as f : dzn = f . read ( ) var_types = None if types : var_types = { } for var , var_type in types . items ( ) : if isinstance ( var_type , str ) :...
Parses a dzn string or file into a dictionary of variable assignments .
242,412
def args ( self , all_solutions = False , num_solutions = None , free_search = False , parallel = None , seed = None , ** kwargs ) : args = [ '-s' , '-v' ] if all_solutions : args . append ( '-a' ) if num_solutions is not None : args += [ '-n' , num_solutions ] if free_search : args . append ( '-f' ) if parallel is not...
Returns a list of command line arguments for the specified options .
242,413
def debug ( dbg = True ) : global _debug_handler if dbg and _debug_handler is None : _debug_handler = logging . StreamHandler ( ) logger . addHandler ( _debug_handler ) logger . setLevel ( logging . DEBUG ) elif not dbg and _debug_handler is not None : logger . removeHandler ( _debug_handler ) _debug_handler = None log...
Enables or disables debugging messages on the standard output .
242,414
def minizinc_version ( ) : vs = _run_minizinc ( '--version' ) m = re . findall ( 'version ([\d\.]+)' , vs ) if not m : raise RuntimeError ( 'MiniZinc executable not found.' ) return m [ 0 ]
Returns the version of the found minizinc executable .
242,415
def preprocess_model ( model , rewrap = True , ** kwargs ) : args = { ** kwargs , ** config . get ( 'args' , { } ) } model = _process_template ( model , ** args ) if rewrap : model = rewrap_model ( model ) return model
Preprocess a MiniZinc model .
242,416
def save_model ( model , output_file = None , output_dir = None , output_prefix = 'pymzn' ) : if output_file : mzn_file = output_file output_file = open ( output_file , 'w+' , buffering = 1 ) else : output_prefix += '_' output_file = NamedTemporaryFile ( dir = output_dir , prefix = output_prefix , suffix = '.mzn' , del...
Save a model to file .
242,417
def check_instance ( mzn , * dzn_files , data = None , include = None , stdlib_dir = None , globals_dir = None , allow_multiple_assignments = False ) : args = [ '--instance-check-only' ] args += _flattening_args ( mzn , * dzn_files , data = data , include = include , stdlib_dir = stdlib_dir , globals_dir = globals_dir ...
Perform instance checking on a model + data .
242,418
def check_model ( mzn , * , include = None , stdlib_dir = None , globals_dir = None ) : args = [ '--model-check-only' ] args += _flattening_args ( mzn , include = include , stdlib_dir = stdlib_dir , globals_dir = globals_dir ) input = mzn if args [ - 1 ] == '-' else None proc = _run_minizinc_proc ( * args , input = inp...
Perform model checking on a given model .
242,419
def minizinc ( mzn , * dzn_files , args = None , data = None , include = None , stdlib_dir = None , globals_dir = None , declare_enums = True , allow_multiple_assignments = False , keep = False , output_vars = None , output_base = None , output_mode = 'dict' , solver = None , timeout = None , two_pass = None , pre_pass...
Implements the workflow for solving a CSP problem encoded with MiniZinc .
242,420
def solve ( solver , mzn , * dzn_files , data = None , include = None , stdlib_dir = None , globals_dir = None , allow_multiple_assignments = False , output_mode = 'item' , timeout = None , two_pass = None , pre_passes = None , output_objective = False , non_unique = False , all_solutions = False , num_solutions = None...
Flatten and solve a MiniZinc program .
242,421
def mzn2fzn ( mzn , * dzn_files , args = None , data = None , include = None , stdlib_dir = None , globals_dir = None , declare_enums = True , allow_multiple_assignments = False , keep = False , output_vars = None , output_base = None , output_mode = 'item' , no_ozn = False ) : mzn_file , dzn_files , data_file , data ,...
Flatten a MiniZinc model into a FlatZinc one .
242,422
def print ( self , output_file = sys . stdout , log = False ) : for soln in iter ( self ) : print ( soln , file = output_file ) print ( SOLN_SEP , file = output_file ) if self . status == 0 : print ( SEARCH_COMPLETE , file = output_file ) if ( self . status == 1 and self . _n_solns == 0 ) or self . status >= 2 : print ...
Print the solution stream
242,423
def dump ( self ) : try : import yaml cfg_file = self . _cfg_file ( ) cfg_dir , __ = os . path . split ( cfg_file ) os . makedirs ( cfg_dir , exist_ok = True ) with open ( cfg_file , 'w' ) as f : yaml . dump ( self , f ) except ImportError as err : raise RuntimeError ( 'Cannot dump the configuration settings to file. Y...
Writes the changes to the configuration file .
242,424
def discretize ( value , factor = 100 ) : if not isinstance ( value , Iterable ) : return int ( value * factor ) int_value = list ( deepcopy ( value ) ) for i in range ( len ( int_value ) ) : int_value [ i ] = int ( int_value [ i ] * factor ) return int_value
Discretize the given value pre - multiplying by the given factor
242,425
def from_string ( source , args = None ) : if _has_jinja : logger . info ( 'Precompiling model with arguments: {}' . format ( args ) ) return _jenv . from_string ( source ) . render ( args or { } ) if args : raise RuntimeError ( _except_text ) return source
Renders a template string
242,426
def add_package ( package_name , package_path = 'templates' , encoding = 'utf-8' ) : if not _has_jinja : raise RuntimeError ( _except_text ) _jload . add_loader ( PackageLoader ( package_name , package_path , encoding ) )
Adds the given package to the template search routine
242,427
def add_path ( searchpath , encoding = 'utf-8' , followlinks = False ) : if not _has_jinja : raise RuntimeError ( _except_text ) _jload . add_loader ( FileSystemLoader ( searchpath , encoding , followlinks ) )
Adds the given path to the template search routine
242,428
def val2dzn ( val , wrap = True ) : if _is_value ( val ) : dzn_val = _dzn_val ( val ) elif _is_set ( val ) : dzn_val = _dzn_set ( val ) elif _is_array_type ( val ) : dzn_val = _dzn_array_nd ( val ) else : raise TypeError ( 'Unsupported serialization of value: {}' . format ( repr ( val ) ) ) if wrap : wrapper = _get_wra...
Serializes a value into its dzn representation .
242,429
def stmt2dzn ( name , val , declare = True , assign = True , wrap = True ) : if not ( declare or assign ) : raise ValueError ( 'The statement must be a declaration or an assignment.' ) stmt = [ ] if declare : val_type = _dzn_type ( val ) stmt . append ( '{}: ' . format ( val_type ) ) stmt . append ( name ) if assign : ...
Returns a dzn statement declaring and assigning the given value .
242,430
def stmt2enum ( enum_type , declare = True , assign = True , wrap = True ) : if not ( declare or assign ) : raise ValueError ( 'The statement must be a declaration or an assignment.' ) stmt = [ ] if declare : stmt . append ( 'enum ' ) stmt . append ( enum_type . __name__ ) if assign : val_str = [ ] for v in list ( enum...
Returns a dzn enum declaration from an enum type .
242,431
def dict2dzn ( objs , declare = False , assign = True , declare_enums = True , wrap = True , fout = None ) : log = logging . getLogger ( __name__ ) vals = [ ] enums = set ( ) for key , val in objs . items ( ) : if _is_enum ( val ) and declare_enums : enum_type = type ( val ) enum_name = enum_type . __name__ if enum_nam...
Serializes the objects in input and produces a list of strings encoding them into dzn format . Optionally the produced dzn is written on a file .
242,432
def async_or_eager ( self , ** options ) : args = options . pop ( "args" , None ) kwargs = options . pop ( "kwargs" , None ) possible_broker_errors = self . _get_possible_broker_errors_tuple ( ) try : return self . apply_async ( args , kwargs , ** options ) except possible_broker_errors : return self . apply ( args , k...
Attempt to call self . apply_async or if that fails because of a problem with the broker run the task eagerly and return an EagerResult .
242,433
def async_or_fail ( self , ** options ) : args = options . pop ( "args" , None ) kwargs = options . pop ( "kwargs" , None ) possible_broker_errors = self . _get_possible_broker_errors_tuple ( ) try : return self . apply_async ( args , kwargs , ** options ) except possible_broker_errors as e : return self . simulate_asy...
Attempt to call self . apply_async but if that fails with an exception we fake the task completion using the exception as the result . This allows us to seamlessly handle errors on task creation the same way we handle errors when a task runs simplifying the user interface .
242,434
def delay_or_eager ( self , * args , ** kwargs ) : return self . async_or_eager ( args = args , kwargs = kwargs )
Wrap async_or_eager with a convenience signiture like delay
242,435
def delay_or_run ( self , * args , ** kwargs ) : warnings . warn ( "delay_or_run is deprecated. Please use delay_or_eager" , DeprecationWarning , ) possible_broker_errors = self . _get_possible_broker_errors_tuple ( ) try : result = self . apply_async ( args = args , kwargs = kwargs ) required_fallback = False except p...
Attempt to call self . delay or if that fails call self . run .
242,436
def delay_or_fail ( self , * args , ** kwargs ) : return self . async_or_fail ( args = args , kwargs = kwargs )
Wrap async_or_fail with a convenience signiture like delay
242,437
def simulate_async_error ( self , exception ) : task_id = gen_unique_id ( ) async_result = self . AsyncResult ( task_id ) einfo = ExceptionInfo ( sys . exc_info ( ) ) async_result . backend . mark_as_failure ( task_id , exception , traceback = einfo . traceback , ) return async_result
Take this exception and store it as an error in the result backend . This unifies the handling of broker - connection errors with any other type of error that might occur when running the task . So the same error - handling that might retry a task or display a useful message to the user can also handle this error .
242,438
def calc_progress ( self , completed_count , total_count ) : self . logger . debug ( "calc_progress(%s, %s)" , completed_count , total_count , ) current_time = time . time ( ) time_spent = current_time - self . start_time self . logger . debug ( "Progress time spent: %s" , time_spent ) if total_count == 0 : return 100 ...
Calculate the percentage progress and estimated remaining time based on the current number of items completed of the total .
242,439
def update_progress ( self , completed_count , total_count , update_frequency = 1 , ) : if completed_count - self . _last_update_count < update_frequency : return progress_percent , time_remaining = self . calc_progress ( completed_count , total_count ) self . logger . debug ( "Updating progress: %s percent, %s remaini...
Update the task backend with both an estimated percentage complete and number of seconds remaining until completion .
242,440
def _validate_required_class_vars ( self ) : required_members = ( 'significant_kwargs' , 'herd_avoidance_timeout' , ) for required_member in required_members : if not hasattr ( self , required_member ) : raise Exception ( "JobtasticTask's must define a %s" % required_member )
Ensure that this subclass has defined all of the required class variables .
242,441
def on_success ( self , retval , task_id , args , kwargs ) : if self . request . is_eager : self . update_state ( task_id , SUCCESS , retval )
Store results in the backend even if we re always eager . This ensures the delay_or_run calls always at least have results .
242,442
def _get_cache ( self ) : if not self . _cache : self . _cache = get_cache ( self . app ) return self . _cache
Return the cache to use for thundering herd protection etc .
242,443
def _get_cache_key ( self , ** kwargs ) : m = md5 ( ) for significant_kwarg in self . significant_kwargs : key , to_str = significant_kwarg try : m . update ( to_str ( kwargs [ key ] ) ) except ( TypeError , UnicodeEncodeError ) : m . update ( to_str ( kwargs [ key ] ) . encode ( 'utf-8' ) ) if hasattr ( self , 'cache_...
Take this task s configured significant_kwargs and build a hash that all equivalent task calls will match .
242,444
def get_cache ( app ) : jobtastic_cache_setting = app . conf . get ( 'JOBTASTIC_CACHE' ) if isinstance ( jobtastic_cache_setting , BaseCache ) : return jobtastic_cache_setting if 'Django' in CACHES : if jobtastic_cache_setting : try : return WrappedCache ( get_django_cache ( jobtastic_cache_setting ) ) except InvalidCa...
Attempt to find a valid cache from the Celery configuration
242,445
def select ( * args ) : def select_columns ( df , args ) : columns = [ column . _name for column in args ] if df . _grouped_on : for col in df . _grouped_on [ : : - 1 ] : if col not in columns : columns . insert ( 0 , col ) return columns return lambda df : df [ select_columns ( df , args ) ]
Select specific columns from DataFrame .
242,446
def arrange ( * args ) : names = [ column . _name for column in args ] def f ( df ) : sortby_df = df >> mutate ( * args ) index = sortby_df . sort_values ( [ str ( arg ) for arg in args ] ) . index return df . loc [ index ] return f
Sort DataFrame by the input column arguments .
242,447
def rename ( ** kwargs ) : def rename_columns ( df ) : column_assignments = { old_name_later . _name : new_name for new_name , old_name_later in kwargs . items ( ) } return df . rename ( columns = column_assignments ) return rename_columns
Rename one or more columns leaving other columns unchanged
242,448
def transmute ( * args , ** kwargs ) : mutate_dateframe_fn = mutate ( * args , ** dict ( kwargs ) ) column_names_args = [ str ( arg ) for arg in args ] column_names_kwargs = [ name for name , _ in _dict_to_possibly_ordered_tuples ( kwargs ) ] column_names = column_names_args + column_names_kwargs return lambda df : mut...
Similar to select but allows mutation in column definitions .
242,449
def get_join_cols ( by_entry ) : left_cols = [ ] right_cols = [ ] for col in by_entry : if isinstance ( col , str ) : left_cols . append ( col ) right_cols . append ( col ) else : left_cols . append ( col [ 0 ] ) right_cols . append ( col [ 1 ] ) return left_cols , right_cols
helper function used for joins builds left and right join list for join function
242,450
def mutating_join ( * args , ** kwargs ) : left = args [ 0 ] right = args [ 1 ] if 'by' in kwargs : left_cols , right_cols = get_join_cols ( kwargs [ 'by' ] ) else : left_cols , right_cols = None , None if 'suffixes' in kwargs : dsuffixes = kwargs [ 'suffixes' ] else : dsuffixes = ( '_x' , '_y' ) if left . _grouped_on ...
generic function for mutating dplyr - style joins
242,451
def _chart_support ( self , name , data , caller , ** kwargs ) : "template chart support function" id = 'chart-%s' % next ( self . id ) name = self . _chart_class_name ( name ) options = dict ( self . environment . options ) options . update ( name = name , id = id ) if jinja2 . __version__ >= '2.9' : kwargs = dict ( (...
template chart support function
242,452
def load_library ( self ) : "loads configuration options" try : filename = self . environment . get_template ( 'chartkick.json' ) . filename except TemplateNotFound : return { } else : options = Options ( ) options . load ( filename ) return options
loads configuration options
242,453
def js ( ) : "returns home directory of js" return os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , 'js' )
returns home directory of js
242,454
def parse_options ( source ) : options = { } tokens = [ t . strip ( ) for t in source . split ( '=' ) ] name = tokens [ 0 ] for token in tokens [ 1 : - 1 ] : value , next_name = token . rsplit ( ' ' , 1 ) options [ name . strip ( ) ] = value name = next_name options [ name . strip ( ) ] = tokens [ - 1 ] . strip ( ) ret...
parses chart tag options
242,455
def copy ( self ) : return RigidTransform ( np . copy ( self . rotation ) , np . copy ( self . translation ) , self . from_frame , self . to_frame )
Returns a copy of the RigidTransform .
242,456
def _check_valid_rotation ( self , rotation ) : if not isinstance ( rotation , np . ndarray ) or not np . issubdtype ( rotation . dtype , np . number ) : raise ValueError ( 'Rotation must be specified as numeric numpy array' ) if len ( rotation . shape ) != 2 or rotation . shape [ 0 ] != 3 or rotation . shape [ 1 ] != ...
Checks that the given rotation matrix is valid .
242,457
def _check_valid_translation ( self , translation ) : if not isinstance ( translation , np . ndarray ) or not np . issubdtype ( translation . dtype , np . number ) : raise ValueError ( 'Translation must be specified as numeric numpy array' ) t = translation . squeeze ( ) if len ( t . shape ) != 1 or t . shape [ 0 ] != ...
Checks that the translation vector is valid .
242,458
def interpolate_with ( self , other_tf , t ) : if t < 0 or t > 1 : raise ValueError ( 'Must interpolate between 0 and 1' ) interp_translation = ( 1.0 - t ) * self . translation + t * other_tf . translation interp_rotation = transformations . quaternion_slerp ( self . quaternion , other_tf . quaternion , t ) interp_tf =...
Interpolate with another rigid transformation .
242,459
def linear_trajectory_to ( self , target_tf , traj_len ) : if traj_len < 0 : raise ValueError ( 'Traj len must at least 0' ) delta_t = 1.0 / ( traj_len + 1 ) t = 0.0 traj = [ ] while t < 1.0 : traj . append ( self . interpolate_with ( target_tf , t ) ) t += delta_t traj . append ( target_tf ) return traj
Creates a trajectory of poses linearly interpolated from this tf to a target tf .
242,460
def apply ( self , points ) : if not isinstance ( points , BagOfPoints ) : raise ValueError ( 'Rigid transformations can only be applied to bags of points' ) if points . dim != 3 : raise ValueError ( 'Rigid transformations can only be applied to 3-dimensional points' ) if points . frame != self . _from_frame : raise Va...
Applies the rigid transformation to a set of 3D objects .
242,461
def dot ( self , other_tf ) : if other_tf . to_frame != self . from_frame : raise ValueError ( 'To frame of right hand side ({0}) must match from frame of left hand side ({1})' . format ( other_tf . to_frame , self . from_frame ) ) pose_tf = self . matrix . dot ( other_tf . matrix ) rotation , translation = RigidTransf...
Compose this rigid transform with another .
242,462
def inverse ( self ) : inv_rotation = self . rotation . T inv_translation = np . dot ( - self . rotation . T , self . translation ) return RigidTransform ( inv_rotation , inv_translation , from_frame = self . _to_frame , to_frame = self . _from_frame )
Take the inverse of the rigid transform .
242,463
def save ( self , filename ) : file_root , file_ext = os . path . splitext ( filename ) if file_ext . lower ( ) != TF_EXTENSION : raise ValueError ( 'Extension %s not supported for RigidTransform. Must be stored with extension %s' % ( file_ext , TF_EXTENSION ) ) f = open ( filename , 'w' ) f . write ( '%s\n' % ( self ....
Save the RigidTransform to a file .
242,464
def as_frames ( self , from_frame , to_frame = 'world' ) : return RigidTransform ( self . rotation , self . translation , from_frame , to_frame )
Return a shallow copy of this rigid transform with just the frames changed .
242,465
def rotation_from_quaternion ( q_wxyz ) : q_xyzw = np . array ( [ q_wxyz [ 1 ] , q_wxyz [ 2 ] , q_wxyz [ 3 ] , q_wxyz [ 0 ] ] ) R = transformations . quaternion_matrix ( q_xyzw ) [ : 3 , : 3 ] return R
Convert quaternion array to rotation matrix .
242,466
def quaternion_from_axis_angle ( v ) : theta = np . linalg . norm ( v ) if theta > 0 : v = v / np . linalg . norm ( v ) ax , ay , az = v qx = ax * np . sin ( 0.5 * theta ) qy = ay * np . sin ( 0.5 * theta ) qz = az * np . sin ( 0.5 * theta ) qw = np . cos ( 0.5 * theta ) q = np . array ( [ qw , qx , qy , qz ] ) return ...
Convert axis - angle representation to a quaternion vector .
242,467
def transform_from_dual_quaternion ( dq , from_frame = 'unassigned' , to_frame = 'world' ) : quaternion = dq . qr translation = 2 * dq . qd [ 1 : ] return RigidTransform ( rotation = quaternion , translation = translation , from_frame = from_frame , to_frame = to_frame )
Create a RigidTransform from a DualQuaternion .
242,468
def rotation_and_translation_from_matrix ( matrix ) : if not isinstance ( matrix , np . ndarray ) or matrix . shape [ 0 ] != 4 or matrix . shape [ 1 ] != 4 : raise ValueError ( 'Matrix must be specified as a 4x4 ndarray' ) rotation = matrix [ : 3 , : 3 ] translation = matrix [ : 3 , 3 ] return rotation , translation
Helper to convert 4x4 matrix to rotation matrix and translation vector .
242,469
def rotation_from_axis_and_origin ( axis , origin , angle , to_frame = 'world' ) : axis_hat = np . array ( [ [ 0 , - axis [ 2 ] , axis [ 1 ] ] , [ axis [ 2 ] , 0 , - axis [ 0 ] ] , [ - axis [ 1 ] , axis [ 0 ] , 0 ] ] ) R = RigidTransform ( np . eye ( 3 ) + np . sin ( angle ) * axis_hat + ( 1 - np . cos ( angle ) ) * ax...
Returns a rotation matrix around some arbitrary axis about the point origin using Rodrigues Formula
242,470
def x_axis_rotation ( theta ) : R = np . array ( [ [ 1 , 0 , 0 , ] , [ 0 , np . cos ( theta ) , - np . sin ( theta ) ] , [ 0 , np . sin ( theta ) , np . cos ( theta ) ] ] ) return R
Generates a 3x3 rotation matrix for a rotation of angle theta about the x axis .
242,471
def y_axis_rotation ( theta ) : R = np . array ( [ [ np . cos ( theta ) , 0 , np . sin ( theta ) ] , [ 0 , 1 , 0 ] , [ - np . sin ( theta ) , 0 , np . cos ( theta ) ] ] ) return R
Generates a 3x3 rotation matrix for a rotation of angle theta about the y axis .
242,472
def z_axis_rotation ( theta ) : R = np . array ( [ [ np . cos ( theta ) , - np . sin ( theta ) , 0 ] , [ np . sin ( theta ) , np . cos ( theta ) , 0 ] , [ 0 , 0 , 1 ] ] ) return R
Generates a 3x3 rotation matrix for a rotation of angle theta about the z axis .
242,473
def random_rotation ( ) : rand_seed = np . random . rand ( 3 , 3 ) U , S , V = np . linalg . svd ( rand_seed ) return U
Generates a random 3x3 rotation matrix with SVD .
242,474
def rotation_from_axes ( x_axis , y_axis , z_axis ) : return np . hstack ( ( x_axis [ : , np . newaxis ] , y_axis [ : , np . newaxis ] , z_axis [ : , np . newaxis ] ) )
Convert specification of axis in target frame to a rotation matrix from source to target frame .
242,475
def interpolate ( T0 , T1 , t ) : if T0 . to_frame != T1 . to_frame : raise ValueError ( 'Cannot interpolate between 2 transforms with different to frames! Got T1 {0} and T2 {1}' . format ( T0 . to_frame , T1 . to_frame ) ) dq0 = T0 . dual_quaternion dq1 = T1 . dual_quaternion dqt = DualQuaternion . interpolate ( dq0 ,...
Return an interpolation of two RigidTransforms .
242,476
def load ( filename ) : file_root , file_ext = os . path . splitext ( filename ) if file_ext . lower ( ) != TF_EXTENSION : raise ValueError ( 'Extension %s not supported for RigidTransform. Can only load extension %s' % ( file_ext , TF_EXTENSION ) ) f = open ( filename , 'r' ) lines = list ( f ) from_frame = lines [ 0 ...
Load a RigidTransform from a file .
242,477
def dot ( self , other_tf ) : if other_tf . to_frame != self . from_frame : raise ValueError ( 'To frame of right hand side ({0}) must match from frame of left hand side ({1})' . format ( other_tf . to_frame , self . from_frame ) ) if not isinstance ( other_tf , RigidTransform ) : raise ValueError ( 'Can only compose w...
Compose this simliarity transform with another .
242,478
def inverse ( self ) : inv_rot = np . linalg . inv ( self . rotation ) inv_scale = 1.0 / self . scale inv_trans = - inv_scale * inv_rot . dot ( self . translation ) return SimilarityTransform ( inv_rot , inv_trans , inv_scale , from_frame = self . _to_frame , to_frame = self . _from_frame )
Take the inverse of the similarity transform .
242,479
def save ( self , filename ) : file_root , file_ext = os . path . splitext ( filename ) if file_ext == '.npy' : np . save ( filename , self . _data ) elif file_ext == '.npz' : np . savez_compressed ( filename , self . _data ) else : raise ValueError ( 'Extension %s not supported for point saves.' % ( file_ext ) )
Saves the collection to a file .
242,480
def load_data ( filename ) : file_root , file_ext = os . path . splitext ( filename ) data = None if file_ext == '.npy' : data = np . load ( filename ) elif file_ext == '.npz' : data = np . load ( filename ) [ 'arr_0' ] else : raise ValueError ( 'Extension %s not supported for point reads' % ( file_ext ) ) return data
Loads data from a file .
242,481
def open ( filename , frame = 'unspecified' ) : data = BagOfPoints . load_data ( filename ) return Point ( data , frame )
Create a Point from data saved in a file .
242,482
def _check_valid_data ( self , data ) : if len ( data . shape ) == 2 and data . shape [ 1 ] != 1 : raise ValueError ( 'Can only initialize Direction from a single Nx1 array' ) if np . abs ( np . linalg . norm ( data ) - 1.0 ) > 1e-4 : raise ValueError ( 'Direction data must have norm=1.0' )
Checks that the incoming data is a Nx1 ndarray .
242,483
def orthogonal_basis ( self ) : if self . dim == 3 : x_arr = np . array ( [ - self . data [ 1 ] , self . data [ 0 ] , 0 ] ) if np . linalg . norm ( x_arr ) == 0 : x_arr = np . array ( [ self . data [ 2 ] , 0 , 0 ] ) x_arr = x_arr / np . linalg . norm ( x_arr ) y_arr = np . cross ( self . data , x_arr ) return Direction...
Return an orthogonal basis to this direction .
242,484
def open ( filename , frame = 'unspecified' ) : data = BagOfPoints . load_data ( filename ) return Direction ( data , frame )
Create a Direction from data saved in a file .
242,485
def split_points ( self , point_cloud ) : if not isinstance ( point_cloud , PointCloud ) : raise ValueError ( 'Can only split point clouds' ) above_plane = point_cloud . _data - np . tile ( self . _x0 . data , [ 1 , point_cloud . num_points ] ) . T . dot ( self . _n ) > 0 above_plane = point_cloud . z_coords > 0 & abov...
Split a point cloud into two along this plane .
242,486
def mean ( self ) : mean_point_data = np . mean ( self . _data , axis = 1 ) return Point ( mean_point_data , self . _frame )
Returns the average point in the cloud .
242,487
def subsample ( self , rate , random = False ) : if type ( rate ) != int and rate < 1 : raise ValueError ( 'Can only subsample with strictly positive integer rate' ) indices = np . arange ( self . num_points ) if random : np . random . shuffle ( indices ) subsample_inds = indices [ : : rate ] subsampled_data = self . _...
Returns a subsampled version of the PointCloud .
242,488
def box_mask ( self , box ) : if not isinstance ( box , Box ) : raise ValueError ( 'Must provide Box object' ) if box . frame != self . frame : raise ValueError ( 'Box must be in same frame as PointCloud' ) all_points = self . data . T cond1 = np . all ( box . min_pt <= all_points , axis = 1 ) cond2 = np . all ( all_po...
Return a PointCloud containing only points within the given Box .
242,489
def best_fit_plane ( self ) : X = np . c_ [ self . x_coords , self . y_coords , np . ones ( self . num_points ) ] y = self . z_coords A = X . T . dot ( X ) b = X . T . dot ( y ) w = np . linalg . inv ( A ) . dot ( b ) n = np . array ( [ w [ 0 ] , w [ 1 ] , - 1 ] ) n = n / np . linalg . norm ( n ) n = Direction ( n , se...
Fits a plane to the point cloud using least squares .
242,490
def remove_zero_points ( self ) : points_of_interest = np . where ( self . z_coords != 0.0 ) [ 0 ] self . _data = self . data [ : , points_of_interest ]
Removes points with a zero in the z - axis .
242,491
def remove_infinite_points ( self ) : points_of_interest = np . where ( np . all ( np . isfinite ( self . data ) , axis = 0 ) ) [ 0 ] self . _data = self . data [ : , points_of_interest ]
Removes infinite points .
242,492
def open ( filename , frame = 'unspecified' ) : data = BagOfPoints . load_data ( filename ) return PointCloud ( data , frame )
Create a PointCloud from data saved in a file .
242,493
def subsample ( self , rate ) : if type ( rate ) != int and rate < 1 : raise ValueError ( 'Can only subsample with strictly positive integer rate' ) subsample_inds = np . arange ( self . num_points ) [ : : rate ] subsampled_data = self . _data [ : , subsample_inds ] return NormalCloud ( subsampled_data , self . _frame ...
Returns a subsampled version of the NormalCloud .
242,494
def remove_zero_normals ( self ) : points_of_interest = np . where ( np . linalg . norm ( self . _data , axis = 0 ) != 0.0 ) [ 0 ] self . _data = self . _data [ : , points_of_interest ]
Removes normal vectors with a zero magnitude .
242,495
def remove_nan_normals ( self ) : points_of_interest = np . where ( np . isfinite ( np . linalg . norm ( self . _data , axis = 0 ) ) ) [ 0 ] self . _data = self . _data [ : , points_of_interest ]
Removes normal vectors with nan magnitude .
242,496
def open ( filename , frame = 'unspecified' ) : data = BagOfPoints . load_data ( filename ) return NormalCloud ( data , frame )
Create a NormalCloud from data saved in a file .
242,497
def open ( filename , frame = 'unspecified' ) : data = BagOfPoints . load_data ( filename ) return ImageCoords ( data , frame )
Create an ImageCoords from data saved in a file .
242,498
def open ( filename , frame = 'unspecified' ) : data = BagOfPoints . load_data ( filename ) return RgbCloud ( data , frame )
Create a RgbCloud from data saved in a file .
242,499
def remove_zero_points ( self ) : points_of_interest = np . where ( ( np . linalg . norm ( self . point_cloud . data , axis = 0 ) != 0.0 ) & ( np . linalg . norm ( self . normal_cloud . data , axis = 0 ) != 0.0 ) & ( np . isfinite ( self . normal_cloud . data [ 0 , : ] ) ) ) [ 0 ] self . point_cloud . _data = self . po...
Remove all elements where the norms and points are zero .