idx int64 0 251k | question stringlengths 53 3.53k | target stringlengths 5 1.23k | len_question int64 20 893 | len_target int64 3 238 |
|---|---|---|---|---|
224,200 | def set_line_str ( self , line_str ) : if not self . from_string : raise ValueError ( "can't set line_str for LogEvent created from " "system.profile documents." ) if line_str != self . _line_str : self . _line_str = line_str . rstrip ( ) self . _reset ( ) | Set line_str . | 79 | 5 |
224,201 | def get_line_str ( self ) : if self . from_string : return ' ' . join ( [ s for s in [ self . merge_marker_str , self . _datetime_str , self . _line_str ] if s ] ) else : return ' ' . join ( [ s for s in [ self . _datetime_str , self . _line_str ] if s ] ) | Return line_str depending on source logfile or system . profile . | 90 | 14 |
224,202 | def _match_datetime_pattern ( self , tokens ) : # first check: less than 4 tokens can't be ctime assume_iso8601_format = len ( tokens ) < 4 # check for ctime-pre-2.4 or ctime format if not assume_iso8601_format : weekday , month , day , time = tokens [ : 4 ] if ( len ( tokens ) < 4 or ( weekday not in self . weekdays ) or ( month not in self . months ) or not day . isdigit ( ) ) : assume_iso8601_format = True if assume_iso8601_format : # sanity check, because the dateutil parser could interpret # any numbers as a valid date if not re . match ( r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}' , tokens [ 0 ] ) : return None # convinced that this is a ISO-8601 format, the dateutil parser # will do the rest dt = dateutil . parser . parse ( tokens [ 0 ] ) self . _datetime_format = "iso8601-utc" if tokens [ 0 ] . endswith ( 'Z' ) else "iso8601-local" else : # assume current year unless self.year_rollover # is set (from LogFile) year = datetime . now ( ) . year dt = dateutil . parser . parse ( ' ' . join ( tokens [ : 4 ] ) , default = datetime ( year , 1 , 1 ) ) if dt . tzinfo is None : dt = dt . replace ( tzinfo = tzutc ( ) ) if self . _year_rollover and dt > self . _year_rollover : dt = dt . replace ( year = year - 1 ) self . _datetime_format = "ctime" if '.' in tokens [ 3 ] else "ctime-pre2.4" return dt | Match the datetime pattern at the beginning of the token list . | 443 | 13 |
224,203 | def _extract_operation_and_namespace ( self ) : split_tokens = self . split_tokens if not self . _datetime_nextpos : # force evaluation of thread to get access to datetime_offset and # to protect from changes due to line truncation. _ = self . thread if not self . _datetime_nextpos or ( len ( split_tokens ) <= self . _datetime_nextpos + 2 ) : return op = split_tokens [ self . _datetime_nextpos + 1 ] . lower ( ) if op == 'warning:' : # check if this log line got truncated if ( "warning: log line attempted" in self . _line_str and "over max size" in self . _line_str ) : self . _datetime_nextpos = split_tokens . index ( '...' ) op = split_tokens [ self . _datetime_nextpos + 1 ] else : # unknown warning, bail out return if op in self . log_operations : self . _operation = op self . _namespace = split_tokens [ self . _datetime_nextpos + 2 ] | Helper method to extract both operation and namespace from a logevent . | 261 | 14 |
224,204 | def _extract_counters ( self ) : # extract counters (if present) counters = [ 'nscanned' , 'nscannedObjects' , 'ntoreturn' , 'nreturned' , 'ninserted' , 'nupdated' , 'ndeleted' , 'r' , 'w' , 'numYields' , 'planSummary' , 'writeConflicts' , 'keyUpdates' ] # TODO: refactor mtools to use current counter names throughout # Transitionary hack: mapping of current names into prior equivalents counter_equiv = { 'docsExamined' : 'nscannedObjects' , 'keysExamined' : 'nscanned' , 'nDeleted' : 'ndeleted' , 'nInserted' : 'ninserted' , 'nMatched' : 'nreturned' , 'nModified' : 'nupdated' } counters . extend ( counter_equiv . keys ( ) ) split_tokens = self . split_tokens # trigger operation evaluation to get access to offset if self . operation : for t , token in enumerate ( split_tokens [ self . datetime_nextpos + 2 : ] ) : for counter in counters : if token . startswith ( '%s:' % counter ) : try : # Remap counter to standard name, if applicable counter = counter_equiv . get ( counter , counter ) vars ( self ) [ '_' + counter ] = int ( ( token . split ( ':' ) [ - 1 ] ) . replace ( ',' , '' ) ) except ValueError : # see if this is a pre-2.5.2 numYields with space # in between (e.g. "numYields: 2") # https://jira.mongodb.org/browse/SERVER-10101 if ( counter == 'numYields' and token . startswith ( 'numYields' ) ) : try : self . _numYields = int ( ( split_tokens [ t + 1 + self . datetime_nextpos + 2 ] ) . replace ( ',' , '' ) ) except ValueError : pass if ( counter == 'planSummary' and token . startswith ( 'planSummary' ) ) : try : self . _planSummary = split_tokens [ t + 1 + self . datetime_nextpos + 2 ] if self . _planSummary : if split_tokens [ t + 1 + self . datetime_nextpos + 3 ] != '{' : self . _actualPlanSummary = self . _planSummary else : self . _actualPlanSummary = '%s %s' % ( self . _planSummary , self . _find_pattern ( 'planSummary: %s' % self . _planSummary , actual = True ) ) except ValueError : pass # token not parsable, skip break | Extract counters like nscanned and nreturned from the logevent . | 642 | 17 |
224,205 | def parse_all ( self ) : tokens = self . split_tokens duration = self . duration datetime = self . datetime thread = self . thread operation = self . operation namespace = self . namespace pattern = self . pattern nscanned = self . nscanned nscannedObjects = self . nscannedObjects ntoreturn = self . ntoreturn nreturned = self . nreturned ninserted = self . ninserted ndeleted = self . ndeleted nupdated = self . nupdated numYields = self . numYields w = self . w r = self . r | Trigger extraction of all information . | 137 | 6 |
224,206 | def to_dict ( self , labels = None ) : output = { } if labels is None : labels = [ 'line_str' , 'split_tokens' , 'datetime' , 'operation' , 'thread' , 'namespace' , 'nscanned' , 'ntoreturn' , 'nreturned' , 'ninserted' , 'nupdated' , 'ndeleted' , 'duration' , 'r' , 'w' , 'numYields' ] for label in labels : value = getattr ( self , label , None ) if value is not None : output [ label ] = value return output | Convert LogEvent object to a dictionary . | 140 | 9 |
224,207 | def to_json ( self , labels = None ) : output = self . to_dict ( labels ) return json . dumps ( output , cls = DateTimeEncoder , ensure_ascii = False ) | Convert LogEvent object to valid JSON . | 45 | 9 |
224,208 | def addFilter ( self , filterclass ) : if filterclass not in self . filters : self . filters . append ( filterclass ) | Add a filter class to the parser . | 28 | 8 |
224,209 | def _outputLine ( self , logevent , length = None , human = False ) : # adapt timezone output if necessary if self . args [ 'timestamp_format' ] != 'none' : logevent . _reformat_timestamp ( self . args [ 'timestamp_format' ] , force = True ) if any ( self . args [ 'timezone' ] ) : if self . args [ 'timestamp_format' ] == 'none' : self . args [ 'timestamp_format' ] = logevent . datetime_format logevent . _reformat_timestamp ( self . args [ 'timestamp_format' ] , force = True ) if self . args [ 'json' ] : print ( logevent . to_json ( ) ) return line = logevent . line_str if length : if len ( line ) > length : line = ( line [ : int ( length / 2 - 2 ) ] + '...' + line [ int ( - length / 2 + 1 ) : ] ) if human : line = self . _changeMs ( line ) line = self . _formatNumbers ( line ) print ( line ) | Print the final line . | 253 | 5 |
224,210 | def _msToString ( self , ms ) : hr , ms = divmod ( ms , 3600000 ) mins , ms = divmod ( ms , 60000 ) secs , mill = divmod ( ms , 1000 ) return "%ihr %imin %isecs %ims" % ( hr , mins , secs , mill ) | Change milliseconds to hours min sec ms format . | 71 | 9 |
224,211 | def _changeMs ( self , line ) : # use the position of the last space instead try : last_space_pos = line . rindex ( ' ' ) except ValueError : return line else : end_str = line [ last_space_pos : ] new_string = line if end_str [ - 2 : ] == 'ms' and int ( end_str [ : - 2 ] ) >= 1000 : # isolate the number of milliseconds ms = int ( end_str [ : - 2 ] ) # create the new string with the beginning part of the # log with the new ms part added in new_string = ( line [ : last_space_pos ] + ' (' + self . _msToString ( ms ) + ')' + line [ last_space_pos : ] ) return new_string | Change the ms part in the string if needed . | 173 | 10 |
224,212 | def _formatNumbers ( self , line ) : # below thousands separator syntax only works for # python 2.7, skip for 2.6 if sys . version_info < ( 2 , 7 ) : return line last_index = 0 try : # find the index of the last } character last_index = ( line . rindex ( '}' ) + 1 ) end = line [ last_index : ] except ValueError : return line else : # split the string on numbers to isolate them splitted = re . split ( "(\d+)" , end ) for index , val in enumerate ( splitted ) : converted = 0 try : converted = int ( val ) # if it's not an int pass and don't change the string except ValueError : pass else : if converted > 1000 : splitted [ index ] = format ( converted , ",d" ) return line [ : last_index ] + ( "" ) . join ( splitted ) | Format the numbers so that there are commas inserted . | 200 | 11 |
224,213 | def _datetime_key_for_merge ( self , logevent ) : if not logevent : # if logfile end is reached, return max datetime to never # pick this line return datetime ( MAXYEAR , 12 , 31 , 23 , 59 , 59 , 999999 , tzutc ( ) ) # if no datetime present (line doesn't have one) return mindate # to pick this line immediately return logevent . datetime or datetime ( MINYEAR , 1 , 1 , 0 , 0 , 0 , 0 , tzutc ( ) ) | Helper method for ordering log lines correctly during merge . | 127 | 10 |
224,214 | def _merge_logfiles ( self ) : # open files, read first lines, extract first dates lines = [ next ( iter ( logfile ) , None ) for logfile in self . args [ 'logfile' ] ] # adjust lines by timezone for i in range ( len ( lines ) ) : if lines [ i ] and lines [ i ] . datetime : lines [ i ] . _datetime = ( lines [ i ] . datetime + timedelta ( hours = self . args [ 'timezone' ] [ i ] ) ) while any ( lines ) : min_line = min ( lines , key = self . _datetime_key_for_merge ) min_idx = lines . index ( min_line ) if self . args [ 'markers' ] [ min_idx ] : min_line . merge_marker_str = self . args [ 'markers' ] [ min_idx ] yield min_line # update lines array with a new line from the min_idx'th logfile lines [ min_idx ] = next ( iter ( self . args [ 'logfile' ] [ min_idx ] ) , None ) if lines [ min_idx ] and lines [ min_idx ] . datetime : lines [ min_idx ] . _datetime = ( lines [ min_idx ] . datetime + timedelta ( hours = self . args [ 'timezone' ] [ min_idx ] ) ) | Helper method to merge several files together by datetime . | 321 | 11 |
224,215 | def logfile_generator ( self ) : if not self . args [ 'exclude' ] : # ask all filters for a start_limit and fast-forward to the maximum start_limits = [ f . start_limit for f in self . filters if hasattr ( f , 'start_limit' ) ] if start_limits : for logfile in self . args [ 'logfile' ] : logfile . fast_forward ( max ( start_limits ) ) if len ( self . args [ 'logfile' ] ) > 1 : # merge log files by time for logevent in self . _merge_logfiles ( ) : yield logevent else : # only one file for logevent in self . args [ 'logfile' ] [ 0 ] : if self . args [ 'timezone' ] [ 0 ] != 0 and logevent . datetime : logevent . _datetime = ( logevent . datetime + timedelta ( hours = self . args [ 'timezone' ] [ 0 ] ) ) yield logevent | Yield each line of the file or the next line if several files . | 228 | 15 |
224,216 | def setup ( self ) : # get start and end of the mask and set a start_limit if not self . mask_source . start : raise SystemExit ( "Can't parse format of %s. Is this a log file or " "system.profile collection?" % self . mlogfilter . args [ 'mask' ] ) self . mask_half_td = timedelta ( seconds = self . mlogfilter . args [ 'mask_size' ] / 2 ) # load filter mask file logevent_list = list ( self . mask_source ) # define start and end of total mask self . mask_start = self . mask_source . start - self . mask_half_td self . mask_end = self . mask_source . end + self . mask_half_td # consider --mask-center if self . mlogfilter . args [ 'mask_center' ] in [ 'start' , 'both' ] : if logevent_list [ 0 ] . duration : self . mask_start -= timedelta ( milliseconds = logevent_list [ 0 ] . duration ) if self . mlogfilter . args [ 'mask_center' ] == 'start' : if logevent_list [ - 1 ] . duration : self . mask_end -= timedelta ( milliseconds = logevent_list [ - 1 ] . duration ) self . start_limit = self . mask_start # different center points if 'mask_center' in self . mlogfilter . args : if self . mlogfilter . args [ 'mask_center' ] in [ 'start' , 'both' ] : starts = ( [ ( le . datetime - timedelta ( milliseconds = le . duration ) ) if le . duration is not None else le . datetime for le in logevent_list if le . datetime ] ) if self . mlogfilter . args [ 'mask_center' ] in [ 'end' , 'both' ] : ends = [ le . datetime for le in logevent_list if le . datetime ] if self . mlogfilter . args [ 'mask_center' ] == 'start' : event_list = sorted ( starts ) elif self . mlogfilter . args [ 'mask_center' ] == 'end' : event_list = sorted ( ends ) elif self . mlogfilter . args [ 'mask_center' ] == 'both' : event_list = sorted ( zip ( starts , ends ) ) mask_list = [ ] if len ( event_list ) == 0 : return start_point = end_point = None for e in event_list : if start_point is None : start_point , end_point = self . _pad_event ( e ) continue next_start = ( e [ 0 ] if type ( e ) == tuple else e ) - self . mask_half_td if next_start <= end_point : end_point = ( ( e [ 1 ] if type ( e ) == tuple else e ) + self . mask_half_td ) else : mask_list . append ( ( start_point , end_point ) ) start_point , end_point = self . _pad_event ( e ) if start_point : mask_list . append ( ( start_point , end_point ) ) self . mask_list = mask_list | Create mask list . | 720 | 4 |
224,217 | def source_files ( mongodb_path ) : for root , dirs , files in os . walk ( mongodb_path ) : for filename in files : # skip files in dbtests folder if 'dbtests' in root : continue if filename . endswith ( ( '.cpp' , '.c' , '.h' ) ) : yield os . path . join ( root , filename ) | Find source files . | 88 | 4 |
224,218 | def index ( ) : if current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] is not None : override = current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] results = ( models . TaskResult . query . join ( models . Task ) . filter ( models . Task . playbook_id . in_ ( override ) ) ) else : results = models . TaskResult . query . all ( ) return render_template ( 'task_result_index.html' , results = results ) | This is not served anywhere in the web application . It is used explicitly in the context of generating static files since flask - frozen requires url_for s to crawl content . url_for s are not used with result . show_result directly and are instead dynamically generated through javascript for performance purposes . | 113 | 59 |
224,219 | def content_sha1 ( context ) : try : content = context . current_parameters [ 'content' ] except AttributeError : content = context return hashlib . sha1 ( encodeutils . to_utf8 ( content ) ) . hexdigest ( ) | Used by the FileContent model to automatically compute the sha1 hash of content before storing it to the database . | 57 | 23 |
224,220 | def main ( ) : files = models . File . query hosts = models . Host . query facts = models . HostFacts . query playbooks = models . Playbook . query records = models . Data . query tasks = models . Task . query results = models . TaskResult . query if current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] is not None : override = current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] files = files . filter ( models . File . playbook_id . in_ ( override ) ) facts = ( facts . join ( models . Host ) . filter ( models . Host . playbook_id . in_ ( override ) ) ) hosts = hosts . filter ( models . Host . playbook_id . in_ ( override ) ) playbooks = playbooks . filter ( models . Playbook . id . in_ ( override ) ) records = records . filter ( models . Data . playbook_id . in_ ( override ) ) tasks = tasks . filter ( models . Task . playbook_id . in_ ( override ) ) results = ( results . join ( models . Task ) . filter ( models . Task . playbook_id . in_ ( override ) ) ) return render_template ( 'about.html' , active = 'about' , files = fast_count ( files ) , hosts = fast_count ( hosts ) , facts = fast_count ( facts ) , playbooks = fast_count ( playbooks ) , records = fast_count ( records ) , tasks = fast_count ( tasks ) , results = fast_count ( results ) ) | Returns the about page | 341 | 4 |
224,221 | def index ( ) : if current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] is not None : override = current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] hosts = ( models . Host . query . filter ( models . Host . playbook_id . in_ ( override ) ) ) else : hosts = models . Host . query . all ( ) return render_template ( 'host_index.html' , hosts = hosts ) | This is not served anywhere in the web application . It is used explicitly in the context of generating static files since flask - frozen requires url_for s to crawl content . url_for s are not used with host . show_host directly and are instead dynamically generated through javascript for performance purposes . | 102 | 59 |
224,222 | def config ( self ) : return { key : self . __dict__ [ key ] for key in dir ( self ) if key . isupper ( ) } | Returns a dictionary for the loaded configuration | 33 | 7 |
224,223 | def index ( ) : if current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] is not None : override = current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] files = ( models . File . query . filter ( models . File . playbook_id . in_ ( override ) ) ) else : files = models . File . query . all ( ) return render_template ( 'file_index.html' , files = files ) | This is not served anywhere in the web application . It is used explicitly in the context of generating static files since flask - frozen requires url_for s to crawl content . url_for s are not used with file . show_file directly and are instead dynamically generated through javascript for performance purposes . | 102 | 59 |
224,224 | def show_file ( file_ ) : file_ = ( models . File . query . get ( file_ ) ) if file_ is None : abort ( 404 ) return render_template ( 'file.html' , file_ = file_ ) | Returns details of a file | 52 | 5 |
224,225 | def configure_db ( app ) : models . db . init_app ( app ) log = logging . getLogger ( 'ara.webapp.configure_db' ) log . debug ( 'Setting up database...' ) if app . config . get ( 'ARA_AUTOCREATE_DATABASE' ) : with app . app_context ( ) : migrations = app . config [ 'DB_MIGRATIONS' ] flask_migrate . Migrate ( app , models . db , directory = migrations ) config = app . extensions [ 'migrate' ] . migrate . get_config ( migrations ) # Verify if the database tables have been created at all inspector = Inspector . from_engine ( models . db . engine ) if len ( inspector . get_table_names ( ) ) == 0 : log . info ( 'Initializing new DB from scratch' ) flask_migrate . upgrade ( directory = migrations ) # Get current alembic head revision script = ScriptDirectory . from_config ( config ) head = script . get_current_head ( ) # Get current revision, if available connection = models . db . engine . connect ( ) context = MigrationContext . configure ( connection ) current = context . get_current_revision ( ) if not current : log . info ( 'Unstable DB schema, stamping original revision' ) flask_migrate . stamp ( directory = migrations , revision = 'da9459a1f71c' ) if head != current : log . info ( 'DB schema out of date, upgrading' ) flask_migrate . upgrade ( directory = migrations ) | 0 . 10 is the first version of ARA that ships with a stable database schema . We can identify a database that originates from before this by checking if there is an alembic revision available . If there is no alembic revision available assume we are running the first revision which contains the latest state of the database prior to this . | 347 | 69 |
224,226 | def configure_cache ( app ) : log = logging . getLogger ( 'ara.webapp.configure_cache' ) log . debug ( 'Configuring cache' ) if not getattr ( app , '_cache' , None ) : app . _cache = { } | Sets up an attribute to cache data in the app context | 60 | 12 |
224,227 | def bspline_to_nurbs ( obj ) : # B-Spline -> NURBS if isinstance ( obj , BSpline . Curve ) : return _convert . convert_curve ( obj , NURBS ) elif isinstance ( obj , BSpline . Surface ) : return _convert . convert_surface ( obj , NURBS ) elif isinstance ( obj , BSpline . Volume ) : return _convert . convert_volume ( obj , NURBS ) else : raise TypeError ( "Input must be an instance of B-Spline curve, surface or volume" ) | Converts non - rational parametric shapes to rational ones . | 134 | 12 |
224,228 | def nurbs_to_bspline ( obj , * * kwargs ) : if not obj . rational : raise TypeError ( "The input must be a rational shape" ) # Get keyword arguments tol = kwargs . get ( 'tol' , 10e-8 ) # Test for non-rational component extraction for w in obj . weights : if abs ( w - 1.0 ) > tol : print ( "Cannot extract non-rational components" ) return obj # NURBS -> B-Spline if isinstance ( obj , NURBS . Curve ) : return _convert . convert_curve ( obj , BSpline ) elif isinstance ( obj , NURBS . Surface ) : return _convert . convert_surface ( obj , BSpline ) elif isinstance ( obj , NURBS . Volume ) : return _convert . convert_volume ( obj , BSpline ) else : raise TypeError ( "Input must be an instance of NURBS curve, surface or volume" ) | Extracts the non - rational components from rational parametric shapes if possible . | 224 | 16 |
224,229 | def doolittle ( matrix_a ) : # Initialize L and U matrices matrix_u = [ [ 0.0 for _ in range ( len ( matrix_a ) ) ] for _ in range ( len ( matrix_a ) ) ] matrix_l = [ [ 0.0 for _ in range ( len ( matrix_a ) ) ] for _ in range ( len ( matrix_a ) ) ] # Doolittle Method for i in range ( 0 , len ( matrix_a ) ) : for k in range ( i , len ( matrix_a ) ) : # Upper triangular (U) matrix matrix_u [ i ] [ k ] = float ( matrix_a [ i ] [ k ] - sum ( [ matrix_l [ i ] [ j ] * matrix_u [ j ] [ k ] for j in range ( 0 , i ) ] ) ) # Lower triangular (L) matrix if i == k : matrix_l [ i ] [ i ] = 1.0 else : matrix_l [ k ] [ i ] = float ( matrix_a [ k ] [ i ] - sum ( [ matrix_l [ k ] [ j ] * matrix_u [ j ] [ i ] for j in range ( 0 , i ) ] ) ) # Handle zero division error try : matrix_l [ k ] [ i ] /= float ( matrix_u [ i ] [ i ] ) except ZeroDivisionError : matrix_l [ k ] [ i ] = 0.0 return matrix_l , matrix_u | Doolittle s Method for LU - factorization . | 326 | 11 |
224,230 | def read_files ( project , ext ) : project_path = os . path . join ( os . path . dirname ( __file__ ) , project ) file_list = os . listdir ( project_path ) flist = [ ] flist_path = [ ] for f in file_list : f_path = os . path . join ( project_path , f ) if os . path . isfile ( f_path ) and f . endswith ( ext ) and f != "__init__.py" : flist . append ( f . split ( '.' ) [ 0 ] ) flist_path . append ( f_path ) return flist , flist_path | Reads files inside the input project directory . | 149 | 9 |
224,231 | def copy_files ( src , ext , dst ) : src_path = os . path . join ( os . path . dirname ( __file__ ) , src ) dst_path = os . path . join ( os . path . dirname ( __file__ ) , dst ) file_list = os . listdir ( src_path ) for f in file_list : if f == '__init__.py' : continue f_path = os . path . join ( src_path , f ) if os . path . isfile ( f_path ) and f . endswith ( ext ) : shutil . copy ( f_path , dst_path ) | Copies files with extensions ext from src to dst directory . | 143 | 12 |
224,232 | def make_dir ( project ) : project_path = os . path . join ( os . path . dirname ( __file__ ) , project ) # Delete the directory and the files inside it if os . path . exists ( project_path ) : shutil . rmtree ( project_path ) # Create the directory os . mkdir ( project_path ) # We need a __init__.py file inside the directory with open ( os . path . join ( project_path , '__init__.py' ) , 'w' ) as fp : fp . write ( '__version__ = "' + str ( get_property ( '__version__' , 'geomdl' ) ) + '"\n' ) fp . write ( '__author__ = "' + str ( get_property ( '__author__' , 'geomdl' ) ) + '"\n' ) fp . write ( '__license__ = "' + str ( get_property ( '__license__' , 'geomdl' ) ) + '"\n' ) | Creates the project directory for compiled modules . | 234 | 9 |
224,233 | def in_argv ( arg_list ) : for arg in sys . argv : for parg in arg_list : if parg == arg or arg . startswith ( parg ) : return True return False | Checks if any of the elements of the input list is in sys . argv array . | 47 | 19 |
224,234 | def generate ( degree , num_ctrlpts , * * kwargs ) : if degree == 0 or num_ctrlpts == 0 : raise ValueError ( "Input values should be different than zero." ) # Get keyword arguments clamped = kwargs . get ( 'clamped' , True ) # Number of repetitions at the start and end of the array num_repeat = degree # Number of knots in the middle num_segments = num_ctrlpts - ( degree + 1 ) if not clamped : # No repetitions at the start and end num_repeat = 0 # Should conform the rule: m = n + p + 1 num_segments = degree + num_ctrlpts - 1 # First knots knot_vector = [ 0.0 for _ in range ( 0 , num_repeat ) ] # Middle knots knot_vector += linspace ( 0.0 , 1.0 , num_segments + 2 ) # Last knots knot_vector += [ 1.0 for _ in range ( 0 , num_repeat ) ] # Return auto-generated knot vector return knot_vector | Generates an equally spaced knot vector . | 235 | 8 |
224,235 | def check ( degree , knot_vector , num_ctrlpts ) : try : if knot_vector is None or len ( knot_vector ) == 0 : raise ValueError ( "Input knot vector cannot be empty" ) except TypeError as e : print ( "An error occurred: {}" . format ( e . args [ - 1 ] ) ) raise TypeError ( "Knot vector must be a list or tuple" ) except Exception : raise # Check the formula; m = p + n + 1 if len ( knot_vector ) != degree + num_ctrlpts + 1 : return False # Check ascending order prev_knot = knot_vector [ 0 ] for knot in knot_vector : if prev_knot > knot : return False prev_knot = knot return True | Checks the validity of the input knot vector . | 166 | 10 |
224,236 | def interpolate_curve ( points , degree , * * kwargs ) : # Keyword arguments use_centripetal = kwargs . get ( 'centripetal' , False ) # Number of control points num_points = len ( points ) # Get uk uk = compute_params_curve ( points , use_centripetal ) # Compute knot vector kv = compute_knot_vector ( degree , num_points , uk ) # Do global interpolation matrix_a = _build_coeff_matrix ( degree , kv , uk , points ) ctrlpts = ginterp ( matrix_a , points ) # Generate B-spline curve curve = BSpline . Curve ( ) curve . degree = degree curve . ctrlpts = ctrlpts curve . knotvector = kv return curve | Curve interpolation through the data points . | 189 | 9 |
224,237 | def interpolate_surface ( points , size_u , size_v , degree_u , degree_v , * * kwargs ) : # Keyword arguments use_centripetal = kwargs . get ( 'centripetal' , False ) # Get uk and vl uk , vl = compute_params_surface ( points , size_u , size_v , use_centripetal ) # Compute knot vectors kv_u = compute_knot_vector ( degree_u , size_u , uk ) kv_v = compute_knot_vector ( degree_v , size_v , vl ) # Do global interpolation on the u-direction ctrlpts_r = [ ] for v in range ( size_v ) : pts = [ points [ v + ( size_v * u ) ] for u in range ( size_u ) ] matrix_a = _build_coeff_matrix ( degree_u , kv_u , uk , pts ) ctrlpts_r += ginterp ( matrix_a , pts ) # Do global interpolation on the v-direction ctrlpts = [ ] for u in range ( size_u ) : pts = [ ctrlpts_r [ u + ( size_u * v ) ] for v in range ( size_v ) ] matrix_a = _build_coeff_matrix ( degree_v , kv_v , vl , pts ) ctrlpts += ginterp ( matrix_a , pts ) # Generate B-spline surface surf = BSpline . Surface ( ) surf . degree_u = degree_u surf . degree_v = degree_v surf . ctrlpts_size_u = size_u surf . ctrlpts_size_v = size_v surf . ctrlpts = ctrlpts surf . knotvector_u = kv_u surf . knotvector_v = kv_v return surf | Surface interpolation through the data points . | 439 | 9 |
224,238 | def compute_knot_vector ( degree , num_points , params ) : # Start knot vector kv = [ 0.0 for _ in range ( degree + 1 ) ] # Use averaging method (Eqn 9.8) to compute internal knots in the knot vector for i in range ( num_points - degree - 1 ) : temp_kv = ( 1.0 / degree ) * sum ( [ params [ j ] for j in range ( i + 1 , i + degree + 1 ) ] ) kv . append ( temp_kv ) # End knot vector kv += [ 1.0 for _ in range ( degree + 1 ) ] return kv | Computes a knot vector from the parameter list using averaging method . | 143 | 13 |
224,239 | def ginterp ( coeff_matrix , points ) : # Dimension dim = len ( points [ 0 ] ) # Number of data points num_points = len ( points ) # Solve system of linear equations matrix_l , matrix_u = linalg . lu_decomposition ( coeff_matrix ) ctrlpts = [ [ 0.0 for _ in range ( dim ) ] for _ in range ( num_points ) ] for i in range ( dim ) : b = [ pt [ i ] for pt in points ] y = linalg . forward_substitution ( matrix_l , b ) x = linalg . backward_substitution ( matrix_u , y ) for j in range ( num_points ) : ctrlpts [ j ] [ i ] = x [ j ] # Return control points return ctrlpts | Applies global interpolation to the set of data points to find control points . | 188 | 16 |
224,240 | def _build_coeff_matrix ( degree , knotvector , params , points ) : # Number of data points num_points = len ( points ) # Set up coefficient matrix matrix_a = [ [ 0.0 for _ in range ( num_points ) ] for _ in range ( num_points ) ] for i in range ( num_points ) : span = helpers . find_span_linear ( degree , knotvector , num_points , params [ i ] ) matrix_a [ i ] [ span - degree : span + 1 ] = helpers . basis_function ( degree , knotvector , span , params [ i ] ) # Return coefficient matrix return matrix_a | Builds the coefficient matrix for global interpolation . | 143 | 10 |
224,241 | def create_render_window ( actors , callbacks , * * kwargs ) : # Get keyword arguments figure_size = kwargs . get ( 'figure_size' , ( 800 , 600 ) ) camera_position = kwargs . get ( 'camera_position' , ( 0 , 0 , 100 ) ) # Find camera focal point center_points = [ ] for actor in actors : center_points . append ( actor . GetCenter ( ) ) camera_focal_point = linalg . vector_mean ( * center_points ) # Create camera camera = vtk . vtkCamera ( ) camera . SetPosition ( * camera_position ) camera . SetFocalPoint ( * camera_focal_point ) # Create renderer renderer = vtk . vtkRenderer ( ) renderer . SetActiveCamera ( camera ) renderer . SetBackground ( 1.0 , 1.0 , 1.0 ) # Add actors to the scene for actor in actors : renderer . AddActor ( actor ) # Render window render_window = vtk . vtkRenderWindow ( ) render_window . AddRenderer ( renderer ) render_window . SetSize ( * figure_size ) # Render window interactor window_interactor = vtk . vtkRenderWindowInteractor ( ) window_interactor . SetRenderWindow ( render_window ) # Add event observers for cb in callbacks : window_interactor . AddObserver ( cb , callbacks [ cb ] [ 0 ] , callbacks [ cb ] [ 1 ] ) # cb name, cb function ref, cb priority # Render actors render_window . Render ( ) # Set window name after render() is called render_window . SetWindowName ( "geomdl" ) # Use trackball camera interactor_style = vtk . vtkInteractorStyleTrackballCamera ( ) window_interactor . SetInteractorStyle ( interactor_style ) # Start interactor window_interactor . Start ( ) # Return window interactor instance return window_interactor | Creates VTK render window with an interactor . | 443 | 11 |
224,242 | def create_color ( color ) : if color [ 0 ] == "#" : # Convert hex string to RGB return [ int ( color [ i : i + 2 ] , 16 ) / 255 for i in range ( 1 , 7 , 2 ) ] else : # Create a named colors instance nc = vtk . vtkNamedColors ( ) return nc . GetColor3d ( color ) | Creates VTK - compatible RGB color from a color string . | 85 | 13 |
224,243 | def create_actor_pts ( pts , color , * * kwargs ) : # Keyword arguments array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) point_size = kwargs . get ( 'size' , 5 ) point_sphere = kwargs . get ( 'point_as_sphere' , True ) # Create points points = vtk . vtkPoints ( ) points . SetData ( pts ) # Create a PolyData object and add points polydata = vtk . vtkPolyData ( ) polydata . SetPoints ( points ) # Run vertex glyph filter on the points array vertex_filter = vtk . vtkVertexGlyphFilter ( ) vertex_filter . SetInputData ( polydata ) # Map ploy data to the graphics primitives mapper = vtk . vtkPolyDataMapper ( ) mapper . SetInputConnection ( vertex_filter . GetOutputPort ( ) ) mapper . SetArrayName ( array_name ) mapper . SetArrayId ( array_index ) # Create an actor and set its properties actor = vtk . vtkActor ( ) actor . SetMapper ( mapper ) actor . GetProperty ( ) . SetColor ( * color ) actor . GetProperty ( ) . SetPointSize ( point_size ) actor . GetProperty ( ) . SetRenderPointsAsSpheres ( point_sphere ) # Return the actor return actor | Creates a VTK actor for rendering scatter plots . | 321 | 11 |
224,244 | def create_actor_polygon ( pts , color , * * kwargs ) : # Keyword arguments array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) line_width = kwargs . get ( 'size' , 1.0 ) # Create points points = vtk . vtkPoints ( ) points . SetData ( pts ) # Number of points num_points = points . GetNumberOfPoints ( ) # Create lines cells = vtk . vtkCellArray ( ) for i in range ( num_points - 1 ) : line = vtk . vtkLine ( ) line . GetPointIds ( ) . SetId ( 0 , i ) line . GetPointIds ( ) . SetId ( 1 , i + 1 ) cells . InsertNextCell ( line ) # Create a PolyData object and add points & lines polydata = vtk . vtkPolyData ( ) polydata . SetPoints ( points ) polydata . SetLines ( cells ) # Map poly data to the graphics primitives mapper = vtk . vtkPolyDataMapper ( ) mapper . SetInputDataObject ( polydata ) mapper . SetArrayName ( array_name ) mapper . SetArrayId ( array_index ) # Create an actor and set its properties actor = vtk . vtkActor ( ) actor . SetMapper ( mapper ) actor . GetProperty ( ) . SetColor ( * color ) actor . GetProperty ( ) . SetLineWidth ( line_width ) # Return the actor return actor | Creates a VTK actor for rendering polygons . | 344 | 11 |
224,245 | def create_actor_mesh ( pts , lines , color , * * kwargs ) : # Keyword arguments array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) line_width = kwargs . get ( 'size' , 0.5 ) # Create points points = vtk . vtkPoints ( ) points . SetData ( pts ) # Create lines cells = vtk . vtkCellArray ( ) for line in lines : pline = vtk . vtkPolyLine ( ) pline . GetPointIds ( ) . SetNumberOfIds ( 5 ) for i in range ( len ( line ) ) : pline . GetPointIds ( ) . SetId ( i , line [ i ] ) pline . GetPointIds ( ) . SetId ( 4 , line [ 0 ] ) cells . InsertNextCell ( pline ) # Create a PolyData object and add points & lines polydata = vtk . vtkPolyData ( ) polydata . SetPoints ( points ) polydata . SetLines ( cells ) # Map poly data to the graphics primitives mapper = vtk . vtkPolyDataMapper ( ) mapper . SetInputDataObject ( polydata ) mapper . SetArrayName ( array_name ) mapper . SetArrayId ( array_index ) # Create an actor and set its properties actor = vtk . vtkActor ( ) actor . SetMapper ( mapper ) actor . GetProperty ( ) . SetColor ( * color ) actor . GetProperty ( ) . SetLineWidth ( line_width ) # Return the actor return actor | Creates a VTK actor for rendering quadrilateral plots . | 361 | 13 |
224,246 | def create_actor_tri ( pts , tris , color , * * kwargs ) : # Keyword arguments array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) # Create points points = vtk . vtkPoints ( ) points . SetData ( pts ) # Create triangles triangles = vtk . vtkCellArray ( ) for tri in tris : tmp = vtk . vtkTriangle ( ) for i , v in enumerate ( tri ) : tmp . GetPointIds ( ) . SetId ( i , v ) triangles . InsertNextCell ( tmp ) # Create a PolyData object and add points & triangles polydata = vtk . vtkPolyData ( ) polydata . SetPoints ( points ) polydata . SetPolys ( triangles ) # Map poly data to the graphics primitives mapper = vtk . vtkPolyDataMapper ( ) mapper . SetInputDataObject ( polydata ) mapper . SetArrayName ( array_name ) mapper . SetArrayId ( array_index ) # Create an actor and set its properties actor = vtk . vtkActor ( ) actor . SetMapper ( mapper ) actor . GetProperty ( ) . SetColor ( * color ) # Return the actor return actor | Creates a VTK actor for rendering triangulated surface plots . | 285 | 14 |
224,247 | def create_actor_hexahedron ( grid , color , * * kwargs ) : # Keyword arguments array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) # Create hexahedron elements points = vtk . vtkPoints ( ) hexarray = vtk . vtkCellArray ( ) for j , pt in enumerate ( grid ) : tmp = vtk . vtkHexahedron ( ) fb = pt [ 0 ] for i , v in enumerate ( fb ) : points . InsertNextPoint ( v ) tmp . GetPointIds ( ) . SetId ( i , i + ( j * 8 ) ) ft = pt [ - 1 ] for i , v in enumerate ( ft ) : points . InsertNextPoint ( v ) tmp . GetPointIds ( ) . SetId ( i + 4 , i + 4 + ( j * 8 ) ) hexarray . InsertNextCell ( tmp ) # Create an unstructured grid object and add points & hexahedron elements ugrid = vtk . vtkUnstructuredGrid ( ) ugrid . SetPoints ( points ) ugrid . SetCells ( tmp . GetCellType ( ) , hexarray ) # ugrid.InsertNextCell(tmp.GetCellType(), tmp.GetPointIds()) # Map unstructured grid to the graphics primitives mapper = vtk . vtkDataSetMapper ( ) mapper . SetInputDataObject ( ugrid ) mapper . SetArrayName ( array_name ) mapper . SetArrayId ( array_index ) # Create an actor and set its properties actor = vtk . vtkActor ( ) actor . SetMapper ( mapper ) actor . GetProperty ( ) . SetColor ( * color ) # Return the actor return actor | Creates a VTK actor for rendering voxels using hexahedron elements . | 403 | 18 |
224,248 | def create_actor_delaunay ( pts , color , * * kwargs ) : # Keyword arguments array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) use_delaunay3d = kwargs . get ( "d3d" , False ) # Create points points = vtk . vtkPoints ( ) points . SetData ( pts ) # Create a PolyData object and add points polydata = vtk . vtkPolyData ( ) polydata . SetPoints ( points ) # Apply Delaunay triangulation on the poly data object triangulation = vtk . vtkDelaunay3D ( ) if use_delaunay3d else vtk . vtkDelaunay2D ( ) triangulation . SetInputData ( polydata ) # Map triangulated surface to the graphics primitives mapper = vtk . vtkDataSetMapper ( ) mapper . SetInputConnection ( triangulation . GetOutputPort ( ) ) mapper . SetArrayName ( array_name ) mapper . SetArrayId ( array_index ) # Create an actor and set its properties actor = vtk . vtkActor ( ) actor . SetMapper ( mapper ) actor . GetProperty ( ) . SetColor ( * color ) # Return the actor return actor | Creates a VTK actor for rendering triangulated plots using Delaunay triangulation . | 298 | 20 |
224,249 | def flip_ctrlpts_u ( ctrlpts , size_u , size_v ) : new_ctrlpts = [ ] for i in range ( 0 , size_u ) : for j in range ( 0 , size_v ) : temp = [ float ( c ) for c in ctrlpts [ i + ( j * size_u ) ] ] new_ctrlpts . append ( temp ) return new_ctrlpts | Flips a list of 1 - dimensional control points from u - row order to v - row order . | 97 | 21 |
224,250 | def generate_ctrlptsw ( ctrlpts ) : # Multiply control points by weight new_ctrlpts = [ ] for cpt in ctrlpts : temp = [ float ( pt * cpt [ - 1 ] ) for pt in cpt ] temp [ - 1 ] = float ( cpt [ - 1 ] ) new_ctrlpts . append ( temp ) return new_ctrlpts | Generates weighted control points from unweighted ones in 1 - D . | 89 | 15 |
224,251 | def generate_ctrlpts_weights ( ctrlpts ) : # Divide control points by weight new_ctrlpts = [ ] for cpt in ctrlpts : temp = [ float ( pt / cpt [ - 1 ] ) for pt in cpt ] temp [ - 1 ] = float ( cpt [ - 1 ] ) new_ctrlpts . append ( temp ) return new_ctrlpts | Generates unweighted control points from weighted ones in 1 - D . | 89 | 15 |
224,252 | def combine_ctrlpts_weights ( ctrlpts , weights = None ) : if weights is None : weights = [ 1.0 for _ in range ( len ( ctrlpts ) ) ] ctrlptsw = [ ] for pt , w in zip ( ctrlpts , weights ) : temp = [ float ( c * w ) for c in pt ] temp . append ( float ( w ) ) ctrlptsw . append ( temp ) return ctrlptsw | Multiplies control points by the weights to generate weighted control points . | 103 | 14 |
224,253 | def separate_ctrlpts_weights ( ctrlptsw ) : ctrlpts = [ ] weights = [ ] for ptw in ctrlptsw : temp = [ float ( pw / ptw [ - 1 ] ) for pw in ptw [ : - 1 ] ] ctrlpts . append ( temp ) weights . append ( ptw [ - 1 ] ) return [ ctrlpts , weights ] | Divides weighted control points by weights to generate unweighted control points and weights vector . | 91 | 18 |
224,254 | def flip_ctrlpts2d_file ( file_in = '' , file_out = 'ctrlpts_flip.txt' ) : # Read control points ctrlpts2d , size_u , size_v = _read_ctrltps2d_file ( file_in ) # Flip control points array new_ctrlpts2d = flip_ctrlpts2d ( ctrlpts2d , size_u , size_v ) # Save new control points _save_ctrlpts2d_file ( new_ctrlpts2d , size_u , size_v , file_out ) | Flips u and v directions of a 2D control points file and saves flipped coordinates to a file . | 139 | 21 |
224,255 | def generate_ctrlptsw2d_file ( file_in = '' , file_out = 'ctrlptsw.txt' ) : # Read control points ctrlpts2d , size_u , size_v = _read_ctrltps2d_file ( file_in ) # Multiply control points by weight new_ctrlpts2d = generate_ctrlptsw2d ( ctrlpts2d ) # Save new control points _save_ctrlpts2d_file ( new_ctrlpts2d , size_u , size_v , file_out ) | Generates weighted control points from unweighted ones in 2 - D . | 131 | 15 |
224,256 | def keypress_callback ( self , obj , ev ) : key = obj . GetKeySym ( ) # pressed key (as str) render_window = obj . GetRenderWindow ( ) # vtkRenderWindow renderer = render_window . GetRenderers ( ) . GetFirstRenderer ( ) # vtkRenderer picker = obj . GetPicker ( ) # vtkPropPicker actor = picker . GetActor ( ) # vtkActor # Custom keypress events if key == 'Up' : camera = renderer . GetActiveCamera ( ) # vtkCamera camera . Pitch ( 2.5 ) if key == 'Down' : camera = renderer . GetActiveCamera ( ) # vtkCamera camera . Pitch ( - 2.5 ) if key == 'Left' : camera = renderer . GetActiveCamera ( ) # vtkCamera camera . Yaw ( - 2.5 ) if key == 'Right' : camera = renderer . GetActiveCamera ( ) # vtkCamera camera . Yaw ( 2.5 ) if key == 'b' : if self . _bg_id >= len ( self . _bg ) : self . _bg_id = 0 renderer . SetBackground ( * self . _bg [ self . _bg_id ] ) self . _bg_id += 1 if key == 'm' : if actor is not None : actor . GetProperty ( ) . SetColor ( random ( ) , random ( ) , random ( ) ) if key == 'd' : if actor is not None : print ( "Name:" , actor . GetMapper ( ) . GetArrayName ( ) ) print ( "Index:" , actor . GetMapper ( ) . GetArrayId ( ) ) print ( "Selected point:" , picker . GetSelectionPoint ( ) [ 0 : 2 ] ) print ( "# of visible actors:" , renderer . VisibleActorCount ( ) ) if key == 'h' : if actor is not None : actor . SetVisibility ( not actor . GetVisibility ( ) ) if key == 'n' : actors = renderer . GetActors ( ) # vtkActorCollection for actor in actors : actor . VisibilityOn ( ) # Update render window render_window . Render ( ) | VTK callback for keypress events . | 487 | 8 |
224,257 | def generate_voxel_grid ( bbox , szval , use_cubes = False ) : # Input validation if szval [ 0 ] <= 1 or szval [ 1 ] <= 1 or szval [ 2 ] <= 1 : raise GeomdlException ( "Size values must be bigger than 1" , data = dict ( sizevals = szval ) ) # Find step size for each direction steps = [ float ( bbox [ 1 ] [ idx ] - bbox [ 0 ] [ idx ] ) / float ( szval [ idx ] - 1 ) for idx in range ( 0 , 3 ) ] # It is possible to use cubes instead of cuboids if use_cubes : min_val = min ( * steps ) steps = [ min_val for _ in range ( 0 , 3 ) ] # Find range in each direction ranges = [ list ( linalg . frange ( bbox [ 0 ] [ idx ] , bbox [ 1 ] [ idx ] , steps [ idx ] ) ) for idx in range ( 0 , 3 ) ] voxel_grid = [ ] for u in ranges [ 0 ] : for v in ranges [ 1 ] : for w in ranges [ 2 ] : bbmin = [ u , v , w ] bbmax = [ k + l for k , l in zip ( bbmin , steps ) ] voxel_grid . append ( [ bbmin , bbmax ] ) return voxel_grid | Generates the voxel grid with the desired size . | 326 | 12 |
224,258 | def process_template ( file_src ) : def tmpl_sqrt ( x ) : """ Square-root of 'x' """ return math . sqrt ( x ) def tmpl_cubert ( x ) : """ Cube-root of 'x' """ return x ** ( 1.0 / 3.0 ) if x >= 0 else - ( - x ) ** ( 1.0 / 3.0 ) def tmpl_pow ( x , y ) : """ 'x' to the power 'y' """ return math . pow ( x , y ) # Check if it is possible to import 'jinja2' try : import jinja2 except ImportError : raise GeomdlException ( "Please install 'jinja2' package to use templated input: pip install jinja2" ) # Replace jinja2 template tags for compatibility fsrc = file_src . replace ( "{%" , "<%" ) . replace ( "%}" , "%>" ) . replace ( "{{" , "<{" ) . replace ( "}}" , "}>" ) # Generate Jinja2 environment env = jinja2 . Environment ( loader = jinja2 . BaseLoader ( ) , trim_blocks = True , block_start_string = '<%' , block_end_string = '%>' , variable_start_string = '<{' , variable_end_string = '}>' ) . from_string ( fsrc ) # Load custom functions into the Jinja2 environment template_funcs = dict ( knot_vector = utilities . generate_knot_vector , sqrt = tmpl_sqrt , cubert = tmpl_cubert , pow = tmpl_pow , ) for k , v in template_funcs . items ( ) : env . globals [ k ] = v # Process Jinja2 template functions & variables inside the input file return env . render ( ) | Process Jinja2 template input | 419 | 6 |
224,259 | def import_surf_mesh ( file_name ) : raw_content = read_file ( file_name ) raw_content = raw_content . split ( "\n" ) content = [ ] for rc in raw_content : temp = rc . strip ( ) . split ( ) content . append ( temp ) # 1st line defines the dimension and it must be 3 if int ( content [ 0 ] [ 0 ] ) != 3 : raise TypeError ( "Input mesh '" + str ( file_name ) + "' must be 3-dimensional" ) # Create a NURBS surface instance and fill with the data read from mesh file surf = shortcuts . generate_surface ( rational = True ) # 2nd line is the degrees surf . degree_u = int ( content [ 1 ] [ 0 ] ) surf . degree_v = int ( content [ 1 ] [ 1 ] ) # 3rd line is the number of weighted control points in u and v directions dim_u = int ( content [ 2 ] [ 0 ] ) dim_v = int ( content [ 2 ] [ 1 ] ) # Starting from 6th line, we have the weighted control points ctrlpts_end = 5 + ( dim_u * dim_v ) ctrlpts_mesh = content [ 5 : ctrlpts_end ] # mesh files have the control points in u-row order format ctrlpts = compatibility . flip_ctrlpts_u ( ctrlpts_mesh , dim_u , dim_v ) # mesh files store control points in format (x, y, z, w) ctrlptsw = compatibility . generate_ctrlptsw ( ctrlpts ) # Set control points surf . set_ctrlpts ( ctrlptsw , dim_u , dim_v ) # 4th and 5th lines are knot vectors surf . knotvector_u = [ float ( u ) for u in content [ 3 ] ] surf . knotvector_v = [ float ( v ) for v in content [ 4 ] ] # Return the surface instance return surf | Generates a NURBS surface object from a mesh file . | 442 | 13 |
224,260 | def import_vol_mesh ( file_name ) : raw_content = read_file ( file_name ) raw_content = raw_content . split ( "\n" ) content = [ ] for rc in raw_content : temp = rc . strip ( ) . split ( ) content . append ( temp ) # 1st line defines the dimension and it must be 3 if int ( content [ 0 ] [ 0 ] ) != 3 : raise TypeError ( "Input mesh '" + str ( file_name ) + "' must be 3-dimensional" ) # Create a NURBS surface instance and fill with the data read from mesh file vol = shortcuts . generate_volume ( rational = True ) # 2nd line is the degrees vol . degree_u = int ( content [ 1 ] [ 0 ] ) vol . degree_v = int ( content [ 1 ] [ 1 ] ) vol . degree_w = int ( content [ 1 ] [ 2 ] ) # 3rd line is the number of weighted control points in u, v, w directions dim_u = int ( content [ 2 ] [ 0 ] ) dim_v = int ( content [ 2 ] [ 1 ] ) dim_w = int ( content [ 2 ] [ 2 ] ) # Starting from 7th line, we have the weighted control points surf_cpts = dim_u * dim_v ctrlpts_end = 6 + ( surf_cpts * dim_w ) ctrlpts_mesh = content [ 6 : ctrlpts_end ] # mesh files have the control points in u-row order format ctrlpts = [ ] for i in range ( dim_w - 1 ) : ctrlpts += compatibility . flip_ctrlpts_u ( ctrlpts_mesh [ surf_cpts * i : surf_cpts * ( i + 1 ) ] , dim_u , dim_v ) # mesh files store control points in format (x, y, z, w) ctrlptsw = compatibility . generate_ctrlptsw ( ctrlpts ) # Set control points vol . set_ctrlpts ( ctrlptsw , dim_u , dim_v , dim_w ) # 4th, 5th and 6th lines are knot vectors vol . knotvector_u = [ float ( u ) for u in content [ 3 ] ] vol . knotvector_v = [ float ( v ) for v in content [ 4 ] ] vol . knotvector_w = [ float ( w ) for w in content [ 5 ] ] # Return the volume instance return vol | Generates a NURBS volume object from a mesh file . | 555 | 13 |
224,261 | def import_txt ( file_name , two_dimensional = False , * * kwargs ) : # Read file content = exch . read_file ( file_name ) # Are we using a Jinja2 template? j2tmpl = kwargs . get ( 'jinja2' , False ) if j2tmpl : content = exch . process_template ( content ) # File delimiters col_sep = kwargs . get ( 'col_separator' , ";" ) sep = kwargs . get ( 'separator' , "," ) return exch . import_text_data ( content , sep , col_sep , two_dimensional ) | Reads control points from a text file and generates a 1 - dimensional list of control points . | 146 | 19 |
224,262 | def export_txt ( obj , file_name , two_dimensional = False , * * kwargs ) : # Check if the user has set any control points if obj . ctrlpts is None or len ( obj . ctrlpts ) == 0 : raise exch . GeomdlException ( "There are no control points to save!" ) # Check the usage of two_dimensional flag if obj . pdimension == 1 and two_dimensional : # Silently ignore two_dimensional flag two_dimensional = False # File delimiters col_sep = kwargs . get ( 'col_separator' , ";" ) sep = kwargs . get ( 'separator' , "," ) content = exch . export_text_data ( obj , sep , col_sep , two_dimensional ) return exch . write_file ( file_name , content ) | Exports control points as a text file . | 186 | 9 |
224,263 | def import_csv ( file_name , * * kwargs ) : # File delimiters sep = kwargs . get ( 'separator' , "," ) content = exch . read_file ( file_name , skip_lines = 1 ) return exch . import_text_data ( content , sep ) | Reads control points from a CSV file and generates a 1 - dimensional list of control points . | 68 | 19 |
224,264 | def export_csv ( obj , file_name , point_type = 'evalpts' , * * kwargs ) : if not 0 < obj . pdimension < 3 : raise exch . GeomdlException ( "Input object should be a curve or a surface" ) # Pick correct points from the object if point_type == 'ctrlpts' : points = obj . ctrlptsw if obj . rational else obj . ctrlpts elif point_type == 'evalpts' : points = obj . evalpts else : raise exch . GeomdlException ( "Please choose a valid point type option. Possible types: ctrlpts, evalpts" ) # Prepare CSV header dim = len ( points [ 0 ] ) line = "dim " for i in range ( dim - 1 ) : line += str ( i + 1 ) + ", dim " line += str ( dim ) + "\n" # Prepare values for pt in points : line += "," . join ( [ str ( p ) for p in pt ] ) + "\n" # Write to file return exch . write_file ( file_name , line ) | Exports control points or evaluated points as a CSV file . | 244 | 12 |
224,265 | def import_cfg ( file_name , * * kwargs ) : def callback ( data ) : return libconf . loads ( data ) # Check if it is possible to import 'libconf' try : import libconf except ImportError : raise exch . GeomdlException ( "Please install 'libconf' package to use libconfig format: pip install libconf" ) # Get keyword arguments delta = kwargs . get ( 'delta' , - 1.0 ) use_template = kwargs . get ( 'jinja2' , False ) # Read file file_src = exch . read_file ( file_name ) # Import data return exch . import_dict_str ( file_src = file_src , delta = delta , callback = callback , tmpl = use_template ) | Imports curves and surfaces from files in libconfig format . | 172 | 12 |
224,266 | def export_cfg ( obj , file_name ) : def callback ( data ) : return libconf . dumps ( data ) # Check if it is possible to import 'libconf' try : import libconf except ImportError : raise exch . GeomdlException ( "Please install 'libconf' package to use libconfig format: pip install libconf" ) # Export data exported_data = exch . export_dict_str ( obj = obj , callback = callback ) # Write to file return exch . write_file ( file_name , exported_data ) | Exports curves and surfaces in libconfig format . | 117 | 10 |
224,267 | def import_yaml ( file_name , * * kwargs ) : def callback ( data ) : yaml = YAML ( ) return yaml . load ( data ) # Check if it is possible to import 'ruamel.yaml' try : from ruamel . yaml import YAML except ImportError : raise exch . GeomdlException ( "Please install 'ruamel.yaml' package to use YAML format: pip install ruamel.yaml" ) # Get keyword arguments delta = kwargs . get ( 'delta' , - 1.0 ) use_template = kwargs . get ( 'jinja2' , False ) # Read file file_src = exch . read_file ( file_name ) # Import data return exch . import_dict_str ( file_src = file_src , delta = delta , callback = callback , tmpl = use_template ) | Imports curves and surfaces from files in YAML format . | 198 | 13 |
224,268 | def export_yaml ( obj , file_name ) : def callback ( data ) : # Ref: https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string stream = StringIO ( ) yaml = YAML ( ) yaml . dump ( data , stream ) return stream . getvalue ( ) # Check if it is possible to import 'ruamel.yaml' try : from ruamel . yaml import YAML except ImportError : raise exch . GeomdlException ( "Please install 'ruamel.yaml' package to use YAML format: pip install ruamel.yaml" ) # Export data exported_data = exch . export_dict_str ( obj = obj , callback = callback ) # Write to file return exch . write_file ( file_name , exported_data ) | Exports curves and surfaces in YAML format . | 192 | 11 |
224,269 | def import_json ( file_name , * * kwargs ) : def callback ( data ) : return json . loads ( data ) # Get keyword arguments delta = kwargs . get ( 'delta' , - 1.0 ) use_template = kwargs . get ( 'jinja2' , False ) # Read file file_src = exch . read_file ( file_name ) # Import data return exch . import_dict_str ( file_src = file_src , delta = delta , callback = callback , tmpl = use_template ) | Imports curves and surfaces from files in JSON format . | 122 | 11 |
224,270 | def export_json ( obj , file_name ) : def callback ( data ) : return json . dumps ( data , indent = 4 ) # Export data exported_data = exch . export_dict_str ( obj = obj , callback = callback ) # Write to file return exch . write_file ( file_name , exported_data ) | Exports curves and surfaces in JSON format . | 71 | 9 |
224,271 | def import_obj ( file_name , * * kwargs ) : def default_callback ( face_list ) : return face_list # Keyword arguments callback_func = kwargs . get ( 'callback' , default_callback ) # Read and process the input file content = exch . read_file ( file_name ) content_arr = content . split ( "\n" ) # Initialize variables on_face = False vertices = [ ] triangles = [ ] faces = [ ] # Index values vert_idx = 1 tri_idx = 1 face_idx = 1 # Loop through the data for carr in content_arr : carr = carr . strip ( ) data = carr . split ( " " ) data = [ d . strip ( ) for d in data ] if data [ 0 ] == "v" : if on_face : on_face = not on_face face = elements . Face ( * triangles , id = face_idx ) faces . append ( face ) face_idx += 1 vertices [ : ] = [ ] triangles [ : ] = [ ] vert_idx = 1 tri_idx = 1 vertex = elements . Vertex ( * data [ 1 : ] , id = vert_idx ) vertices . append ( vertex ) vert_idx += 1 if data [ 0 ] == "f" : on_face = True triangle = elements . Triangle ( * [ vertices [ int ( fidx ) - 1 ] for fidx in data [ 1 : ] ] , id = tri_idx ) triangles . append ( triangle ) tri_idx += 1 # Process he final face if triangles : face = elements . Face ( * triangles , id = face_idx ) faces . append ( face ) # Return the output of the callback function return callback_func ( faces ) | Reads . obj files and generates faces . | 392 | 9 |
224,272 | def select_color ( cpcolor , evalcolor , idx = 0 ) : # Random colors by default color = utilities . color_generator ( ) # Constant color for control points grid if isinstance ( cpcolor , str ) : color [ 0 ] = cpcolor # User-defined color for control points grid if isinstance ( cpcolor , ( list , tuple ) ) : color [ 0 ] = cpcolor [ idx ] # Constant color for evaluated points grid if isinstance ( evalcolor , str ) : color [ 1 ] = evalcolor # User-defined color for evaluated points grid if isinstance ( evalcolor , ( list , tuple ) ) : color [ 1 ] = evalcolor [ idx ] return color | Selects item color for plotting . | 150 | 7 |
224,273 | def process_tessellate ( elem , update_delta , delta , * * kwargs ) : if update_delta : elem . delta = delta elem . evaluate ( ) elem . tessellate ( * * kwargs ) return elem | Tessellates surfaces . | 59 | 5 |
224,274 | def process_elements_surface ( elem , mconf , colorval , idx , force_tsl , update_delta , delta , reset_names ) : if idx < 0 : lock . acquire ( ) idx = counter . value counter . value += 1 lock . release ( ) if update_delta : elem . delta = delta elem . evaluate ( ) # Reset element name if reset_names : elem . name = "surface" # Fix element name if elem . name == "surface" and idx >= 0 : elem . name = elem . name + " " + str ( idx ) # Color selection color = select_color ( colorval [ 0 ] , colorval [ 1 ] , idx = idx ) # Initialize the return list rl = [ ] # Add control points if mconf [ 'ctrlpts' ] == 'points' : ret = dict ( ptsarr = elem . ctrlpts , name = ( elem . name , "(CP)" ) , color = color [ 0 ] , plot_type = 'ctrlpts' , idx = idx ) rl . append ( ret ) # Add control points as quads if mconf [ 'ctrlpts' ] == 'quads' : qtsl = tessellate . QuadTessellate ( ) qtsl . tessellate ( elem . ctrlpts , size_u = elem . ctrlpts_size_u , size_v = elem . ctrlpts_size_v ) ret = dict ( ptsarr = [ qtsl . vertices , qtsl . faces ] , name = ( elem . name , "(CP)" ) , color = color [ 0 ] , plot_type = 'ctrlpts' , idx = idx ) rl . append ( ret ) # Add surface points if mconf [ 'evalpts' ] == 'points' : ret = dict ( ptsarr = elem . evalpts , name = ( elem . name , idx ) , color = color [ 1 ] , plot_type = 'evalpts' , idx = idx ) rl . append ( ret ) # Add surface points as quads if mconf [ 'evalpts' ] == 'quads' : qtsl = tessellate . QuadTessellate ( ) qtsl . tessellate ( elem . evalpts , size_u = elem . sample_size_u , size_v = elem . sample_size_v ) ret = dict ( ptsarr = [ qtsl . vertices , qtsl . faces ] , name = elem . name , color = color [ 1 ] , plot_type = 'evalpts' , idx = idx ) rl . append ( ret ) # Add surface points as vertices and triangles if mconf [ 'evalpts' ] == 'triangles' : elem . tessellate ( force = force_tsl ) ret = dict ( ptsarr = [ elem . tessellator . vertices , elem . tessellator . faces ] , name = elem . name , color = color [ 1 ] , plot_type = 'evalpts' , idx = idx ) rl . append ( ret ) # Add the trim curves for itc , trim in enumerate ( elem . trims ) : ret = dict ( ptsarr = elem . evaluate_list ( trim . evalpts ) , name = ( "trim" , itc ) , color = colorval [ 2 ] , plot_type = 'trimcurve' , idx = idx ) rl . append ( ret ) # Return the list return rl | Processes visualization elements for surfaces . | 817 | 7 |
224,275 | def find_span_binsearch ( degree , knot_vector , num_ctrlpts , knot , * * kwargs ) : # Get tolerance value tol = kwargs . get ( 'tol' , 10e-6 ) # In The NURBS Book; number of knots = m + 1, number of control points = n + 1, p = degree # All knot vectors should follow the rule: m = p + n + 1 n = num_ctrlpts - 1 if abs ( knot_vector [ n + 1 ] - knot ) <= tol : return n # Set max and min positions of the array to be searched low = degree high = num_ctrlpts # The division could return a float value which makes it impossible to use as an array index mid = ( low + high ) / 2 # Direct int casting would cause numerical errors due to discarding the significand figures (digits after the dot) # The round function could return unexpected results, so we add the floating point with some small number # This addition would solve the issues caused by the division operation and how Python stores float numbers. # E.g. round(13/2) = 6 (expected to see 7) mid = int ( round ( mid + tol ) ) # Search for the span while ( knot < knot_vector [ mid ] ) or ( knot >= knot_vector [ mid + 1 ] ) : if knot < knot_vector [ mid ] : high = mid else : low = mid mid = int ( ( low + high ) / 2 ) return mid | Finds the span of the knot over the input knot vector using binary search . | 328 | 16 |
224,276 | def find_span_linear ( degree , knot_vector , num_ctrlpts , knot , * * kwargs ) : span = 0 # Knot span index starts from zero while span < num_ctrlpts and knot_vector [ span ] <= knot : span += 1 return span - 1 | Finds the span of a single knot over the knot vector using linear search . | 63 | 16 |
224,277 | def find_spans ( degree , knot_vector , num_ctrlpts , knots , func = find_span_linear ) : spans = [ ] for knot in knots : spans . append ( func ( degree , knot_vector , num_ctrlpts , knot ) ) return spans | Finds spans of a list of knots over the knot vector . | 61 | 13 |
224,278 | def find_multiplicity ( knot , knot_vector , * * kwargs ) : # Get tolerance value tol = kwargs . get ( 'tol' , 10e-8 ) mult = 0 # initial multiplicity for kv in knot_vector : if abs ( knot - kv ) <= tol : mult += 1 return mult | Finds knot multiplicity over the knot vector . | 74 | 10 |
224,279 | def basis_function ( degree , knot_vector , span , knot ) : left = [ 0.0 for _ in range ( degree + 1 ) ] right = [ 0.0 for _ in range ( degree + 1 ) ] N = [ 1.0 for _ in range ( degree + 1 ) ] # N[0] = 1.0 by definition for j in range ( 1 , degree + 1 ) : left [ j ] = knot - knot_vector [ span + 1 - j ] right [ j ] = knot_vector [ span + j ] - knot saved = 0.0 for r in range ( 0 , j ) : temp = N [ r ] / ( right [ r + 1 ] + left [ j - r ] ) N [ r ] = saved + right [ r + 1 ] * temp saved = left [ j - r ] * temp N [ j ] = saved return N | Computes the non - vanishing basis functions for a single parameter . | 189 | 13 |
224,280 | def basis_functions ( degree , knot_vector , spans , knots ) : basis = [ ] for span , knot in zip ( spans , knots ) : basis . append ( basis_function ( degree , knot_vector , span , knot ) ) return basis | Computes the non - vanishing basis functions for a list of parameters . | 54 | 14 |
224,281 | def basis_function_all ( degree , knot_vector , span , knot ) : N = [ [ None for _ in range ( degree + 1 ) ] for _ in range ( degree + 1 ) ] for i in range ( 0 , degree + 1 ) : bfuns = basis_function ( i , knot_vector , span , knot ) for j in range ( 0 , i + 1 ) : N [ j ] [ i ] = bfuns [ j ] return N | Computes all non - zero basis functions of all degrees from 0 up to the input degree for a single parameter . | 101 | 23 |
224,282 | def basis_functions_ders ( degree , knot_vector , spans , knots , order ) : basis_ders = [ ] for span , knot in zip ( spans , knots ) : basis_ders . append ( basis_function_ders ( degree , knot_vector , span , knot , order ) ) return basis_ders | Computes derivatives of the basis functions for a list of parameters . | 73 | 13 |
224,283 | def basis_function_one ( degree , knot_vector , span , knot ) : # Special case at boundaries if ( span == 0 and knot == knot_vector [ 0 ] ) or ( span == len ( knot_vector ) - degree - 2 ) and knot == knot_vector [ len ( knot_vector ) - 1 ] : return 1.0 # Knot is outside of span range if knot < knot_vector [ span ] or knot >= knot_vector [ span + degree + 1 ] : return 0.0 N = [ 0.0 for _ in range ( degree + span + 1 ) ] # Initialize the zeroth degree basis functions for j in range ( 0 , degree + 1 ) : if knot_vector [ span + j ] <= knot < knot_vector [ span + j + 1 ] : N [ j ] = 1.0 # Computing triangular table of basis functions for k in range ( 1 , degree + 1 ) : # Detecting zeros saves computations saved = 0.0 if N [ 0 ] != 0.0 : saved = ( ( knot - knot_vector [ span ] ) * N [ 0 ] ) / ( knot_vector [ span + k ] - knot_vector [ span ] ) for j in range ( 0 , degree - k + 1 ) : Uleft = knot_vector [ span + j + 1 ] Uright = knot_vector [ span + j + k + 1 ] # Zero detection if N [ j + 1 ] == 0.0 : N [ j ] = saved saved = 0.0 else : temp = N [ j + 1 ] / ( Uright - Uleft ) N [ j ] = saved + ( Uright - knot ) * temp saved = ( knot - Uleft ) * temp return N [ 0 ] | Computes the value of a basis function for a single parameter . | 374 | 13 |
224,284 | def set_axes_equal ( ax ) : bounds = [ ax . get_xlim3d ( ) , ax . get_ylim3d ( ) , ax . get_zlim3d ( ) ] ranges = [ abs ( bound [ 1 ] - bound [ 0 ] ) for bound in bounds ] centers = [ np . mean ( bound ) for bound in bounds ] radius = 0.5 * max ( ranges ) lower_limits = centers - radius upper_limits = centers + radius ax . set_xlim3d ( [ lower_limits [ 0 ] , upper_limits [ 0 ] ] ) ax . set_ylim3d ( [ lower_limits [ 1 ] , upper_limits [ 1 ] ] ) ax . set_zlim3d ( [ lower_limits [ 2 ] , upper_limits [ 2 ] ] ) | Sets equal aspect ratio across the three axes of a 3D plot . | 180 | 15 |
224,285 | def animate ( self , * * kwargs ) : # Calling parent render function super ( VisSurface , self ) . render ( * * kwargs ) # Colormaps surf_cmaps = kwargs . get ( 'colormap' , None ) # Initialize variables tri_idxs = [ ] vert_coords = [ ] trisurf_params = [ ] frames = [ ] frames_tris = [ ] num_vertices = 0 # Start plotting of the surface and the control points grid fig = plt . figure ( figsize = self . vconf . figure_size , dpi = self . vconf . figure_dpi ) ax = Axes3D ( fig ) # Start plotting surf_count = 0 for plot in self . _plots : # Plot evaluated points if plot [ 'type' ] == 'evalpts' and self . vconf . display_evalpts : # Use internal triangulation algorithm instead of Qhull (MPL default) verts = plot [ 'ptsarr' ] [ 0 ] tris = plot [ 'ptsarr' ] [ 1 ] # Extract zero-indexed vertex number list tri_idxs += [ [ ti + num_vertices for ti in tri . data ] for tri in tris ] # Extract vertex coordinates vert_coords += [ vert . data for vert in verts ] # Update number of vertices num_vertices = len ( vert_coords ) # Determine the color or the colormap of the triangulated plot params = { } if surf_cmaps : try : params [ 'cmap' ] = surf_cmaps [ surf_count ] surf_count += 1 except IndexError : params [ 'color' ] = plot [ 'color' ] else : params [ 'color' ] = plot [ 'color' ] trisurf_params += [ params for _ in range ( len ( tris ) ) ] # Pre-processing for the animation pts = np . array ( vert_coords , dtype = self . vconf . dtype ) # Create the frames (Artists) for tidx , pidx in zip ( tri_idxs , trisurf_params ) : frames_tris . append ( tidx ) # Create MPL Triangulation object triangulation = mpltri . Triangulation ( pts [ : , 0 ] , pts [ : , 1 ] , triangles = frames_tris ) # Use custom Triangulation object and the choice of color/colormap to plot the surface p3df = ax . plot_trisurf ( triangulation , pts [ : , 2 ] , alpha = self . vconf . alpha , * * pidx ) # Add to frames list frames . append ( [ p3df ] ) # Create MPL ArtistAnimation ani = animation . ArtistAnimation ( fig , frames , interval = 100 , blit = True , repeat_delay = 1000 ) # Remove axes if not self . vconf . display_axes : plt . axis ( 'off' ) # Set axes equal if self . vconf . axes_equal : self . vconf . set_axes_equal ( ax ) # Axis labels if self . vconf . display_labels : ax . set_xlabel ( 'x' ) ax . set_ylabel ( 'y' ) ax . set_zlabel ( 'z' ) # Process keyword arguments fig_filename = kwargs . get ( 'fig_save_as' , None ) fig_display = kwargs . get ( 'display_plot' , True ) # Display the plot if fig_display : plt . show ( ) else : fig_filename = self . vconf . figure_image_filename if fig_filename is None else fig_filename # Save the figure self . vconf . save_figure_as ( fig , fig_filename ) # Return the figure object return fig | Animates the surface . | 845 | 5 |
224,286 | def tangent_curve_single_list ( obj , param_list , normalize ) : ret_vector = [ ] for param in param_list : temp = tangent_curve_single ( obj , param , normalize ) ret_vector . append ( temp ) return tuple ( ret_vector ) | Evaluates the curve tangent vectors at the given list of parameter values . | 66 | 16 |
224,287 | def normal_curve_single ( obj , u , normalize ) : # 2nd derivative of the curve gives the normal ders = obj . derivatives ( u , 2 ) point = ders [ 0 ] vector = linalg . vector_normalize ( ders [ 2 ] ) if normalize else ders [ 2 ] return tuple ( point ) , tuple ( vector ) | Evaluates the curve normal vector at the input parameter u . | 80 | 13 |
224,288 | def normal_curve_single_list ( obj , param_list , normalize ) : ret_vector = [ ] for param in param_list : temp = normal_curve_single ( obj , param , normalize ) ret_vector . append ( temp ) return tuple ( ret_vector ) | Evaluates the curve normal vectors at the given list of parameter values . | 64 | 15 |
224,289 | def binormal_curve_single ( obj , u , normalize ) : # Cross product of tangent and normal vectors gives binormal vector tan_vector = tangent_curve_single ( obj , u , normalize ) norm_vector = normal_curve_single ( obj , u , normalize ) point = tan_vector [ 0 ] vector = linalg . vector_cross ( tan_vector [ 1 ] , norm_vector [ 1 ] ) vector = linalg . vector_normalize ( vector ) if normalize else vector return tuple ( point ) , tuple ( vector ) | Evaluates the curve binormal vector at the given u parameter . | 127 | 14 |
224,290 | def binormal_curve_single_list ( obj , param_list , normalize ) : ret_vector = [ ] for param in param_list : temp = binormal_curve_single ( obj , param , normalize ) ret_vector . append ( temp ) return tuple ( ret_vector ) | Evaluates the curve binormal vectors at the given list of parameter values . | 66 | 16 |
224,291 | def tangent_surface_single_list ( obj , param_list , normalize ) : ret_vector = [ ] for param in param_list : temp = tangent_surface_single ( obj , param , normalize ) ret_vector . append ( temp ) return tuple ( ret_vector ) | Evaluates the surface tangent vectors at the given list of parameter values . | 64 | 16 |
224,292 | def normal_surface_single_list ( obj , param_list , normalize ) : ret_vector = [ ] for param in param_list : temp = normal_surface_single ( obj , param , normalize ) ret_vector . append ( temp ) return tuple ( ret_vector ) | Evaluates the surface normal vectors at the given list of parameter values . | 62 | 15 |
224,293 | def find_ctrlpts_curve ( t , curve , * * kwargs ) : # Get keyword arguments span_func = kwargs . get ( 'find_span_func' , helpers . find_span_linear ) # Find spans and the constant index span = span_func ( curve . degree , curve . knotvector , len ( curve . ctrlpts ) , t ) idx = span - curve . degree # Find control points involved in evaluation of the curve point at the input parameter curve_ctrlpts = [ ( ) for _ in range ( curve . degree + 1 ) ] for i in range ( 0 , curve . degree + 1 ) : curve_ctrlpts [ i ] = curve . ctrlpts [ idx + i ] # Return control points array return curve_ctrlpts | Finds the control points involved in the evaluation of the curve point defined by the input parameter . | 175 | 19 |
224,294 | def find_ctrlpts_surface ( t_u , t_v , surf , * * kwargs ) : # Get keyword arguments span_func = kwargs . get ( 'find_span_func' , helpers . find_span_linear ) # Find spans span_u = span_func ( surf . degree_u , surf . knotvector_u , surf . ctrlpts_size_u , t_u ) span_v = span_func ( surf . degree_v , surf . knotvector_v , surf . ctrlpts_size_v , t_v ) # Constant indices idx_u = span_u - surf . degree_u idx_v = span_v - surf . degree_v # Find control points involved in evaluation of the surface point at the input parameter pair (u, v) surf_ctrlpts = [ [ ] for _ in range ( surf . degree_u + 1 ) ] for k in range ( surf . degree_u + 1 ) : temp = [ ( ) for _ in range ( surf . degree_v + 1 ) ] for l in range ( surf . degree_v + 1 ) : temp [ l ] = surf . ctrlpts2d [ idx_u + k ] [ idx_v + l ] surf_ctrlpts [ k ] = temp # Return 2-dimensional control points array return surf_ctrlpts | Finds the control points involved in the evaluation of the surface point defined by the input parameter pair . | 306 | 20 |
224,295 | def link_curves ( * args , * * kwargs ) : # Get keyword arguments tol = kwargs . get ( 'tol' , 10e-8 ) validate = kwargs . get ( 'validate' , False ) # Validate input if validate : for idx in range ( len ( args ) - 1 ) : if linalg . point_distance ( args [ idx ] . ctrlpts [ - 1 ] , args [ idx + 1 ] . ctrlpts [ 0 ] ) > tol : raise GeomdlException ( "Curve #" + str ( idx ) + " and Curve #" + str ( idx + 1 ) + " don't touch each other" ) kv = [ ] # new knot vector cpts = [ ] # new control points array wgts = [ ] # new weights array kv_connected = [ ] # superfluous knots to be removed pdomain_end = 0 # Loop though the curves for arg in args : # Process knot vectors if not kv : kv += list ( arg . knotvector [ : - ( arg . degree + 1 ) ] ) # get rid of the last superfluous knot to maintain split curve notation cpts += list ( arg . ctrlpts ) # Process control points if arg . rational : wgts += list ( arg . weights ) else : tmp_w = [ 1.0 for _ in range ( arg . ctrlpts_size ) ] wgts += tmp_w else : tmp_kv = [ pdomain_end + k for k in arg . knotvector [ 1 : - ( arg . degree + 1 ) ] ] kv += tmp_kv cpts += list ( arg . ctrlpts [ 1 : ] ) # Process control points if arg . rational : wgts += list ( arg . weights [ 1 : ] ) else : tmp_w = [ 1.0 for _ in range ( arg . ctrlpts_size - 1 ) ] wgts += tmp_w pdomain_end += arg . knotvector [ - 1 ] kv_connected . append ( pdomain_end ) # Fix curve by appending the last knot to the end kv += [ pdomain_end for _ in range ( arg . degree + 1 ) ] # Remove the last knot from knot insertion list kv_connected . pop ( ) return kv , cpts , wgts , kv_connected | Links the input curves together . | 532 | 6 |
224,296 | def add_dimension ( obj , * * kwargs ) : if not isinstance ( obj , abstract . SplineGeometry ) : raise GeomdlException ( "Can only operate on spline geometry objects" ) # Keyword arguments inplace = kwargs . get ( 'inplace' , False ) array_init = kwargs . get ( 'array_init' , [ [ ] for _ in range ( len ( obj . ctrlpts ) ) ] ) offset_value = kwargs . get ( 'offset' , 0.0 ) # Update control points new_ctrlpts = array_init for idx , point in enumerate ( obj . ctrlpts ) : temp = [ float ( p ) for p in point [ 0 : obj . dimension ] ] temp . append ( offset_value ) new_ctrlpts [ idx ] = temp if inplace : obj . ctrlpts = new_ctrlpts return obj else : ret = copy . deepcopy ( obj ) ret . ctrlpts = new_ctrlpts return ret | Elevates the spatial dimension of the spline geometry . | 231 | 12 |
224,297 | def split_curve ( obj , param , * * kwargs ) : # Validate input if not isinstance ( obj , abstract . Curve ) : raise GeomdlException ( "Input shape must be an instance of abstract.Curve class" ) if param == obj . knotvector [ 0 ] or param == obj . knotvector [ - 1 ] : raise GeomdlException ( "Cannot split on the corner points" ) # Keyword arguments span_func = kwargs . get ( 'find_span_func' , helpers . find_span_linear ) # FindSpan implementation insert_knot_func = kwargs . get ( 'insert_knot_func' , insert_knot ) # Knot insertion algorithm # Find multiplicity of the knot and define how many times we need to add the knot ks = span_func ( obj . degree , obj . knotvector , len ( obj . ctrlpts ) , param ) - obj . degree + 1 s = helpers . find_multiplicity ( param , obj . knotvector ) r = obj . degree - s # Create backups of the original curve temp_obj = copy . deepcopy ( obj ) # Insert knot insert_knot_func ( temp_obj , [ param ] , num = [ r ] , check_num = False ) # Knot vectors knot_span = span_func ( temp_obj . degree , temp_obj . knotvector , len ( temp_obj . ctrlpts ) , param ) + 1 curve1_kv = list ( temp_obj . knotvector [ 0 : knot_span ] ) curve1_kv . append ( param ) curve2_kv = list ( temp_obj . knotvector [ knot_span : ] ) for _ in range ( 0 , temp_obj . degree + 1 ) : curve2_kv . insert ( 0 , param ) # Control points (use Pw if rational) cpts = temp_obj . ctrlptsw if obj . rational else temp_obj . ctrlpts curve1_ctrlpts = cpts [ 0 : ks + r ] curve2_ctrlpts = cpts [ ks + r - 1 : ] # Create a new curve for the first half curve1 = temp_obj . __class__ ( ) curve1 . degree = temp_obj . degree curve1 . set_ctrlpts ( curve1_ctrlpts ) curve1 . knotvector = curve1_kv # Create another curve fot the second half curve2 = temp_obj . __class__ ( ) curve2 . degree = temp_obj . degree curve2 . set_ctrlpts ( curve2_ctrlpts ) curve2 . knotvector = curve2_kv # Return the split curves ret_val = [ curve1 , curve2 ] return ret_val | Splits the curve at the input parametric coordinate . | 610 | 11 |
224,298 | def decompose_curve ( obj , * * kwargs ) : if not isinstance ( obj , abstract . Curve ) : raise GeomdlException ( "Input shape must be an instance of abstract.Curve class" ) multi_curve = [ ] curve = copy . deepcopy ( obj ) knots = curve . knotvector [ curve . degree + 1 : - ( curve . degree + 1 ) ] while knots : knot = knots [ 0 ] curves = split_curve ( curve , param = knot , * * kwargs ) multi_curve . append ( curves [ 0 ] ) curve = curves [ 1 ] knots = curve . knotvector [ curve . degree + 1 : - ( curve . degree + 1 ) ] multi_curve . append ( curve ) return multi_curve | Decomposes the curve into Bezier curve segments of the same degree . | 169 | 16 |
224,299 | def length_curve ( obj ) : if not isinstance ( obj , abstract . Curve ) : raise GeomdlException ( "Input shape must be an instance of abstract.Curve class" ) length = 0.0 evalpts = obj . evalpts num_evalpts = len ( obj . evalpts ) for idx in range ( num_evalpts - 1 ) : length += linalg . point_distance ( evalpts [ idx ] , evalpts [ idx + 1 ] ) return length | Computes the approximate length of the parametric curve . | 115 | 11 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.