idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
36,200
def _construct_sharded ( self ) : current_version = self . getMongoDVersion ( ) num_mongos = self . args [ 'mongos' ] if self . args [ 'mongos' ] > 0 else 1 shard_names = self . _get_shard_names ( self . args ) nextport = self . args [ 'port' ] + num_mongos for shard in shard_names : if ( self . args [ 'single' ] and L...
Construct command line strings for a sharded cluster .
36,201
def _construct_replset ( self , basedir , portstart , name , num_nodes , arbiter , extra = '' ) : self . config_docs [ name ] = { '_id' : name , 'members' : [ ] } for i in num_nodes : datapath = self . _create_paths ( basedir , '%s/rs%i' % ( name , i + 1 ) ) self . _construct_mongod ( os . path . join ( datapath , 'db'...
Construct command line strings for a replicaset .
36,202
def _construct_config ( self , basedir , port , name = None , isreplset = False ) : if isreplset : return self . _construct_replset ( basedir = basedir , portstart = port , name = name , num_nodes = list ( range ( self . args [ 'config' ] ) ) , arbiter = False , extra = '--configsvr' ) else : datapath = self . _create_...
Construct command line strings for a config server .
36,203
def _construct_single ( self , basedir , port , name = None , extra = '' ) : datapath = self . _create_paths ( basedir , name ) self . _construct_mongod ( os . path . join ( datapath , 'db' ) , os . path . join ( datapath , 'mongod.log' ) , port , replset = None , extra = extra ) host = '%s:%i' % ( self . args [ 'hostn...
Construct command line strings for a single node .
36,204
def _construct_mongod ( self , dbpath , logpath , port , replset = None , extra = '' ) : rs_param = '' if replset : rs_param = '--replSet %s' % replset auth_param = '' if self . args [ 'auth' ] : key_path = os . path . abspath ( os . path . join ( self . dir , 'keyfile' ) ) auth_param = '--keyFile %s' % key_path if sel...
Construct command line strings for mongod process .
36,205
def _construct_mongos ( self , logpath , port , configdb ) : extra = '' auth_param = '' if self . args [ 'auth' ] : key_path = os . path . abspath ( os . path . join ( self . dir , 'keyfile' ) ) auth_param = '--keyFile %s' % key_path if self . unknown_args : extra = self . _filter_valid_arguments ( self . unknown_args ...
Construct command line strings for a mongos process .
36,206
def addMatch ( self , version , filename , lineno , loglevel , trigger ) : self . versions . add ( version ) self . matches [ version ] . append ( ( filename , lineno , loglevel , trigger ) )
Add a match to the LogCodeLine .
36,207
def accept_line ( self , logevent ) : if ( "is now in state" in logevent . line_str and logevent . split_tokens [ - 1 ] in self . states ) : return True if ( "replSet" in logevent . line_str and logevent . thread == "rsMgr" and logevent . split_tokens [ - 1 ] in self . states ) : return True return False
Return True on match .
36,208
def color_map ( cls , group ) : print ( "Group %s" % group ) try : state_idx = cls . states . index ( group ) except ValueError : state_idx = 5 return cls . colors [ state_idx ] , cls . markers [ 0 ]
Change default color behavior .
36,209
def add_line ( self , logevent ) : key = None self . empty = False self . groups . setdefault ( key , list ( ) ) . append ( logevent )
Append log line to this plot type .
36,210
def logevents ( self ) : for key in self . groups : for logevent in self . groups [ key ] : yield logevent
Iterator yielding all logevents from groups dictionary .
36,211
def clicked ( self , event ) : group = event . artist . _mt_group n = event . artist . _mt_n dt = num2date ( event . artist . _mt_bin ) print ( "%4i %s events in %s sec beginning at %s" % ( n , group , self . bucketsize , dt . strftime ( "%b %d %H:%M:%S" ) ) )
Print group name and number of items in bin .
36,212
def add ( self , item , group_by = None ) : key = None if not group_by : group_by = self . group_by if group_by : if hasattr ( group_by , '__call__' ) : key = group_by ( item ) elif isinstance ( group_by , str ) and hasattr ( item , group_by ) : key = getattr ( item , group_by ) else : key = None if isinstance ( group_...
General purpose class to group items by certain criteria .
36,213
def regroup ( self , group_by = None ) : if not group_by : group_by = self . group_by groups = self . groups self . groups = { } for g in groups : for item in groups [ g ] : self . add ( item , group_by )
Regroup items .
36,214
def move_items ( self , from_group , to_group ) : if from_group not in self . keys ( ) or len ( self . groups [ from_group ] ) == 0 : return self . groups . setdefault ( to_group , list ( ) ) . extend ( self . groups . get ( from_group , list ( ) ) ) if from_group in self . groups : del self . groups [ from_group ]
Take all elements from the from_group and add it to the to_group .
36,215
def sort_by_size ( self , group_limit = None , discard_others = False , others_label = 'others' ) : self . groups = OrderedDict ( sorted ( six . iteritems ( self . groups ) , key = lambda x : len ( x [ 1 ] ) , reverse = True ) ) if group_limit is not None : if not discard_others : group_keys = self . groups . keys ( ) ...
Sort the groups by the number of elements they contain descending .
36,216
def import_l2c_db ( ) : data_path = os . path . join ( os . path . dirname ( mtools . __file__ ) , 'data' ) if os . path . exists ( os . path . join ( data_path , 'log2code.pickle' ) ) : av , lv , lbw , lcl = cPickle . load ( open ( os . path . join ( data_path , 'log2code.pickle' ) , 'rb' ) ) return av , lv , lbw , lc...
Static import helper function .
36,217
def _strip_counters ( self , sub_line ) : try : end = sub_line . rindex ( '}' ) except ValueError : return sub_line else : return sub_line [ : ( end + 1 ) ]
Find the codeline end by taking out the counters and durations .
36,218
def _strip_datetime ( self , sub_line ) : try : begin = sub_line . index ( ']' ) except ValueError : return sub_line else : sub = sub_line [ begin + 1 : ] return sub
Strip datetime and other parts so that there is no redundancy .
36,219
def _find_variable ( self , pattern , logline ) : var_subs = [ ] first_index = logline . index ( pattern [ 0 ] ) beg_str = logline [ : first_index ] var_subs . append ( self . _strip_datetime ( beg_str ) ) for patt , patt_next in zip ( pattern [ : - 1 ] , pattern [ 1 : ] ) : pat = re . escape ( patt ) + '(.*)' + re . e...
Return the variable parts of the code given a tuple of strings pattern .
36,220
def _variable_parts ( self , line , codeline ) : var_subs = [ ] if codeline : var_subs = self . _find_variable ( codeline . pattern , line ) else : line_str = self . _strip_datetime ( self . _strip_counters ( line ) ) var_subs = [ line_str . strip ( ) ] return var_subs
Return variable parts of the codeline given the static parts .
36,221
def combine ( self , pattern , variable ) : inter_zip = izip_longest ( variable , pattern , fillvalue = '' ) interleaved = [ elt for pair in inter_zip for elt in pair ] return '' . join ( interleaved )
Combine a pattern and variable parts to be a line string again .
36,222
def run ( self , arguments = None , get_unknowns = False ) : if os . name != 'nt' : signal . signal ( signal . SIGPIPE , signal . SIG_DFL ) if get_unknowns : if arguments : self . args , self . unknown_args = ( self . argparser . parse_known_args ( args = arguments . split ( ) ) ) else : ( self . args , self . unknown_...
Init point to execute the script .
36,223
def update_progress ( self , progress , prefix = '' ) : total_length = 40 if progress == 1. : sys . stderr . write ( '\r' + ' ' * ( total_length + len ( prefix ) + 50 ) ) sys . stderr . write ( '\n' ) sys . stderr . flush ( ) else : bar_length = int ( round ( total_length * progress ) ) sys . stderr . write ( '\r%s [%s...
Print a progress bar for longer - running scripts .
36,224
def accept_line ( self , logevent ) : if self . regex_mode : return bool ( re . search ( self . field , logevent . line_str ) ) else : return getattr ( logevent , self . field ) is not None
Return True if the log line has the nominated yaxis field .
36,225
def clicked ( self , event ) : group = event . artist . _mt_group indices = event . ind major , minor , _ = mpl_version . split ( '.' ) if ( int ( major ) , int ( minor ) ) < ( 1 , 2 ) or not event . mouseevent . dblclick : for i in indices : print ( self . groups [ group ] [ i ] . line_str ) else : first = indices [ 0...
Call if an element of this plottype is clicked .
36,226
def run ( self , arguments = None ) : LogFileTool . run ( self , arguments ) for i , self . logfile in enumerate ( self . args [ 'logfile' ] ) : if i > 0 : print ( "\n ------------------------------------------\n" ) if self . logfile . datetime_format == 'ctime-pre2.4' : start_time = ( self . logfile . start . strftime...
Print useful information about the log file .
36,227
def filesize ( self ) : if self . from_stdin : return None if not self . _filesize : self . _calculate_bounds ( ) return self . _filesize
Lazy evaluation of start and end of logfile .
36,228
def num_lines ( self ) : if self . from_stdin : return None if not self . _num_lines : self . _iterate_lines ( ) return self . _num_lines
Lazy evaluation of the number of lines .
36,229
def versions ( self ) : versions = [ ] for v , _ in self . restarts : if len ( versions ) == 0 or v != versions [ - 1 ] : versions . append ( v ) return versions
Return all version changes .
36,230
def next ( self ) : line = self . filehandle . readline ( ) line = line . decode ( 'utf-8' , 'replace' ) if line == '' : raise StopIteration line = line . rstrip ( '\n' ) le = LogEvent ( line ) if self . _datetime_format and self . _datetime_nextpos is not None : ret = le . set_datetime_hint ( self . _datetime_format ,...
Get next line adjust for year rollover and hint datetime format .
36,231
def _calculate_bounds ( self ) : if self . _bounds_calculated : return if self . from_stdin : return False max_start_lines = 10 lines_checked = 0 for line in self . filehandle : logevent = LogEvent ( line ) lines_checked += 1 if logevent . datetime : self . _start = logevent . datetime self . _timezone = logevent . dat...
Calculate beginning and end of logfile .
36,232
def _find_curr_line ( self , prev = False ) : curr_pos = self . filehandle . tell ( ) jump_back = min ( self . filehandle . tell ( ) , 15000 ) self . filehandle . seek ( - jump_back , 1 ) buff = self . filehandle . read ( jump_back ) self . filehandle . seek ( curr_pos , 0 ) if prev and self . prev_pos is not None and ...
Internal helper function .
36,233
def fast_forward ( self , start_dt ) : if self . from_stdin : return else : max_mark = self . filesize step_size = max_mark self . filehandle . seek ( 0 ) le = self . next ( ) if le . datetime and le . datetime >= start_dt : self . filehandle . seek ( 0 ) return le = None self . filehandle . seek ( 0 ) while abs ( step...
Fast - forward file to given start_dt datetime obj using binary search .
36,234
def setup ( self ) : if self . mlogfilter . is_stdin : now = datetime . now ( ) self . startDateTime = datetime ( now . year , 1 , 1 , tzinfo = tzutc ( ) ) self . endDateTime = datetime ( MAXYEAR , 12 , 31 , tzinfo = tzutc ( ) ) else : logfiles = self . mlogfilter . args [ 'logfile' ] self . startDateTime = min ( [ lf ...
Get start end end date of logfile before starting to parse .
36,235
def num_events ( self ) : if not self . _num_events : self . _num_events = self . coll_handle . count ( ) return self . _num_events
Lazy evaluation of the number of events .
36,236
def next ( self ) : if not self . cursor : self . cursor = self . coll_handle . find ( ) . sort ( [ ( "ts" , ASCENDING ) ] ) doc = self . cursor . next ( ) doc [ 'thread' ] = self . name le = LogEvent ( doc ) return le
Make iterators .
36,237
def _calculate_bounds ( self ) : first = self . coll_handle . find_one ( None , sort = [ ( "ts" , ASCENDING ) ] ) last = self . coll_handle . find_one ( None , sort = [ ( "ts" , DESCENDING ) ] ) self . _start = first [ 'ts' ] if self . _start . tzinfo is None : self . _start = self . _start . replace ( tzinfo = tzutc (...
Calculate beginning and end of log events .
36,238
def run ( self ) : if ProfileCollection and isinstance ( self . mloginfo . logfile , ProfileCollection ) : print ( "\n not available for system.profile collections\n" ) return codelines = defaultdict ( lambda : 0 ) non_matches = 0 logfile = self . mloginfo . logfile if logfile . start and logfile . end and not self ...
Run each line through log2code and group by matched pattern .
36,239
def shell2json ( s ) : replace = { r'BinData\(.+?\)' : '1' , r'(new )?Date\(.+?\)' : '1' , r'Timestamp\(.+?\)' : '1' , r'ObjectId\(.+?\)' : '1' , r'DBRef\(.+?\)' : '1' , r'undefined' : '1' , r'MinKey' : '1' , r'MaxKey' : '1' , r'NumberLong\(.+?\)' : '1' , r'/.+?/\w*' : '1' } for key , value in replace . items ( ) : s =...
Convert shell syntax to json .
36,240
def json2pattern ( s ) : s , _ = re . subn ( r'([{,])\s*([^,{\s\'"]+)\s*:' , ' \\1 "\\2" : ' , s ) s = shell2json ( s ) s , n = re . subn ( r'([:,\[])\s*([^{}\[\]"]+?)\s*([,}\]])' , '\\1 1 \\3' , s ) try : doc = json . loads ( s , object_hook = _decode_pattern_dict ) return json . dumps ( doc , sort_keys = True , separ...
Convert JSON format to a query pattern .
36,241
def print_table ( rows , override_headers = None , uppercase_headers = True ) : if len ( rows ) == 0 : return keys = list ( rows [ 0 ] . keys ( ) ) headers = override_headers or keys if uppercase_headers : rows = [ dict ( zip ( keys , map ( lambda x : x . upper ( ) , headers ) ) ) , None ] + rows else : rows = [ dict (...
All rows need to be a list of dictionaries all with the same keys .
36,242
def set_line_str ( self , line_str ) : if not self . from_string : raise ValueError ( "can't set line_str for LogEvent created from " "system.profile documents." ) if line_str != self . _line_str : self . _line_str = line_str . rstrip ( ) self . _reset ( )
Set line_str .
36,243
def get_line_str ( self ) : if self . from_string : return ' ' . join ( [ s for s in [ self . merge_marker_str , self . _datetime_str , self . _line_str ] if s ] ) else : return ' ' . join ( [ s for s in [ self . _datetime_str , self . _line_str ] if s ] )
Return line_str depending on source logfile or system . profile .
36,244
def _match_datetime_pattern ( self , tokens ) : assume_iso8601_format = len ( tokens ) < 4 if not assume_iso8601_format : weekday , month , day , time = tokens [ : 4 ] if ( len ( tokens ) < 4 or ( weekday not in self . weekdays ) or ( month not in self . months ) or not day . isdigit ( ) ) : assume_iso8601_format = Tru...
Match the datetime pattern at the beginning of the token list .
36,245
def _extract_operation_and_namespace ( self ) : split_tokens = self . split_tokens if not self . _datetime_nextpos : _ = self . thread if not self . _datetime_nextpos or ( len ( split_tokens ) <= self . _datetime_nextpos + 2 ) : return op = split_tokens [ self . _datetime_nextpos + 1 ] . lower ( ) if op == 'warning:' :...
Helper method to extract both operation and namespace from a logevent .
36,246
def _extract_counters ( self ) : counters = [ 'nscanned' , 'nscannedObjects' , 'ntoreturn' , 'nreturned' , 'ninserted' , 'nupdated' , 'ndeleted' , 'r' , 'w' , 'numYields' , 'planSummary' , 'writeConflicts' , 'keyUpdates' ] counter_equiv = { 'docsExamined' : 'nscannedObjects' , 'keysExamined' : 'nscanned' , 'nDeleted' :...
Extract counters like nscanned and nreturned from the logevent .
36,247
def parse_all ( self ) : tokens = self . split_tokens duration = self . duration datetime = self . datetime thread = self . thread operation = self . operation namespace = self . namespace pattern = self . pattern nscanned = self . nscanned nscannedObjects = self . nscannedObjects ntoreturn = self . ntoreturn nreturned...
Trigger extraction of all information .
36,248
def to_dict ( self , labels = None ) : output = { } if labels is None : labels = [ 'line_str' , 'split_tokens' , 'datetime' , 'operation' , 'thread' , 'namespace' , 'nscanned' , 'ntoreturn' , 'nreturned' , 'ninserted' , 'nupdated' , 'ndeleted' , 'duration' , 'r' , 'w' , 'numYields' ] for label in labels : value = getat...
Convert LogEvent object to a dictionary .
36,249
def to_json ( self , labels = None ) : output = self . to_dict ( labels ) return json . dumps ( output , cls = DateTimeEncoder , ensure_ascii = False )
Convert LogEvent object to valid JSON .
36,250
def addFilter ( self , filterclass ) : if filterclass not in self . filters : self . filters . append ( filterclass )
Add a filter class to the parser .
36,251
def _outputLine ( self , logevent , length = None , human = False ) : if self . args [ 'timestamp_format' ] != 'none' : logevent . _reformat_timestamp ( self . args [ 'timestamp_format' ] , force = True ) if any ( self . args [ 'timezone' ] ) : if self . args [ 'timestamp_format' ] == 'none' : self . args [ 'timestamp_...
Print the final line .
36,252
def _msToString ( self , ms ) : hr , ms = divmod ( ms , 3600000 ) mins , ms = divmod ( ms , 60000 ) secs , mill = divmod ( ms , 1000 ) return "%ihr %imin %isecs %ims" % ( hr , mins , secs , mill )
Change milliseconds to hours min sec ms format .
36,253
def _changeMs ( self , line ) : try : last_space_pos = line . rindex ( ' ' ) except ValueError : return line else : end_str = line [ last_space_pos : ] new_string = line if end_str [ - 2 : ] == 'ms' and int ( end_str [ : - 2 ] ) >= 1000 : ms = int ( end_str [ : - 2 ] ) new_string = ( line [ : last_space_pos ] + ' (' + ...
Change the ms part in the string if needed .
36,254
def _formatNumbers ( self , line ) : if sys . version_info < ( 2 , 7 ) : return line last_index = 0 try : last_index = ( line . rindex ( '}' ) + 1 ) end = line [ last_index : ] except ValueError : return line else : splitted = re . split ( "(\d+)" , end ) for index , val in enumerate ( splitted ) : converted = 0 try : ...
Format the numbers so that there are commas inserted .
36,255
def _datetime_key_for_merge ( self , logevent ) : if not logevent : return datetime ( MAXYEAR , 12 , 31 , 23 , 59 , 59 , 999999 , tzutc ( ) ) return logevent . datetime or datetime ( MINYEAR , 1 , 1 , 0 , 0 , 0 , 0 , tzutc ( ) )
Helper method for ordering log lines correctly during merge .
36,256
def _merge_logfiles ( self ) : lines = [ next ( iter ( logfile ) , None ) for logfile in self . args [ 'logfile' ] ] for i in range ( len ( lines ) ) : if lines [ i ] and lines [ i ] . datetime : lines [ i ] . _datetime = ( lines [ i ] . datetime + timedelta ( hours = self . args [ 'timezone' ] [ i ] ) ) while any ( li...
Helper method to merge several files together by datetime .
36,257
def logfile_generator ( self ) : if not self . args [ 'exclude' ] : start_limits = [ f . start_limit for f in self . filters if hasattr ( f , 'start_limit' ) ] if start_limits : for logfile in self . args [ 'logfile' ] : logfile . fast_forward ( max ( start_limits ) ) if len ( self . args [ 'logfile' ] ) > 1 : for loge...
Yield each line of the file or the next line if several files .
36,258
def setup ( self ) : if not self . mask_source . start : raise SystemExit ( "Can't parse format of %s. Is this a log file or " "system.profile collection?" % self . mlogfilter . args [ 'mask' ] ) self . mask_half_td = timedelta ( seconds = self . mlogfilter . args [ 'mask_size' ] / 2 ) logevent_list = list ( self . mas...
Create mask list .
36,259
def source_files ( mongodb_path ) : for root , dirs , files in os . walk ( mongodb_path ) : for filename in files : if 'dbtests' in root : continue if filename . endswith ( ( '.cpp' , '.c' , '.h' ) ) : yield os . path . join ( root , filename )
Find source files .
36,260
def index ( ) : if current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] is not None : override = current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] results = ( models . TaskResult . query . join ( models . Task ) . filter ( models . Task . playbook_id . in_ ( override ) ) ) else : results = models . TaskResult . query . all ...
This is not served anywhere in the web application . It is used explicitly in the context of generating static files since flask - frozen requires url_for s to crawl content . url_for s are not used with result . show_result directly and are instead dynamically generated through javascript for performance purposes .
36,261
def content_sha1 ( context ) : try : content = context . current_parameters [ 'content' ] except AttributeError : content = context return hashlib . sha1 ( encodeutils . to_utf8 ( content ) ) . hexdigest ( )
Used by the FileContent model to automatically compute the sha1 hash of content before storing it to the database .
36,262
def main ( ) : files = models . File . query hosts = models . Host . query facts = models . HostFacts . query playbooks = models . Playbook . query records = models . Data . query tasks = models . Task . query results = models . TaskResult . query if current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] is not None : overri...
Returns the about page
36,263
def index ( ) : if current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] is not None : override = current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] hosts = ( models . Host . query . filter ( models . Host . playbook_id . in_ ( override ) ) ) else : hosts = models . Host . query . all ( ) return render_template ( 'host_index....
This is not served anywhere in the web application . It is used explicitly in the context of generating static files since flask - frozen requires url_for s to crawl content . url_for s are not used with host . show_host directly and are instead dynamically generated through javascript for performance purposes .
36,264
def config ( self ) : return { key : self . __dict__ [ key ] for key in dir ( self ) if key . isupper ( ) }
Returns a dictionary for the loaded configuration
36,265
def index ( ) : if current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] is not None : override = current_app . config [ 'ARA_PLAYBOOK_OVERRIDE' ] files = ( models . File . query . filter ( models . File . playbook_id . in_ ( override ) ) ) else : files = models . File . query . all ( ) return render_template ( 'file_index....
This is not served anywhere in the web application . It is used explicitly in the context of generating static files since flask - frozen requires url_for s to crawl content . url_for s are not used with file . show_file directly and are instead dynamically generated through javascript for performance purposes .
36,266
def show_file ( file_ ) : file_ = ( models . File . query . get ( file_ ) ) if file_ is None : abort ( 404 ) return render_template ( 'file.html' , file_ = file_ )
Returns details of a file
36,267
def configure_db ( app ) : models . db . init_app ( app ) log = logging . getLogger ( 'ara.webapp.configure_db' ) log . debug ( 'Setting up database...' ) if app . config . get ( 'ARA_AUTOCREATE_DATABASE' ) : with app . app_context ( ) : migrations = app . config [ 'DB_MIGRATIONS' ] flask_migrate . Migrate ( app , mode...
0 . 10 is the first version of ARA that ships with a stable database schema . We can identify a database that originates from before this by checking if there is an alembic revision available . If there is no alembic revision available assume we are running the first revision which contains the latest state of the data...
36,268
def configure_cache ( app ) : log = logging . getLogger ( 'ara.webapp.configure_cache' ) log . debug ( 'Configuring cache' ) if not getattr ( app , '_cache' , None ) : app . _cache = { }
Sets up an attribute to cache data in the app context
36,269
def bspline_to_nurbs ( obj ) : if isinstance ( obj , BSpline . Curve ) : return _convert . convert_curve ( obj , NURBS ) elif isinstance ( obj , BSpline . Surface ) : return _convert . convert_surface ( obj , NURBS ) elif isinstance ( obj , BSpline . Volume ) : return _convert . convert_volume ( obj , NURBS ) else : ra...
Converts non - rational parametric shapes to rational ones .
36,270
def nurbs_to_bspline ( obj , ** kwargs ) : if not obj . rational : raise TypeError ( "The input must be a rational shape" ) tol = kwargs . get ( 'tol' , 10e-8 ) for w in obj . weights : if abs ( w - 1.0 ) > tol : print ( "Cannot extract non-rational components" ) return obj if isinstance ( obj , NURBS . Curve ) : retur...
Extracts the non - rational components from rational parametric shapes if possible .
36,271
def doolittle ( matrix_a ) : matrix_u = [ [ 0.0 for _ in range ( len ( matrix_a ) ) ] for _ in range ( len ( matrix_a ) ) ] matrix_l = [ [ 0.0 for _ in range ( len ( matrix_a ) ) ] for _ in range ( len ( matrix_a ) ) ] for i in range ( 0 , len ( matrix_a ) ) : for k in range ( i , len ( matrix_a ) ) : matrix_u [ i ] [ ...
Doolittle s Method for LU - factorization .
36,272
def read_files ( project , ext ) : project_path = os . path . join ( os . path . dirname ( __file__ ) , project ) file_list = os . listdir ( project_path ) flist = [ ] flist_path = [ ] for f in file_list : f_path = os . path . join ( project_path , f ) if os . path . isfile ( f_path ) and f . endswith ( ext ) and f != ...
Reads files inside the input project directory .
36,273
def copy_files ( src , ext , dst ) : src_path = os . path . join ( os . path . dirname ( __file__ ) , src ) dst_path = os . path . join ( os . path . dirname ( __file__ ) , dst ) file_list = os . listdir ( src_path ) for f in file_list : if f == '__init__.py' : continue f_path = os . path . join ( src_path , f ) if os ...
Copies files with extensions ext from src to dst directory .
36,274
def make_dir ( project ) : project_path = os . path . join ( os . path . dirname ( __file__ ) , project ) if os . path . exists ( project_path ) : shutil . rmtree ( project_path ) os . mkdir ( project_path ) with open ( os . path . join ( project_path , '__init__.py' ) , 'w' ) as fp : fp . write ( '__version__ = "' + s...
Creates the project directory for compiled modules .
36,275
def in_argv ( arg_list ) : for arg in sys . argv : for parg in arg_list : if parg == arg or arg . startswith ( parg ) : return True return False
Checks if any of the elements of the input list is in sys . argv array .
36,276
def generate ( degree , num_ctrlpts , ** kwargs ) : if degree == 0 or num_ctrlpts == 0 : raise ValueError ( "Input values should be different than zero." ) clamped = kwargs . get ( 'clamped' , True ) num_repeat = degree num_segments = num_ctrlpts - ( degree + 1 ) if not clamped : num_repeat = 0 num_segments = degree + ...
Generates an equally spaced knot vector .
36,277
def check ( degree , knot_vector , num_ctrlpts ) : try : if knot_vector is None or len ( knot_vector ) == 0 : raise ValueError ( "Input knot vector cannot be empty" ) except TypeError as e : print ( "An error occurred: {}" . format ( e . args [ - 1 ] ) ) raise TypeError ( "Knot vector must be a list or tuple" ) except ...
Checks the validity of the input knot vector .
36,278
def interpolate_curve ( points , degree , ** kwargs ) : use_centripetal = kwargs . get ( 'centripetal' , False ) num_points = len ( points ) uk = compute_params_curve ( points , use_centripetal ) kv = compute_knot_vector ( degree , num_points , uk ) matrix_a = _build_coeff_matrix ( degree , kv , uk , points ) ctrlpts =...
Curve interpolation through the data points .
36,279
def interpolate_surface ( points , size_u , size_v , degree_u , degree_v , ** kwargs ) : use_centripetal = kwargs . get ( 'centripetal' , False ) uk , vl = compute_params_surface ( points , size_u , size_v , use_centripetal ) kv_u = compute_knot_vector ( degree_u , size_u , uk ) kv_v = compute_knot_vector ( degree_v , ...
Surface interpolation through the data points .
36,280
def compute_knot_vector ( degree , num_points , params ) : kv = [ 0.0 for _ in range ( degree + 1 ) ] for i in range ( num_points - degree - 1 ) : temp_kv = ( 1.0 / degree ) * sum ( [ params [ j ] for j in range ( i + 1 , i + degree + 1 ) ] ) kv . append ( temp_kv ) kv += [ 1.0 for _ in range ( degree + 1 ) ] return kv
Computes a knot vector from the parameter list using averaging method .
36,281
def ginterp ( coeff_matrix , points ) : dim = len ( points [ 0 ] ) num_points = len ( points ) matrix_l , matrix_u = linalg . lu_decomposition ( coeff_matrix ) ctrlpts = [ [ 0.0 for _ in range ( dim ) ] for _ in range ( num_points ) ] for i in range ( dim ) : b = [ pt [ i ] for pt in points ] y = linalg . forward_subst...
Applies global interpolation to the set of data points to find control points .
36,282
def _build_coeff_matrix ( degree , knotvector , params , points ) : num_points = len ( points ) matrix_a = [ [ 0.0 for _ in range ( num_points ) ] for _ in range ( num_points ) ] for i in range ( num_points ) : span = helpers . find_span_linear ( degree , knotvector , num_points , params [ i ] ) matrix_a [ i ] [ span -...
Builds the coefficient matrix for global interpolation .
36,283
def create_render_window ( actors , callbacks , ** kwargs ) : figure_size = kwargs . get ( 'figure_size' , ( 800 , 600 ) ) camera_position = kwargs . get ( 'camera_position' , ( 0 , 0 , 100 ) ) center_points = [ ] for actor in actors : center_points . append ( actor . GetCenter ( ) ) camera_focal_point = linalg . vecto...
Creates VTK render window with an interactor .
36,284
def create_color ( color ) : if color [ 0 ] == "#" : return [ int ( color [ i : i + 2 ] , 16 ) / 255 for i in range ( 1 , 7 , 2 ) ] else : nc = vtk . vtkNamedColors ( ) return nc . GetColor3d ( color )
Creates VTK - compatible RGB color from a color string .
36,285
def create_actor_pts ( pts , color , ** kwargs ) : array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) point_size = kwargs . get ( 'size' , 5 ) point_sphere = kwargs . get ( 'point_as_sphere' , True ) points = vtk . vtkPoints ( ) points . SetData ( pts ) polydata = vtk . vtkPolyData ( )...
Creates a VTK actor for rendering scatter plots .
36,286
def create_actor_polygon ( pts , color , ** kwargs ) : array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) line_width = kwargs . get ( 'size' , 1.0 ) points = vtk . vtkPoints ( ) points . SetData ( pts ) num_points = points . GetNumberOfPoints ( ) cells = vtk . vtkCellArray ( ) for i in...
Creates a VTK actor for rendering polygons .
36,287
def create_actor_mesh ( pts , lines , color , ** kwargs ) : array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) line_width = kwargs . get ( 'size' , 0.5 ) points = vtk . vtkPoints ( ) points . SetData ( pts ) cells = vtk . vtkCellArray ( ) for line in lines : pline = vtk . vtkPolyLine (...
Creates a VTK actor for rendering quadrilateral plots .
36,288
def create_actor_tri ( pts , tris , color , ** kwargs ) : array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) points = vtk . vtkPoints ( ) points . SetData ( pts ) triangles = vtk . vtkCellArray ( ) for tri in tris : tmp = vtk . vtkTriangle ( ) for i , v in enumerate ( tri ) : tmp . Get...
Creates a VTK actor for rendering triangulated surface plots .
36,289
def create_actor_hexahedron ( grid , color , ** kwargs ) : array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) points = vtk . vtkPoints ( ) hexarray = vtk . vtkCellArray ( ) for j , pt in enumerate ( grid ) : tmp = vtk . vtkHexahedron ( ) fb = pt [ 0 ] for i , v in enumerate ( fb ) : po...
Creates a VTK actor for rendering voxels using hexahedron elements .
36,290
def create_actor_delaunay ( pts , color , ** kwargs ) : array_name = kwargs . get ( 'name' , "" ) array_index = kwargs . get ( 'index' , 0 ) use_delaunay3d = kwargs . get ( "d3d" , False ) points = vtk . vtkPoints ( ) points . SetData ( pts ) polydata = vtk . vtkPolyData ( ) polydata . SetPoints ( points ) triangulatio...
Creates a VTK actor for rendering triangulated plots using Delaunay triangulation .
36,291
def flip_ctrlpts_u ( ctrlpts , size_u , size_v ) : new_ctrlpts = [ ] for i in range ( 0 , size_u ) : for j in range ( 0 , size_v ) : temp = [ float ( c ) for c in ctrlpts [ i + ( j * size_u ) ] ] new_ctrlpts . append ( temp ) return new_ctrlpts
Flips a list of 1 - dimensional control points from u - row order to v - row order .
36,292
def generate_ctrlptsw ( ctrlpts ) : new_ctrlpts = [ ] for cpt in ctrlpts : temp = [ float ( pt * cpt [ - 1 ] ) for pt in cpt ] temp [ - 1 ] = float ( cpt [ - 1 ] ) new_ctrlpts . append ( temp ) return new_ctrlpts
Generates weighted control points from unweighted ones in 1 - D .
36,293
def generate_ctrlpts_weights ( ctrlpts ) : new_ctrlpts = [ ] for cpt in ctrlpts : temp = [ float ( pt / cpt [ - 1 ] ) for pt in cpt ] temp [ - 1 ] = float ( cpt [ - 1 ] ) new_ctrlpts . append ( temp ) return new_ctrlpts
Generates unweighted control points from weighted ones in 1 - D .
36,294
def combine_ctrlpts_weights ( ctrlpts , weights = None ) : if weights is None : weights = [ 1.0 for _ in range ( len ( ctrlpts ) ) ] ctrlptsw = [ ] for pt , w in zip ( ctrlpts , weights ) : temp = [ float ( c * w ) for c in pt ] temp . append ( float ( w ) ) ctrlptsw . append ( temp ) return ctrlptsw
Multiplies control points by the weights to generate weighted control points .
36,295
def separate_ctrlpts_weights ( ctrlptsw ) : ctrlpts = [ ] weights = [ ] for ptw in ctrlptsw : temp = [ float ( pw / ptw [ - 1 ] ) for pw in ptw [ : - 1 ] ] ctrlpts . append ( temp ) weights . append ( ptw [ - 1 ] ) return [ ctrlpts , weights ]
Divides weighted control points by weights to generate unweighted control points and weights vector .
36,296
def flip_ctrlpts2d_file ( file_in = '' , file_out = 'ctrlpts_flip.txt' ) : ctrlpts2d , size_u , size_v = _read_ctrltps2d_file ( file_in ) new_ctrlpts2d = flip_ctrlpts2d ( ctrlpts2d , size_u , size_v ) _save_ctrlpts2d_file ( new_ctrlpts2d , size_u , size_v , file_out )
Flips u and v directions of a 2D control points file and saves flipped coordinates to a file .
36,297
def generate_ctrlptsw2d_file ( file_in = '' , file_out = 'ctrlptsw.txt' ) : ctrlpts2d , size_u , size_v = _read_ctrltps2d_file ( file_in ) new_ctrlpts2d = generate_ctrlptsw2d ( ctrlpts2d ) _save_ctrlpts2d_file ( new_ctrlpts2d , size_u , size_v , file_out )
Generates weighted control points from unweighted ones in 2 - D .
36,298
def keypress_callback ( self , obj , ev ) : key = obj . GetKeySym ( ) render_window = obj . GetRenderWindow ( ) renderer = render_window . GetRenderers ( ) . GetFirstRenderer ( ) picker = obj . GetPicker ( ) actor = picker . GetActor ( ) if key == 'Up' : camera = renderer . GetActiveCamera ( ) camera . Pitch ( 2.5 ) if...
VTK callback for keypress events .
36,299
def generate_voxel_grid ( bbox , szval , use_cubes = False ) : if szval [ 0 ] <= 1 or szval [ 1 ] <= 1 or szval [ 2 ] <= 1 : raise GeomdlException ( "Size values must be bigger than 1" , data = dict ( sizevals = szval ) ) steps = [ float ( bbox [ 1 ] [ idx ] - bbox [ 0 ] [ idx ] ) / float ( szval [ idx ] - 1 ) for idx ...
Generates the voxel grid with the desired size .