idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
38,800
def _fit_stage_componentwise ( X , residuals , sample_weight , ** fit_params ) : n_features = X . shape [ 1 ] base_learners = [ ] error = numpy . empty ( n_features ) for component in range ( n_features ) : learner = ComponentwiseLeastSquares ( component ) . fit ( X , residuals , sample_weight ) l_pred = learner . predict ( X ) error [ component ] = squared_norm ( residuals - l_pred ) base_learners . append ( learner ) best_component = numpy . nanargmin ( error ) best_learner = base_learners [ best_component ] return best_learner
Fit component - wise weighted least squares model
38,801
def coef_ ( self ) : coef = numpy . zeros ( self . n_features_ + 1 , dtype = float ) for estimator in self . estimators_ : coef [ estimator . component ] += self . learning_rate * estimator . coef_ return coef
Return the aggregated coefficients .
38,802
def _fit_stage ( self , i , X , y , y_pred , sample_weight , sample_mask , random_state , scale , X_idx_sorted , X_csc = None , X_csr = None ) : assert sample_mask . dtype == numpy . bool loss = self . loss_ do_dropout = self . dropout_rate > 0. and 0 < i < len ( scale ) - 1 for k in range ( loss . K ) : residual = loss . negative_gradient ( y , y_pred , k = k , sample_weight = sample_weight ) tree = DecisionTreeRegressor ( criterion = self . criterion , splitter = 'best' , max_depth = self . max_depth , min_samples_split = self . min_samples_split , min_samples_leaf = self . min_samples_leaf , min_weight_fraction_leaf = self . min_weight_fraction_leaf , min_impurity_split = self . min_impurity_split , min_impurity_decrease = self . min_impurity_decrease , max_features = self . max_features , max_leaf_nodes = self . max_leaf_nodes , random_state = random_state , presort = self . presort ) if self . subsample < 1.0 : sample_weight = sample_weight * sample_mask . astype ( numpy . float64 ) X = X_csr if X_csr is not None else X tree . fit ( X , residual , sample_weight = sample_weight , check_input = False , X_idx_sorted = X_idx_sorted ) self . estimators_ [ i , k ] = tree if do_dropout : drop_model , n_dropped = _sample_binomial_plus_one ( self . dropout_rate , i + 1 , random_state ) scale [ i + 1 ] = 1. / ( n_dropped + 1. ) y_pred [ : , k ] = 0 for m in range ( i + 1 ) : if drop_model [ m ] == 1 : scale [ m ] *= n_dropped / ( n_dropped + 1. ) else : y_pred [ : , k ] += self . learning_rate * scale [ m ] * self . estimators_ [ m , k ] . predict ( X ) . ravel ( ) else : loss . update_terminal_regions ( tree . tree_ , X , y , residual , y_pred , sample_weight , sample_mask , self . learning_rate , k = k ) return y_pred
Fit another stage of n_classes_ trees to the boosting model .
38,803
def _fit_stages ( self , X , y , y_pred , sample_weight , random_state , begin_at_stage = 0 , monitor = None , X_idx_sorted = None ) : n_samples = X . shape [ 0 ] do_oob = self . subsample < 1.0 sample_mask = numpy . ones ( ( n_samples , ) , dtype = numpy . bool ) n_inbag = max ( 1 , int ( self . subsample * n_samples ) ) loss_ = self . loss_ if self . verbose : verbose_reporter = VerboseReporter ( self . verbose ) verbose_reporter . init ( self , begin_at_stage ) X_csc = csc_matrix ( X ) if issparse ( X ) else None X_csr = csr_matrix ( X ) if issparse ( X ) else None if self . dropout_rate > 0. : scale = numpy . ones ( self . n_estimators , dtype = float ) else : scale = None i = begin_at_stage for i in range ( begin_at_stage , self . n_estimators ) : if do_oob : sample_mask = _random_sample_mask ( n_samples , n_inbag , random_state ) y_oob_sample = y [ ~ sample_mask ] old_oob_score = loss_ ( y_oob_sample , y_pred [ ~ sample_mask ] , sample_weight [ ~ sample_mask ] ) y_pred = self . _fit_stage ( i , X , y , y_pred , sample_weight , sample_mask , random_state , scale , X_idx_sorted , X_csc , X_csr ) if do_oob : self . train_score_ [ i ] = loss_ ( y [ sample_mask ] , y_pred [ sample_mask ] , sample_weight [ sample_mask ] ) self . oob_improvement_ [ i ] = ( old_oob_score - loss_ ( y_oob_sample , y_pred [ ~ sample_mask ] , sample_weight [ ~ sample_mask ] ) ) else : self . train_score_ [ i ] = loss_ ( y , y_pred , sample_weight ) if self . verbose > 0 : verbose_reporter . update ( i , self ) if monitor is not None : early_stopping = monitor ( i , self , locals ( ) ) if early_stopping : break if self . dropout_rate > 0. : self . scale_ = scale return i + 1
Iteratively fits the stages .
38,804
def fit ( self , X , y , sample_weight = None , monitor = None ) : random_state = check_random_state ( self . random_state ) X , event , time = check_arrays_survival ( X , y , accept_sparse = [ 'csr' , 'csc' , 'coo' ] , dtype = DTYPE ) n_samples , self . n_features_ = X . shape X = X . astype ( DTYPE ) if sample_weight is None : sample_weight = numpy . ones ( n_samples , dtype = numpy . float32 ) else : sample_weight = column_or_1d ( sample_weight , warn = True ) check_consistent_length ( X , sample_weight ) self . _check_params ( ) self . loss_ = LOSS_FUNCTIONS [ self . loss ] ( 1 ) if isinstance ( self . loss_ , ( CensoredSquaredLoss , IPCWLeastSquaresError ) ) : time = numpy . log ( time ) self . _init_state ( ) self . init_ . fit ( X , ( event , time ) , sample_weight ) y_pred = self . init_ . predict ( X ) begin_at_stage = 0 if self . presort is True and issparse ( X ) : raise ValueError ( "Presorting is not supported for sparse matrices." ) presort = self . presort if presort == 'auto' : presort = not issparse ( X ) X_idx_sorted = None if presort : X_idx_sorted = numpy . asfortranarray ( numpy . argsort ( X , axis = 0 ) , dtype = numpy . int32 ) y = numpy . fromiter ( zip ( event , time ) , dtype = [ ( 'event' , numpy . bool ) , ( 'time' , numpy . float64 ) ] ) n_stages = self . _fit_stages ( X , y , y_pred , sample_weight , random_state , begin_at_stage , monitor , X_idx_sorted ) if n_stages != self . estimators_ . shape [ 0 ] : self . estimators_ = self . estimators_ [ : n_stages ] self . train_score_ = self . train_score_ [ : n_stages ] if hasattr ( self , 'oob_improvement_' ) : self . oob_improvement_ = self . oob_improvement_ [ : n_stages ] self . n_estimators_ = n_stages return self
Fit the gradient boosting model .
38,805
def staged_predict ( self , X ) : check_is_fitted ( self , 'estimators_' ) if not hasattr ( self , "scale_" ) : for y in self . _staged_decision_function ( X ) : yield self . _scale_prediction ( y . ravel ( ) ) else : for y in self . _dropout_staged_decision_function ( X ) : yield self . _scale_prediction ( y . ravel ( ) )
Predict hazard at each stage for X .
38,806
def fit ( self , X , y ) : X , event , time = check_arrays_survival ( X , y ) self . _fit ( X , event , time ) return self
Build a MINLIP survival model from training data .
38,807
def predict ( self , X ) : K = self . _get_kernel ( X , self . X_fit_ ) pred = - numpy . dot ( self . coef_ , K . T ) return pred . ravel ( )
Predict risk score of experiencing an event .
38,808
def get_x_y ( data_frame , attr_labels , pos_label = None , survival = True ) : if survival : if len ( attr_labels ) != 2 : raise ValueError ( "expected sequence of length two for attr_labels, but got %d" % len ( attr_labels ) ) if pos_label is None : raise ValueError ( "pos_label needs to be specified if survival=True" ) return _get_x_y_survival ( data_frame , attr_labels [ 0 ] , attr_labels [ 1 ] , pos_label ) return _get_x_y_other ( data_frame , attr_labels )
Split data frame into features and labels .
38,809
def load_arff_files_standardized ( path_training , attr_labels , pos_label = None , path_testing = None , survival = True , standardize_numeric = True , to_numeric = True ) : dataset = loadarff ( path_training ) if "index" in dataset . columns : dataset . index = dataset [ "index" ] . astype ( object ) dataset . drop ( "index" , axis = 1 , inplace = True ) x_train , y_train = get_x_y ( dataset , attr_labels , pos_label , survival ) if path_testing is not None : x_test , y_test = _load_arff_testing ( path_testing , attr_labels , pos_label , survival ) if len ( x_train . columns . symmetric_difference ( x_test . columns ) ) > 0 : warnings . warn ( "Restricting columns to intersection between training and testing data" , stacklevel = 2 ) cols = x_train . columns . intersection ( x_test . columns ) if len ( cols ) == 0 : raise ValueError ( "columns of training and test data do not intersect" ) x_train = x_train . loc [ : , cols ] x_test = x_test . loc [ : , cols ] x = safe_concat ( ( x_train , x_test ) , axis = 0 ) if standardize_numeric : x = standardize ( x ) if to_numeric : x = categorical_to_numeric ( x ) n_train = x_train . shape [ 0 ] x_train = x . iloc [ : n_train , : ] x_test = x . iloc [ n_train : , : ] else : if standardize_numeric : x_train = standardize ( x_train ) if to_numeric : x_train = categorical_to_numeric ( x_train ) x_test = None y_test = None return x_train , y_train , x_test , y_test
Load dataset in ARFF format .
38,810
def load_aids ( endpoint = "aids" ) : labels_aids = [ 'censor' , 'time' ] labels_death = [ 'censor_d' , 'time_d' ] if endpoint == "aids" : attr_labels = labels_aids drop_columns = labels_death elif endpoint == "death" : attr_labels = labels_death drop_columns = labels_aids else : raise ValueError ( "endpoint must be 'aids' or 'death'" ) fn = resource_filename ( __name__ , 'data/actg320.arff' ) x , y = get_x_y ( loadarff ( fn ) , attr_labels = attr_labels , pos_label = '1' ) x . drop ( drop_columns , axis = 1 , inplace = True ) return x , y
Load and return the AIDS Clinical Trial dataset
38,811
def _api_scrape ( json_inp , ndx ) : try : headers = json_inp [ 'resultSets' ] [ ndx ] [ 'headers' ] values = json_inp [ 'resultSets' ] [ ndx ] [ 'rowSet' ] except KeyError : try : headers = json_inp [ 'resultSet' ] [ ndx ] [ 'headers' ] values = json_inp [ 'resultSet' ] [ ndx ] [ 'rowSet' ] except KeyError : headers = json_inp [ 'resultSet' ] [ 'headers' ] values = json_inp [ 'resultSet' ] [ 'rowSet' ] if HAS_PANDAS : return DataFrame ( values , columns = headers ) else : return [ dict ( zip ( headers , value ) ) for value in values ]
Internal method to streamline the getting of data from the json
38,812
def get_player ( first_name , last_name = None , season = constants . CURRENT_SEASON , only_current = 0 , just_id = True ) : if last_name is None : name = first_name . lower ( ) else : name = '{}, {}' . format ( last_name , first_name ) . lower ( ) pl = PlayerList ( season = season , only_current = only_current ) . info ( ) hdr = 'DISPLAY_LAST_COMMA_FIRST' if HAS_PANDAS : item = pl [ pl . DISPLAY_LAST_COMMA_FIRST . str . lower ( ) == name ] else : item = next ( plyr for plyr in pl if str ( plyr [ hdr ] ) . lower ( ) == name ) if len ( item ) == 0 : raise PlayerNotFoundException elif just_id : return item [ 'PERSON_ID' ] else : return item
Calls our PlayerList class to get a full list of players and then returns just an id if specified or the full row of player information
38,813
def respond_to_ask ( self , message ) : valid_actions , hole_card , round_state = self . __parse_ask_message ( message ) return self . declare_action ( valid_actions , hole_card , round_state )
Called from Dealer when ask message received from RoundManager
38,814
def receive_notification ( self , message ) : msg_type = message [ "message_type" ] if msg_type == "game_start_message" : info = self . __parse_game_start_message ( message ) self . receive_game_start_message ( info ) elif msg_type == "round_start_message" : round_count , hole , seats = self . __parse_round_start_message ( message ) self . receive_round_start_message ( round_count , hole , seats ) elif msg_type == "street_start_message" : street , state = self . __parse_street_start_message ( message ) self . receive_street_start_message ( street , state ) elif msg_type == "game_update_message" : new_action , round_state = self . __parse_game_update_message ( message ) self . receive_game_update_message ( new_action , round_state ) elif msg_type == "round_result_message" : winners , hand_info , state = self . __parse_round_result_message ( message ) self . receive_round_result_message ( winners , hand_info , state )
Called from Dealer when notification received from RoundManager
38,815
async def result_continuation ( task ) : await asyncio . sleep ( 0.1 ) num , res = task . result ( ) return num , res * 2
A preliminary result processor we ll chain on to the original task This will get executed wherever the source task was executed in this case one of the threads in the ThreadPoolExecutor
38,816
async def result_processor ( tasks ) : output = { } for task in tasks : num , res = await task output [ num ] = res return output
An async result aggregator that combines all the results This gets executed in unsync . loop and unsync . thread
38,817
def read_union ( fo , writer_schema , reader_schema = None ) : index = read_long ( fo ) if reader_schema : if not isinstance ( reader_schema , list ) : if match_types ( writer_schema [ index ] , reader_schema ) : return read_data ( fo , writer_schema [ index ] , reader_schema ) else : for schema in reader_schema : if match_types ( writer_schema [ index ] , schema ) : return read_data ( fo , writer_schema [ index ] , schema ) msg = 'schema mismatch: %s not found in %s' % ( writer_schema , reader_schema ) raise SchemaResolutionError ( msg ) else : return read_data ( fo , writer_schema [ index ] )
A union is encoded by first writing a long value indicating the zero - based position within the union of the schema of its value .
38,818
def read_data ( fo , writer_schema , reader_schema = None ) : record_type = extract_record_type ( writer_schema ) logical_type = extract_logical_type ( writer_schema ) if reader_schema and record_type in AVRO_TYPES : if writer_schema == reader_schema : reader_schema = None else : match_schemas ( writer_schema , reader_schema ) reader_fn = READERS . get ( record_type ) if reader_fn : try : data = reader_fn ( fo , writer_schema , reader_schema ) except StructError : raise EOFError ( 'cannot read %s from %s' % ( record_type , fo ) ) if 'logicalType' in writer_schema : fn = LOGICAL_READERS . get ( logical_type ) if fn : return fn ( data , writer_schema , reader_schema ) if reader_schema is not None : return maybe_promote ( data , record_type , extract_record_type ( reader_schema ) ) else : return data else : return read_data ( fo , SCHEMA_DEFS [ record_type ] , SCHEMA_DEFS . get ( reader_schema ) )
Read data from file object according to schema .
38,819
def _iter_avro_records ( fo , header , codec , writer_schema , reader_schema ) : sync_marker = header [ 'sync' ] read_block = BLOCK_READERS . get ( codec ) if not read_block : raise ValueError ( 'Unrecognized codec: %r' % codec ) block_count = 0 while True : try : block_count = read_long ( fo ) except StopIteration : return block_fo = read_block ( fo ) for i in xrange ( block_count ) : yield read_data ( block_fo , writer_schema , reader_schema ) skip_sync ( fo , sync_marker )
Return iterator over avro records .
38,820
def _iter_avro_blocks ( fo , header , codec , writer_schema , reader_schema ) : sync_marker = header [ 'sync' ] read_block = BLOCK_READERS . get ( codec ) if not read_block : raise ValueError ( 'Unrecognized codec: %r' % codec ) while True : offset = fo . tell ( ) try : num_block_records = read_long ( fo ) except StopIteration : return block_bytes = read_block ( fo ) skip_sync ( fo , sync_marker ) size = fo . tell ( ) - offset yield Block ( block_bytes , num_block_records , codec , reader_schema , writer_schema , offset , size )
Return iterator over avro blocks .
38,821
def prepare_timestamp_millis ( data , schema ) : if isinstance ( data , datetime . datetime ) : if data . tzinfo is not None : delta = ( data - epoch ) return int ( delta . total_seconds ( ) * MLS_PER_SECOND ) t = int ( time . mktime ( data . timetuple ( ) ) ) * MLS_PER_SECOND + int ( data . microsecond / 1000 ) return t else : return data
Converts datetime . datetime object to int timestamp with milliseconds
38,822
def prepare_timestamp_micros ( data , schema ) : if isinstance ( data , datetime . datetime ) : if data . tzinfo is not None : delta = ( data - epoch ) return int ( delta . total_seconds ( ) * MCS_PER_SECOND ) t = int ( time . mktime ( data . timetuple ( ) ) ) * MCS_PER_SECOND + data . microsecond return t else : return data
Converts datetime . datetime to int timestamp with microseconds
38,823
def prepare_date ( data , schema ) : if isinstance ( data , datetime . date ) : return data . toordinal ( ) - DAYS_SHIFT else : return data
Converts datetime . date to int timestamp
38,824
def prepare_uuid ( data , schema ) : if isinstance ( data , uuid . UUID ) : return str ( data ) else : return data
Converts uuid . UUID to string formatted UUID xxxxxxxx - xxxx - xxxx - xxxx - xxxxxxxxxxxx
38,825
def prepare_time_millis ( data , schema ) : if isinstance ( data , datetime . time ) : return int ( data . hour * MLS_PER_HOUR + data . minute * MLS_PER_MINUTE + data . second * MLS_PER_SECOND + int ( data . microsecond / 1000 ) ) else : return data
Convert datetime . time to int timestamp with milliseconds
38,826
def prepare_time_micros ( data , schema ) : if isinstance ( data , datetime . time ) : return long ( data . hour * MCS_PER_HOUR + data . minute * MCS_PER_MINUTE + data . second * MCS_PER_SECOND + data . microsecond ) else : return data
Convert datetime . time to int timestamp with microseconds
38,827
def prepare_bytes_decimal ( data , schema ) : if not isinstance ( data , decimal . Decimal ) : return data scale = schema . get ( 'scale' , 0 ) sign , digits , exp = data . as_tuple ( ) if - exp > scale : raise ValueError ( 'Scale provided in schema does not match the decimal' ) delta = exp + scale if delta > 0 : digits = digits + ( 0 , ) * delta unscaled_datum = 0 for digit in digits : unscaled_datum = ( unscaled_datum * 10 ) + digit bits_req = unscaled_datum . bit_length ( ) + 1 if sign : unscaled_datum = ( 1 << bits_req ) - unscaled_datum bytes_req = bits_req // 8 padding_bits = ~ ( ( 1 << bits_req ) - 1 ) if sign else 0 packed_bits = padding_bits | unscaled_datum bytes_req += 1 if ( bytes_req << 3 ) < bits_req else 0 tmp = MemoryIO ( ) for index in range ( bytes_req - 1 , - 1 , - 1 ) : bits_to_write = packed_bits >> ( 8 * index ) tmp . write ( mk_bits ( bits_to_write & 0xff ) ) return tmp . getvalue ( )
Convert decimal . Decimal to bytes
38,828
def prepare_fixed_decimal ( data , schema ) : if not isinstance ( data , decimal . Decimal ) : return data scale = schema . get ( 'scale' , 0 ) size = schema [ 'size' ] sign , digits , exp = data . as_tuple ( ) if - exp > scale : raise ValueError ( 'Scale provided in schema does not match the decimal' ) delta = exp + scale if delta > 0 : digits = digits + ( 0 , ) * delta unscaled_datum = 0 for digit in digits : unscaled_datum = ( unscaled_datum * 10 ) + digit bits_req = unscaled_datum . bit_length ( ) + 1 size_in_bits = size * 8 offset_bits = size_in_bits - bits_req mask = 2 ** size_in_bits - 1 bit = 1 for i in range ( bits_req ) : mask ^= bit bit <<= 1 if bits_req < 8 : bytes_req = 1 else : bytes_req = bits_req // 8 if bits_req % 8 != 0 : bytes_req += 1 tmp = MemoryIO ( ) if sign : unscaled_datum = ( 1 << bits_req ) - unscaled_datum unscaled_datum = mask | unscaled_datum for index in range ( size - 1 , - 1 , - 1 ) : bits_to_write = unscaled_datum >> ( 8 * index ) tmp . write ( mk_bits ( bits_to_write & 0xff ) ) else : for i in range ( offset_bits // 8 ) : tmp . write ( mk_bits ( 0 ) ) for index in range ( bytes_req - 1 , - 1 , - 1 ) : bits_to_write = unscaled_datum >> ( 8 * index ) tmp . write ( mk_bits ( bits_to_write & 0xff ) ) return tmp . getvalue ( )
Converts decimal . Decimal to fixed length bytes array
38,829
def write_crc32 ( fo , bytes ) : data = crc32 ( bytes ) & 0xFFFFFFFF fo . write ( pack ( '>I' , data ) )
A 4 - byte big - endian CRC32 checksum
38,830
def write_union ( fo , datum , schema ) : if isinstance ( datum , tuple ) : ( name , datum ) = datum for index , candidate in enumerate ( schema ) : if extract_record_type ( candidate ) == 'record' : schema_name = candidate [ 'name' ] else : schema_name = candidate if name == schema_name : break else : msg = 'provided union type name %s not found in schema %s' % ( name , schema ) raise ValueError ( msg ) else : pytype = type ( datum ) best_match_index = - 1 most_fields = - 1 for index , candidate in enumerate ( schema ) : if validate ( datum , candidate , raise_errors = False ) : if extract_record_type ( candidate ) == 'record' : fields = len ( candidate [ 'fields' ] ) if fields > most_fields : best_match_index = index most_fields = fields else : best_match_index = index break if best_match_index < 0 : msg = '%r (type %s) do not match %s' % ( datum , pytype , schema ) raise ValueError ( msg ) index = best_match_index write_long ( fo , index ) write_data ( fo , datum , schema [ index ] )
A union is encoded by first writing a long value indicating the zero - based position within the union of the schema of its value . The value is then encoded per the indicated schema within the union .
38,831
def write_data ( fo , datum , schema ) : record_type = extract_record_type ( schema ) logical_type = extract_logical_type ( schema ) fn = WRITERS . get ( record_type ) if fn : if logical_type : prepare = LOGICAL_WRITERS . get ( logical_type ) if prepare : datum = prepare ( datum , schema ) return fn ( fo , datum , schema ) else : return write_data ( fo , datum , SCHEMA_DEFS [ record_type ] )
Write a datum of data to output stream .
38,832
def null_write_block ( fo , block_bytes ) : write_long ( fo , len ( block_bytes ) ) fo . write ( block_bytes )
Write block in null codec .
38,833
def deflate_write_block ( fo , block_bytes ) : data = compress ( block_bytes ) [ 2 : - 1 ] write_long ( fo , len ( data ) ) fo . write ( data )
Write block in deflate codec .
38,834
def schemaless_writer ( fo , schema , record ) : schema = parse_schema ( schema ) write_data ( fo , record , schema )
Write a single record without the schema or header information
38,835
def validate_int ( datum , ** kwargs ) : return ( ( isinstance ( datum , ( int , long , numbers . Integral ) ) and INT_MIN_VALUE <= datum <= INT_MAX_VALUE and not isinstance ( datum , bool ) ) or isinstance ( datum , ( datetime . time , datetime . datetime , datetime . date ) ) )
Check that the data value is a non floating point number with size less that Int32 . Also support for logicalType timestamp validation with datetime .
38,836
def validate_float ( datum , ** kwargs ) : return ( isinstance ( datum , ( int , long , float , numbers . Real ) ) and not isinstance ( datum , bool ) )
Check that the data value is a floating point number or double precision .
38,837
def validate_record ( datum , schema , parent_ns = None , raise_errors = True ) : _ , namespace = schema_name ( schema , parent_ns ) return ( isinstance ( datum , Mapping ) and all ( validate ( datum = datum . get ( f [ 'name' ] , f . get ( 'default' , no_value ) ) , schema = f [ 'type' ] , field = '{}.{}' . format ( namespace , f [ 'name' ] ) , raise_errors = raise_errors ) for f in schema [ 'fields' ] ) )
Check that the data is a Mapping type with all schema defined fields validated as True .
38,838
def validate_union ( datum , schema , parent_ns = None , raise_errors = True ) : if isinstance ( datum , tuple ) : ( name , datum ) = datum for candidate in schema : if extract_record_type ( candidate ) == 'record' : if name == candidate [ "name" ] : return validate ( datum , schema = candidate , field = parent_ns , raise_errors = raise_errors ) else : return False errors = [ ] for s in schema : try : ret = validate ( datum , schema = s , field = parent_ns , raise_errors = raise_errors ) if ret : return True except ValidationError as e : errors . extend ( e . errors ) if raise_errors : raise ValidationError ( * errors ) return False
Check that the data is a list type with possible options to validate as True .
38,839
def validate_many ( records , schema , raise_errors = True ) : errors = [ ] results = [ ] for record in records : try : results . append ( validate ( record , schema , raise_errors = raise_errors ) ) except ValidationError as e : errors . extend ( e . errors ) if raise_errors and errors : raise ValidationError ( * errors ) return all ( results )
Validate a list of data!
38,840
def parse_schema ( schema , _write_hint = True , _force = False ) : if _force : return _parse_schema ( schema , "" , _write_hint ) elif isinstance ( schema , dict ) and "__fastavro_parsed" in schema : return schema else : return _parse_schema ( schema , "" , _write_hint )
Returns a parsed avro schema
38,841
def load_schema ( schema_path ) : with open ( schema_path ) as fd : schema = json . load ( fd ) schema_dir , schema_file = path . split ( schema_path ) return _load_schema ( schema , schema_dir )
Returns a schema loaded from the file at schema_path .
38,842
def showtip ( self , text ) : "Display text in tooltip window" self . text = text if self . tipwindow or not self . text : return x , y , cx , cy = self . widget . bbox ( "insert" ) x = x + self . widget . winfo_rootx ( ) + 27 y = y + cy + self . widget . winfo_rooty ( ) + 27 self . tipwindow = tw = tk . Toplevel ( self . widget ) tw . wm_overrideredirect ( 1 ) tw . wm_geometry ( "+%d+%d" % ( x , y ) ) try : tw . tk . call ( "::tk::unsupported::MacWindowStyle" , "style" , tw . _w , "help" , "noActivates" ) except tk . TclError : pass label = tk . Label ( tw , text = self . text , justify = tk . LEFT , background = "#ffffe0" , foreground = "black" , relief = tk . SOLID , borderwidth = 1 , font = ( "tahoma" , "8" , "normal" ) ) label . pack ( ipadx = 1 )
Display text in tooltip window
38,843
def run ( self ) : self . toplevel . protocol ( "WM_DELETE_WINDOW" , self . __on_window_close ) self . toplevel . mainloop ( )
Ejecute the main loop .
38,844
def create_regpoly ( self , x0 , y0 , x1 , y1 , sides = 0 , start = 90 , extent = 360 , ** kw ) : coords = self . __regpoly_coords ( x0 , y0 , x1 , y1 , sides , start , extent ) return self . canvas . create_polygon ( * coords , ** kw )
Create a regular polygon
38,845
def __regpoly_coords ( self , x0 , y0 , x1 , y1 , sides , start , extent ) : coords = [ ] if extent == 0 : return coords xm = ( x0 + x1 ) / 2. ym = ( y0 + y1 ) / 2. rx = xm - x0 ry = ym - y0 n = sides if n == 0 : n = round ( ( rx + ry ) * .5 ) if n < 2 : n = 4 dirv = 1 if extent > 0 else - 1 if abs ( extent ) > 360 : extent = dirv * abs ( extent ) % 360 step = dirv * 360 / n numsteps = 1 + extent / float ( step ) numsteps_int = int ( numsteps ) i = 0 while i < numsteps_int : rad = ( start - i * step ) * DEG2RAD x = rx * math . cos ( rad ) y = ry * math . sin ( rad ) coords . append ( ( xm + x , ym - y ) ) i += 1 if numsteps != numsteps_int : rad2 = ( start - numsteps_int * step ) * DEG2RAD x2 = rx * math . cos ( rad2 ) - x y2 = ry * math . sin ( rad2 ) - y rad3 = ( start - extent ) * DEG2RAD x3 = math . cos ( rad3 ) y3 = math . sin ( rad3 ) j = ( x * y2 - x2 * y ) / ( x3 * y2 - x2 * y3 ) coords . append ( ( xm + j * x3 , ym - j * y3 ) ) return coords
Create the coordinates of the regular polygon specified
38,846
def get_image ( self , path ) : image = '' name = os . path . basename ( path ) if not StockImage . is_registered ( name ) : ipath = self . __find_image ( path ) if ipath is not None : StockImage . register ( name , ipath ) else : msg = "Image '{0}' not found in resource paths." . format ( name ) logger . warning ( msg ) try : image = StockImage . get ( name ) except StockImageException : pass return image
Return tk image corresponding to name which is taken form path .
38,847
def import_variables ( self , container , varnames = None ) : if varnames is None : for keyword in self . tkvariables : setattr ( container , keyword , self . tkvariables [ keyword ] ) else : for keyword in varnames : if keyword in self . tkvariables : setattr ( container , keyword , self . tkvariables [ keyword ] )
Helper method to avoid call get_variable for every variable .
38,848
def create_variable ( self , varname , vtype = None ) : var_types = ( 'string' , 'int' , 'boolean' , 'double' ) vname = varname var = None type_from_name = 'string' if ':' in varname : type_from_name , vname = varname . split ( ':' ) if type_from_name not in ( var_types ) : type_from_name , vname = vname , type_from_name if type_from_name not in ( var_types ) : raise Exception ( 'Undefined variable type in "{0}"' . format ( varname ) ) if vname in self . tkvariables : var = self . tkvariables [ vname ] else : if vtype is None : if type_from_name == 'int' : var = tkinter . IntVar ( ) elif type_from_name == 'boolean' : var = tkinter . BooleanVar ( ) elif type_from_name == 'double' : var = tkinter . DoubleVar ( ) else : var = tkinter . StringVar ( ) else : var = vtype ( ) self . tkvariables [ vname ] = var return var
Create a tk variable . If the variable was created previously return that instance .
38,849
def add_from_file ( self , fpath ) : if self . tree is None : base , name = os . path . split ( fpath ) self . add_resource_path ( base ) self . tree = tree = ET . parse ( fpath ) self . root = tree . getroot ( ) self . objects = { } else : pass
Load ui definition from file .
38,850
def add_from_string ( self , strdata ) : if self . tree is None : self . tree = tree = ET . ElementTree ( ET . fromstring ( strdata ) ) self . root = tree . getroot ( ) self . objects = { } else : pass
Load ui definition from string .
38,851
def add_from_xmlnode ( self , element ) : if self . tree is None : root = ET . Element ( 'interface' ) root . append ( element ) self . tree = tree = ET . ElementTree ( root ) self . root = tree . getroot ( ) self . objects = { } else : pass
Load ui definition from xml . etree . Element node .
38,852
def get_object ( self , name , master = None ) : widget = None if name in self . objects : widget = self . objects [ name ] . widget else : xpath = ".//object[@id='{0}']" . format ( name ) node = self . tree . find ( xpath ) if node is not None : root = BuilderObject ( self , dict ( ) ) root . widget = master bobject = self . _realize ( root , node ) widget = bobject . widget if widget is None : msg = 'Widget "{0}" not defined.' . format ( name ) raise Exception ( msg ) return widget
Find and create the widget named name . Use master as parent . If widget was already created return that instance .
38,853
def _realize ( self , master , element ) : data = data_xmlnode_to_dict ( element , self . translator ) cname = data [ 'class' ] uniqueid = data [ 'id' ] if cname not in CLASS_MAP : self . _import_class ( cname ) if cname in CLASS_MAP : self . _pre_process_data ( data ) parent = CLASS_MAP [ cname ] . builder . factory ( self , data ) widget = parent . realize ( master ) self . objects [ uniqueid ] = parent xpath = "./child" children = element . findall ( xpath ) for child in children : child_xml = child . find ( './object' ) child = self . _realize ( parent , child_xml ) parent . add_child ( child ) parent . configure ( ) parent . layout ( ) return parent else : raise Exception ( 'Class "{0}" not mapped' . format ( cname ) )
Builds a widget from xml element using master as parent .
38,854
def connect_callbacks ( self , callbacks_bag ) : notconnected = [ ] for wname , builderobj in self . objects . items ( ) : missing = builderobj . connect_commands ( callbacks_bag ) if missing is not None : notconnected . extend ( missing ) missing = builderobj . connect_bindings ( callbacks_bag ) if missing is not None : notconnected . extend ( missing ) if notconnected : notconnected = list ( set ( notconnected ) ) msg = 'Missing callbacks for commands: {}' . format ( notconnected ) logger . warning ( msg ) return notconnected else : return None
Connect callbacks specified in callbacks_bag with callbacks defined in the ui definition . Return a list with the name of the callbacks not connected .
38,855
def _start_selecting ( self , event ) : self . _selecting = True canvas = self . _canvas x = canvas . canvasx ( event . x ) y = canvas . canvasy ( event . y ) self . _sstart = ( x , y ) if not self . _sobject : self . _sobject = canvas . create_rectangle ( self . _sstart [ 0 ] , self . _sstart [ 1 ] , x , y , dash = ( 3 , 5 ) , outline = '#0000ff' ) canvas . itemconfigure ( self . _sobject , state = tk . NORMAL )
Comienza con el proceso de seleccion .
38,856
def _keep_selecting ( self , event ) : canvas = self . _canvas x = canvas . canvasx ( event . x ) y = canvas . canvasy ( event . y ) canvas . coords ( self . _sobject , self . _sstart [ 0 ] , self . _sstart [ 1 ] , x , y )
Continua con el proceso de seleccion . Crea o redimensiona el cuadro de seleccion de acuerdo con la posicion del raton .
38,857
def _finish_selecting ( self , event ) : self . _selecting = False canvas = self . _canvas x = canvas . canvasx ( event . x ) y = canvas . canvasy ( event . y ) canvas . coords ( self . _sobject , - 1 , - 1 , - 1 , - 1 ) canvas . itemconfigure ( self . _sobject , state = tk . HIDDEN ) sel_region = self . _sstart [ 0 ] , self . _sstart [ 1 ] , x , y canvas . region_selected = sel_region canvas . event_generate ( '<<RegionSelected>>' )
Finaliza la seleccion . Marca como seleccionados todos los objetos que se encuentran dentro del recuadro de seleccion .
38,858
def matrix_coords ( rows , cols , rowh , colw , ox = 0 , oy = 0 ) : "Generate coords for a matrix of rects" for i , f , c in rowmajor ( rows , cols ) : x = ox + c * colw y = oy + f * rowh x1 = x + colw y1 = y + rowh yield ( i , x , y , x1 , y1 )
Generate coords for a matrix of rects
38,859
def get ( self ) : value = { } for ( elementname , elementvar ) in self . _elementvars . items ( ) : value [ elementname ] = elementvar . get ( ) return value
Return a dictionary that represents the Tcl array
38,860
def yview ( self , * args ) : self . after_idle ( self . __updateWnds ) ttk . Treeview . yview ( self , * args )
Update inplace widgets position when doing vertical scroll
38,861
def xview ( self , * args ) : self . after_idle ( self . __updateWnds ) ttk . Treeview . xview ( self , * args )
Update inplace widgets position when doing horizontal scroll
38,862
def __check_focus ( self , event ) : changed = False if not self . _curfocus : changed = True elif self . _curfocus != self . focus ( ) : self . __clear_inplace_widgets ( ) changed = True newfocus = self . focus ( ) if changed : if newfocus : self . _curfocus = newfocus self . __focus ( newfocus ) self . __updateWnds ( )
Checks if the focus has changed
38,863
def __focus ( self , item ) : cols = self . __get_display_columns ( ) for col in cols : self . __event_info = ( col , item ) self . event_generate ( '<<TreeviewInplaceEdit>>' ) if col in self . _inplace_widgets : w = self . _inplace_widgets [ col ] w . bind ( '<Key-Tab>' , lambda e : w . tk_focusNext ( ) . focus_set ( ) ) w . bind ( '<Shift-Key-Tab>' , lambda e : w . tk_focusPrev ( ) . focus_set ( ) )
Called when focus item has changed
38,864
def __clear_inplace_widgets ( self ) : cols = self . __get_display_columns ( ) for c in cols : if c in self . _inplace_widgets : widget = self . _inplace_widgets [ c ] widget . place_forget ( ) self . _inplace_widgets_show . pop ( c , None )
Remove all inplace edit widgets .
38,865
def run ( self ) : install . run ( self ) spath = os . path . join ( self . install_scripts , 'pygubu' ) for ext in ( '.py' , '.pyw' ) : filename = spath + ext if os . path . exists ( filename ) : os . remove ( filename ) if platform . system ( ) == 'Windows' : spath = os . path . join ( self . install_scripts , 'pygubu-designer.bat' ) if os . path . exists ( spath ) : os . remove ( spath )
Run parent install and then save the install dir in the script .
38,866
def hide_all ( self ) : self . current = None for _v , ( label , widget ) in self . _propbag . items ( ) : label . grid_remove ( ) widget . grid_remove ( )
Hide all properties from property editor .
38,867
def _get_init_args ( self ) : args = { } for rop in self . ro_properties : if rop in self . properties : args [ rop ] = self . properties [ rop ] return args
Creates dict with properties marked as readonly
38,868
def _calculate_menu_wh ( self ) : w = iw = 50 h = ih = 0 index = self . _menu . index ( tk . END ) index = index if index is not None else 0 count = index + 1 font = self . _menu . cget ( 'font' ) font = self . _get_font ( font ) for i in range ( 0 , count ) : mtype = self . _menu . type ( i ) if mtype == 'tearoff' : continue label = 'default' ifont = 'TkMenuFont' if mtype != 'separator' : label = self . _menu . entrycget ( i , 'label' ) ifont = self . _menu . entrycget ( i , 'font' ) wpx = font . measure ( label ) hpx = font . metrics ( 'linespace' ) w += wpx if hpx > h : h = hpx * 2 ifont = self . _get_font ( ifont ) wpx = ifont . measure ( label ) hpx = ifont . metrics ( 'linespace' ) iw += wpx if hpx > ih : ih = hpx * 2 w = max ( w , iw , 100 ) h = max ( h , ih , 25 ) self . _cwidth = w + int ( w * 0.25 ) self . _cheight = h + int ( h * 0.25 )
Calculate menu widht and height .
38,869
def _over_resizer ( self , x , y ) : "Returns True if mouse is over a resizer" over_resizer = False c = self . canvas ids = c . find_overlapping ( x , y , x , y ) if ids : o = ids [ 0 ] tags = c . gettags ( o ) if 'resizer' in tags : over_resizer = True return over_resizer
Returns True if mouse is over a resizer
38,870
def resize_preview ( self , dw , dh ) : "Resizes preview that is currently dragged" if self . _objects_moving : id_ = self . _objects_moving [ 0 ] tags = self . canvas . gettags ( id_ ) for tag in tags : if tag . startswith ( 'preview_' ) : _ , ident = tag . split ( 'preview_' ) preview = self . previews [ ident ] preview . resize_by ( dw , dh ) self . move_previews ( ) break self . _update_cregion ( )
Resizes preview that is currently dragged
38,871
def move_previews ( self ) : "Move previews after a resize event" min_y = self . _calc_preview_ypos ( ) for idx , ( key , p ) in enumerate ( self . previews . items ( ) ) : new_dy = min_y [ idx ] - p . y self . previews [ key ] . move_by ( 0 , new_dy ) self . _update_cregion ( ) self . show_selected ( self . _sel_id , self . _sel_widget )
Move previews after a resize event
38,872
def _calc_preview_ypos ( self ) : "Calculates the previews positions on canvas" y = 10 min_y = [ y ] for k , p in self . previews . items ( ) : y += p . height ( ) + self . padding min_y . append ( y ) return min_y
Calculates the previews positions on canvas
38,873
def _get_slot ( self ) : "Returns the next coordinates for a preview" x = y = 10 for k , p in self . previews . items ( ) : y += p . height ( ) + self . padding return x , y
Returns the next coordinates for a preview
38,874
def clear_cache ( cls ) : for key in cls . _cached : cls . _cached [ key ] = None cls . _cached = { }
Call this before closing tk root
38,875
def register ( cls , key , filename ) : if key in cls . _stock : logger . info ( 'Warning, replacing resource ' + str ( key ) ) cls . _stock [ key ] = { 'type' : 'custom' , 'filename' : filename } logger . info ( '%s registered as %s' % ( filename , key ) )
Register a image file using key
38,876
def register_from_data ( cls , key , format , data ) : if key in cls . _stock : logger . info ( 'Warning, replacing resource ' + str ( key ) ) cls . _stock [ key ] = { 'type' : 'data' , 'data' : data , 'format' : format } logger . info ( '%s registered as %s' % ( 'data' , key ) )
Register a image data using key
38,877
def register_created ( cls , key , image ) : if key in cls . _stock : logger . info ( 'Warning, replacing resource ' + str ( key ) ) cls . _stock [ key ] = { 'type' : 'created' , 'image' : image } logger . info ( '%s registered as %s' % ( 'data' , key ) )
Register an already created image using key
38,878
def _load_image ( cls , rkey ) : v = cls . _stock [ rkey ] img = None itype = v [ 'type' ] if itype in ( 'stock' , 'data' ) : img = tk . PhotoImage ( format = v [ 'format' ] , data = v [ 'data' ] ) elif itype == 'created' : img = v [ 'image' ] else : img = tk . PhotoImage ( file = v [ 'filename' ] ) cls . _cached [ rkey ] = img logger . info ( 'Loaded resource %s.' % rkey ) return img
Load image from file or return the cached instance .
38,879
def get ( cls , rkey ) : if rkey in cls . _cached : logger . info ( 'Resource %s is in cache.' % rkey ) return cls . _cached [ rkey ] if rkey in cls . _stock : img = cls . _load_image ( rkey ) return img else : raise StockImageException ( 'StockImage: %s not registered.' % rkey )
Get image previously registered with key rkey . If key not exist raise StockImageException
38,880
def config_treeview ( self ) : tree = self . treeview tree . bind ( '<Double-1>' , self . on_treeview_double_click ) tree . bind ( '<<TreeviewSelect>>' , self . on_treeview_select , add = '+' )
Sets treeview columns and other params
38,881
def get_toplevel_parent ( self , treeitem ) : tv = self . treeview toplevel_items = tv . get_children ( ) item = treeitem while not ( item in toplevel_items ) : item = tv . parent ( item ) return item
Returns the top level parent for treeitem .
38,882
def draw_widget ( self , item ) : if item : self . filter_remove ( remember = True ) selected_id = self . treedata [ item ] [ 'id' ] item = self . get_toplevel_parent ( item ) widget_id = self . treedata [ item ] [ 'id' ] wclass = self . treedata [ item ] [ 'class' ] xmlnode = self . tree_node_to_xml ( '' , item ) self . previewer . draw ( item , widget_id , xmlnode , wclass ) self . previewer . show_selected ( item , selected_id ) self . filter_restore ( )
Create a preview of the selected treeview item
38,883
def on_treeview_delete_selection ( self , event = None ) : tv = self . treeview selection = tv . selection ( ) self . filter_remove ( remember = True ) toplevel_items = tv . get_children ( ) parents_to_redraw = set ( ) for item in selection : try : parent = '' if item not in toplevel_items : parent = self . get_toplevel_parent ( item ) else : self . previewer . delete ( item ) del self . treedata [ item ] tv . delete ( item ) self . app . set_changed ( ) if parent : self . _update_max_grid_rc ( parent ) parents_to_redraw . add ( parent ) self . widget_editor . hide_all ( ) except tk . TclError : pass for item in parents_to_redraw : self . draw_widget ( item ) self . filter_restore ( )
Removes selected items from treeview
38,884
def tree_to_xml ( self ) : self . filter_remove ( remember = True ) tree = self . treeview root = ET . Element ( 'interface' ) items = tree . get_children ( ) for item in items : node = self . tree_node_to_xml ( '' , item ) root . append ( node ) self . filter_restore ( ) return ET . ElementTree ( root )
Traverses treeview and generates a ElementTree object
38,885
def tree_node_to_xml ( self , parent , item ) : tree = self . treeview data = self . treedata [ item ] node = data . to_xml_node ( ) children = tree . get_children ( item ) for child in children : cnode = ET . Element ( 'child' ) cwidget = self . tree_node_to_xml ( item , child ) cnode . append ( cwidget ) node . append ( cnode ) return node
Converts a treeview item and children to xml nodes
38,886
def _insert_item ( self , root , data , from_file = False ) : tree = self . treeview treelabel = data . get_id ( ) row = col = '' if root != '' and 'layout' in data : row = data . get_layout_property ( 'row' ) col = data . get_layout_property ( 'column' ) row_count = self . get_max_row ( root ) if not from_file and ( row_count > int ( row ) and int ( col ) == 0 ) : row = str ( row_count + 1 ) data . set_layout_property ( 'row' , row ) image = '' try : image = StockImage . get ( '16x16-tk.default' ) except StockImageException : pass try : image = StockImage . get ( '16x16-{0}' . format ( data . get_class ( ) ) ) except StockImageException : pass values = ( data . get_class ( ) , row , col ) item = tree . insert ( root , 'end' , text = treelabel , values = values , image = image ) data . attach ( self ) self . treedata [ item ] = data self . _update_max_grid_rc ( root , from_file = True ) self . app . set_changed ( ) return item
Insert a item on the treeview and fills columns from data
38,887
def copy_to_clipboard ( self ) : tree = self . treeview selection = tree . selection ( ) if selection : self . filter_remove ( remember = True ) root = ET . Element ( 'selection' ) for item in selection : node = self . tree_node_to_xml ( '' , item ) root . append ( node ) try : text = ET . tostring ( root , encoding = 'unicode' ) except LookupError : text = ET . tostring ( root , encoding = 'UTF-8' ) tree . clipboard_clear ( ) tree . clipboard_append ( text ) self . filter_restore ( )
Copies selected items to clipboard .
38,888
def add_widget ( self , wclass ) : tree = self . treeview selected_item = '' tsel = tree . selection ( ) if tsel : selected_item = tsel [ 0 ] self . filter_remove ( ) root = selected_item if not self . _validate_add ( root , wclass , False ) : parent = tree . parent ( root ) if parent != root : if self . _validate_add ( parent , wclass ) : root = parent else : return else : return widget_id = self . get_unique_id ( wclass ) data = WidgetDescr ( wclass , widget_id ) for pname in builder . CLASS_MAP [ wclass ] . builder . properties : pdescription = { } if pname in properties . WIDGET_PROPERTIES : pdescription = properties . WIDGET_PROPERTIES [ pname ] if wclass in pdescription : pdescription = dict ( pdescription , ** pdescription [ wclass ] ) default_value = str ( pdescription . get ( 'default' , '' ) ) data . set_property ( pname , default_value ) if pname in ( 'text' , 'label' ) : data . set_property ( pname , widget_id ) for prop_name in properties . GRID_PROPERTIES : pdescription = properties . LAYOUT_OPTIONS [ prop_name ] if wclass in pdescription : pdescription = dict ( pdescription , ** pdescription [ wclass ] ) default_value = str ( pdescription . get ( 'default' , '' ) ) data . set_layout_property ( prop_name , default_value ) rownum = '0' if root : rownum = str ( self . get_max_row ( root ) + 1 ) data . set_layout_property ( 'row' , rownum ) data . set_layout_property ( 'column' , '0' ) item = self . _insert_item ( root , data ) self . draw_widget ( item ) tree . after_idle ( lambda : tree . selection_set ( item ) ) tree . after_idle ( lambda : tree . focus ( item ) ) tree . after_idle ( lambda : tree . see ( item ) )
Adds a new item to the treeview .
38,889
def load_file ( self , filename ) : self . counter . clear ( ) try : etree = ET . parse ( filename ) except ET . ParseError : parser = ET . XMLParser ( encoding = 'UTF-8' ) etree = ET . parse ( filename , parser ) eroot = etree . getroot ( ) self . remove_all ( ) self . previewer . remove_all ( ) self . widget_editor . hide_all ( ) self . previewer . resource_paths . append ( os . path . dirname ( filename ) ) for element in eroot : self . populate_tree ( '' , eroot , element , from_file = True ) children = self . treeview . get_children ( '' ) for child in children : self . draw_widget ( child ) self . previewer . show_selected ( None , None )
Load file into treeview
38,890
def populate_tree ( self , master , parent , element , from_file = False ) : data = WidgetDescr ( None , None ) data . from_xml_node ( element ) cname = data . get_class ( ) uniqueid = self . get_unique_id ( cname , data . get_id ( ) ) data . set_property ( 'id' , uniqueid ) if cname in builder . CLASS_MAP : pwidget = self . _insert_item ( master , data , from_file = from_file ) xpath = "./child" children = element . findall ( xpath ) for child in children : child_object = child . find ( './object' ) cwidget = self . populate_tree ( pwidget , child , child_object , from_file = from_file ) return pwidget else : raise Exception ( 'Class "{0}" not mapped' . format ( cname ) )
Reads xml nodes and populates tree item
38,891
def update_event ( self , hint , obj ) : tree = self . treeview data = obj item = self . get_item_by_data ( obj ) if item : if data . get_id ( ) != tree . item ( item , 'text' ) : tree . item ( item , text = data . get_id ( ) ) if tree . parent ( item ) != '' : row = data . get_layout_property ( 'row' ) col = data . get_layout_property ( 'column' ) values = tree . item ( item , 'values' ) if ( row != values [ 1 ] or col != values [ 2 ] ) : values = ( data . get_class ( ) , row , col ) tree . item ( item , values = values ) self . draw_widget ( item ) self . app . set_changed ( )
Updates tree colums when itemdata is changed .
38,892
def _reatach ( self ) : for item , p , idx in self . _detached : if self . treeview . exists ( item ) and self . treeview . exists ( p ) : self . treeview . move ( item , p , idx ) self . _detached = [ ]
Reinsert the hidden items .
38,893
def _detach ( self , item ) : to_detach = [ ] children_det = [ ] children_match = False match_found = False value = self . filtervar . get ( ) txt = self . treeview . item ( item , 'text' ) . lower ( ) if value in txt : match_found = True else : class_txt = self . treedata [ item ] . get_class ( ) . lower ( ) if value in class_txt : match_found = True parent = self . treeview . parent ( item ) idx = self . treeview . index ( item ) children = self . treeview . get_children ( item ) if children : for child in children : match , detach = self . _detach ( child ) children_match = children_match | match if detach : children_det . extend ( detach ) if match_found : if children_det : to_detach . extend ( children_det ) else : if children_match : if children_det : to_detach . extend ( children_det ) else : to_detach . append ( ( item , parent , idx ) ) match_found = match_found | children_match return match_found , to_detach
Hide items from treeview that do not match the search string .
38,894
def load_file ( self , filename ) : self . tree_editor . load_file ( filename ) self . project_name . configure ( text = filename ) self . currentfile = filename self . is_changed = False
Load xml into treeview
38,895
def lower_ir ( ir_blocks , query_metadata_table , type_equivalence_hints = None ) : sanity_check_ir_blocks_from_frontend ( ir_blocks , query_metadata_table ) ir_blocks = lower_context_field_existence ( ir_blocks , query_metadata_table ) ir_blocks = optimize_boolean_expression_comparisons ( ir_blocks ) if type_equivalence_hints : ir_blocks = lower_coerce_type_block_type_data ( ir_blocks , type_equivalence_hints ) ir_blocks = lower_coerce_type_blocks ( ir_blocks ) ir_blocks = rewrite_filters_in_optional_blocks ( ir_blocks ) ir_blocks = merge_consecutive_filter_clauses ( ir_blocks ) ir_blocks = lower_folded_outputs ( ir_blocks ) return ir_blocks
Lower the IR into an IR form that can be represented in Gremlin queries .
38,896
def lower_coerce_type_block_type_data ( ir_blocks , type_equivalence_hints ) : allowed_key_type_spec = ( GraphQLInterfaceType , GraphQLObjectType ) allowed_value_type_spec = GraphQLUnionType for key , value in six . iteritems ( type_equivalence_hints ) : if ( not isinstance ( key , allowed_key_type_spec ) or not isinstance ( value , allowed_value_type_spec ) ) : msg = ( u'Invalid type equivalence hints received! Hint {} ({}) -> {} ({}) ' u'was unexpected, expected a hint in the form ' u'GraphQLInterfaceType -> GraphQLUnionType or ' u'GraphQLObjectType -> GraphQLUnionType' . format ( key . name , str ( type ( key ) ) , value . name , str ( type ( value ) ) ) ) raise GraphQLCompilationError ( msg ) equivalent_type_names = { key . name : { x . name for x in value . types } for key , value in six . iteritems ( type_equivalence_hints ) } new_ir_blocks = [ ] for block in ir_blocks : new_block = block if isinstance ( block , CoerceType ) : target_class = get_only_element_from_collection ( block . target_class ) if target_class in equivalent_type_names : new_block = CoerceType ( equivalent_type_names [ target_class ] ) new_ir_blocks . append ( new_block ) return new_ir_blocks
Rewrite CoerceType blocks to explicitly state which types are allowed in the coercion .
38,897
def lower_coerce_type_blocks ( ir_blocks ) : new_ir_blocks = [ ] for block in ir_blocks : new_block = block if isinstance ( block , CoerceType ) : predicate = BinaryComposition ( u'contains' , Literal ( list ( block . target_class ) ) , LocalField ( '@class' ) ) new_block = Filter ( predicate ) new_ir_blocks . append ( new_block ) return new_ir_blocks
Lower CoerceType blocks into Filter blocks with a type - check predicate .
38,898
def rewrite_filters_in_optional_blocks ( ir_blocks ) : new_ir_blocks = [ ] optional_context_depth = 0 for block in ir_blocks : new_block = block if isinstance ( block , CoerceType ) : raise AssertionError ( u'Found a CoerceType block after all such blocks should have been ' u'lowered to Filter blocks: {}' . format ( ir_blocks ) ) elif isinstance ( block , Traverse ) and block . optional : optional_context_depth += 1 elif isinstance ( block , Backtrack ) and block . optional : optional_context_depth -= 1 if optional_context_depth < 0 : raise AssertionError ( u'Reached negative optional context depth for blocks: ' u'{}' . format ( ir_blocks ) ) elif isinstance ( block , Filter ) and optional_context_depth > 0 : null_check = BinaryComposition ( u'=' , LocalField ( '@this' ) , NullLiteral ) new_block = Filter ( BinaryComposition ( u'||' , null_check , block . predicate ) ) else : pass new_ir_blocks . append ( new_block ) return new_ir_blocks
In optional contexts add a check for null that allows non - existent optional data through .
38,899
def lower_folded_outputs ( ir_blocks ) : folds , remaining_ir_blocks = extract_folds_from_ir_blocks ( ir_blocks ) if not remaining_ir_blocks : raise AssertionError ( u'Expected at least one non-folded block to remain: {} {} ' u'{}' . format ( folds , remaining_ir_blocks , ir_blocks ) ) output_block = remaining_ir_blocks [ - 1 ] if not isinstance ( output_block , ConstructResult ) : raise AssertionError ( u'Expected the last non-folded block to be ConstructResult, ' u'but instead was: {} {} ' u'{}' . format ( type ( output_block ) , output_block , ir_blocks ) ) converted_folds = { base_fold_location . get_location_name ( ) [ 0 ] : _convert_folded_blocks ( folded_ir_blocks ) for base_fold_location , folded_ir_blocks in six . iteritems ( folds ) } new_output_fields = dict ( ) for output_name , output_expression in six . iteritems ( output_block . fields ) : new_output_expression = output_expression if isinstance ( output_expression , FoldedContextField ) : base_fold_location_name = output_expression . fold_scope_location . get_location_name ( ) [ 0 ] folded_ir_blocks = converted_folds [ base_fold_location_name ] new_output_expression = GremlinFoldedContextField ( output_expression . fold_scope_location , folded_ir_blocks , output_expression . field_type ) new_output_fields [ output_name ] = new_output_expression new_ir_blocks = remaining_ir_blocks [ : - 1 ] new_ir_blocks . append ( ConstructResult ( new_output_fields ) ) return new_ir_blocks
Lower standard folded output fields into GremlinFoldedContextField objects .