idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
232,800
def _create_results_summary ( self ) : # Make sure we have all attributes needed to create the results summary needed_attributes = [ "params" , "standard_errors" , "tvalues" , "pvalues" , "robust_std_errs" , "robust_t_stats" , "robust_p_vals" ] try : assert all ( [ hasattr ( self , attr ) for attr in needed_attributes ] ) assert all ( [ isinstance ( getattr ( self , attr ) , pd . Series ) for attr in needed_attributes ] ) except AssertionError : msg = "Call this function only after setting/calculating all other" msg_2 = " estimation results attributes" raise NotImplementedError ( msg + msg_2 ) self . summary = pd . concat ( ( self . params , self . standard_errors , self . tvalues , self . pvalues , self . robust_std_errs , self . robust_t_stats , self . robust_p_vals ) , axis = 1 ) return None
Create the dataframe that displays the estimation results and store it on the model instance .
240
17
232,801
def _record_values_for_fit_summary_and_statsmodels ( self ) : # Make sure we have all attributes needed to create the results summary needed_attributes = [ "fitted_probs" , "params" , "log_likelihood" , "standard_errors" ] try : assert all ( [ hasattr ( self , attr ) for attr in needed_attributes ] ) assert all ( [ getattr ( self , attr ) is not None for attr in needed_attributes ] ) except AssertionError : msg = "Call this function only after setting/calculating all other" msg_2 = " estimation results attributes" raise NotImplementedError ( msg + msg_2 ) # Record the number of observations self . nobs = self . fitted_probs . shape [ 0 ] # This is the number of estimated parameters self . df_model = self . params . shape [ 0 ] # The number of observations minus the number of estimated parameters self . df_resid = self . nobs - self . df_model # This is just the log-likelihood. The opaque name is used for # conformance with statsmodels self . llf = self . log_likelihood # This is just a repeat of the standard errors self . bse = self . standard_errors # These are the penalized measures of fit used for model comparison self . aic = compute_aic ( self ) self . bic = compute_bic ( self ) return None
Store the various estimation results that are used to describe how well the estimated model fits the given dataset and record the values that are needed for the statsmodels estimation results table . All values are stored on the model instance .
319
43
232,802
def _store_inferential_results ( self , value_array , index_names , attribute_name , series_name = None , column_names = None ) : if len ( value_array . shape ) == 1 : assert series_name is not None new_attribute_value = pd . Series ( value_array , index = index_names , name = series_name ) elif len ( value_array . shape ) == 2 : assert column_names is not None new_attribute_value = pd . DataFrame ( value_array , index = index_names , columns = column_names ) setattr ( self , attribute_name , new_attribute_value ) return None
Store the estimation results that relate to statistical inference such as parameter estimates standard errors p - values etc .
147
20
232,803
def _store_generic_inference_results ( self , results_dict , all_params , all_names ) : # Store the utility coefficients self . _store_inferential_results ( results_dict [ "utility_coefs" ] , index_names = self . ind_var_names , attribute_name = "coefs" , series_name = "coefficients" ) # Store the gradient self . _store_inferential_results ( results_dict [ "final_gradient" ] , index_names = all_names , attribute_name = "gradient" , series_name = "gradient" ) # Store the hessian self . _store_inferential_results ( results_dict [ "final_hessian" ] , index_names = all_names , attribute_name = "hessian" , column_names = all_names ) # Store the variance-covariance matrix self . _store_inferential_results ( - 1 * scipy . linalg . inv ( self . hessian ) , index_names = all_names , attribute_name = "cov" , column_names = all_names ) # Store ALL of the estimated parameters self . _store_inferential_results ( np . concatenate ( all_params , axis = 0 ) , index_names = all_names , attribute_name = "params" , series_name = "parameters" ) # Store the standard errors self . _store_inferential_results ( np . sqrt ( np . diag ( self . cov ) ) , index_names = all_names , attribute_name = "standard_errors" , series_name = "std_err" ) # Store the t-stats of the estimated parameters self . tvalues = self . params / self . standard_errors self . tvalues . name = "t_stats" # Store the p-values p_vals = 2 * scipy . stats . norm . sf ( np . abs ( self . tvalues ) ) self . _store_inferential_results ( p_vals , index_names = all_names , attribute_name = "pvalues" , series_name = "p_values" ) # Store the fischer information matrix of estimated coefficients self . _store_inferential_results ( results_dict [ "fisher_info" ] , index_names = all_names , attribute_name = "fisher_information" , column_names = all_names ) # Store the 'robust' variance-covariance matrix robust_covariance = calc_asymptotic_covariance ( self . hessian , self . fisher_information ) self . _store_inferential_results ( robust_covariance , index_names = all_names , attribute_name = "robust_cov" , column_names = all_names ) # Store the 'robust' standard errors self . _store_inferential_results ( np . sqrt ( np . diag ( self . robust_cov ) ) , index_names = all_names , attribute_name = "robust_std_errs" , series_name = "robust_std_err" ) # Store the 'robust' t-stats of the estimated coefficients self . robust_t_stats = self . params / self . robust_std_errs self . robust_t_stats . name = "robust_t_stats" # Store the 'robust' p-values one_sided_p_vals = scipy . stats . norm . sf ( np . abs ( self . robust_t_stats ) ) self . _store_inferential_results ( 2 * one_sided_p_vals , index_names = all_names , attribute_name = "robust_p_vals" , series_name = "robust_p_values" ) return None
Store the model inference values that are common to all choice models . This includes things like index coefficients gradients hessians asymptotic covariance matrices t - values p - values and robust versions of these values .
851
45
232,804
def _store_optional_parameters ( self , optional_params , name_list_attr , default_name_str , all_names , all_params , param_attr_name , series_name ) : # Identify the number of optional parameters num_elements = optional_params . shape [ 0 ] # Get the names of the optional parameters parameter_names = getattr ( self , name_list_attr ) if parameter_names is None : parameter_names = [ default_name_str . format ( x ) for x in range ( 1 , num_elements + 1 ) ] # Store the names of the optional parameters in all_names all_names = list ( parameter_names ) + list ( all_names ) # Store the values of the optional parameters in all_params all_params . insert ( 0 , optional_params ) # Store the optional parameters on the model object self . _store_inferential_results ( optional_params , index_names = parameter_names , attribute_name = param_attr_name , series_name = series_name ) return all_names , all_params
Extract the optional parameters from the results_dict save them to the model object and update the list of all parameters and all parameter names .
237
28
232,805
def _adjust_inferential_results_for_parameter_constraints ( self , constraints ) : if constraints is not None : # Ensure the model object has inferential results inferential_attributes = [ "standard_errors" , "tvalues" , "pvalues" , "robust_std_errs" , "robust_t_stats" , "robust_p_vals" ] assert all ( [ hasattr ( self , x ) for x in inferential_attributes ] ) assert hasattr ( self , "params" ) all_names = self . params . index . tolist ( ) for series in [ getattr ( self , x ) for x in inferential_attributes ] : for pos in constraints : series . loc [ all_names [ pos ] ] = np . nan return None
Ensure that parameters that were constrained during estimation do not have any values showed for inferential results . After all no inference was performed .
177
27
232,806
def _check_result_dict_for_needed_keys ( self , results_dict ) : missing_cols = [ x for x in needed_result_keys if x not in results_dict ] if missing_cols != [ ] : msg = "The following keys are missing from results_dict\n{}" raise ValueError ( msg . format ( missing_cols ) ) return None
Ensure that results_dict has the needed keys to store all the estimation results . Raise a helpful ValueError otherwise .
85
24
232,807
def _add_mixing_variable_names_to_individual_vars ( self ) : assert isinstance ( self . ind_var_names , list ) # Note that if one estimates a mixed logit model, then the mixing # variables will be added to individual vars. And if one estimates # the model again (perhaps from different starting values), then # an error will be raised when creating the coefs series because we # will have added the mixing variables twice. The condition below # should prevent this error. already_included = any ( [ "Sigma " in x for x in self . ind_var_names ] ) if self . mixing_vars is not None and not already_included : new_ind_var_names = [ "Sigma " + x for x in self . mixing_vars ] self . ind_var_names += new_ind_var_names return None
Ensure that the model objects mixing variables are added to its list of individual variables .
194
17
232,808
def print_summaries ( self ) : if hasattr ( self , "fit_summary" ) and hasattr ( self , "summary" ) : print ( "\n" ) print ( self . fit_summary ) print ( "=" * 30 ) print ( self . summary ) else : msg = "This {} object has not yet been estimated so there " msg_2 = "are no estimation summaries to print." raise NotImplementedError ( msg . format ( self . model_type ) + msg_2 ) return None
Returns None . Will print the measures of fit and the estimation results for the model .
113
17
232,809
def prefix ( * kinds ) : def wrap ( fn ) : try : fn . prefix_kinds . extend ( kinds ) except AttributeError : fn . prefix_kinds = list ( kinds ) return fn return wrap
Decorate a method as handling prefix tokens of the given kinds
46
12
232,810
def infix ( * kinds ) : def wrap ( fn ) : try : fn . infix_kinds . extend ( kinds ) except AttributeError : fn . infix_kinds = list ( kinds ) return fn return wrap
Decorate a method as handling infix tokens of the given kinds
49
13
232,811
def attempt ( self , * kinds ) : if self . _error : raise self . _error token = self . next_token if not token : return None if kinds and token . kind not in kinds : return None self . _advance ( ) return token
Try to get the next token if it matches one of the kinds given otherwise returning None . If no kinds are given any kind is accepted .
54
28
232,812
def require ( self , * kinds ) : token = self . attempt ( ) if not token : raise SyntaxError ( 'Unexpected end of input' ) if kinds and token . kind not in kinds : raise SyntaxError . unexpected ( token , kinds ) return token
Get the next token raising an exception if it doesn t match one of the given kinds or the input ends . If no kinds are given returns the next token of any kind .
56
35
232,813
def local_symbol_table ( imports = None , symbols = ( ) ) : return SymbolTable ( table_type = LOCAL_TABLE_TYPE , symbols = symbols , imports = imports )
Constructs a local symbol table .
41
7
232,814
def shared_symbol_table ( name , version , symbols , imports = None ) : return SymbolTable ( table_type = SHARED_TABLE_TYPE , symbols = symbols , name = name , version = version , imports = imports )
Constructs a shared symbol table .
50
7
232,815
def placeholder_symbol_table ( name , version , max_id ) : if version <= 0 : raise ValueError ( 'Version must be grater than or equal to 1: %s' % version ) if max_id < 0 : raise ValueError ( 'Max ID must be zero or positive: %s' % max_id ) return SymbolTable ( table_type = SHARED_TABLE_TYPE , symbols = repeat ( None , max_id ) , name = name , version = version , is_substitute = True )
Constructs a shared symbol table that consists symbols that all have no known text .
114
16
232,816
def substitute_symbol_table ( table , version , max_id ) : if not table . table_type . is_shared : raise ValueError ( 'Symbol table to substitute from must be a shared table' ) if version <= 0 : raise ValueError ( 'Version must be grater than or equal to 1: %s' % version ) if max_id < 0 : raise ValueError ( 'Max ID must be zero or positive: %s' % max_id ) # TODO Recycle the symbol tokens from the source table into the substitute. if max_id <= table . max_id : symbols = ( token . text for token in islice ( table , max_id ) ) else : symbols = chain ( ( token . text for token in table ) , repeat ( None , max_id - table . max_id ) ) return SymbolTable ( table_type = SHARED_TABLE_TYPE , symbols = symbols , name = table . name , version = version , is_substitute = True )
Substitutes a given shared symbol table for another version .
217
12
232,817
def __add ( self , token ) : self . __symbols . append ( token ) text = token . text if text is not None and text not in self . __mapping : self . __mapping [ text ] = token
Unconditionally adds a token to the table .
50
10
232,818
def __add_shared ( self , original_token ) : sid = self . __new_sid ( ) token = SymbolToken ( original_token . text , sid , self . __import_location ( sid ) ) self . __add ( token ) return token
Adds a token normalizing the SID and import reference to this table .
55
15
232,819
def __add_import ( self , original_token ) : sid = self . __new_sid ( ) token = SymbolToken ( original_token . text , sid , original_token . location ) self . __add ( token ) return token
Adds a token normalizing only the SID
51
9
232,820
def __add_text ( self , text ) : if text is not None and not isinstance ( text , six . text_type ) : raise TypeError ( 'Local symbol definition must be a Unicode sequence or None: %r' % text ) sid = self . __new_sid ( ) location = None if self . table_type . is_shared : location = self . __import_location ( sid ) token = SymbolToken ( text , sid , location ) self . __add ( token ) return token
Adds the given Unicode text as a locally defined symbol .
107
11
232,821
def intern ( self , text ) : if self . table_type . is_shared : raise TypeError ( 'Cannot intern on shared symbol table' ) if not isinstance ( text , six . text_type ) : raise TypeError ( 'Cannot intern non-Unicode sequence into symbol table: %r' % text ) token = self . get ( text ) if token is None : token = self . __add_text ( text ) return token
Interns the given Unicode sequence into the symbol table .
97
11
232,822
def get ( self , key , default = None ) : if isinstance ( key , six . text_type ) : return self . __mapping . get ( key , None ) if not isinstance ( key , int ) : raise TypeError ( 'Key must be int or Unicode sequence.' ) # TODO determine if $0 should be returned for all symbol tables. if key == 0 : return SYMBOL_ZERO_TOKEN # Translate one-based SID to zero-based intern table index = key - 1 if index < 0 or key > len ( self ) : return default return self . __symbols [ index ]
Returns a token by text or local ID with a default .
135
12
232,823
def register ( self , table ) : if table . table_type . is_system : raise ValueError ( 'Cannot add system table to catalog' ) if not table . table_type . is_shared : raise ValueError ( 'Cannot add local table to catalog' ) if table . is_substitute : raise ValueError ( 'Cannot add substitute table to catalog' ) versions = self . __tables . get ( table . name ) if versions is None : versions = { } self . __tables [ table . name ] = versions versions [ table . version ] = table
Adds a shared table to the catalog .
125
8
232,824
def resolve ( self , name , version , max_id ) : if not isinstance ( name , six . text_type ) : raise TypeError ( 'Name must be a Unicode sequence: %r' % name ) if not isinstance ( version , int ) : raise TypeError ( 'Version must be an int: %r' % version ) if version <= 0 : raise ValueError ( 'Version must be positive: %s' % version ) if max_id is not None and max_id < 0 : raise ValueError ( 'Max ID must be zero or positive: %s' % max_id ) versions = self . __tables . get ( name ) if versions is None : if max_id is None : raise CannotSubstituteTable ( 'Found no table for %s, but no max_id' % name ) return placeholder_symbol_table ( name , version , max_id ) table = versions . get ( version ) if table is None : # TODO Replace the keys map with a search tree based dictionary. keys = list ( versions ) keys . sort ( ) table = versions [ keys [ - 1 ] ] if table . version == version and ( max_id is None or table . max_id == max_id ) : return table if max_id is None : raise CannotSubstituteTable ( 'Found match for %s, but not version %d, and no max_id' % ( name , version ) ) return substitute_symbol_table ( table , version , max_id )
Resolves the table for a given name and version .
324
11
232,825
def start_container ( self ) : self . __container_lengths . append ( self . current_container_length ) self . current_container_length = 0 new_container_node = _Node ( ) self . __container_node . add_child ( new_container_node ) self . __container_nodes . append ( self . __container_node ) self . __container_node = new_container_node
Add a node to the tree that represents the start of a container .
91
14
232,826
def end_container ( self , header_buf ) : if not self . __container_nodes : raise ValueError ( "Attempted to end container with none active." ) # Header needs to be the first node visited on this subtree. self . __container_node . add_leaf ( _Node ( header_buf ) ) self . __container_node = self . __container_nodes . pop ( ) parent_container_length = self . __container_lengths . pop ( ) self . current_container_length = parent_container_length + self . current_container_length + len ( header_buf )
Add a node containing the container s header to the current subtree .
133
14
232,827
def add_scalar_value ( self , value_buf ) : self . __container_node . add_child ( _Node ( value_buf ) ) self . current_container_length += len ( value_buf )
Add a node to the tree containing a scalar value .
49
12
232,828
def drain ( self ) : if self . __container_nodes : raise ValueError ( "Attempted to drain without ending all containers." ) for buf in self . __depth_traverse ( self . __root ) : if buf is not None : yield buf self . __reset ( )
Walk the BufferTree and reset it when finished .
61
10
232,829
def ion_equals ( a , b , timestamps_instants_only = False ) : if timestamps_instants_only : return _ion_equals_timestamps_instants ( a , b ) return _ion_equals_timestamps_data_model ( a , b )
Tests two objects for equivalence under the Ion data model .
69
13
232,830
def _ion_equals ( a , b , timestamp_comparison_func , recursive_comparison_func ) : for a , b in ( ( a , b ) , ( b , a ) ) : # Ensures that operand order does not matter. if isinstance ( a , _IonNature ) : if isinstance ( b , _IonNature ) : # Both operands have _IonNature. Their IonTypes and annotations must be equivalent. eq = a . ion_type is b . ion_type and _annotations_eq ( a , b ) else : # Only one operand has _IonNature. It cannot be equivalent to the other operand if it has annotations. eq = not a . ion_annotations if eq : if isinstance ( a , IonPyList ) : return _sequences_eq ( a , b , recursive_comparison_func ) elif isinstance ( a , IonPyDict ) : return _structs_eq ( a , b , recursive_comparison_func ) elif isinstance ( a , IonPyTimestamp ) : return timestamp_comparison_func ( a , b ) elif isinstance ( a , IonPyNull ) : return isinstance ( b , IonPyNull ) or ( b is None and a . ion_type is IonType . NULL ) elif isinstance ( a , IonPySymbol ) or ( isinstance ( a , IonPyText ) and a . ion_type is IonType . SYMBOL ) : return _symbols_eq ( a , b ) elif isinstance ( a , IonPyDecimal ) : return _decimals_eq ( a , b ) elif isinstance ( a , IonPyFloat ) : return _floats_eq ( a , b ) else : return a == b return False # Reaching this point means that neither operand has _IonNature. for a , b in ( ( a , b ) , ( b , a ) ) : # Ensures that operand order does not matter. if isinstance ( a , list ) : return _sequences_eq ( a , b , recursive_comparison_func ) elif isinstance ( a , dict ) : return _structs_eq ( a , b , recursive_comparison_func ) elif isinstance ( a , datetime ) : return timestamp_comparison_func ( a , b ) elif isinstance ( a , SymbolToken ) : return _symbols_eq ( a , b ) elif isinstance ( a , Decimal ) : return _decimals_eq ( a , b ) elif isinstance ( a , float ) : return _floats_eq ( a , b ) return a == b
Compares a and b according to the description of the ion_equals method .
595
17
232,831
def _timestamps_eq ( a , b ) : assert isinstance ( a , datetime ) if not isinstance ( b , datetime ) : return False # Local offsets must be equivalent. if ( a . tzinfo is None ) ^ ( b . tzinfo is None ) : return False if a . utcoffset ( ) != b . utcoffset ( ) : return False for a , b in ( ( a , b ) , ( b , a ) ) : if isinstance ( a , Timestamp ) : if isinstance ( b , Timestamp ) : # Both operands declare their precisions. They are only equivalent if their precisions are the same. if a . precision is b . precision and a . fractional_precision is b . fractional_precision : break return False elif a . precision is not TimestampPrecision . SECOND or a . fractional_precision != MICROSECOND_PRECISION : # Only one of the operands declares its precision. It is only equivalent to the other (a naive datetime) # if it has full microseconds precision. return False return a == b
Compares two timestamp operands for equivalence under the Ion data model .
246
15
232,832
def _timestamp_instants_eq ( a , b ) : assert isinstance ( a , datetime ) if not isinstance ( b , datetime ) : return False # datetime's __eq__ can't compare a None offset and a non-None offset. For these equivalence semantics, a None # offset (unknown local offset) is treated equivalently to a +00:00. if a . tzinfo is None : a = a . replace ( tzinfo = OffsetTZInfo ( ) ) if b . tzinfo is None : b = b . replace ( tzinfo = OffsetTZInfo ( ) ) # datetime's __eq__ implementation compares instants; offsets and precision need not be equal. return a == b
Compares two timestamp operands for point - in - time equivalence only .
161
16
232,833
def _parse_var_int_components ( buf , signed ) : value = 0 sign = 1 while True : ch = buf . read ( 1 ) if ch == '' : raise IonException ( 'Variable integer under-run' ) octet = ord ( ch ) if signed : if octet & _VAR_INT_SIGN_MASK : sign = - 1 value = octet & _VAR_INT_SIGN_VALUE_MASK signed = False else : value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK : break return sign , value
Parses a VarInt or VarUInt field from a file - like object .
151
18
232,834
def _parse_signed_int_components ( buf ) : sign_bit = 0 value = 0 first = True while True : ch = buf . read ( 1 ) if ch == b'' : break octet = ord ( ch ) if first : if octet & _SIGNED_INT_SIGN_MASK : sign_bit = 1 value = octet & _SIGNED_INT_SIGN_VALUE_MASK first = False else : value <<= 8 value |= octet return sign_bit , value
Parses the remainder of a file - like object as a signed magnitude value .
110
17
232,835
def _parse_decimal ( buf ) : exponent = _parse_var_int ( buf , signed = True ) sign_bit , coefficient = _parse_signed_int_components ( buf ) if coefficient == 0 : # Handle the zero cases--especially negative zero value = Decimal ( ( sign_bit , ( 0 , ) , exponent ) ) else : coefficient *= sign_bit and - 1 or 1 value = Decimal ( coefficient ) . scaleb ( exponent ) return value
Parses the remainder of a file - like object as a decimal .
103
15
232,836
def _create_delegate_handler ( delegate ) : @ coroutine def handler ( * args ) : yield yield delegate . send ( Transition ( args , delegate ) ) return handler
Creates a handler function that creates a co - routine that can yield once with the given positional arguments to the delegate as a transition .
37
27
232,837
def _var_uint_field_handler ( handler , ctx ) : _ , self = yield queue = ctx . queue value = 0 while True : if len ( queue ) == 0 : # We don't know when the field ends, so read at least one byte. yield ctx . read_data_transition ( 1 , self ) octet = queue . read_byte ( ) value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK : break yield ctx . immediate_transition ( handler ( value , ctx ) )
Handler co - routine for variable unsigned integer fields that .
146
11
232,838
def _length_scalar_handler ( scalar_factory , ion_type , length , ctx ) : _ , self = yield if length == 0 : data = b'' else : yield ctx . read_data_transition ( length , self ) data = ctx . queue . read ( length ) scalar = scalar_factory ( data ) event_cls = IonEvent if callable ( scalar ) : # TODO Wrap the exception to get context position. event_cls = IonThunkEvent yield ctx . event_transition ( event_cls , IonEventType . SCALAR , ion_type , scalar )
Handles scalars scalar_factory is a function that returns a value or thunk .
143
20
232,839
def _annotation_handler ( ion_type , length , ctx ) : _ , self = yield self_handler = _create_delegate_handler ( self ) if ctx . annotations is not None : raise IonException ( 'Annotation cannot be nested in annotations' ) # We have to replace our context for annotations specifically to encapsulate the limit ctx = ctx . derive_container_context ( length , add_depth = 0 ) # Immediately read the length field and the annotations ( ann_length , _ ) , _ = yield ctx . immediate_transition ( _var_uint_field_handler ( self_handler , ctx ) ) if ann_length < 1 : raise IonException ( 'Invalid annotation length subfield; annotation wrapper must have at least one annotation.' ) # Read/parse the annotations. yield ctx . read_data_transition ( ann_length , self ) ann_data = ctx . queue . read ( ann_length ) annotations = tuple ( _parse_sid_iter ( ann_data ) ) if ctx . limit - ctx . queue . position < 1 : # There is no space left for the 'value' subfield, which is required. raise IonException ( 'Incorrect annotation wrapper length.' ) # Go parse the start of the value but go back to the real parent container. yield ctx . immediate_transition ( _start_type_handler ( ctx . field_name , ctx . whence , ctx , annotations = annotations ) )
Handles annotations . ion_type is ignored .
320
10
232,840
def _ordered_struct_start_handler ( handler , ctx ) : _ , self = yield self_handler = _create_delegate_handler ( self ) ( length , _ ) , _ = yield ctx . immediate_transition ( _var_uint_field_handler ( self_handler , ctx ) ) if length < 2 : # A valid field name/value pair is at least two octets: one for the field name SID and one for the value. raise IonException ( 'Ordered structs (type ID 0xD1) must have at least one field name/value pair.' ) yield ctx . immediate_transition ( handler ( length , ctx ) )
Handles the special case of ordered structs specified by the type ID 0xD1 .
148
19
232,841
def _container_start_handler ( ion_type , length , ctx ) : _ , self = yield container_ctx = ctx . derive_container_context ( length ) if ctx . annotations and ctx . limit != container_ctx . limit : # 'ctx' is the annotation wrapper context. `container_ctx` represents the wrapper's 'value' subfield. Their # limits must match. raise IonException ( 'Incorrect annotation wrapper length.' ) delegate = _container_handler ( ion_type , container_ctx ) # We start the container, and transition to the new container processor. yield ctx . event_transition ( IonEvent , IonEventType . CONTAINER_START , ion_type , value = None , whence = delegate )
Handles container delegation .
162
5
232,842
def _bind_length_handlers ( tids , user_handler , lns ) : for tid in tids : for ln in lns : type_octet = _gen_type_octet ( tid , ln ) ion_type = _TID_VALUE_TYPE_TABLE [ tid ] if ln == 1 and ion_type is IonType . STRUCT : handler = partial ( _ordered_struct_start_handler , partial ( user_handler , ion_type ) ) elif ln < _LENGTH_FIELD_FOLLOWS : # Directly partially bind length. handler = partial ( user_handler , ion_type , ln ) else : # Delegate to length field parsing first. handler = partial ( _var_uint_field_handler , partial ( user_handler , ion_type ) ) _HANDLER_DISPATCH_TABLE [ type_octet ] = handler
Binds a set of handlers with the given factory .
197
11
232,843
def _bind_length_scalar_handlers ( tids , scalar_factory , lns = _NON_ZERO_LENGTH_LNS ) : handler = partial ( _length_scalar_handler , scalar_factory ) return _bind_length_handlers ( tids , handler , lns )
Binds a set of scalar handlers for an inclusive range of low - nibble values .
75
19
232,844
def remaining ( self ) : if self . depth == 0 : return _STREAM_REMAINING return self . limit - self . queue . position
Determines how many bytes are remaining in the current context .
32
13
232,845
def read_data_transition ( self , length , whence = None , skip = False , stream_event = ION_STREAM_INCOMPLETE_EVENT ) : if whence is None : whence = self . whence return Transition ( None , _read_data_handler ( length , whence , self , skip , stream_event ) )
Returns an immediate event_transition to read a specified number of bytes .
73
15
232,846
def _narrow_unichr ( code_point ) : try : if len ( code_point . char ) > 1 : return code_point . char except AttributeError : pass return six . unichr ( code_point )
Retrieves the unicode character representing any given code point in a way that won t break on narrow builds .
51
23
232,847
def reader_trampoline ( start , allow_flush = False ) : data_event = yield if data_event is None or data_event . type is not ReadEventType . NEXT : raise TypeError ( 'Reader must be started with NEXT' ) trans = Transition ( None , start ) while True : trans = trans . delegate . send ( Transition ( data_event , trans . delegate ) ) data_event = None if trans . event is not None : # Only yield if there is an event. data_event = ( yield trans . event ) if trans . event . event_type . is_stream_signal : if data_event . type is not ReadEventType . DATA : if not allow_flush or not ( trans . event . event_type is IonEventType . INCOMPLETE and data_event . type is ReadEventType . NEXT ) : raise TypeError ( 'Reader expected data: %r' % ( data_event , ) ) else : if data_event . type is ReadEventType . DATA : raise TypeError ( 'Reader did not expect data' ) if data_event . type is ReadEventType . DATA and len ( data_event . data ) == 0 : raise ValueError ( 'Empty data not allowed' ) if trans . event . depth == 0 and trans . event . event_type is not IonEventType . CONTAINER_START and data_event . type is ReadEventType . SKIP : raise TypeError ( 'Cannot skip at the top-level' )
Provides the co - routine trampoline for a reader state machine .
322
15
232,848
def blocking_reader ( reader , input , buffer_size = _DEFAULT_BUFFER_SIZE ) : ion_event = None while True : read_event = ( yield ion_event ) ion_event = reader . send ( read_event ) while ion_event is not None and ion_event . event_type . is_stream_signal : data = input . read ( buffer_size ) if len ( data ) == 0 : # End of file. if ion_event . event_type is IonEventType . INCOMPLETE : ion_event = reader . send ( NEXT_EVENT ) continue else : yield ION_STREAM_END_EVENT return ion_event = reader . send ( read_data_event ( data ) )
Provides an implementation of using the reader co - routine with a file - like object .
161
18
232,849
def read ( self , length , skip = False ) : if length > self . __size : raise IndexError ( 'Cannot pop %d bytes, %d bytes in buffer queue' % ( length , self . __size ) ) self . position += length self . __size -= length segments = self . __segments offset = self . __offset data = self . __data_cls ( ) while length > 0 : segment = segments [ 0 ] segment_off = offset segment_len = len ( segment ) segment_rem = segment_len - segment_off segment_read_len = min ( segment_rem , length ) if segment_off == 0 and segment_read_len == segment_rem : # consume an entire segment if skip : segment_slice = self . __element_type ( ) else : segment_slice = segment else : # Consume a part of the segment. if skip : segment_slice = self . __element_type ( ) else : segment_slice = segment [ segment_off : segment_off + segment_read_len ] offset = 0 segment_off += segment_read_len if segment_off == segment_len : segments . popleft ( ) self . __offset = 0 else : self . __offset = segment_off if length <= segment_rem and len ( data ) == 0 : return segment_slice data . extend ( segment_slice ) length -= segment_read_len if self . is_unicode : return data . as_text ( ) else : return data
Consumes the first length bytes from the accumulator .
322
11
232,850
def unread ( self , c ) : if self . position < 1 : raise IndexError ( 'Cannot unread an empty buffer queue.' ) if isinstance ( c , six . text_type ) : if not self . is_unicode : BufferQueue . _incompatible_types ( self . is_unicode , c ) else : c = self . __chr ( c ) num_code_units = self . is_unicode and len ( c ) or 1 if self . __offset == 0 : if num_code_units == 1 and six . PY3 : if self . is_unicode : segment = c else : segment = six . int2byte ( c ) else : segment = c self . __segments . appendleft ( segment ) else : self . __offset -= num_code_units def verify ( ch , idx ) : existing = self . __segments [ 0 ] [ self . __offset + idx ] if existing != ch : raise ValueError ( 'Attempted to unread %s when %s was expected.' % ( ch , existing ) ) if num_code_units == 1 : verify ( c , 0 ) else : for i in range ( num_code_units ) : verify ( c [ i ] , i ) self . __size += num_code_units self . position -= num_code_units
Unread the given character byte or code point .
291
10
232,851
def skip ( self , length ) : if length >= self . __size : skip_amount = self . __size rem = length - skip_amount self . __segments . clear ( ) self . __offset = 0 self . __size = 0 self . position += skip_amount else : rem = 0 self . read ( length , skip = True ) return rem
Removes length bytes and returns the number length still required to skip
76
13
232,852
def managed_reader ( reader , catalog = None ) : if catalog is None : catalog = SymbolTableCatalog ( ) ctx = _ManagedContext ( catalog ) symbol_trans = Transition ( None , None ) ion_event = None while True : if symbol_trans . delegate is not None and ion_event is not None and not ion_event . event_type . is_stream_signal : # We have a symbol processor active, do not yield to user. delegate = symbol_trans . delegate symbol_trans = delegate . send ( Transition ( ion_event , delegate ) ) if symbol_trans . delegate is None : # When the symbol processor terminates, the event is the context # and there is no delegate. ctx = symbol_trans . event data_event = NEXT_EVENT else : data_event = symbol_trans . event else : data_event = None if ion_event is not None : event_type = ion_event . event_type ion_type = ion_event . ion_type depth = ion_event . depth # System values only happen at the top-level if depth == 0 : if event_type is IonEventType . VERSION_MARKER : if ion_event != ION_VERSION_MARKER_EVENT : raise IonException ( 'Invalid IVM: %s' % ( ion_event , ) ) # Reset and swallow IVM ctx = _ManagedContext ( ctx . catalog ) data_event = NEXT_EVENT elif ion_type is IonType . SYMBOL and len ( ion_event . annotations ) == 0 and ion_event . value is not None and ctx . resolve ( ion_event . value ) . text == TEXT_ION_1_0 : assert symbol_trans . delegate is None # A faux IVM is a NOP data_event = NEXT_EVENT elif event_type is IonEventType . CONTAINER_START and ion_type is IonType . STRUCT and ctx . has_symbol_table_annotation ( ion_event . annotations ) : assert symbol_trans . delegate is None # Activate a new symbol processor. delegate = _local_symbol_table_handler ( ctx ) symbol_trans = Transition ( None , delegate ) data_event = NEXT_EVENT if data_event is None : # No system processing or we have to get data, yield control. if ion_event is not None : ion_event = _managed_thunk_event ( ctx , ion_event ) data_event = yield ion_event ion_event = reader . send ( data_event )
Managed reader wrapping another reader .
561
7
232,853
def _illegal_character ( c , ctx , message = '' ) : container_type = ctx . container . ion_type is None and 'top-level' or ctx . container . ion_type . name value_type = ctx . ion_type is None and 'unknown' or ctx . ion_type . name if c is None : header = 'Illegal token' else : c = 'EOF' if BufferQueue . is_eof ( c ) else _chr ( c ) header = 'Illegal character %s' % ( c , ) raise IonException ( '%s at position %d in %s value contained in %s. %s Pending value: %s' % ( header , ctx . queue . position , value_type , container_type , message , ctx . value ) )
Raises an IonException upon encountering the given illegal character in the given context .
180
16
232,854
def _defaultdict ( dct , fallback = _illegal_character ) : out = defaultdict ( lambda : fallback ) for k , v in six . iteritems ( dct ) : out [ k ] = v return out
Wraps the given dictionary such that the given fallback function will be called when a nonexistent key is accessed .
49
22
232,855
def _number_negative_start_handler ( c , ctx ) : assert c == _MINUS assert len ( ctx . value ) == 0 ctx . set_ion_type ( IonType . INT ) ctx . value . append ( c ) c , _ = yield yield ctx . immediate_transition ( _NEGATIVE_TABLE [ c ] ( c , ctx ) )
Handles numeric values that start with a negative sign . Branches to delegate co - routines according to _NEGATIVE_TABLE .
85
27
232,856
def _number_zero_start_handler ( c , ctx ) : assert c == _ZERO assert len ( ctx . value ) == 0 or ( len ( ctx . value ) == 1 and ctx . value [ 0 ] == _MINUS ) ctx . set_ion_type ( IonType . INT ) ctx . value . append ( c ) c , _ = yield if _ends_value ( c ) : trans = ctx . event_transition ( IonThunkEvent , IonEventType . SCALAR , ctx . ion_type , _parse_decimal_int ( ctx . value ) ) if c == _SLASH : trans = ctx . immediate_transition ( _number_slash_end_handler ( c , ctx , trans ) ) yield trans yield ctx . immediate_transition ( _ZERO_START_TABLE [ c ] ( c , ctx ) )
Handles numeric values that start with zero or negative zero . Branches to delegate co - routines according to _ZERO_START_TABLE .
201
30
232,857
def _number_or_timestamp_handler ( c , ctx ) : assert c in _DIGITS ctx . set_ion_type ( IonType . INT ) # If this is the last digit read, this value is an Int. val = ctx . value val . append ( c ) c , self = yield trans = ctx . immediate_transition ( self ) while True : if _ends_value ( c ) : trans = ctx . event_transition ( IonThunkEvent , IonEventType . SCALAR , ctx . ion_type , _parse_decimal_int ( ctx . value ) ) if c == _SLASH : trans = ctx . immediate_transition ( _number_slash_end_handler ( c , ctx , trans ) ) else : if c not in _DIGITS : trans = ctx . immediate_transition ( _NUMBER_OR_TIMESTAMP_TABLE [ c ] ( c , ctx ) ) else : val . append ( c ) c , _ = yield trans
Handles numeric values that start with digits 1 - 9 . May terminate a value in which case that value is an int . If it does not terminate a value it branches to delegate co - routines according to _NUMBER_OR_TIMESTAMP_TABLE .
230
54
232,858
def _number_slash_end_handler ( c , ctx , event ) : assert c == _SLASH c , self = yield next_ctx = ctx . derive_child_context ( ctx . whence ) comment = _comment_handler ( _SLASH , next_ctx , next_ctx . whence ) comment . send ( ( c , comment ) ) # If the previous line returns without error, it's a valid comment and the number may be emitted. yield _CompositeTransition ( event , ctx , comment , next_ctx , initialize_handler = False )
Handles numeric values that end in a forward slash . This is only legal if the slash begins a comment ; thus this co - routine either results in an error being raised or an event being yielded .
125
40
232,859
def _exponent_handler_factory ( ion_type , exp_chars , parse_func , first_char = None ) : def transition ( prev , c , ctx , trans ) : if c in _SIGN and prev in exp_chars : ctx . value . append ( c ) else : _illegal_character ( c , ctx ) return trans illegal = exp_chars + _SIGN return _numeric_handler_factory ( _DIGITS , transition , lambda c , ctx : c in exp_chars , illegal , parse_func , illegal_at_end = illegal , ion_type = ion_type , first_char = first_char )
Generates a handler co - routine which tokenizes an numeric exponent .
148
14
232,860
def _coefficient_handler_factory ( trans_table , parse_func , assertion = lambda c , ctx : True , ion_type = None , append_first_if_not = None ) : def transition ( prev , c , ctx , trans ) : if prev == _UNDERSCORE : _illegal_character ( c , ctx , 'Underscore before %s.' % ( _chr ( c ) , ) ) return ctx . immediate_transition ( trans_table [ c ] ( c , ctx ) ) return _numeric_handler_factory ( _DIGITS , transition , assertion , ( _DOT , ) , parse_func , ion_type = ion_type , append_first_if_not = append_first_if_not )
Generates a handler co - routine which tokenizes a numeric coefficient .
173
14
232,861
def _radix_int_handler_factory ( radix_indicators , charset , parse_func ) : def assertion ( c , ctx ) : return c in radix_indicators and ( ( len ( ctx . value ) == 1 and ctx . value [ 0 ] == _ZERO ) or ( len ( ctx . value ) == 2 and ctx . value [ 0 ] == _MINUS and ctx . value [ 1 ] == _ZERO ) ) and ctx . ion_type == IonType . INT return _numeric_handler_factory ( charset , lambda prev , c , ctx , trans : _illegal_character ( c , ctx ) , assertion , radix_indicators , parse_func , illegal_at_end = radix_indicators )
Generates a handler co - routine which tokenizes a integer of a particular radix .
175
18
232,862
def _timestamp_zero_start_handler ( c , ctx ) : val = ctx . value ctx . set_ion_type ( IonType . TIMESTAMP ) if val [ 0 ] == _MINUS : _illegal_character ( c , ctx , 'Negative year not allowed.' ) val . append ( c ) c , self = yield trans = ctx . immediate_transition ( self ) while True : if c in _TIMESTAMP_YEAR_DELIMITERS : trans = ctx . immediate_transition ( _timestamp_handler ( c , ctx ) ) elif c in _DIGITS : val . append ( c ) else : _illegal_character ( c , ctx ) c , _ = yield trans
Handles numeric values that start with a zero followed by another digit . This is either a timestamp or an error .
166
23
232,863
def _parse_timestamp ( tokens ) : def parse ( ) : precision = TimestampPrecision . YEAR off_hour = tokens [ _TimestampState . OFF_HOUR ] off_minutes = tokens [ _TimestampState . OFF_MINUTE ] microsecond = None fraction_digits = None if off_hour is not None : assert off_minutes is not None off_sign = - 1 if _MINUS in off_hour else 1 off_hour = int ( off_hour ) off_minutes = int ( off_minutes ) * off_sign if off_sign == - 1 and off_hour == 0 and off_minutes == 0 : # -00:00 (unknown UTC offset) is a naive datetime. off_hour = None off_minutes = None else : assert off_minutes is None year = tokens [ _TimestampState . YEAR ] assert year is not None year = int ( year ) month = tokens [ _TimestampState . MONTH ] if month is None : month = 1 else : month = int ( month ) precision = TimestampPrecision . MONTH day = tokens [ _TimestampState . DAY ] if day is None : day = 1 else : day = int ( day ) precision = TimestampPrecision . DAY hour = tokens [ _TimestampState . HOUR ] minute = tokens [ _TimestampState . MINUTE ] if hour is None : assert minute is None hour = 0 minute = 0 else : assert minute is not None hour = int ( hour ) minute = int ( minute ) precision = TimestampPrecision . MINUTE second = tokens [ _TimestampState . SECOND ] if second is None : second = 0 else : second = int ( second ) precision = TimestampPrecision . SECOND fraction = tokens [ _TimestampState . FRACTIONAL ] if fraction is not None : fraction_digits = len ( fraction ) if fraction_digits > MICROSECOND_PRECISION : for digit in fraction [ MICROSECOND_PRECISION : ] : if digit != _ZERO : raise ValueError ( 'Only six significant digits supported in timestamp fractional. Found %s.' % ( fraction , ) ) fraction_digits = MICROSECOND_PRECISION fraction = fraction [ 0 : MICROSECOND_PRECISION ] else : fraction . extend ( _ZEROS [ MICROSECOND_PRECISION - fraction_digits ] ) microsecond = int ( fraction ) return timestamp ( year , month , day , hour , minute , second , microsecond , off_hour , off_minutes , precision = precision , fractional_precision = fraction_digits ) return parse
Parses each token in the given _TimestampTokens and marshals the numeric components into a Timestamp .
580
23
232,864
def _comment_handler ( c , ctx , whence ) : assert c == _SLASH c , self = yield if c == _SLASH : ctx . set_line_comment ( ) block_comment = False elif c == _ASTERISK : if ctx . line_comment : # This happens when a block comment immediately follows a line comment. ctx . set_line_comment ( False ) block_comment = True else : _illegal_character ( c , ctx , 'Illegal character sequence "/%s".' % ( _chr ( c ) , ) ) done = False prev = None trans = ctx . immediate_transition ( self ) while not done : c , _ = yield trans if block_comment : if prev == _ASTERISK and c == _SLASH : done = True prev = c else : if c in _NEWLINES or BufferQueue . is_eof ( c ) : done = True yield ctx . set_self_delimiting ( True ) . immediate_transition ( whence )
Handles comments . Upon completion of the comment immediately transitions back to whence .
227
15
232,865
def _sexp_slash_handler ( c , ctx , whence = None , pending_event = None ) : assert c == _SLASH if whence is None : whence = ctx . whence c , self = yield ctx . queue . unread ( c ) if c == _ASTERISK or c == _SLASH : yield ctx . immediate_transition ( _comment_handler ( _SLASH , ctx , whence ) ) else : if pending_event is not None : # Since this is the start of a new value and not a comment, the pending event must be emitted. assert pending_event . event is not None yield _CompositeTransition ( pending_event , ctx , partial ( _operator_symbol_handler , _SLASH ) ) yield ctx . immediate_transition ( _operator_symbol_handler ( _SLASH , ctx ) )
Handles the special case of a forward - slash within an s - expression . This is either an operator or a comment .
193
25
232,866
def _typed_null_handler ( c , ctx ) : assert c == _DOT c , self = yield nxt = _NULL_STARTS i = 0 length = None done = False trans = ctx . immediate_transition ( self ) while True : if done : if _ends_value ( c ) or ( ctx . container . ion_type is IonType . SEXP and c in _OPERATORS ) : trans = ctx . event_transition ( IonEvent , IonEventType . SCALAR , nxt . ion_type , None ) else : _illegal_character ( c , ctx , 'Illegal null type.' ) elif length is None : if c not in nxt : _illegal_character ( c , ctx , 'Illegal null type.' ) nxt = nxt [ c ] if isinstance ( nxt , _NullSequence ) : length = len ( nxt . sequence ) else : if c != nxt [ i ] : _illegal_character ( c , ctx , 'Illegal null type.' ) i += 1 done = i == length c , _ = yield trans
Handles typed null values . Entered once null . has been found .
245
14
232,867
def _inf_or_operator_handler_factory ( c_start , is_delegate = True ) : @ coroutine def inf_or_operator_handler ( c , ctx ) : next_ctx = None if not is_delegate : ctx . value . append ( c_start ) c , self = yield else : assert ctx . value [ 0 ] == c_start assert c not in _DIGITS ctx . queue . unread ( c ) next_ctx = ctx _ , self = yield assert c == _ maybe_inf = True ctx . set_ion_type ( IonType . FLOAT ) match_index = 0 trans = ctx . immediate_transition ( self ) while True : if maybe_inf : if match_index < len ( _INF_SUFFIX ) : maybe_inf = c == _INF_SUFFIX [ match_index ] else : if _ends_value ( c ) or ( ctx . container . ion_type is IonType . SEXP and c in _OPERATORS ) : yield ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . FLOAT , c_start == _MINUS and _NEG_INF or _POS_INF ) else : maybe_inf = False if maybe_inf : match_index += 1 else : ctx . set_unicode ( ) if match_index > 0 : next_ctx = ctx . derive_child_context ( ctx . whence ) for ch in _INF_SUFFIX [ 0 : match_index ] : next_ctx . value . append ( ch ) break c , self = yield trans if ctx . container is not _C_SEXP : _illegal_character ( c , next_ctx is None and ctx or next_ctx , 'Illegal character following %s.' % ( _chr ( c_start ) , ) ) if match_index == 0 : if c in _OPERATORS : yield ctx . immediate_transition ( _operator_symbol_handler ( c , ctx ) ) yield ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . SYMBOL , ctx . value . as_symbol ( ) ) yield _CompositeTransition ( ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . SYMBOL , ctx . value . as_symbol ( ) ) , ctx , partial ( _unquoted_symbol_handler , c ) , next_ctx ) return inf_or_operator_handler
Generates handler co - routines for values that may be + inf or - inf .
577
17
232,868
def _operator_symbol_handler ( c , ctx ) : assert c in _OPERATORS ctx . set_unicode ( ) val = ctx . value val . append ( c ) c , self = yield trans = ctx . immediate_transition ( self ) while c in _OPERATORS : val . append ( c ) c , _ = yield trans yield ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . SYMBOL , val . as_symbol ( ) )
Handles operator symbol values within s - expressions .
116
10
232,869
def _symbol_token_end ( c , ctx , is_field_name , value = None ) : if value is None : value = ctx . value if is_field_name or c in _SYMBOL_TOKEN_TERMINATORS or ctx . quoted_text : # This might be an annotation or a field name. Mark it as self-delimiting because a symbol token termination # character has been found. ctx . set_self_delimiting ( ctx . quoted_text ) . set_pending_symbol ( value ) . set_quoted_text ( False ) trans = ctx . immediate_transition ( ctx . whence ) else : trans = ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . SYMBOL , _as_symbol ( value ) ) return trans
Returns a transition which ends the current symbol token .
190
10
232,870
def _unquoted_symbol_handler ( c , ctx , is_field_name = False ) : in_sexp = ctx . container . ion_type is IonType . SEXP ctx . set_unicode ( ) if c not in _IDENTIFIER_CHARACTERS : if in_sexp and c in _OPERATORS : c_next , _ = yield ctx . queue . unread ( c_next ) assert ctx . value yield _CompositeTransition ( ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . SYMBOL , ctx . value . as_symbol ( ) ) , ctx , partial ( _operator_symbol_handler , c ) ) _illegal_character ( c , ctx . set_ion_type ( IonType . SYMBOL ) ) val = ctx . value val . append ( c ) prev = c c , self = yield trans = ctx . immediate_transition ( self ) while True : if c not in _WHITESPACE : if prev in _WHITESPACE or _ends_value ( c ) or c == _COLON or ( in_sexp and c in _OPERATORS ) : break if c not in _IDENTIFIER_CHARACTERS : _illegal_character ( c , ctx . set_ion_type ( IonType . SYMBOL ) ) val . append ( c ) prev = c c , _ = yield trans yield _symbol_token_end ( c , ctx , is_field_name )
Handles identifier symbol tokens . If in an s - expression these may be followed without whitespace by operators .
347
22
232,871
def _single_quote_handler_factory ( on_single_quote , on_other ) : @ coroutine def single_quote_handler ( c , ctx , is_field_name = False ) : assert c == _SINGLE_QUOTE c , self = yield if c == _SINGLE_QUOTE and not _is_escaped ( c ) : yield on_single_quote ( c , ctx , is_field_name ) else : ctx . set_unicode ( quoted_text = True ) yield on_other ( c , ctx , is_field_name ) return single_quote_handler
Generates handlers used for classifying tokens that begin with one or more single quotes .
138
17
232,872
def _struct_or_lob_handler ( c , ctx ) : assert c == _OPEN_BRACE c , self = yield yield ctx . immediate_transition ( _STRUCT_OR_LOB_TABLE [ c ] ( c , ctx ) )
Handles tokens that begin with an open brace .
59
10
232,873
def _lob_start_handler ( c , ctx ) : assert c == _OPEN_BRACE c , self = yield trans = ctx . immediate_transition ( self ) quotes = 0 while True : if c in _WHITESPACE : if quotes > 0 : _illegal_character ( c , ctx ) elif c == _DOUBLE_QUOTE : if quotes > 0 : _illegal_character ( c , ctx ) ctx . set_ion_type ( IonType . CLOB ) . set_unicode ( quoted_text = True ) yield ctx . immediate_transition ( _short_string_handler ( c , ctx ) ) elif c == _SINGLE_QUOTE : if not quotes : ctx . set_ion_type ( IonType . CLOB ) . set_unicode ( quoted_text = True ) quotes += 1 if quotes == 3 : yield ctx . immediate_transition ( _long_string_handler ( c , ctx ) ) else : yield ctx . immediate_transition ( _blob_end_handler ( c , ctx ) ) c , _ = yield trans
Handles tokens that begin with two open braces .
252
10
232,874
def _lob_end_handler_factory ( ion_type , action , validate = lambda c , ctx , action_res : None ) : assert ion_type is IonType . BLOB or ion_type is IonType . CLOB @ coroutine def lob_end_handler ( c , ctx ) : val = ctx . value prev = c action_res = None if c != _CLOSE_BRACE and c not in _WHITESPACE : action_res = action ( c , ctx , prev , action_res , True ) c , self = yield trans = ctx . immediate_transition ( self ) while True : if c in _WHITESPACE : if prev == _CLOSE_BRACE : _illegal_character ( c , ctx . set_ion_type ( ion_type ) , 'Expected }.' ) elif c == _CLOSE_BRACE : if prev == _CLOSE_BRACE : validate ( c , ctx , action_res ) break else : action_res = action ( c , ctx , prev , action_res , False ) prev = c c , _ = yield trans ctx . set_self_delimiting ( True ) # Lob values are self-delimiting (they are terminated by '}}'). yield ctx . event_transition ( IonThunkEvent , IonEventType . SCALAR , ion_type , _parse_lob ( ion_type , val ) ) return lob_end_handler
Generates handlers for the end of blob or clob values .
323
13
232,875
def _blob_end_handler_factory ( ) : def expand_res ( res ) : if res is None : return 0 , 0 return res def action ( c , ctx , prev , res , is_first ) : num_digits , num_pads = expand_res ( res ) if c in _BASE64_DIGITS : if prev == _CLOSE_BRACE or prev == _BASE64_PAD : _illegal_character ( c , ctx . set_ion_type ( IonType . BLOB ) ) num_digits += 1 elif c == _BASE64_PAD : if prev == _CLOSE_BRACE : _illegal_character ( c , ctx . set_ion_type ( IonType . BLOB ) ) num_pads += 1 else : _illegal_character ( c , ctx . set_ion_type ( IonType . BLOB ) ) ctx . value . append ( c ) return num_digits , num_pads def validate ( c , ctx , res ) : num_digits , num_pads = expand_res ( res ) if num_pads > 3 or ( num_digits + num_pads ) % 4 != 0 : _illegal_character ( c , ctx , 'Incorrect number of pad characters (%d) for a blob of %d base-64 digits.' % ( num_pads , num_digits ) ) return _lob_end_handler_factory ( IonType . BLOB , action , validate )
Generates the handler for the end of a blob value . This includes the base - 64 data and the two closing braces .
337
25
232,876
def _clob_end_handler_factory ( ) : def action ( c , ctx , prev , res , is_first ) : if is_first and ctx . is_self_delimiting and c == _DOUBLE_QUOTE : assert c is prev return res _illegal_character ( c , ctx ) return _lob_end_handler_factory ( IonType . CLOB , action )
Generates the handler for the end of a clob value . This includes anything from the data s closing quote through the second closing brace .
93
28
232,877
def _container_start_handler_factory ( ion_type , before_yield = lambda c , ctx : None ) : assert ion_type . is_container @ coroutine def container_start_handler ( c , ctx ) : before_yield ( c , ctx ) yield yield ctx . event_transition ( IonEvent , IonEventType . CONTAINER_START , ion_type , value = None ) return container_start_handler
Generates handlers for tokens that begin with container start characters .
101
12
232,878
def _skip_trampoline ( handler ) : data_event , self = ( yield None ) delegate = handler event = None depth = 0 while True : def pass_through ( ) : _trans = delegate . send ( Transition ( data_event , delegate ) ) return _trans , _trans . delegate , _trans . event if data_event is not None and data_event . type is ReadEventType . SKIP : while True : trans , delegate , event = pass_through ( ) if event is not None : if event . event_type is IonEventType . CONTAINER_END and event . depth <= depth : break if event is None or event . event_type is IonEventType . INCOMPLETE : data_event , _ = yield Transition ( event , self ) else : trans , delegate , event = pass_through ( ) if event is not None and ( event . event_type is IonEventType . CONTAINER_START or event . event_type is IonEventType . CONTAINER_END ) : depth = event . depth data_event , _ = yield Transition ( event , self )
Intercepts events from container handlers emitting them only if they should not be skipped .
238
17
232,879
def read_data_event ( self , whence , complete = False , can_flush = False ) : return Transition ( None , _read_data_handler ( whence , self , complete , can_flush ) )
Creates a transition to a co - routine for retrieving data as bytes .
45
15
232,880
def set_unicode ( self , quoted_text = False ) : if isinstance ( self . value , CodePointArray ) : assert self . quoted_text == quoted_text return self self . value = CodePointArray ( self . value ) self . quoted_text = quoted_text self . line_comment = False return self
Converts the context s value to a sequence of unicode code points for holding text tokens indicating whether the text is quoted .
70
25
232,881
def set_quoted_text ( self , quoted_text ) : self . quoted_text = quoted_text self . line_comment = False return self
Sets the context s quoted_text flag . Useful when entering and exiting quoted text tokens .
33
19
232,882
def derive_container_context ( self , ion_type , whence ) : if ion_type is IonType . STRUCT : container = _C_STRUCT elif ion_type is IonType . LIST : container = _C_LIST elif ion_type is IonType . SEXP : container = _C_SEXP else : raise TypeError ( 'Cannot derive container context for non-container type %s.' % ( ion_type . name , ) ) return _HandlerContext ( container = container , queue = self . queue , field_name = self . field_name , annotations = self . annotations , depth = self . depth + 1 , whence = whence , value = None , # containers don't have a value ion_type = ion_type , pending_symbol = None )
Derives a container context as a child of the current context .
168
13
232,883
def derive_child_context ( self , whence ) : return _HandlerContext ( container = self . container , queue = self . queue , field_name = None , annotations = None , depth = self . depth , whence = whence , value = bytearray ( ) , # children start without a value ion_type = None , pending_symbol = None )
Derives a scalar context as a child of the current context .
77
14
232,884
def set_ion_type ( self , ion_type ) : if ion_type is self . ion_type : return self self . ion_type = ion_type self . line_comment = False return self
Sets context to the given IonType .
45
9
232,885
def set_annotation ( self ) : assert self . pending_symbol is not None assert not self . value annotations = ( _as_symbol ( self . pending_symbol , is_symbol_value = False ) , ) # pending_symbol becomes an annotation self . annotations = annotations if not self . annotations else self . annotations + annotations self . ion_type = None self . pending_symbol = None # reset pending symbol self . quoted_text = False self . line_comment = False self . is_self_delimiting = False return self
Appends the context s pending_symbol to its annotations sequence .
121
14
232,886
def set_field_name ( self ) : assert self . pending_symbol is not None assert not self . value self . field_name = _as_symbol ( self . pending_symbol , is_symbol_value = False ) # pending_symbol becomes field name self . pending_symbol = None # reset pending symbol self . quoted_text = False self . line_comment = False self . is_self_delimiting = False return self
Sets the context s pending_symbol as its field_name .
100
15
232,887
def set_pending_symbol ( self , pending_symbol = None ) : if pending_symbol is None : pending_symbol = CodePointArray ( ) self . value = bytearray ( ) # reset value self . pending_symbol = pending_symbol self . line_comment = False return self
Sets the context s pending_symbol with the given unicode sequence and resets the context s value .
70
23
232,888
def _write_base ( buf , value , bits_per_octet , end_bit = 0 , sign_bit = 0 , is_signed = False ) : if value == 0 : buf . append ( sign_bit | end_bit ) return 1 num_bits = bit_length ( value ) num_octets = num_bits // bits_per_octet # 'remainder' is the number of value bits in the first octet. remainder = num_bits % bits_per_octet if remainder != 0 or is_signed : # If signed, the first octet has one fewer bit available, requiring another octet. num_octets += 1 else : # This ensures that unsigned values that fit exactly are not shifted too far. remainder = bits_per_octet for i in range ( num_octets ) : octet = 0 if i == 0 : octet |= sign_bit if i == num_octets - 1 : octet |= end_bit # 'remainder' is used for alignment such that only the first octet # may contain insignificant zeros. octet |= ( ( value >> ( num_bits - ( remainder + bits_per_octet * i ) ) ) & _OCTET_MASKS [ bits_per_octet ] ) buf . append ( octet ) return num_octets
Write a field to the provided buffer .
293
8
232,889
def record ( * fields ) : @ six . add_metaclass ( _RecordMetaClass ) class RecordType ( object ) : _record_sentinel = True _record_fields = fields return RecordType
Constructs a type that can be extended to create immutable value types .
44
14
232,890
def coroutine ( func ) : def wrapper ( * args , * * kwargs ) : gen = func ( * args , * * kwargs ) val = next ( gen ) if val != None : raise TypeError ( 'Unexpected value from start of coroutine' ) return gen wrapper . __name__ = func . __name__ wrapper . __doc__ = func . __doc__ return wrapper
Wraps a PEP - 342 enhanced generator in a way that avoids boilerplate of the priming call to next .
85
24
232,891
def derive_field_name ( self , field_name ) : cls = type ( self ) # We use ordinals to avoid thunk materialization. return cls ( self [ 0 ] , self [ 1 ] , self [ 2 ] , field_name , self [ 4 ] , self [ 5 ] )
Derives a new event from this one setting the field_name attribute .
66
15
232,892
def derive_annotations ( self , annotations ) : cls = type ( self ) # We use ordinals to avoid thunk materialization. return cls ( self [ 0 ] , self [ 1 ] , self [ 2 ] , self [ 3 ] , annotations , self [ 5 ] )
Derives a new event from this one setting the annotations attribute .
61
13
232,893
def derive_value ( self , value ) : return IonEvent ( self . event_type , self . ion_type , value , self . field_name , self . annotations , self . depth )
Derives a new event from this one setting the value attribute .
42
13
232,894
def derive_depth ( self , depth ) : cls = type ( self ) # We use ordinals to avoid thunk materialization. return cls ( self [ 0 ] , self [ 1 ] , self [ 2 ] , self [ 3 ] , self [ 4 ] , depth )
Derives a new event from this one setting the depth attribute .
60
13
232,895
def adjust_from_utc_fields ( * args , * * kwargs ) : raw_ts = Timestamp ( * args , * * kwargs ) offset = raw_ts . utcoffset ( ) if offset is None or offset == timedelta ( ) : return raw_ts # XXX This returns a datetime, not a Timestamp (which has our precision if defined) adjusted = raw_ts + offset if raw_ts . precision is None : # No precision means we can just return a regular datetime return adjusted return Timestamp ( adjusted . year , adjusted . month , adjusted . day , adjusted . hour , adjusted . minute , adjusted . second , adjusted . microsecond , raw_ts . tzinfo , precision = raw_ts . precision , fractional_precision = raw_ts . fractional_precision )
Constructs a timestamp from UTC fields adjusted to the local offset if given .
179
15
232,896
def raw_writer ( indent = None ) : is_whitespace_str = isinstance ( indent , str ) and re . search ( r'\A\s*\Z' , indent , re . M ) is not None if not ( indent is None or is_whitespace_str ) : raise ValueError ( 'The indent parameter must either be None or a string containing only whitespace' ) indent_bytes = six . b ( indent ) if isinstance ( indent , str ) else indent return writer_trampoline ( _raw_writer_coroutine ( indent = indent_bytes ) )
Returns a raw text writer co - routine .
129
9
232,897
def writer_trampoline ( start ) : trans = Transition ( None , start ) while True : ion_event = ( yield trans . event ) if trans . event is None : if ion_event is None : raise TypeError ( 'Cannot start Writer with no event' ) else : if trans . event . type is WriteEventType . HAS_PENDING and ion_event is not None : raise TypeError ( 'Writer expected to receive no event: %r' % ( ion_event , ) ) if trans . event . type is not WriteEventType . HAS_PENDING and ion_event is None : raise TypeError ( 'Writer expected to receive event' ) if ion_event is not None and ion_event . event_type is IonEventType . INCOMPLETE : raise TypeError ( 'Writer cannot receive INCOMPLETE event' ) trans = trans . delegate . send ( Transition ( ion_event , trans . delegate ) )
Provides the co - routine trampoline for a writer state machine .
201
15
232,898
def _drain ( writer , ion_event ) : result_event = _WRITE_EVENT_HAS_PENDING_EMPTY while result_event . type is WriteEventType . HAS_PENDING : result_event = writer . send ( ion_event ) ion_event = None yield result_event
Drain the writer of its pending write events .
70
10
232,899
def blocking_writer ( writer , output ) : result_type = None while True : ion_event = ( yield result_type ) for result_event in _drain ( writer , ion_event ) : output . write ( result_event . data ) result_type = result_event . type
Provides an implementation of using the writer co - routine with a file - like object .
63
18