idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
44,900 | def resolve ( self , name , version , max_id ) : if not isinstance ( name , six . text_type ) : raise TypeError ( 'Name must be a Unicode sequence: %r' % name ) if not isinstance ( version , int ) : raise TypeError ( 'Version must be an int: %r' % version ) if version <= 0 : raise ValueError ( 'Version must be positive: %s' % version ) if max_id is not None and max_id < 0 : raise ValueError ( 'Max ID must be zero or positive: %s' % max_id ) versions = self . __tables . get ( name ) if versions is None : if max_id is None : raise CannotSubstituteTable ( 'Found no table for %s, but no max_id' % name ) return placeholder_symbol_table ( name , version , max_id ) table = versions . get ( version ) if table is None : keys = list ( versions ) keys . sort ( ) table = versions [ keys [ - 1 ] ] if table . version == version and ( max_id is None or table . max_id == max_id ) : return table if max_id is None : raise CannotSubstituteTable ( 'Found match for %s, but not version %d, and no max_id' % ( name , version ) ) return substitute_symbol_table ( table , version , max_id ) | Resolves the table for a given name and version . |
44,901 | def start_container ( self ) : self . __container_lengths . append ( self . current_container_length ) self . current_container_length = 0 new_container_node = _Node ( ) self . __container_node . add_child ( new_container_node ) self . __container_nodes . append ( self . __container_node ) self . __container_node = new_container_node | Add a node to the tree that represents the start of a container . |
44,902 | def end_container ( self , header_buf ) : if not self . __container_nodes : raise ValueError ( "Attempted to end container with none active." ) self . __container_node . add_leaf ( _Node ( header_buf ) ) self . __container_node = self . __container_nodes . pop ( ) parent_container_length = self . __container_lengths . pop ( ) self . current_container_length = parent_container_length + self . current_container_length + len ( header_buf ) | Add a node containing the container s header to the current subtree . |
44,903 | def add_scalar_value ( self , value_buf ) : self . __container_node . add_child ( _Node ( value_buf ) ) self . current_container_length += len ( value_buf ) | Add a node to the tree containing a scalar value . |
44,904 | def drain ( self ) : if self . __container_nodes : raise ValueError ( "Attempted to drain without ending all containers." ) for buf in self . __depth_traverse ( self . __root ) : if buf is not None : yield buf self . __reset ( ) | Walk the BufferTree and reset it when finished . |
44,905 | def ion_equals ( a , b , timestamps_instants_only = False ) : if timestamps_instants_only : return _ion_equals_timestamps_instants ( a , b ) return _ion_equals_timestamps_data_model ( a , b ) | Tests two objects for equivalence under the Ion data model . |
44,906 | def _ion_equals ( a , b , timestamp_comparison_func , recursive_comparison_func ) : for a , b in ( ( a , b ) , ( b , a ) ) : if isinstance ( a , _IonNature ) : if isinstance ( b , _IonNature ) : eq = a . ion_type is b . ion_type and _annotations_eq ( a , b ) else : eq = not a . ion_annotations if eq : if isinstance ( a , IonPyList ) : return _sequences_eq ( a , b , recursive_comparison_func ) elif isinstance ( a , IonPyDict ) : return _structs_eq ( a , b , recursive_comparison_func ) elif isinstance ( a , IonPyTimestamp ) : return timestamp_comparison_func ( a , b ) elif isinstance ( a , IonPyNull ) : return isinstance ( b , IonPyNull ) or ( b is None and a . ion_type is IonType . NULL ) elif isinstance ( a , IonPySymbol ) or ( isinstance ( a , IonPyText ) and a . ion_type is IonType . SYMBOL ) : return _symbols_eq ( a , b ) elif isinstance ( a , IonPyDecimal ) : return _decimals_eq ( a , b ) elif isinstance ( a , IonPyFloat ) : return _floats_eq ( a , b ) else : return a == b return False for a , b in ( ( a , b ) , ( b , a ) ) : if isinstance ( a , list ) : return _sequences_eq ( a , b , recursive_comparison_func ) elif isinstance ( a , dict ) : return _structs_eq ( a , b , recursive_comparison_func ) elif isinstance ( a , datetime ) : return timestamp_comparison_func ( a , b ) elif isinstance ( a , SymbolToken ) : return _symbols_eq ( a , b ) elif isinstance ( a , Decimal ) : return _decimals_eq ( a , b ) elif isinstance ( a , float ) : return _floats_eq ( a , b ) return a == b | Compares a and b according to the description of the ion_equals method . |
44,907 | def _timestamps_eq ( a , b ) : assert isinstance ( a , datetime ) if not isinstance ( b , datetime ) : return False if ( a . tzinfo is None ) ^ ( b . tzinfo is None ) : return False if a . utcoffset ( ) != b . utcoffset ( ) : return False for a , b in ( ( a , b ) , ( b , a ) ) : if isinstance ( a , Timestamp ) : if isinstance ( b , Timestamp ) : if a . precision is b . precision and a . fractional_precision is b . fractional_precision : break return False elif a . precision is not TimestampPrecision . SECOND or a . fractional_precision != MICROSECOND_PRECISION : return False return a == b | Compares two timestamp operands for equivalence under the Ion data model . |
44,908 | def _timestamp_instants_eq ( a , b ) : assert isinstance ( a , datetime ) if not isinstance ( b , datetime ) : return False if a . tzinfo is None : a = a . replace ( tzinfo = OffsetTZInfo ( ) ) if b . tzinfo is None : b = b . replace ( tzinfo = OffsetTZInfo ( ) ) return a == b | Compares two timestamp operands for point - in - time equivalence only . |
44,909 | def _parse_var_int_components ( buf , signed ) : value = 0 sign = 1 while True : ch = buf . read ( 1 ) if ch == '' : raise IonException ( 'Variable integer under-run' ) octet = ord ( ch ) if signed : if octet & _VAR_INT_SIGN_MASK : sign = - 1 value = octet & _VAR_INT_SIGN_VALUE_MASK signed = False else : value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK : break return sign , value | Parses a VarInt or VarUInt field from a file - like object . |
44,910 | def _parse_signed_int_components ( buf ) : sign_bit = 0 value = 0 first = True while True : ch = buf . read ( 1 ) if ch == b'' : break octet = ord ( ch ) if first : if octet & _SIGNED_INT_SIGN_MASK : sign_bit = 1 value = octet & _SIGNED_INT_SIGN_VALUE_MASK first = False else : value <<= 8 value |= octet return sign_bit , value | Parses the remainder of a file - like object as a signed magnitude value . |
44,911 | def _parse_decimal ( buf ) : exponent = _parse_var_int ( buf , signed = True ) sign_bit , coefficient = _parse_signed_int_components ( buf ) if coefficient == 0 : value = Decimal ( ( sign_bit , ( 0 , ) , exponent ) ) else : coefficient *= sign_bit and - 1 or 1 value = Decimal ( coefficient ) . scaleb ( exponent ) return value | Parses the remainder of a file - like object as a decimal . |
44,912 | def _create_delegate_handler ( delegate ) : @ coroutine def handler ( * args ) : yield yield delegate . send ( Transition ( args , delegate ) ) return handler | Creates a handler function that creates a co - routine that can yield once with the given positional arguments to the delegate as a transition . |
44,913 | def _var_uint_field_handler ( handler , ctx ) : _ , self = yield queue = ctx . queue value = 0 while True : if len ( queue ) == 0 : yield ctx . read_data_transition ( 1 , self ) octet = queue . read_byte ( ) value <<= _VAR_INT_VALUE_BITS value |= octet & _VAR_INT_VALUE_MASK if octet & _VAR_INT_SIGNAL_MASK : break yield ctx . immediate_transition ( handler ( value , ctx ) ) | Handler co - routine for variable unsigned integer fields that . |
44,914 | def _length_scalar_handler ( scalar_factory , ion_type , length , ctx ) : _ , self = yield if length == 0 : data = b'' else : yield ctx . read_data_transition ( length , self ) data = ctx . queue . read ( length ) scalar = scalar_factory ( data ) event_cls = IonEvent if callable ( scalar ) : event_cls = IonThunkEvent yield ctx . event_transition ( event_cls , IonEventType . SCALAR , ion_type , scalar ) | Handles scalars scalar_factory is a function that returns a value or thunk . |
44,915 | def _annotation_handler ( ion_type , length , ctx ) : _ , self = yield self_handler = _create_delegate_handler ( self ) if ctx . annotations is not None : raise IonException ( 'Annotation cannot be nested in annotations' ) ctx = ctx . derive_container_context ( length , add_depth = 0 ) ( ann_length , _ ) , _ = yield ctx . immediate_transition ( _var_uint_field_handler ( self_handler , ctx ) ) if ann_length < 1 : raise IonException ( 'Invalid annotation length subfield; annotation wrapper must have at least one annotation.' ) yield ctx . read_data_transition ( ann_length , self ) ann_data = ctx . queue . read ( ann_length ) annotations = tuple ( _parse_sid_iter ( ann_data ) ) if ctx . limit - ctx . queue . position < 1 : raise IonException ( 'Incorrect annotation wrapper length.' ) yield ctx . immediate_transition ( _start_type_handler ( ctx . field_name , ctx . whence , ctx , annotations = annotations ) ) | Handles annotations . ion_type is ignored . |
44,916 | def _ordered_struct_start_handler ( handler , ctx ) : _ , self = yield self_handler = _create_delegate_handler ( self ) ( length , _ ) , _ = yield ctx . immediate_transition ( _var_uint_field_handler ( self_handler , ctx ) ) if length < 2 : raise IonException ( 'Ordered structs (type ID 0xD1) must have at least one field name/value pair.' ) yield ctx . immediate_transition ( handler ( length , ctx ) ) | Handles the special case of ordered structs specified by the type ID 0xD1 . |
44,917 | def _container_start_handler ( ion_type , length , ctx ) : _ , self = yield container_ctx = ctx . derive_container_context ( length ) if ctx . annotations and ctx . limit != container_ctx . limit : raise IonException ( 'Incorrect annotation wrapper length.' ) delegate = _container_handler ( ion_type , container_ctx ) yield ctx . event_transition ( IonEvent , IonEventType . CONTAINER_START , ion_type , value = None , whence = delegate ) | Handles container delegation . |
44,918 | def _bind_length_handlers ( tids , user_handler , lns ) : for tid in tids : for ln in lns : type_octet = _gen_type_octet ( tid , ln ) ion_type = _TID_VALUE_TYPE_TABLE [ tid ] if ln == 1 and ion_type is IonType . STRUCT : handler = partial ( _ordered_struct_start_handler , partial ( user_handler , ion_type ) ) elif ln < _LENGTH_FIELD_FOLLOWS : handler = partial ( user_handler , ion_type , ln ) else : handler = partial ( _var_uint_field_handler , partial ( user_handler , ion_type ) ) _HANDLER_DISPATCH_TABLE [ type_octet ] = handler | Binds a set of handlers with the given factory . |
44,919 | def _bind_length_scalar_handlers ( tids , scalar_factory , lns = _NON_ZERO_LENGTH_LNS ) : handler = partial ( _length_scalar_handler , scalar_factory ) return _bind_length_handlers ( tids , handler , lns ) | Binds a set of scalar handlers for an inclusive range of low - nibble values . |
44,920 | def remaining ( self ) : if self . depth == 0 : return _STREAM_REMAINING return self . limit - self . queue . position | Determines how many bytes are remaining in the current context . |
44,921 | def read_data_transition ( self , length , whence = None , skip = False , stream_event = ION_STREAM_INCOMPLETE_EVENT ) : if whence is None : whence = self . whence return Transition ( None , _read_data_handler ( length , whence , self , skip , stream_event ) ) | Returns an immediate event_transition to read a specified number of bytes . |
44,922 | def _narrow_unichr ( code_point ) : try : if len ( code_point . char ) > 1 : return code_point . char except AttributeError : pass return six . unichr ( code_point ) | Retrieves the unicode character representing any given code point in a way that won t break on narrow builds . |
44,923 | def reader_trampoline ( start , allow_flush = False ) : data_event = yield if data_event is None or data_event . type is not ReadEventType . NEXT : raise TypeError ( 'Reader must be started with NEXT' ) trans = Transition ( None , start ) while True : trans = trans . delegate . send ( Transition ( data_event , trans . delegate ) ) data_event = None if trans . event is not None : data_event = ( yield trans . event ) if trans . event . event_type . is_stream_signal : if data_event . type is not ReadEventType . DATA : if not allow_flush or not ( trans . event . event_type is IonEventType . INCOMPLETE and data_event . type is ReadEventType . NEXT ) : raise TypeError ( 'Reader expected data: %r' % ( data_event , ) ) else : if data_event . type is ReadEventType . DATA : raise TypeError ( 'Reader did not expect data' ) if data_event . type is ReadEventType . DATA and len ( data_event . data ) == 0 : raise ValueError ( 'Empty data not allowed' ) if trans . event . depth == 0 and trans . event . event_type is not IonEventType . CONTAINER_START and data_event . type is ReadEventType . SKIP : raise TypeError ( 'Cannot skip at the top-level' ) | Provides the co - routine trampoline for a reader state machine . |
44,924 | def blocking_reader ( reader , input , buffer_size = _DEFAULT_BUFFER_SIZE ) : ion_event = None while True : read_event = ( yield ion_event ) ion_event = reader . send ( read_event ) while ion_event is not None and ion_event . event_type . is_stream_signal : data = input . read ( buffer_size ) if len ( data ) == 0 : if ion_event . event_type is IonEventType . INCOMPLETE : ion_event = reader . send ( NEXT_EVENT ) continue else : yield ION_STREAM_END_EVENT return ion_event = reader . send ( read_data_event ( data ) ) | Provides an implementation of using the reader co - routine with a file - like object . |
44,925 | def read ( self , length , skip = False ) : if length > self . __size : raise IndexError ( 'Cannot pop %d bytes, %d bytes in buffer queue' % ( length , self . __size ) ) self . position += length self . __size -= length segments = self . __segments offset = self . __offset data = self . __data_cls ( ) while length > 0 : segment = segments [ 0 ] segment_off = offset segment_len = len ( segment ) segment_rem = segment_len - segment_off segment_read_len = min ( segment_rem , length ) if segment_off == 0 and segment_read_len == segment_rem : if skip : segment_slice = self . __element_type ( ) else : segment_slice = segment else : if skip : segment_slice = self . __element_type ( ) else : segment_slice = segment [ segment_off : segment_off + segment_read_len ] offset = 0 segment_off += segment_read_len if segment_off == segment_len : segments . popleft ( ) self . __offset = 0 else : self . __offset = segment_off if length <= segment_rem and len ( data ) == 0 : return segment_slice data . extend ( segment_slice ) length -= segment_read_len if self . is_unicode : return data . as_text ( ) else : return data | Consumes the first length bytes from the accumulator . |
44,926 | def unread ( self , c ) : if self . position < 1 : raise IndexError ( 'Cannot unread an empty buffer queue.' ) if isinstance ( c , six . text_type ) : if not self . is_unicode : BufferQueue . _incompatible_types ( self . is_unicode , c ) else : c = self . __chr ( c ) num_code_units = self . is_unicode and len ( c ) or 1 if self . __offset == 0 : if num_code_units == 1 and six . PY3 : if self . is_unicode : segment = c else : segment = six . int2byte ( c ) else : segment = c self . __segments . appendleft ( segment ) else : self . __offset -= num_code_units def verify ( ch , idx ) : existing = self . __segments [ 0 ] [ self . __offset + idx ] if existing != ch : raise ValueError ( 'Attempted to unread %s when %s was expected.' % ( ch , existing ) ) if num_code_units == 1 : verify ( c , 0 ) else : for i in range ( num_code_units ) : verify ( c [ i ] , i ) self . __size += num_code_units self . position -= num_code_units | Unread the given character byte or code point . |
44,927 | def skip ( self , length ) : if length >= self . __size : skip_amount = self . __size rem = length - skip_amount self . __segments . clear ( ) self . __offset = 0 self . __size = 0 self . position += skip_amount else : rem = 0 self . read ( length , skip = True ) return rem | Removes length bytes and returns the number length still required to skip |
44,928 | def managed_reader ( reader , catalog = None ) : if catalog is None : catalog = SymbolTableCatalog ( ) ctx = _ManagedContext ( catalog ) symbol_trans = Transition ( None , None ) ion_event = None while True : if symbol_trans . delegate is not None and ion_event is not None and not ion_event . event_type . is_stream_signal : delegate = symbol_trans . delegate symbol_trans = delegate . send ( Transition ( ion_event , delegate ) ) if symbol_trans . delegate is None : ctx = symbol_trans . event data_event = NEXT_EVENT else : data_event = symbol_trans . event else : data_event = None if ion_event is not None : event_type = ion_event . event_type ion_type = ion_event . ion_type depth = ion_event . depth if depth == 0 : if event_type is IonEventType . VERSION_MARKER : if ion_event != ION_VERSION_MARKER_EVENT : raise IonException ( 'Invalid IVM: %s' % ( ion_event , ) ) ctx = _ManagedContext ( ctx . catalog ) data_event = NEXT_EVENT elif ion_type is IonType . SYMBOL and len ( ion_event . annotations ) == 0 and ion_event . value is not None and ctx . resolve ( ion_event . value ) . text == TEXT_ION_1_0 : assert symbol_trans . delegate is None data_event = NEXT_EVENT elif event_type is IonEventType . CONTAINER_START and ion_type is IonType . STRUCT and ctx . has_symbol_table_annotation ( ion_event . annotations ) : assert symbol_trans . delegate is None delegate = _local_symbol_table_handler ( ctx ) symbol_trans = Transition ( None , delegate ) data_event = NEXT_EVENT if data_event is None : if ion_event is not None : ion_event = _managed_thunk_event ( ctx , ion_event ) data_event = yield ion_event ion_event = reader . send ( data_event ) | Managed reader wrapping another reader . |
44,929 | def _illegal_character ( c , ctx , message = '' ) : container_type = ctx . container . ion_type is None and 'top-level' or ctx . container . ion_type . name value_type = ctx . ion_type is None and 'unknown' or ctx . ion_type . name if c is None : header = 'Illegal token' else : c = 'EOF' if BufferQueue . is_eof ( c ) else _chr ( c ) header = 'Illegal character %s' % ( c , ) raise IonException ( '%s at position %d in %s value contained in %s. %s Pending value: %s' % ( header , ctx . queue . position , value_type , container_type , message , ctx . value ) ) | Raises an IonException upon encountering the given illegal character in the given context . |
44,930 | def _defaultdict ( dct , fallback = _illegal_character ) : out = defaultdict ( lambda : fallback ) for k , v in six . iteritems ( dct ) : out [ k ] = v return out | Wraps the given dictionary such that the given fallback function will be called when a nonexistent key is accessed . |
44,931 | def _number_negative_start_handler ( c , ctx ) : assert c == _MINUS assert len ( ctx . value ) == 0 ctx . set_ion_type ( IonType . INT ) ctx . value . append ( c ) c , _ = yield yield ctx . immediate_transition ( _NEGATIVE_TABLE [ c ] ( c , ctx ) ) | Handles numeric values that start with a negative sign . Branches to delegate co - routines according to _NEGATIVE_TABLE . |
44,932 | def _number_zero_start_handler ( c , ctx ) : assert c == _ZERO assert len ( ctx . value ) == 0 or ( len ( ctx . value ) == 1 and ctx . value [ 0 ] == _MINUS ) ctx . set_ion_type ( IonType . INT ) ctx . value . append ( c ) c , _ = yield if _ends_value ( c ) : trans = ctx . event_transition ( IonThunkEvent , IonEventType . SCALAR , ctx . ion_type , _parse_decimal_int ( ctx . value ) ) if c == _SLASH : trans = ctx . immediate_transition ( _number_slash_end_handler ( c , ctx , trans ) ) yield trans yield ctx . immediate_transition ( _ZERO_START_TABLE [ c ] ( c , ctx ) ) | Handles numeric values that start with zero or negative zero . Branches to delegate co - routines according to _ZERO_START_TABLE . |
44,933 | def _number_or_timestamp_handler ( c , ctx ) : assert c in _DIGITS ctx . set_ion_type ( IonType . INT ) val = ctx . value val . append ( c ) c , self = yield trans = ctx . immediate_transition ( self ) while True : if _ends_value ( c ) : trans = ctx . event_transition ( IonThunkEvent , IonEventType . SCALAR , ctx . ion_type , _parse_decimal_int ( ctx . value ) ) if c == _SLASH : trans = ctx . immediate_transition ( _number_slash_end_handler ( c , ctx , trans ) ) else : if c not in _DIGITS : trans = ctx . immediate_transition ( _NUMBER_OR_TIMESTAMP_TABLE [ c ] ( c , ctx ) ) else : val . append ( c ) c , _ = yield trans | Handles numeric values that start with digits 1 - 9 . May terminate a value in which case that value is an int . If it does not terminate a value it branches to delegate co - routines according to _NUMBER_OR_TIMESTAMP_TABLE . |
44,934 | def _number_slash_end_handler ( c , ctx , event ) : assert c == _SLASH c , self = yield next_ctx = ctx . derive_child_context ( ctx . whence ) comment = _comment_handler ( _SLASH , next_ctx , next_ctx . whence ) comment . send ( ( c , comment ) ) yield _CompositeTransition ( event , ctx , comment , next_ctx , initialize_handler = False ) | Handles numeric values that end in a forward slash . This is only legal if the slash begins a comment ; thus this co - routine either results in an error being raised or an event being yielded . |
44,935 | def _exponent_handler_factory ( ion_type , exp_chars , parse_func , first_char = None ) : def transition ( prev , c , ctx , trans ) : if c in _SIGN and prev in exp_chars : ctx . value . append ( c ) else : _illegal_character ( c , ctx ) return trans illegal = exp_chars + _SIGN return _numeric_handler_factory ( _DIGITS , transition , lambda c , ctx : c in exp_chars , illegal , parse_func , illegal_at_end = illegal , ion_type = ion_type , first_char = first_char ) | Generates a handler co - routine which tokenizes an numeric exponent . |
44,936 | def _coefficient_handler_factory ( trans_table , parse_func , assertion = lambda c , ctx : True , ion_type = None , append_first_if_not = None ) : def transition ( prev , c , ctx , trans ) : if prev == _UNDERSCORE : _illegal_character ( c , ctx , 'Underscore before %s.' % ( _chr ( c ) , ) ) return ctx . immediate_transition ( trans_table [ c ] ( c , ctx ) ) return _numeric_handler_factory ( _DIGITS , transition , assertion , ( _DOT , ) , parse_func , ion_type = ion_type , append_first_if_not = append_first_if_not ) | Generates a handler co - routine which tokenizes a numeric coefficient . |
44,937 | def _radix_int_handler_factory ( radix_indicators , charset , parse_func ) : def assertion ( c , ctx ) : return c in radix_indicators and ( ( len ( ctx . value ) == 1 and ctx . value [ 0 ] == _ZERO ) or ( len ( ctx . value ) == 2 and ctx . value [ 0 ] == _MINUS and ctx . value [ 1 ] == _ZERO ) ) and ctx . ion_type == IonType . INT return _numeric_handler_factory ( charset , lambda prev , c , ctx , trans : _illegal_character ( c , ctx ) , assertion , radix_indicators , parse_func , illegal_at_end = radix_indicators ) | Generates a handler co - routine which tokenizes a integer of a particular radix . |
44,938 | def _timestamp_zero_start_handler ( c , ctx ) : val = ctx . value ctx . set_ion_type ( IonType . TIMESTAMP ) if val [ 0 ] == _MINUS : _illegal_character ( c , ctx , 'Negative year not allowed.' ) val . append ( c ) c , self = yield trans = ctx . immediate_transition ( self ) while True : if c in _TIMESTAMP_YEAR_DELIMITERS : trans = ctx . immediate_transition ( _timestamp_handler ( c , ctx ) ) elif c in _DIGITS : val . append ( c ) else : _illegal_character ( c , ctx ) c , _ = yield trans | Handles numeric values that start with a zero followed by another digit . This is either a timestamp or an error . |
44,939 | def _parse_timestamp ( tokens ) : def parse ( ) : precision = TimestampPrecision . YEAR off_hour = tokens [ _TimestampState . OFF_HOUR ] off_minutes = tokens [ _TimestampState . OFF_MINUTE ] microsecond = None fraction_digits = None if off_hour is not None : assert off_minutes is not None off_sign = - 1 if _MINUS in off_hour else 1 off_hour = int ( off_hour ) off_minutes = int ( off_minutes ) * off_sign if off_sign == - 1 and off_hour == 0 and off_minutes == 0 : off_hour = None off_minutes = None else : assert off_minutes is None year = tokens [ _TimestampState . YEAR ] assert year is not None year = int ( year ) month = tokens [ _TimestampState . MONTH ] if month is None : month = 1 else : month = int ( month ) precision = TimestampPrecision . MONTH day = tokens [ _TimestampState . DAY ] if day is None : day = 1 else : day = int ( day ) precision = TimestampPrecision . DAY hour = tokens [ _TimestampState . HOUR ] minute = tokens [ _TimestampState . MINUTE ] if hour is None : assert minute is None hour = 0 minute = 0 else : assert minute is not None hour = int ( hour ) minute = int ( minute ) precision = TimestampPrecision . MINUTE second = tokens [ _TimestampState . SECOND ] if second is None : second = 0 else : second = int ( second ) precision = TimestampPrecision . SECOND fraction = tokens [ _TimestampState . FRACTIONAL ] if fraction is not None : fraction_digits = len ( fraction ) if fraction_digits > MICROSECOND_PRECISION : for digit in fraction [ MICROSECOND_PRECISION : ] : if digit != _ZERO : raise ValueError ( 'Only six significant digits supported in timestamp fractional. Found %s.' % ( fraction , ) ) fraction_digits = MICROSECOND_PRECISION fraction = fraction [ 0 : MICROSECOND_PRECISION ] else : fraction . extend ( _ZEROS [ MICROSECOND_PRECISION - fraction_digits ] ) microsecond = int ( fraction ) return timestamp ( year , month , day , hour , minute , second , microsecond , off_hour , off_minutes , precision = precision , fractional_precision = fraction_digits ) return parse | Parses each token in the given _TimestampTokens and marshals the numeric components into a Timestamp . |
44,940 | def _comment_handler ( c , ctx , whence ) : assert c == _SLASH c , self = yield if c == _SLASH : ctx . set_line_comment ( ) block_comment = False elif c == _ASTERISK : if ctx . line_comment : ctx . set_line_comment ( False ) block_comment = True else : _illegal_character ( c , ctx , 'Illegal character sequence "/%s".' % ( _chr ( c ) , ) ) done = False prev = None trans = ctx . immediate_transition ( self ) while not done : c , _ = yield trans if block_comment : if prev == _ASTERISK and c == _SLASH : done = True prev = c else : if c in _NEWLINES or BufferQueue . is_eof ( c ) : done = True yield ctx . set_self_delimiting ( True ) . immediate_transition ( whence ) | Handles comments . Upon completion of the comment immediately transitions back to whence . |
44,941 | def _sexp_slash_handler ( c , ctx , whence = None , pending_event = None ) : assert c == _SLASH if whence is None : whence = ctx . whence c , self = yield ctx . queue . unread ( c ) if c == _ASTERISK or c == _SLASH : yield ctx . immediate_transition ( _comment_handler ( _SLASH , ctx , whence ) ) else : if pending_event is not None : assert pending_event . event is not None yield _CompositeTransition ( pending_event , ctx , partial ( _operator_symbol_handler , _SLASH ) ) yield ctx . immediate_transition ( _operator_symbol_handler ( _SLASH , ctx ) ) | Handles the special case of a forward - slash within an s - expression . This is either an operator or a comment . |
44,942 | def _typed_null_handler ( c , ctx ) : assert c == _DOT c , self = yield nxt = _NULL_STARTS i = 0 length = None done = False trans = ctx . immediate_transition ( self ) while True : if done : if _ends_value ( c ) or ( ctx . container . ion_type is IonType . SEXP and c in _OPERATORS ) : trans = ctx . event_transition ( IonEvent , IonEventType . SCALAR , nxt . ion_type , None ) else : _illegal_character ( c , ctx , 'Illegal null type.' ) elif length is None : if c not in nxt : _illegal_character ( c , ctx , 'Illegal null type.' ) nxt = nxt [ c ] if isinstance ( nxt , _NullSequence ) : length = len ( nxt . sequence ) else : if c != nxt [ i ] : _illegal_character ( c , ctx , 'Illegal null type.' ) i += 1 done = i == length c , _ = yield trans | Handles typed null values . Entered once null . has been found . |
44,943 | def _inf_or_operator_handler_factory ( c_start , is_delegate = True ) : @ coroutine def inf_or_operator_handler ( c , ctx ) : next_ctx = None if not is_delegate : ctx . value . append ( c_start ) c , self = yield else : assert ctx . value [ 0 ] == c_start assert c not in _DIGITS ctx . queue . unread ( c ) next_ctx = ctx _ , self = yield assert c == _ maybe_inf = True ctx . set_ion_type ( IonType . FLOAT ) match_index = 0 trans = ctx . immediate_transition ( self ) while True : if maybe_inf : if match_index < len ( _INF_SUFFIX ) : maybe_inf = c == _INF_SUFFIX [ match_index ] else : if _ends_value ( c ) or ( ctx . container . ion_type is IonType . SEXP and c in _OPERATORS ) : yield ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . FLOAT , c_start == _MINUS and _NEG_INF or _POS_INF ) else : maybe_inf = False if maybe_inf : match_index += 1 else : ctx . set_unicode ( ) if match_index > 0 : next_ctx = ctx . derive_child_context ( ctx . whence ) for ch in _INF_SUFFIX [ 0 : match_index ] : next_ctx . value . append ( ch ) break c , self = yield trans if ctx . container is not _C_SEXP : _illegal_character ( c , next_ctx is None and ctx or next_ctx , 'Illegal character following %s.' % ( _chr ( c_start ) , ) ) if match_index == 0 : if c in _OPERATORS : yield ctx . immediate_transition ( _operator_symbol_handler ( c , ctx ) ) yield ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . SYMBOL , ctx . value . as_symbol ( ) ) yield _CompositeTransition ( ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . SYMBOL , ctx . value . as_symbol ( ) ) , ctx , partial ( _unquoted_symbol_handler , c ) , next_ctx ) return inf_or_operator_handler | Generates handler co - routines for values that may be + inf or - inf . |
44,944 | def _operator_symbol_handler ( c , ctx ) : assert c in _OPERATORS ctx . set_unicode ( ) val = ctx . value val . append ( c ) c , self = yield trans = ctx . immediate_transition ( self ) while c in _OPERATORS : val . append ( c ) c , _ = yield trans yield ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . SYMBOL , val . as_symbol ( ) ) | Handles operator symbol values within s - expressions . |
44,945 | def _symbol_token_end ( c , ctx , is_field_name , value = None ) : if value is None : value = ctx . value if is_field_name or c in _SYMBOL_TOKEN_TERMINATORS or ctx . quoted_text : ctx . set_self_delimiting ( ctx . quoted_text ) . set_pending_symbol ( value ) . set_quoted_text ( False ) trans = ctx . immediate_transition ( ctx . whence ) else : trans = ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . SYMBOL , _as_symbol ( value ) ) return trans | Returns a transition which ends the current symbol token . |
44,946 | def _unquoted_symbol_handler ( c , ctx , is_field_name = False ) : in_sexp = ctx . container . ion_type is IonType . SEXP ctx . set_unicode ( ) if c not in _IDENTIFIER_CHARACTERS : if in_sexp and c in _OPERATORS : c_next , _ = yield ctx . queue . unread ( c_next ) assert ctx . value yield _CompositeTransition ( ctx . event_transition ( IonEvent , IonEventType . SCALAR , IonType . SYMBOL , ctx . value . as_symbol ( ) ) , ctx , partial ( _operator_symbol_handler , c ) ) _illegal_character ( c , ctx . set_ion_type ( IonType . SYMBOL ) ) val = ctx . value val . append ( c ) prev = c c , self = yield trans = ctx . immediate_transition ( self ) while True : if c not in _WHITESPACE : if prev in _WHITESPACE or _ends_value ( c ) or c == _COLON or ( in_sexp and c in _OPERATORS ) : break if c not in _IDENTIFIER_CHARACTERS : _illegal_character ( c , ctx . set_ion_type ( IonType . SYMBOL ) ) val . append ( c ) prev = c c , _ = yield trans yield _symbol_token_end ( c , ctx , is_field_name ) | Handles identifier symbol tokens . If in an s - expression these may be followed without whitespace by operators . |
44,947 | def _single_quote_handler_factory ( on_single_quote , on_other ) : @ coroutine def single_quote_handler ( c , ctx , is_field_name = False ) : assert c == _SINGLE_QUOTE c , self = yield if c == _SINGLE_QUOTE and not _is_escaped ( c ) : yield on_single_quote ( c , ctx , is_field_name ) else : ctx . set_unicode ( quoted_text = True ) yield on_other ( c , ctx , is_field_name ) return single_quote_handler | Generates handlers used for classifying tokens that begin with one or more single quotes . |
44,948 | def _struct_or_lob_handler ( c , ctx ) : assert c == _OPEN_BRACE c , self = yield yield ctx . immediate_transition ( _STRUCT_OR_LOB_TABLE [ c ] ( c , ctx ) ) | Handles tokens that begin with an open brace . |
44,949 | def _lob_start_handler ( c , ctx ) : assert c == _OPEN_BRACE c , self = yield trans = ctx . immediate_transition ( self ) quotes = 0 while True : if c in _WHITESPACE : if quotes > 0 : _illegal_character ( c , ctx ) elif c == _DOUBLE_QUOTE : if quotes > 0 : _illegal_character ( c , ctx ) ctx . set_ion_type ( IonType . CLOB ) . set_unicode ( quoted_text = True ) yield ctx . immediate_transition ( _short_string_handler ( c , ctx ) ) elif c == _SINGLE_QUOTE : if not quotes : ctx . set_ion_type ( IonType . CLOB ) . set_unicode ( quoted_text = True ) quotes += 1 if quotes == 3 : yield ctx . immediate_transition ( _long_string_handler ( c , ctx ) ) else : yield ctx . immediate_transition ( _blob_end_handler ( c , ctx ) ) c , _ = yield trans | Handles tokens that begin with two open braces . |
44,950 | def _lob_end_handler_factory ( ion_type , action , validate = lambda c , ctx , action_res : None ) : assert ion_type is IonType . BLOB or ion_type is IonType . CLOB @ coroutine def lob_end_handler ( c , ctx ) : val = ctx . value prev = c action_res = None if c != _CLOSE_BRACE and c not in _WHITESPACE : action_res = action ( c , ctx , prev , action_res , True ) c , self = yield trans = ctx . immediate_transition ( self ) while True : if c in _WHITESPACE : if prev == _CLOSE_BRACE : _illegal_character ( c , ctx . set_ion_type ( ion_type ) , 'Expected }.' ) elif c == _CLOSE_BRACE : if prev == _CLOSE_BRACE : validate ( c , ctx , action_res ) break else : action_res = action ( c , ctx , prev , action_res , False ) prev = c c , _ = yield trans ctx . set_self_delimiting ( True ) yield ctx . event_transition ( IonThunkEvent , IonEventType . SCALAR , ion_type , _parse_lob ( ion_type , val ) ) return lob_end_handler | Generates handlers for the end of blob or clob values . |
44,951 | def _blob_end_handler_factory ( ) : def expand_res ( res ) : if res is None : return 0 , 0 return res def action ( c , ctx , prev , res , is_first ) : num_digits , num_pads = expand_res ( res ) if c in _BASE64_DIGITS : if prev == _CLOSE_BRACE or prev == _BASE64_PAD : _illegal_character ( c , ctx . set_ion_type ( IonType . BLOB ) ) num_digits += 1 elif c == _BASE64_PAD : if prev == _CLOSE_BRACE : _illegal_character ( c , ctx . set_ion_type ( IonType . BLOB ) ) num_pads += 1 else : _illegal_character ( c , ctx . set_ion_type ( IonType . BLOB ) ) ctx . value . append ( c ) return num_digits , num_pads def validate ( c , ctx , res ) : num_digits , num_pads = expand_res ( res ) if num_pads > 3 or ( num_digits + num_pads ) % 4 != 0 : _illegal_character ( c , ctx , 'Incorrect number of pad characters (%d) for a blob of %d base-64 digits.' % ( num_pads , num_digits ) ) return _lob_end_handler_factory ( IonType . BLOB , action , validate ) | Generates the handler for the end of a blob value . This includes the base - 64 data and the two closing braces . |
44,952 | def _clob_end_handler_factory ( ) : def action ( c , ctx , prev , res , is_first ) : if is_first and ctx . is_self_delimiting and c == _DOUBLE_QUOTE : assert c is prev return res _illegal_character ( c , ctx ) return _lob_end_handler_factory ( IonType . CLOB , action ) | Generates the handler for the end of a clob value . This includes anything from the data s closing quote through the second closing brace . |
44,953 | def _container_start_handler_factory ( ion_type , before_yield = lambda c , ctx : None ) : assert ion_type . is_container @ coroutine def container_start_handler ( c , ctx ) : before_yield ( c , ctx ) yield yield ctx . event_transition ( IonEvent , IonEventType . CONTAINER_START , ion_type , value = None ) return container_start_handler | Generates handlers for tokens that begin with container start characters . |
44,954 | def _skip_trampoline ( handler ) : data_event , self = ( yield None ) delegate = handler event = None depth = 0 while True : def pass_through ( ) : _trans = delegate . send ( Transition ( data_event , delegate ) ) return _trans , _trans . delegate , _trans . event if data_event is not None and data_event . type is ReadEventType . SKIP : while True : trans , delegate , event = pass_through ( ) if event is not None : if event . event_type is IonEventType . CONTAINER_END and event . depth <= depth : break if event is None or event . event_type is IonEventType . INCOMPLETE : data_event , _ = yield Transition ( event , self ) else : trans , delegate , event = pass_through ( ) if event is not None and ( event . event_type is IonEventType . CONTAINER_START or event . event_type is IonEventType . CONTAINER_END ) : depth = event . depth data_event , _ = yield Transition ( event , self ) | Intercepts events from container handlers emitting them only if they should not be skipped . |
44,955 | def _next_code_point_handler ( whence , ctx ) : data_event , self = yield queue = ctx . queue unicode_escapes_allowed = ctx . ion_type is not IonType . CLOB escaped_newline = False escape_sequence = b'' low_surrogate_required = False while True : if len ( queue ) == 0 : yield ctx . read_data_event ( self ) queue_iter = iter ( queue ) code_point_generator = _next_code_point_iter ( queue , queue_iter ) code_point = next ( code_point_generator ) if code_point == _BACKSLASH : escape_sequence += six . int2byte ( _BACKSLASH ) num_digits = None while True : if len ( queue ) == 0 : yield ctx . read_data_event ( self ) code_point = next ( queue_iter ) if six . indexbytes ( escape_sequence , - 1 ) == _BACKSLASH : if code_point == _ord ( b'u' ) and unicode_escapes_allowed : num_digits = 12 if low_surrogate_required else 6 low_surrogate_required = False elif low_surrogate_required : _illegal_character ( code_point , ctx , 'Unpaired high surrogate escape sequence %s.' % ( escape_sequence , ) ) elif code_point == _ord ( b'x' ) : num_digits = 4 elif code_point == _ord ( b'U' ) and unicode_escapes_allowed : num_digits = 10 elif code_point in _COMMON_ESCAPES : if code_point == _SLASH or code_point == _QUESTION_MARK : escape_sequence = b'' escape_sequence += six . int2byte ( code_point ) break elif code_point in _NEWLINES : escaped_newline = True break else : _illegal_character ( code_point , ctx , 'Invalid escape sequence \\%s.' % ( _chr ( code_point ) , ) ) escape_sequence += six . int2byte ( code_point ) else : if code_point not in _HEX_DIGITS : _illegal_character ( code_point , ctx , 'Non-hex character %s found in unicode escape.' % ( _chr ( code_point ) , ) ) escape_sequence += six . int2byte ( code_point ) if len ( escape_sequence ) == num_digits : break if not escaped_newline : decoded_escape_sequence = escape_sequence . decode ( 'unicode-escape' ) cp_iter = _next_code_point_iter ( decoded_escape_sequence , iter ( decoded_escape_sequence ) , to_int = ord ) code_point = next ( cp_iter ) if code_point is None : low_surrogate_required = True continue code_point = CodePoint ( code_point ) code_point . char = decoded_escape_sequence code_point . is_escaped = True ctx . set_code_point ( code_point ) yield Transition ( None , whence ) elif low_surrogate_required : _illegal_character ( code_point , ctx , 'Unpaired high surrogate escape sequence %s.' % ( escape_sequence , ) ) if code_point == _CARRIAGE_RETURN : if len ( queue ) == 0 : yield ctx . read_data_event ( self ) code_point = next ( queue_iter ) if code_point != _NEWLINE : queue . unread ( code_point ) code_point = _NEWLINE while code_point is None : yield ctx . read_data_event ( self ) code_point = next ( code_point_generator ) if escaped_newline : code_point = CodePoint ( code_point ) code_point . char = _ESCAPED_NEWLINE code_point . is_escaped = True ctx . set_code_point ( code_point ) yield Transition ( None , whence ) | Retrieves the next code point from within a quoted string or symbol . |
44,956 | def read_data_event ( self , whence , complete = False , can_flush = False ) : return Transition ( None , _read_data_handler ( whence , self , complete , can_flush ) ) | Creates a transition to a co - routine for retrieving data as bytes . |
44,957 | def set_unicode ( self , quoted_text = False ) : if isinstance ( self . value , CodePointArray ) : assert self . quoted_text == quoted_text return self self . value = CodePointArray ( self . value ) self . quoted_text = quoted_text self . line_comment = False return self | Converts the context s value to a sequence of unicode code points for holding text tokens indicating whether the text is quoted . |
44,958 | def set_quoted_text ( self , quoted_text ) : self . quoted_text = quoted_text self . line_comment = False return self | Sets the context s quoted_text flag . Useful when entering and exiting quoted text tokens . |
44,959 | def derive_container_context ( self , ion_type , whence ) : if ion_type is IonType . STRUCT : container = _C_STRUCT elif ion_type is IonType . LIST : container = _C_LIST elif ion_type is IonType . SEXP : container = _C_SEXP else : raise TypeError ( 'Cannot derive container context for non-container type %s.' % ( ion_type . name , ) ) return _HandlerContext ( container = container , queue = self . queue , field_name = self . field_name , annotations = self . annotations , depth = self . depth + 1 , whence = whence , value = None , ion_type = ion_type , pending_symbol = None ) | Derives a container context as a child of the current context . |
44,960 | def derive_child_context ( self , whence ) : return _HandlerContext ( container = self . container , queue = self . queue , field_name = None , annotations = None , depth = self . depth , whence = whence , value = bytearray ( ) , ion_type = None , pending_symbol = None ) | Derives a scalar context as a child of the current context . |
44,961 | def set_ion_type ( self , ion_type ) : if ion_type is self . ion_type : return self self . ion_type = ion_type self . line_comment = False return self | Sets context to the given IonType . |
44,962 | def set_annotation ( self ) : assert self . pending_symbol is not None assert not self . value annotations = ( _as_symbol ( self . pending_symbol , is_symbol_value = False ) , ) self . annotations = annotations if not self . annotations else self . annotations + annotations self . ion_type = None self . pending_symbol = None self . quoted_text = False self . line_comment = False self . is_self_delimiting = False return self | Appends the context s pending_symbol to its annotations sequence . |
44,963 | def set_field_name ( self ) : assert self . pending_symbol is not None assert not self . value self . field_name = _as_symbol ( self . pending_symbol , is_symbol_value = False ) self . pending_symbol = None self . quoted_text = False self . line_comment = False self . is_self_delimiting = False return self | Sets the context s pending_symbol as its field_name . |
44,964 | def set_pending_symbol ( self , pending_symbol = None ) : if pending_symbol is None : pending_symbol = CodePointArray ( ) self . value = bytearray ( ) self . pending_symbol = pending_symbol self . line_comment = False return self | Sets the context s pending_symbol with the given unicode sequence and resets the context s value . |
44,965 | def _write_base ( buf , value , bits_per_octet , end_bit = 0 , sign_bit = 0 , is_signed = False ) : if value == 0 : buf . append ( sign_bit | end_bit ) return 1 num_bits = bit_length ( value ) num_octets = num_bits // bits_per_octet remainder = num_bits % bits_per_octet if remainder != 0 or is_signed : num_octets += 1 else : remainder = bits_per_octet for i in range ( num_octets ) : octet = 0 if i == 0 : octet |= sign_bit if i == num_octets - 1 : octet |= end_bit octet |= ( ( value >> ( num_bits - ( remainder + bits_per_octet * i ) ) ) & _OCTET_MASKS [ bits_per_octet ] ) buf . append ( octet ) return num_octets | Write a field to the provided buffer . |
44,966 | def record ( * fields ) : @ six . add_metaclass ( _RecordMetaClass ) class RecordType ( object ) : _record_sentinel = True _record_fields = fields return RecordType | Constructs a type that can be extended to create immutable value types . |
44,967 | def coroutine ( func ) : def wrapper ( * args , ** kwargs ) : gen = func ( * args , ** kwargs ) val = next ( gen ) if val != None : raise TypeError ( 'Unexpected value from start of coroutine' ) return gen wrapper . __name__ = func . __name__ wrapper . __doc__ = func . __doc__ return wrapper | Wraps a PEP - 342 enhanced generator in a way that avoids boilerplate of the priming call to next . |
44,968 | def derive_field_name ( self , field_name ) : cls = type ( self ) return cls ( self [ 0 ] , self [ 1 ] , self [ 2 ] , field_name , self [ 4 ] , self [ 5 ] ) | Derives a new event from this one setting the field_name attribute . |
44,969 | def derive_annotations ( self , annotations ) : cls = type ( self ) return cls ( self [ 0 ] , self [ 1 ] , self [ 2 ] , self [ 3 ] , annotations , self [ 5 ] ) | Derives a new event from this one setting the annotations attribute . |
44,970 | def derive_value ( self , value ) : return IonEvent ( self . event_type , self . ion_type , value , self . field_name , self . annotations , self . depth ) | Derives a new event from this one setting the value attribute . |
44,971 | def derive_depth ( self , depth ) : cls = type ( self ) return cls ( self [ 0 ] , self [ 1 ] , self [ 2 ] , self [ 3 ] , self [ 4 ] , depth ) | Derives a new event from this one setting the depth attribute . |
44,972 | def adjust_from_utc_fields ( * args , ** kwargs ) : raw_ts = Timestamp ( * args , ** kwargs ) offset = raw_ts . utcoffset ( ) if offset is None or offset == timedelta ( ) : return raw_ts adjusted = raw_ts + offset if raw_ts . precision is None : return adjusted return Timestamp ( adjusted . year , adjusted . month , adjusted . day , adjusted . hour , adjusted . minute , adjusted . second , adjusted . microsecond , raw_ts . tzinfo , precision = raw_ts . precision , fractional_precision = raw_ts . fractional_precision ) | Constructs a timestamp from UTC fields adjusted to the local offset if given . |
44,973 | def raw_writer ( indent = None ) : is_whitespace_str = isinstance ( indent , str ) and re . search ( r'\A\s*\Z' , indent , re . M ) is not None if not ( indent is None or is_whitespace_str ) : raise ValueError ( 'The indent parameter must either be None or a string containing only whitespace' ) indent_bytes = six . b ( indent ) if isinstance ( indent , str ) else indent return writer_trampoline ( _raw_writer_coroutine ( indent = indent_bytes ) ) | Returns a raw text writer co - routine . |
44,974 | def writer_trampoline ( start ) : trans = Transition ( None , start ) while True : ion_event = ( yield trans . event ) if trans . event is None : if ion_event is None : raise TypeError ( 'Cannot start Writer with no event' ) else : if trans . event . type is WriteEventType . HAS_PENDING and ion_event is not None : raise TypeError ( 'Writer expected to receive no event: %r' % ( ion_event , ) ) if trans . event . type is not WriteEventType . HAS_PENDING and ion_event is None : raise TypeError ( 'Writer expected to receive event' ) if ion_event is not None and ion_event . event_type is IonEventType . INCOMPLETE : raise TypeError ( 'Writer cannot receive INCOMPLETE event' ) trans = trans . delegate . send ( Transition ( ion_event , trans . delegate ) ) | Provides the co - routine trampoline for a writer state machine . |
44,975 | def _drain ( writer , ion_event ) : result_event = _WRITE_EVENT_HAS_PENDING_EMPTY while result_event . type is WriteEventType . HAS_PENDING : result_event = writer . send ( ion_event ) ion_event = None yield result_event | Drain the writer of its pending write events . |
44,976 | def blocking_writer ( writer , output ) : result_type = None while True : ion_event = ( yield result_type ) for result_event in _drain ( writer , ion_event ) : output . write ( result_event . data ) result_type = result_event . type | Provides an implementation of using the writer co - routine with a file - like object . |
44,977 | def from_event ( cls , ion_event ) : if ion_event . value is not None : args , kwargs = cls . _to_constructor_args ( ion_event . value ) else : args , kwargs = ( ) , { } value = cls ( * args , ** kwargs ) value . ion_event = ion_event value . ion_type = ion_event . ion_type value . ion_annotations = ion_event . annotations return value | Constructs the given native extension from the properties of an event . |
44,978 | def from_value ( cls , ion_type , value , annotations = ( ) ) : if value is None : value = IonPyNull ( ) else : args , kwargs = cls . _to_constructor_args ( value ) value = cls ( * args , ** kwargs ) value . ion_event = None value . ion_type = ion_type value . ion_annotations = annotations return value | Constructs a value as a copy with an associated Ion type and annotations . |
44,979 | def to_event ( self , event_type , field_name = None , depth = None ) : if self . ion_event is None : value = self if isinstance ( self , IonPyNull ) : value = None self . ion_event = IonEvent ( event_type , ion_type = self . ion_type , value = value , field_name = field_name , annotations = self . ion_annotations , depth = depth ) return self . ion_event | Constructs an IonEvent from this _IonNature value . |
44,980 | def _remove_sig ( signature , idempotent = False ) : try : signaturep = next ( signature . iterancestors ( ) ) except StopIteration : if idempotent : return raise ValueError ( "Can't remove the root signature node" ) if signature . tail is not None : try : signatures = next ( signature . itersiblings ( preceding = True ) ) except StopIteration : if signaturep . text is not None : signaturep . text = signaturep . text + signature . tail else : signaturep . text = signature . tail else : if signatures . tail is not None : signatures . tail = signatures . tail + signature . tail else : signatures . tail = signature . tail signaturep . remove ( signature ) | Remove the signature node from its parent keeping any tail element . This is needed for eneveloped signatures . |
44,981 | def authorize ( self , scope = None , redirect_uri = None , state = None ) : _logger . debug ( "Called authorize()" ) params = { 'client_id' : self . client_id } if scope : params [ 'scope' ] = scope if redirect_uri : params [ 'redirect_uri' ] = redirect_uri if state : params [ 'state' ] = state url = self . auth_url + 'authorize?' + urlencode ( params ) _logger . debug ( "Redirecting to %s" , url ) return redirect ( url ) | Redirect to GitHub and request access to a user s data . |
44,982 | def authorized_handler ( self , f ) : @ wraps ( f ) def decorated ( * args , ** kwargs ) : if 'code' in request . args : data = self . _handle_response ( ) else : data = self . _handle_invalid_response ( ) return f ( * ( ( data , ) + args ) , ** kwargs ) return decorated | Decorator for the route that is used as the callback for authorizing with GitHub . This callback URL can be set in the settings for the app or passed in during authorization . |
44,983 | def _handle_response ( self ) : _logger . debug ( "Handling response from GitHub" ) params = { 'code' : request . args . get ( 'code' ) , 'client_id' : self . client_id , 'client_secret' : self . client_secret } url = self . auth_url + 'access_token' _logger . debug ( "POSTing to %s" , url ) _logger . debug ( params ) response = self . session . post ( url , data = params ) data = parse_qs ( response . content ) _logger . debug ( "response.content = %s" , data ) for k , v in data . items ( ) : if len ( v ) == 1 : data [ k ] = v [ 0 ] token = data . get ( b'access_token' , None ) if token is not None : token = token . decode ( 'ascii' ) return token | Handles response after the redirect to GitHub . This response determines if the user has allowed the this application access . If we were then we send a POST request for the access_key used to authenticate requests to GitHub . |
44,984 | def decode_lazy ( rlp , sedes = None , ** sedes_kwargs ) : item , end = consume_item_lazy ( rlp , 0 ) if end != len ( rlp ) : raise DecodingError ( 'RLP length prefix announced wrong length' , rlp ) if isinstance ( item , LazyList ) : item . sedes = sedes item . sedes_kwargs = sedes_kwargs return item elif sedes : return sedes . deserialize ( item , ** sedes_kwargs ) else : return item | Decode an RLP encoded object in a lazy fashion . |
44,985 | def consume_item_lazy ( rlp , start ) : p , t , l , s = consume_length_prefix ( rlp , start ) if t is bytes : item , _ , end = consume_payload ( rlp , p , s , bytes , l ) return item , end else : assert t is list return LazyList ( rlp , s , s + l ) , s + l | Read an item from an RLP string lazily . |
44,986 | def peek ( rlp , index , sedes = None ) : ll = decode_lazy ( rlp ) if not isinstance ( index , Iterable ) : index = [ index ] for i in index : if isinstance ( ll , Atomic ) : raise IndexError ( 'Too many indices given' ) ll = ll [ i ] if sedes : return sedes . deserialize ( ll ) else : return ll | Get a specific element from an rlp encoded nested list . |
44,987 | def fixed_length ( cls , l , allow_empty = False ) : return cls ( l , l , allow_empty = allow_empty ) | Create a sedes for text data with exactly l encoded characters . |
44,988 | def _eq ( left , right ) : if isinstance ( left , ( tuple , list ) ) and isinstance ( right , ( tuple , list ) ) : return len ( left ) == len ( right ) and all ( _eq ( * pair ) for pair in zip ( left , right ) ) else : return left == right | Equality comparison that allows for equality between tuple and list types with equivalent elements . |
44,989 | def is_sequence ( obj ) : return isinstance ( obj , Sequence ) and not ( isinstance ( obj , str ) or BinaryClass . is_valid_type ( obj ) ) | Check if obj is a sequence but not a string or bytes . |
44,990 | def encode ( obj , sedes = None , infer_serializer = True , cache = True ) : if isinstance ( obj , Serializable ) : cached_rlp = obj . _cached_rlp if sedes is None and cached_rlp : return cached_rlp else : really_cache = ( cache and sedes is None ) else : really_cache = False if sedes : item = sedes . serialize ( obj ) elif infer_serializer : item = infer_sedes ( obj ) . serialize ( obj ) else : item = obj result = encode_raw ( item ) if really_cache : obj . _cached_rlp = result return result | Encode a Python object in RLP format . |
44,991 | def consume_payload ( rlp , prefix , start , type_ , length ) : if type_ is bytes : item = rlp [ start : start + length ] return ( item , [ prefix + item ] , start + length ) elif type_ is list : items = [ ] per_item_rlp = [ ] list_rlp = prefix next_item_start = start end = next_item_start + length while next_item_start < end : p , t , l , s = consume_length_prefix ( rlp , next_item_start ) item , item_rlp , next_item_start = consume_payload ( rlp , p , s , t , l ) per_item_rlp . append ( item_rlp ) list_rlp += item_rlp [ 0 ] items . append ( item ) per_item_rlp . insert ( 0 , list_rlp ) if next_item_start > end : raise DecodingError ( 'List length prefix announced a too small ' 'length' , rlp ) return ( items , per_item_rlp , next_item_start ) else : raise TypeError ( 'Type must be either list or bytes' ) | Read the payload of an item from an RLP string . |
44,992 | def consume_item ( rlp , start ) : p , t , l , s = consume_length_prefix ( rlp , start ) return consume_payload ( rlp , p , s , t , l ) | Read an item from an RLP string . |
44,993 | def decode ( rlp , sedes = None , strict = True , recursive_cache = False , ** kwargs ) : if not is_bytes ( rlp ) : raise DecodingError ( 'Can only decode RLP bytes, got type %s' % type ( rlp ) . __name__ , rlp ) try : item , per_item_rlp , end = consume_item ( rlp , 0 ) except IndexError : raise DecodingError ( 'RLP string too short' , rlp ) if end != len ( rlp ) and strict : msg = 'RLP string ends with {} superfluous bytes' . format ( len ( rlp ) - end ) raise DecodingError ( msg , rlp ) if sedes : obj = sedes . deserialize ( item , ** kwargs ) if is_sequence ( obj ) or hasattr ( obj , '_cached_rlp' ) : _apply_rlp_cache ( obj , per_item_rlp , recursive_cache ) return obj else : return item | Decode an RLP encoded object . |
44,994 | def infer_sedes ( obj ) : if is_sedes ( obj . __class__ ) : return obj . __class__ elif not isinstance ( obj , bool ) and isinstance ( obj , int ) and obj >= 0 : return big_endian_int elif BinaryClass . is_valid_type ( obj ) : return binary elif not isinstance ( obj , str ) and isinstance ( obj , collections . Sequence ) : return List ( map ( infer_sedes , obj ) ) elif isinstance ( obj , bool ) : return boolean elif isinstance ( obj , str ) : return text msg = 'Did not find sedes handling type {}' . format ( type ( obj ) . __name__ ) raise TypeError ( msg ) | Try to find a sedes objects suitable for a given Python object . |
44,995 | def destinations ( self , cluster = 'main' ) : if not self . config . has_section ( cluster ) : raise SystemExit ( "Cluster '%s' not defined in %s" % ( cluster , self . config_file ) ) destinations = self . config . get ( cluster , 'destinations' ) return destinations . replace ( ' ' , '' ) . split ( ',' ) | Return a list of destinations for a cluster . |
44,996 | def replication_factor ( self , cluster = 'main' ) : if not self . config . has_section ( cluster ) : raise SystemExit ( "Cluster '%s' not defined in %s" % ( cluster , self . config_file ) ) return int ( self . config . get ( cluster , 'replication_factor' ) ) | Return the replication factor for a cluster as an integer . |
44,997 | def ssh_user ( self , cluster = 'main' ) : if not self . config . has_section ( cluster ) : raise SystemExit ( "Cluster '%s' not defined in %s" % ( cluster , self . config_file ) ) try : return self . config . get ( cluster , 'ssh_user' ) except NoOptionError : return pwd . getpwuid ( os . getuid ( ) ) . pw_name | Return the ssh user for a cluster or current user if undefined . |
44,998 | def whisper_lock_writes ( self , cluster = 'main' ) : if not self . config . has_section ( cluster ) : raise SystemExit ( "Cluster '%s' not defined in %s" % ( cluster , self . config_file ) ) try : return bool ( self . config . get ( cluster , 'whisper_lock_writes' ) ) except NoOptionError : return False | Lock whisper files during carbon - sync . |
44,999 | def hashing_type ( self , cluster = 'main' ) : if not self . config . has_section ( cluster ) : raise SystemExit ( "Cluster '%s' not defined in %s" % ( cluster , self . config_file ) ) hashing_type = 'carbon_ch' try : return self . config . get ( cluster , 'hashing_type' ) except NoOptionError : return hashing_type | Hashing type of cluster . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.