idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
42,700
def capture_packet ( self ) : data = self . socket . recv ( self . _buffer_size ) for h in self . capture_handlers : h [ 'reads' ] += 1 h [ 'data_read' ] += len ( data ) d = data if 'pre_write_transforms' in h : for data_transform in h [ 'pre_write_transforms' ] : d = data_transform ( d ) h [ 'logger' ] . write ( d )
Write packet data to the logger s log file .
42,701
def clean_up ( self ) : self . socket . close ( ) for h in self . capture_handlers : h [ 'logger' ] . close ( )
Clean up the socket and log file handles .
42,702
def socket_monitor_loop ( self ) : try : while True : gevent . socket . wait_read ( self . socket . fileno ( ) ) self . _handle_log_rotations ( ) self . capture_packet ( ) finally : self . clean_up ( )
Monitor the socket and log captured data .
42,703
def add_handler ( self , handler ) : handler [ 'logger' ] = self . _get_logger ( handler ) handler [ 'reads' ] = 0 handler [ 'data_read' ] = 0 self . capture_handlers . append ( handler )
Add an additional handler
42,704
def remove_handler ( self , name ) : index = None for i , h in enumerate ( self . capture_handlers ) : if h [ 'name' ] == name : index = i if index is not None : self . capture_handlers [ index ] [ 'logger' ] . close ( ) del self . capture_handlers [ index ]
Remove a handler given a name
42,705
def dump_handler_config_data ( self ) : ignored_keys = [ 'logger' , 'log_rot_time' , 'reads' , 'data_read' ] config_data = [ ] for h in self . capture_handlers : config_data . append ( { 'handler' : { k : v for k , v in h . iteritems ( ) if k not in ignored_keys } , 'log_file_path' : h [ 'logger' ] . _stream . name , 'conn_type' : self . conn_type , 'address' : self . address , } ) return config_data
Return capture handler configuration data .
42,706
def dump_all_handler_stats ( self ) : stats = [ ] for h in self . capture_handlers : now = calendar . timegm ( time . gmtime ( ) ) rot_time = calendar . timegm ( h [ 'log_rot_time' ] ) time_delta = now - rot_time approx_data_rate = '{} bytes/second' . format ( h [ 'data_read' ] / float ( time_delta ) ) stats . append ( { 'name' : h [ 'name' ] , 'reads' : h [ 'reads' ] , 'data_read_length' : '{} bytes' . format ( h [ 'data_read' ] ) , 'approx_data_rate' : approx_data_rate } ) return stats
Return handler capture statistics
42,707
def _handle_log_rotations ( self ) : for h in self . capture_handlers : if self . _should_rotate_log ( h ) : self . _rotate_log ( h )
Rotate each handler s log file if necessary
42,708
def _should_rotate_log ( self , handler ) : if handler [ 'rotate_log' ] : rotate_time_index = handler . get ( 'rotate_log_index' , 'day' ) try : rotate_time_index = self . _decode_time_rotation_index ( rotate_time_index ) except ValueError : rotate_time_index = 2 rotate_time_delta = handler . get ( 'rotate_log_delta' , 1 ) cur_t = time . gmtime ( ) first_different_index = 9 for i in range ( 9 ) : if cur_t [ i ] != handler [ 'log_rot_time' ] [ i ] : first_different_index = i break if first_different_index < rotate_time_index : return True else : time_delta = cur_t [ rotate_time_index ] - handler [ 'log_rot_time' ] [ rotate_time_index ] return time_delta >= rotate_time_delta return False
Determine if a log file rotation is necessary
42,709
def _decode_time_rotation_index ( self , time_rot_index ) : time_index_decode_table = { 'year' : 0 , 'years' : 0 , 'tm_year' : 0 , 'month' : 1 , 'months' : 1 , 'tm_mon' : 1 , 'day' : 2 , 'days' : 2 , 'tm_mday' : 2 , 'hour' : 3 , 'hours' : 3 , 'tm_hour' : 3 , 'minute' : 4 , 'minutes' : 4 , 'tm_min' : 4 , 'second' : 5 , 'seconds' : 5 , 'tm_sec' : 5 , } if time_rot_index not in time_index_decode_table . keys ( ) : raise ValueError ( 'Invalid time option specified for log rotation' ) return time_index_decode_table [ time_rot_index ]
Return the time struct index to use for log rotation checks
42,710
def _get_log_file ( self , handler ) : if 'file_name_pattern' not in handler : filename = '%Y-%m-%d-%H-%M-%S-{name}.pcap' else : filename = handler [ 'file_name_pattern' ] log_file = handler [ 'log_dir' ] if 'path' in handler : log_file = os . path . join ( log_file , handler [ 'path' ] , filename ) else : log_file = os . path . join ( log_file , filename ) log_file = time . strftime ( log_file , time . gmtime ( ) ) log_file = log_file . format ( ** handler ) return log_file
Generate log file path for a given handler
42,711
def _get_logger ( self , handler ) : log_file = self . _get_log_file ( handler ) if not os . path . isdir ( os . path . dirname ( log_file ) ) : os . makedirs ( os . path . dirname ( log_file ) ) handler [ 'log_rot_time' ] = time . gmtime ( ) return pcap . open ( log_file , mode = 'a' )
Initialize a PCAP stream for logging data
42,712
def add_logger ( self , name , address , conn_type , log_dir_path = None , ** kwargs ) : capture_handler_conf = kwargs if not log_dir_path : log_dir_path = self . _mngr_conf [ 'root_log_directory' ] log_dir_path = os . path . normpath ( os . path . expanduser ( log_dir_path ) ) capture_handler_conf [ 'log_dir' ] = log_dir_path capture_handler_conf [ 'name' ] = name if 'rotate_log' not in capture_handler_conf : capture_handler_conf [ 'rotate_log' ] = True transforms = [ ] if 'pre_write_transforms' in capture_handler_conf : for transform in capture_handler_conf [ 'pre_write_transforms' ] : if isinstance ( transform , str ) : if globals ( ) . has_key ( transform ) : transforms . append ( globals ( ) . get ( transform ) ) else : msg = ( 'Unable to load data transformation ' '"{}" for handler "{}"' ) . format ( transform , capture_handler_conf [ 'name' ] ) log . warn ( msg ) elif hasattr ( transform , '__call__' ) : transforms . append ( transform ) else : msg = ( 'Unable to determine how to load data transform "{}"' ) . format ( transform ) log . warn ( msg ) capture_handler_conf [ 'pre_write_transforms' ] = transforms address_key = str ( address ) if address_key in self . _stream_capturers : capturer = self . _stream_capturers [ address_key ] [ 0 ] capturer . add_handler ( capture_handler_conf ) return socket_logger = SocketStreamCapturer ( capture_handler_conf , address , conn_type ) greenlet = gevent . spawn ( socket_logger . socket_monitor_loop ) self . _stream_capturers [ address_key ] = ( socket_logger , greenlet ) self . _pool . add ( greenlet )
Add a new stream capturer to the manager .
42,713
def stop_capture_handler ( self , name ) : empty_capturers_indeces = [ ] for k , sc in self . _stream_capturers . iteritems ( ) : stream_capturer = sc [ 0 ] stream_capturer . remove_handler ( name ) if stream_capturer . handler_count == 0 : self . _pool . killone ( sc [ 1 ] ) empty_capturers_indeces . append ( k ) for i in empty_capturers_indeces : del self . _stream_capturers [ i ]
Remove all handlers with a given name
42,714
def stop_stream_capturer ( self , address ) : address = str ( address ) if address not in self . _stream_capturers : raise ValueError ( 'Capturer address does not match a managed capturer' ) stream_cap = self . _stream_capturers [ address ] self . _pool . killone ( stream_cap [ 1 ] ) del self . _stream_capturers [ address ]
Stop a capturer that the manager controls .
42,715
def rotate_capture_handler_log ( self , name ) : for sc_key , sc in self . _stream_capturers . iteritems ( ) : for h in sc [ 0 ] . capture_handlers : if h [ 'name' ] == name : sc [ 0 ] . _rotate_log ( h )
Force a rotation of a handler s log file
42,716
def get_logger_data ( self ) : return { address : stream_capturer [ 0 ] . dump_handler_config_data ( ) for address , stream_capturer in self . _stream_capturers . iteritems ( ) }
Return data on managed loggers .
42,717
def get_handler_stats ( self ) : return { address : stream_capturer [ 0 ] . dump_all_handler_stats ( ) for address , stream_capturer in self . _stream_capturers . iteritems ( ) }
Return handler read statistics
42,718
def get_capture_handler_config_by_name ( self , name ) : handler_confs = [ ] for address , stream_capturer in self . _stream_capturers . iteritems ( ) : handler_data = stream_capturer [ 0 ] . dump_handler_config_data ( ) for h in handler_data : if h [ 'handler' ] [ 'name' ] == name : handler_confs . append ( h ) return handler_confs
Return data for handlers of a given name .
42,719
def run_socket_event_loop ( self ) : try : while True : self . _pool . join ( ) if len ( self . _logger_data . keys ( ) ) == 0 : time . sleep ( 0.5 ) except KeyboardInterrupt : pass finally : self . _pool . kill ( )
Start monitoring managed loggers .
42,720
def _route ( self ) : self . _app . route ( '/' , method = 'GET' , callback = self . _get_logger_list ) self . _app . route ( '/stats' , method = 'GET' , callback = self . _fetch_handler_stats ) self . _app . route ( '/<name>/start' , method = 'POST' , callback = self . _add_logger_by_name ) self . _app . route ( '/<name>/stop' , method = 'DELETE' , callback = self . _stop_logger_by_name ) self . _app . route ( '/<name>/config' , method = 'GET' , callback = self . _get_logger_conf ) self . _app . route ( '/<name>/rotate' , method = 'POST' , callback = self . _rotate_capturer_log )
Handles server route instantiation .
42,721
def _add_logger_by_name ( self , name ) : data = dict ( request . forms ) loc = data . pop ( 'loc' , '' ) port = data . pop ( 'port' , None ) conn_type = data . pop ( 'conn_type' , None ) if not port or not conn_type : e = 'Port and/or conn_type not set' raise ValueError ( e ) address = [ loc , int ( port ) ] if 'rotate_log' in data : data [ 'rotate_log' ] = True if data == 'true' else False if 'rotate_log_delta' in data : data [ 'rotate_log_delta' ] = int ( data [ 'rotate_log_delta' ] ) self . _logger_manager . add_logger ( name , address , conn_type , ** data )
Handles POST requests for adding a new logger .
42,722
def handle_includes ( defns ) : newdefns = [ ] for d in defns : if isinstance ( d , list ) : newdefns . extend ( handle_includes ( d ) ) else : newdefns . append ( d ) return newdefns
Recursive handling of includes for any input list of defns . The assumption here is that when an include is handled by the pyyaml reader it adds them as a list which is stands apart from the rest of the expected YAML definitions .
42,723
def eval ( self , packet ) : result = None terms = None if self . _when is None or self . _when . eval ( packet ) : result = self . _equation . eval ( packet ) return result
Returns the result of evaluating this DNToEUConversion in the context of the given Packet .
42,724
def validate ( self , value , messages = None ) : valid = True primitive = value def log ( msg ) : if messages is not None : messages . append ( msg ) if self . enum : if value not in self . enum . values ( ) : valid = False flds = ( self . name , str ( value ) ) log ( "%s value '%s' not in allowed enumerated values." % flds ) else : primitive = int ( self . enum . keys ( ) [ self . enum . values ( ) . index ( value ) ] ) if self . type : if self . type . validate ( primitive , messages , self . name ) is False : valid = False return valid
Returns True if the given field value is valid False otherwise . Validation error messages are appended to an optional messages array .
42,725
def decode ( self , bytes , raw = False , index = None ) : if index is not None and isinstance ( self . type , dtype . ArrayType ) : value = self . type . decode ( bytes [ self . slice ( ) ] , index , raw ) else : value = self . type . decode ( bytes [ self . slice ( ) ] , raw ) if self . mask is not None : value &= self . mask if self . shift > 0 : value >>= self . shift if not raw and self . enum is not None : value = self . enum . get ( value , value ) return value
Decodes the given bytes according to this Field Definition .
42,726
def encode ( self , value ) : if type ( value ) == str and self . enum and value in self . enum : value = self . enum [ value ] if type ( value ) == int : if self . shift > 0 : value <<= self . shift if self . mask is not None : value &= self . mask return self . type . encode ( value ) if self . type else bytearray ( )
Encodes the given value according to this FieldDefinition .
42,727
def _assertField ( self , fieldname ) : if not self . _hasattr ( fieldname ) : values = self . _defn . name , fieldname raise AttributeError ( "Packet '%s' has no field '%s'" % values )
Raise AttributeError when Packet has no field with the given name .
42,728
def _getattr ( self , fieldname , raw = False , index = None ) : self . _assertField ( fieldname ) value = None if fieldname == 'raw' : value = createRawPacket ( self ) elif fieldname == 'history' : value = self . _defn . history else : if fieldname in self . _defn . derivationmap : defn = self . _defn . derivationmap [ fieldname ] else : defn = self . _defn . fieldmap [ fieldname ] if isinstance ( defn . type , dtype . ArrayType ) and index is None : return createFieldList ( self , defn , raw ) if defn . when is None or defn . when . eval ( self ) : if isinstance ( defn , DerivationDefinition ) : value = defn . equation . eval ( self ) elif raw or ( defn . dntoeu is None and defn . expr is None ) : value = defn . decode ( self . _data , raw , index ) elif defn . dntoeu is not None : value = defn . dntoeu . eval ( self ) elif defn . expr is not None : value = defn . expr . eval ( self ) return value
Returns the value of the given packet field name .
42,729
def _hasattr ( self , fieldname ) : special = 'history' , 'raw' return ( fieldname in special or fieldname in self . _defn . fieldmap or fieldname in self . _defn . derivationmap )
Returns True if this packet contains fieldname False otherwise .
42,730
def _update_bytes ( self , defns , start = 0 ) : pos = slice ( start , start ) for fd in defns : if fd . bytes == '@prev' or fd . bytes is None : if fd . bytes == '@prev' : fd . bytes = None pos = fd . slice ( pos . start ) elif fd . bytes is None : pos = fd . slice ( pos . stop ) if pos . start == pos . stop - 1 : fd . bytes = pos . start else : fd . bytes = [ pos . start , pos . stop - 1 ] pos = fd . slice ( ) return pos . stop
Updates the bytes field in all FieldDefinition .
42,731
def nbytes ( self ) : max_byte = - 1 for defn in self . fields : byte = defn . bytes if type ( defn . bytes ) is int else max ( defn . bytes ) max_byte = max ( max_byte , byte ) return max_byte + 1
The number of bytes for this telemetry packet
42,732
def validate ( self , pkt , messages = None ) : valid = True for f in self . fields : try : value = getattr ( pkt , f . name ) except AttributeError : valid = False if messages is not None : msg = "Telemetry field mismatch for packet '%s'. " msg += "Unable to retrieve value for %s in Packet." values = self . name , f . name messages . append ( msg % values ) break if f . validate ( value , messages ) is False : valid = False return valid
Returns True if the given Packet is valid False otherwise . Validation error messages are appended to an optional messages array .
42,733
def eval ( self , packet ) : try : context = createPacketContext ( packet ) result = eval ( self . _code , packet . _defn . globals , context ) except ZeroDivisionError : result = None return result
Returns the result of evaluating this PacketExpression in the context of the given Packet .
42,734
def _assertField ( self , name ) : if name not in self . _names : msg = 'PacketHistory "%s" has no field "%s"' values = self . _defn . name , name raise AttributeError ( msg % values )
Raise AttributeError when PacketHistory has no field with the given name .
42,735
def add ( self , packet ) : for name in self . _names : value = getattr ( packet , name ) if value is not None : self . _dict [ name ] = value
Add the given Packet to this PacketHistory .
42,736
def add ( self , defn ) : if defn . name not in self : self [ defn . name ] = defn else : msg = "Duplicate packet name '%s'" % defn . name log . error ( msg ) raise util . YAMLError ( msg )
Adds the given Packet Definition to this Telemetry Dictionary .
42,737
def create ( self , name , data = None ) : return createPacket ( self [ name ] , data ) if name in self else None
Creates a new packet with the given definition and raw data .
42,738
def load ( self , content ) : if self . filename is None : if os . path . isfile ( content ) : self . filename = content stream = open ( self . filename , 'rb' ) else : stream = content pkts = yaml . load ( stream ) pkts = handle_includes ( pkts ) for pkt in pkts : self . add ( pkt ) if type ( stream ) is file : stream . close ( )
Loads Packet Definitions from the given YAML content into this Telemetry Dictionary . Content may be either a filename containing YAML content or a YAML string .
42,739
def writeToCSV ( self , output_path = None ) : header = [ 'Name' , 'First Byte' , 'Last Byte' , 'Bit Mask' , 'Endian' , 'Type' , 'Description' , 'Values' ] if output_path is None : output_path = ait . config . _directory for pkt_name in self . tlmdict : filename = os . path . join ( output_path , pkt_name + '.csv' ) with open ( filename , 'wb' ) as output : csvwriter = csv . writer ( output , quoting = csv . QUOTE_ALL ) csvwriter . writerow ( header ) for fld in self . tlmdict [ pkt_name ] . fields : desc = fld . desc . replace ( '\n' , ' ' ) if fld . desc is not None else "" mask = hex ( fld . mask ) if fld . mask is not None else "" enums = '\n' . join ( "%s: %s" % ( k , fld . enum [ k ] ) for k in fld . enum ) if fld . enum is not None else "" row = [ fld . name , fld . slice ( ) . start , fld . slice ( ) . stop , mask , fld . type . endian , fld . type . name , desc , enums ] csvwriter . writerow ( row )
writeToCSV - write the telemetry dictionary to csv
42,740
def decode ( self , bytes ) : value = self . type . decode ( bytes ) if self . _enum is not None : for name , val in self . _enum . items ( ) : if value == val : value = name break return value
Decodes the given bytes according to this AIT Argument Definition .
42,741
def encode ( self , value ) : if type ( value ) == str and self . enum and value in self . enum : value = self . enum [ value ] return self . type . encode ( value ) if self . type else bytearray ( )
Encodes the given value according to this AIT Argument Definition .
42,742
def validate ( self , value , messages = None ) : valid = True primitive = value def log ( msg ) : if messages is not None : messages . append ( msg ) if self . enum : if value not in self . enum . keys ( ) : valid = False args = ( self . name , str ( value ) ) log ( "%s value '%s' not in allowed enumerated values." % args ) else : primitive = int ( self . enum [ value ] ) if self . type : if self . type . validate ( primitive , messages , self . name ) is False : valid = False if self . range : if primitive < self . range [ 0 ] or primitive > self . range [ 1 ] : valid = False args = ( self . name , str ( primitive ) , self . range [ 0 ] , self . range [ 1 ] ) log ( "%s value '%s' out of range [%d, %d]." % args ) return valid
Returns True if the given Argument value is valid False otherwise . Validation error messages are appended to an optional messages array .
42,743
def encode ( self , pad = 106 ) : opcode = struct . pack ( '>H' , self . defn . opcode ) offset = len ( opcode ) size = max ( offset + self . defn . argsize , pad ) encoded = bytearray ( size ) encoded [ 0 : offset ] = opcode encoded [ offset ] = self . defn . argsize offset += 1 index = 0 for defn in self . defn . argdefns : if defn . fixed : value = defn . value else : value = self . args [ index ] index += 1 encoded [ defn . slice ( offset ) ] = defn . encode ( value ) return encoded
Encodes this AIT command to binary .
42,744
def nbytes ( self ) : return len ( self . opcode ) + 1 + sum ( arg . nbytes for arg in self . argdefns )
The number of bytes required to encode this command .
42,745
def argsize ( self ) : argsize = sum ( arg . nbytes for arg in self . argdefns ) return argsize if len ( self . argdefns ) > 0 else 0
The total size in bytes of all the command arguments .
42,746
def validate ( self , cmd , messages = None ) : valid = True args = [ arg for arg in cmd . args if arg is not None ] if self . nargs != len ( args ) : valid = False if messages is not None : msg = 'Expected %d arguments, but received %d.' messages . append ( msg % ( self . nargs , len ( args ) ) ) for defn , value in zip ( self . args , cmd . args ) : if value is None : valid = False if messages is not None : messages . append ( 'Argument "%s" is missing.' % defn . name ) elif defn . validate ( value , messages ) is False : valid = False if len ( cmd . _unrecognized ) > 0 : valid = False if messages is not None : for name in cmd . unrecognized : messages . append ( 'Argument "%s" is unrecognized.' % name ) return valid
Returns True if the given Command is valid False otherwise . Validation error messages are appended to an optional messages array .
42,747
def create ( self , name , * args , ** kwargs ) : tokens = name . split ( ) if len ( tokens ) > 1 and ( len ( args ) > 0 or len ( kwargs ) > 0 ) : msg = 'A Cmd may be created with either positional arguments ' msg += '(passed as a string or a Python list) or keyword ' msg += 'arguments, but not both.' raise TypeError ( msg ) if len ( tokens ) > 1 : name = tokens [ 0 ] args = [ util . toNumber ( t , t ) for t in tokens [ 1 : ] ] defn = self . get ( name , None ) if defn is None : raise TypeError ( 'Unrecognized command: %s' % name ) return createCmd ( defn , * args , ** kwargs )
Creates a new AIT command with the given arguments .
42,748
def decode ( self , bytes ) : opcode = struct . unpack ( ">H" , bytes [ 0 : 2 ] ) [ 0 ] nbytes = struct . unpack ( "B" , bytes [ 2 : 3 ] ) [ 0 ] name = None args = [ ] if opcode in self . opcodes : defn = self . opcodes [ opcode ] name = defn . name stop = 3 for arg in defn . argdefns : start = stop stop = start + arg . nbytes if arg . fixed : pass else : args . append ( arg . decode ( bytes [ start : stop ] ) ) return self . create ( name , * args )
Decodes the given bytes according to this AIT Command Definition .
42,749
def load ( self , content ) : if self . filename is None : if os . path . isfile ( content ) : self . filename = content stream = open ( self . filename , 'rb' ) else : stream = content for cmd in yaml . load ( stream ) : self . add ( cmd ) if type ( stream ) is file : stream . close ( )
Loads Command Definitions from the given YAML content into into this Command Dictionary . Content may be either a filename containing YAML content or a YAML string .
42,750
def cbrt ( x ) : if x >= 0 : return math . pow ( x , 1.0 / 3.0 ) else : return - math . pow ( abs ( x ) , 1.0 / 3.0 )
Returns the cube root of x .
42,751
def process ( self , input_data , topic = None ) : for handler in self . handlers : output = handler . handle ( input_data ) input_data = output self . publish ( input_data )
Invokes each handler in sequence . Publishes final output data .
42,752
def valid_workflow ( self ) : for ix , handler in enumerate ( self . handlers [ : - 1 ] ) : next_input_type = self . handlers [ ix + 1 ] . input_type if ( handler . output_type is not None and next_input_type is not None ) : if handler . output_type != next_input_type : return False return True
Return true if each handler s output type is the same as the next handler s input type . Return False if not .
42,753
def contains ( self , p ) : inside = False if p in self . bounds ( ) : for s in self . segments ( ) : if ( ( s . p . y > p . y ) != ( s . q . y > p . y ) and ( p . x < ( s . q . x - s . p . x ) * ( p . y - s . p . y ) / ( s . q . y - s . p . y ) + s . p . x ) ) : inside = not inside return inside
Returns True if point is contained inside this Polygon False otherwise .
42,754
def segments ( self ) : for n in xrange ( len ( self . vertices ) - 1 ) : yield Line ( self . vertices [ n ] , self . vertices [ n + 1 ] ) yield Line ( self . vertices [ - 1 ] , self . vertices [ 0 ] )
Return the Line segments that comprise this Polygon .
42,755
def format_message ( self , evr_hist_data ) : size_formatter_info = { 's' : - 1 , 'c' : 1 , 'i' : 4 , 'd' : 4 , 'u' : 4 , 'x' : 4 , 'hh' : 1 , 'h' : 2 , 'l' : 4 , 'll' : 8 , 'f' : 8 , 'g' : 8 , 'e' : 8 , } type_formatter_info = { 'c' : 'U{}' , 'i' : 'MSB_I{}' , 'd' : 'MSB_I{}' , 'u' : 'MSB_U{}' , 'f' : 'MSB_D{}' , 'e' : 'MSB_D{}' , 'g' : 'MSB_D{}' , 'x' : 'MSB_U{}' , } formatters = re . findall ( "%(?:\d+\$)?([cdieEfgGosuxXhlL]+)" , self . _message ) cur_byte_index = 0 data_chunks = [ ] for f in formatters : f_size_char = f_type = f [ - 1 ] if len ( f ) > 1 : f_size_char = f [ : - 1 ] fsize = size_formatter_info [ f_size_char . lower ( ) ] try : if f_type != 's' : end_index = cur_byte_index + fsize fstr = type_formatter_info [ f_type . lower ( ) ] . format ( fsize * 8 ) if fsize == 1 and 'MSB_' in fstr : fstr = fstr [ 4 : ] d = dtype . PrimitiveType ( fstr ) . decode ( evr_hist_data [ cur_byte_index : end_index ] ) else : end_index = str ( evr_hist_data ) . index ( '\x00' , cur_byte_index ) d = str ( evr_hist_data [ cur_byte_index : end_index ] ) data_chunks . append ( d ) except : msg = "Unable to format EVR Message with data {}" . format ( evr_hist_data ) log . error ( msg ) raise ValueError ( msg ) cur_byte_index = end_index if f == 's' : cur_byte_index += 1 if len ( formatters ) == 0 : return self . _message else : msg = self . _message for f in formatters : if len ( f ) > 1 : msg = msg . replace ( '%{}' . format ( f ) , '%{}' . format ( f [ - 1 ] ) ) return msg % tuple ( data_chunks )
Format EVR message with EVR data
42,756
def append ( self , cmd , delay = 0.000 , attrs = None ) : self . lines . append ( SeqCmd ( cmd , delay , attrs ) )
Adds a new command with a relative time delay to this sequence .
42,757
def printText ( self , stream = None ) : if stream is None : stream = sys . stdout stream . write ( '# seqid : %u\n' % self . seqid ) stream . write ( '# version : %u\n' % self . version ) stream . write ( '# crc32 : 0x%04x\n' % self . crc32 ) stream . write ( '# ncmds : %u\n' % len ( self . commands ) ) stream . write ( '# duration: %.3fs\n' % self . duration ) stream . write ( '\n' ) for line in self . lines : stream . write ( str ( line ) ) stream . write ( '\n' )
Prints a text representation of this sequence to the given stream or standard output .
42,758
def validate ( self ) : if not os . path . isfile ( self . pathname ) : self . message . append ( 'Filename "%s" does not exist.' ) else : try : with open ( self . pathname , 'r' ) as stream : pass except IOError : self . messages . append ( 'Could not open "%s" for reading.' % self . pathname ) for line in self . commands : messages = [ ] if line . cmd and not line . cmd . validate ( messages ) : msg = 'error: %s: %s' % ( line . cmd . name , " " . join ( messages ) ) self . log . messages . append ( msg ) return len ( self . log . messages ) == 0
Returns True if this Sequence is valid False otherwise . Validation error messages are stored in self . messages .
42,759
def decode ( cls , bytes , cmddict ) : attrs = SeqCmdAttrs . decode ( bytes [ 0 : 1 ] ) delay = SeqDelay . decode ( bytes [ 1 : 4 ] ) cmd = cmddict . decode ( bytes [ 4 : ] ) return cls ( cmd , delay , attrs )
Decodes a sequence command from an array of bytes according to the given command dictionary and returns a new SeqCmd .
42,760
def encode ( self ) : return self . attrs . encode ( ) + self . delay . encode ( ) + self . cmd . encode ( )
Encodes this SeqCmd to binary and returns a bytearray .
42,761
def parse ( cls , line , lineno , log , cmddict ) : delay = SeqDelay . parse ( line , lineno , log , cmddict ) attrs = SeqCmdAttrs . parse ( line , lineno , log , cmddict ) comment = SeqComment . parse ( line , lineno , log , cmddict ) stop = len ( line ) if comment : stop = comment . pos . col . start - 1 if attrs and attrs . pos . col . stop != - 1 : stop = attrs . pos . col . start - 1 tokens = line [ : stop ] . split ( ) name = tokens [ 1 ] args = tokens [ 2 : ] start = line . find ( name ) pos = SeqPos ( line , lineno , start + 1 , stop ) if name not in cmddict : log . error ( 'Unrecognized command "%s".' % name , pos ) elif cmddict [ name ] . nargs != len ( args ) : msg = 'Command argument size mismatch: expected %d, but encountered %d.' log . error ( msg % ( cmddict [ name ] . nargs , len ( args ) ) , pos ) args = [ util . toNumber ( a , a ) for a in args ] cmd = cmddict . create ( name , * args ) return cls ( cmd , delay , attrs , comment , pos )
Parses the sequence command from a line of text according to the given command dictionary and returns a new SeqCmd .
42,762
def decode ( cls , bytes , cmddict = None ) : byte = struct . unpack ( 'B' , bytes ) [ 0 ] self = cls ( ) defval = self . default for bit , name , value0 , value1 , default in SeqCmdAttrs . Table : mask = 1 << bit bitset = mask & byte defset = mask & defval if bitset != defset : if bitset : self . attrs [ name ] = value1 else : self . attrs [ name ] = value0 return self
Decodes sequence command attributes from an array of bytes and returns a new SeqCmdAttrs .
42,763
def encode ( self ) : byte = self . default for bit , name , value0 , value1 , default in SeqCmdAttrs . Table : if name in self . attrs : value = self . attrs [ name ] byte = setBit ( byte , bit , value == value1 ) return struct . pack ( 'B' , byte )
Encodes this SeqCmdAttrs to binary and returns a bytearray .
42,764
def parse ( cls , line , lineno , log , cmddict = None ) : start = line . find ( '{' ) stop = line . find ( '}' ) pos = SeqPos ( line , lineno , start + 1 , stop ) result = cls ( None , pos ) if start >= 0 and stop >= start : attrs = { } pairs = line [ start + 1 : stop ] . split ( ',' ) for item in pairs : ncolons = item . count ( ':' ) if ncolons == 0 : log . error ( 'Missing colon in command attribute "%s".' % item , pos ) elif ncolons > 1 : log . error ( 'Too many colons in command attribute "%s".' % item , pos ) else : name , value = ( s . strip ( ) for s in item . split ( ':' ) ) attrs [ name ] = value result = cls ( attrs , pos ) elif start != - 1 or stop != - 1 : log . error ( 'Incorrect command attribute curly brace placement.' , pos ) return result
Parses a SeqCmdAttrs from a line of text and returns it or None . Warning and error messages are logged via the SeqMsgLog log .
42,765
def decode ( cls , bytes , cmddict = None ) : delay_s = struct . unpack ( '>H' , bytes [ 0 : 2 ] ) [ 0 ] delay_ms = struct . unpack ( 'B' , bytes [ 2 : 3 ] ) [ 0 ] return cls ( delay_s + ( delay_ms / 255.0 ) )
Decodes a sequence delay from an array of bytes according to the given command dictionary and returns a new SeqDelay .
42,766
def encode ( self ) : delay_s = int ( math . floor ( self . delay ) ) delay_ms = int ( ( self . delay - delay_s ) * 255.0 ) return struct . pack ( '>H' , delay_s ) + struct . pack ( 'B' , delay_ms )
Encodes this SeqDelay to a binary bytearray .
42,767
def parse ( cls , line , lineno , log , cmddict = None ) : delay = - 1 token = line . split ( ) [ 0 ] start = line . find ( token ) pos = SeqPos ( line , lineno , start + 1 , start + len ( token ) ) try : delay = float ( token ) except ValueError : msg = 'String "%s" could not be interpreted as a numeric time delay.' log . error ( msg % token , pos ) return cls ( delay , pos )
Parses the SeqDelay from a line of text . Warning and error messages are logged via the SeqMsgLog log .
42,768
def parse ( cls , line , lineno , log , cmddict = None ) : start = line . find ( '%' ) pos = SeqPos ( line , lineno , start + 1 , len ( line ) ) result = None if start >= 0 : result = cls ( line [ start : ] , pos ) return result
Parses the SeqMetaCmd from a line of text . Warning and error messages are logged via the SeqMsgLog log .
42,769
def error ( self , msg , pos = None ) : self . log ( msg , 'error: ' + self . location ( pos ) )
Logs an error message pertaining to the given SeqPos .
42,770
def log ( self , msg , prefix = None ) : if prefix : if not prefix . strip ( ) . endswith ( ':' ) : prefix += ': ' msg = prefix + msg self . messages . append ( msg )
Logs a message with an optional prefix .
42,771
def warning ( self , msg , pos = None ) : self . log ( msg , 'warning: ' + self . location ( pos ) )
Logs a warning message pertaining to the given SeqAtom .
42,772
def expandConfigPaths ( config , prefix = None , datetime = None , pathvars = None , parameter_key = '' , * keys ) : if len ( keys ) == 0 : keys = PATH_KEYS for name , value in config . items ( ) : if name in keys and type ( name ) is str : expanded = util . expandPath ( value , prefix ) cleaned = replaceVariables ( expanded , datetime = datetime , pathvars = pathvars ) for p in cleaned : if not os . path . exists ( p ) : msg = "Config parameter {}.{} specifies nonexistent path {}" . format ( parameter_key , name , p ) log . warn ( msg ) config [ name ] = cleaned [ 0 ] if len ( cleaned ) == 1 else cleaned elif type ( value ) is dict : param_key = name if parameter_key == '' else parameter_key + '.' + name expandConfigPaths ( value , prefix , datetime , pathvars , param_key , * keys )
Updates all relative configuration paths in dictionary config which contain a key in keys by prepending prefix .
42,773
def replaceVariables ( path , datetime = None , pathvars = None ) : if datetime is None : datetime = time . gmtime ( ) if pathvars is None : pathvars = [ ] if isinstance ( path , list ) : path_list = path else : path_list = [ path ] regex = re . compile ( '\$\{(.*?)\}' ) newpath_list = [ ] for p in path_list : newpath_list . append ( p ) for k in regex . findall ( p ) : if k in pathvars : v = pathvars [ k ] if type ( v ) is dict : msg = "Path variable must refer to string, integer, or list" raise TypeError ( msg ) value_list = v if type ( v ) is list else [ v ] temp_list = [ ] for v in value_list : for newpath in newpath_list : temp_list . append ( newpath . replace ( '${%s}' % k , str ( v ) ) ) newpath_list = temp_list for index , newpath in enumerate ( newpath_list ) : newpath_list [ index ] = time . strftime ( newpath , datetime ) return newpath_list
Return absolute path with path variables replaced as applicable
42,774
def flatten ( d , * keys ) : flat = { } for k in keys : flat = merge ( flat , d . pop ( k , { } ) ) return flat
Flattens the dictionary d by merging keys in order such that later keys take precedence over earlier keys .
42,775
def loadYAML ( filename = None , data = None ) : config = None try : if filename : data = open ( filename , 'rt' ) config = yaml . load ( data ) if type ( data ) is file : data . close ( ) except IOError , e : msg = 'Could not read AIT configuration file "%s": %s' log . error ( msg , filename , str ( e ) ) return config
Loads either the given YAML configuration file or YAML data .
42,776
def merge ( d , o ) : for k in o . keys ( ) : if type ( o [ k ] ) is dict and k in d : merge ( d [ k ] , o [ k ] ) else : d [ k ] = o [ k ] return d
Recursively merges keys from o into d and returns d .
42,777
def _directory ( self ) : if self . _filename is None : return os . path . join ( self . _ROOT_DIR , 'config' ) else : return os . path . dirname ( self . _filename )
The directory for this AitConfig .
42,778
def _datapaths ( self ) : paths = { } try : data = self . _config [ 'data' ] for k in data : paths [ k ] = data [ k ] [ 'path' ] except KeyError as e : raise AitConfigMissing ( e . message ) except Exception as e : raise AitConfigError ( 'Error reading data paths: %s' % e ) return paths
Returns a simple key - value map for easy access to data paths
42,779
def reload ( self , filename = None , data = None ) : if data is None and filename is None : filename = self . _filename self . _config = loadYAML ( filename , data ) self . _filename = filename if self . _config is not None : keys = 'default' , self . _platform , self . _hostname self . _config = flatten ( self . _config , * keys ) if self . _pathvars is None : self . _pathvars = self . getDefaultPathVariables ( ) expandConfigPaths ( self . _config , self . _directory , self . _datetime , merge ( self . _config , self . _pathvars ) ) else : self . _config = { }
Reloads the a AIT configuration .
42,780
def addPathVariables ( self , pathvars ) : if type ( pathvars ) is dict : self . _pathvars = merge ( self . _pathvars , pathvars )
Adds path variables to the pathvars map property
42,781
def _assertIndex ( self , index ) : if type ( index ) is not int : raise TypeError ( 'list indices must be integers' ) if index < 0 or index >= self . nelems : raise IndexError ( 'list index out of range' )
Raise TypeError or IndexError if index is not an integer or out of range for the number of elements in this array respectively .
42,782
def cmddict ( self ) : if self . _cmddict is None : self . _cmddict = cmd . getDefaultDict ( ) return self . _cmddict
PrimitiveType base for the ComplexType
42,783
def evrs ( self ) : if self . _evrs is None : import ait . core . evr as evr self . _evrs = evr . getDefaultDict ( ) return self . _evrs
Getter EVRs dictionary
42,784
def load ( self , ymlfile = None ) : if ymlfile is not None : self . ymlfile = ymlfile try : if self . _clean : self . data = self . process ( self . ymlfile ) else : with open ( self . ymlfile , 'rb' ) as stream : for data in yaml . load_all ( stream ) : self . data . append ( data ) self . loaded = True except ScannerError , e : msg = "YAML formattting error - '" + self . ymlfile + ": '" + str ( e ) + "'" raise util . YAMLError ( msg )
Load and process the YAML file
42,785
def process ( self , ymlfile ) : output = "" try : self . doclines = [ ] linenum = None with open ( ymlfile , 'r' ) as txt : for linenum , line in enumerate ( txt ) : doc_pattern = re . compile ( '(---) (![a-z]+)(.*$)' , flags = re . I ) seq_pattern = re . compile ( '(\s*)(-+) !([a-z]+)(.*$)' , flags = re . I ) if doc_pattern . match ( line ) : line = doc_pattern . sub ( r"---" , line ) . lower ( ) self . doclines . append ( linenum ) elif seq_pattern . match ( line ) : line = seq_pattern . sub ( r"\1\2 \3: line " + str ( linenum ) , line ) . lower ( ) output = output + line if linenum is None : msg = "Empty YAML file: " + ymlfile raise util . YAMLError ( msg ) else : self . doclines . append ( linenum + 1 ) return output except IOError , e : msg = "Could not process YAML file '" + ymlfile + "': '" + str ( e ) + "'" raise IOError ( msg )
Cleans out all document tags from the YAML file to make it JSON - friendly to work with the JSON Schema .
42,786
def load ( self , schemafile = None ) : if schemafile is not None : self . _schemafile = schemafile try : self . data = json . load ( open ( self . _schemafile ) ) except ( IOError , ValueError ) , e : msg = "Could not load schema file '" + self . _schemafile + "': '" + str ( e ) + "'" raise jsonschema . SchemaError ( msg ) self . loaded = True
Load and process the schema file
42,787
def schema_val ( self , messages = None ) : "Perform validation with processed YAML and Schema" self . _ymlproc = YAMLProcessor ( self . _ymlfile ) self . _schemaproc = SchemaProcessor ( self . _schemafile ) valid = True log . debug ( "BEGIN: Schema-based validation for YAML '%s' with schema '%s'" , self . _ymlfile , self . _schemafile ) if self . _ymlproc . loaded and self . _schemaproc . loaded : for docnum , data in enumerate ( yaml . load_all ( self . _ymlproc . data ) ) : data = yaml . load ( json . dumps ( data ) ) v = jsonschema . Draft4Validator ( self . _schemaproc . data ) for error in sorted ( v . iter_errors ( data ) ) : msg = "Schema-based validation failed for YAML file '" + self . _ymlfile + "'" self . ehandler . process ( docnum , self . _ymlproc . doclines , error , messages ) valid = False if not valid : log . error ( msg ) elif not self . _ymlproc . loaded : raise util . YAMLError ( "YAML must be loaded in order to validate." ) elif not self . _schemaproc . loaded : raise jsonschema . SchemaError ( "Schema must be loaded in order to validate." ) log . debug ( "END: Schema-based validation complete for '%s'" , self . _ymlfile ) return valid
Perform validation with processed YAML and Schema
42,788
def content_val ( self , ymldata = None , messages = None ) : self . _ymlproc = YAMLProcessor ( self . _ymlfile , False ) log . debug ( "BEGIN: Content-based validation of Command dictionary" ) if ymldata is not None : cmddict = ymldata elif ymldata is None and self . _ymlproc . loaded : cmddict = self . _ymlproc . data elif not self . _ymlproc . loaded : raise util . YAMLError ( "YAML failed to load." ) try : docnum = 0 argsvalid = True rules = [ ] rules . append ( UniquenessRule ( 'name' , "Duplicate command name: %s" , messages ) ) rules . append ( UniquenessRule ( 'opcode' , "Duplicate opcode: %s" , messages ) ) for cmdcnt , cmddefn in enumerate ( cmddict [ 0 ] ) : for rule in rules : rule . check ( cmddefn ) argrules = [ ] argrules . append ( UniquenessRule ( 'name' , "Duplicate argument name: " + cmddefn . name + ".%s" , messages ) ) argrules . append ( TypeRule ( 'type' , "Invalid argument type for argument: " + cmddefn . name + ".%s" , messages ) ) argrules . append ( TypeSizeRule ( 'nbytes' , "Invalid argument size for argument: " + cmddefn . name + ".%s" , messages ) ) argrules . append ( EnumRule ( 'enum' , "Invalid enum value for argument: " + cmddefn . name + ".%s" , messages ) ) argrules . append ( ByteOrderRule ( 'bytes' , "Invalid byte order for argument: " + cmddefn . name + ".%s" , messages ) ) argdefns = cmddefn . argdefns for arg in argdefns : for rule in argrules : rule . check ( arg ) if not all ( r . valid is True for r in argrules ) : argsvalid = False log . debug ( "END: Content-based validation complete for '%s'" , self . _ymlfile ) return all ( rule . valid is True for rule in rules ) and argsvalid except util . YAMLValidationError , e : if messages is not None : if len ( e . message ) < 128 : msg = "Validation Failed for YAML file '" + self . _ymlfile + "': '" + str ( e . message ) + "'" else : msg = "Validation Failed for YAML file '" + self . _ymlfile + "'" log . error ( msg ) self . ehandler . process ( docnum , self . ehandler . doclines , e , messages ) return False
Validates the Command Dictionary to ensure the contents for each of the fields meets specific criteria regarding the expected types byte ranges etc .
42,789
def validate ( self , ymldata = None , messages = None ) : schema_val = self . schema_val ( messages ) if len ( messages ) == 0 : content_val = self . content_val ( ymldata , messages ) return schema_val and content_val
Validates the Telemetry Dictionary definitions
42,790
def content_val ( self , ymldata = None , messages = None ) : log . debug ( "BEGIN: Content-based validation of Telemetry dictionary" ) if ymldata is not None : tlmdict = ymldata else : tlmdict = tlm . TlmDict ( self . _ymlfile ) try : docnum = 0 fldsvalid = True rules = [ ] rules . append ( UniquenessRule ( 'name' , "Duplicate packet name: %s" , messages ) ) for key in tlmdict . keys ( ) : pktdefn = tlmdict [ key ] for rule in rules : rule . check ( pktdefn ) fldrules = [ ] fldrules . append ( UniquenessRule ( 'name' , "Duplicate field name: " + pktdefn . name + ".%s" , messages ) ) fldrules . append ( TypeRule ( 'type' , "Invalid field type for field: " + pktdefn . name + ".%s" , messages ) ) fldrules . append ( TypeSizeRule ( 'nbytes' , "Invalid field size for field: " + pktdefn . name + ".%s" , messages ) ) fldrules . append ( EnumRule ( 'enum' , "Invalid enum value for field: " + pktdefn . name + ".%s" , messages ) ) flddefns = pktdefn . fields for fld in flddefns : for rule in fldrules : rule . check ( fld ) if not all ( r . valid is True for r in fldrules ) : fldsvalid = False log . debug ( "END: Content-based validation complete for '%s'" , self . _ymlfile ) return all ( rule . valid is True for rule in rules ) and fldsvalid except util . YAMLValidationError , e : if messages is not None : if len ( e . message ) < 128 : msg = "Validation Failed for YAML file '" + self . _ymlfile + "': '" + str ( e . message ) + "'" else : msg = "Validation Failed for YAML file '" + self . _ymlfile + "'" log . error ( msg ) self . ehandler . process ( self . ehandler . doclines , e , messages ) return False
Validates the Telemetry Dictionary to ensure the contents for each of the fields meets specific criteria regarding the expected types byte ranges etc .
42,791
def check ( self , defn ) : val = getattr ( defn , self . attr ) if val is not None and val in self . val_list : self . messages . append ( self . msg % str ( val ) ) self . valid = False elif val is not None : self . val_list . append ( val ) log . debug ( self . val_list )
Performs the uniqueness check against the value list maintained in this rule objects
42,792
def check ( self , defn ) : allowedTypes = dtype . PrimitiveType , dtype . ArrayType if not isinstance ( defn . type , allowedTypes ) : self . messages . append ( self . msg % str ( defn . name ) ) self . valid = False
Performs isinstance check for the definitions data type .
42,793
def check ( self , defn , msg = None ) : if isinstance ( defn . type , dtype . PrimitiveType ) : nbytes = defn . type . nbytes defnbytes = defn . slice ( ) . stop - defn . slice ( ) . start if nbytes != defnbytes : self . messages . append ( self . msg % defn . name ) self . messages . append ( "Definition size of (" + str ( defnbytes ) + " bytes) does not match size of data" + " type " + str ( defn . type . name ) + " (" + str ( nbytes ) + " byte(s))" ) self . valid = False
Uses the byte range in the object definition to determine the number of bytes and compares to the size defined in the type .
42,794
def _pop ( self , block = True , timeout = None , left = False ) : item = None timer = None deque = self . _deque empty = IndexError ( 'pop from an empty deque' ) if block is False : if len ( self . _deque ) > 0 : item = deque . popleft ( ) if left else deque . pop ( ) else : raise empty else : try : if timeout is not None : timer = gevent . Timeout ( timeout , empty ) timer . start ( ) while True : self . notEmpty . wait ( ) if len ( deque ) > 0 : item = deque . popleft ( ) if left else deque . pop ( ) break finally : if timer is not None : timer . cancel ( ) if len ( deque ) == 0 : self . notEmpty . clear ( ) return item
Removes and returns the an item from this GeventDeque .
42,795
def append ( self , item ) : self . _deque . append ( item ) self . notEmpty . set ( )
Add item to the right side of the GeventDeque .
42,796
def appendleft ( self , item ) : self . _deque . appendleft ( item ) self . notEmpty . set ( )
Add item to the left side of the GeventDeque .
42,797
def extend ( self , iterable ) : self . _deque . extend ( iterable ) if len ( self . _deque ) > 0 : self . notEmpty . set ( )
Extend the right side of this GeventDeque by appending elements from the iterable argument .
42,798
def extendleft ( self , iterable ) : self . _deque . extendleft ( iterable ) if len ( self . _deque ) > 0 : self . notEmpty . set ( )
Extend the left side of this GeventDeque by appending elements from the iterable argument . Note the series of left appends results in reversing the order of elements in the iterable argument .
42,799
def popleft ( self , block = True , timeout = None ) : return self . _pop ( block , timeout , left = True )
Remove and return an item from the right side of the GeventDeque . If no elements are present raises an IndexError .