idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
16,800
def add ( self , revision , operators ) : ir = IndexedRevision ( revision = revision , operators = [ operator . name for operator in operators ] ) self . _revisions [ revision . key ] = ir return ir
Add a revision to the index .
16,801
def save ( self ) : data = [ i . asdict ( ) for i in self . _revisions . values ( ) ] logger . debug ( "Saving data" ) cache . store_archiver_index ( self . config , self . archiver , data )
Save the index data back to the wily cache .
16,802
def ensure_exists ( self ) : if not cache . exists ( self . config ) : logger . debug ( "Wily cache not found, creating." ) cache . create ( self . config ) logger . debug ( "Created wily cache" ) else : logger . debug ( f"Cache {self.config.cache_path} exists" )
Ensure that cache directory exists .
16,803
def add_version ( f ) : doc = f . __doc__ f . __doc__ = "Version: " + __version__ + "\n\n" + doc return f
Add the version of wily to the help heading .
16,804
def resolve_metric_as_tuple ( metric ) : if "." in metric : _ , metric = metric . split ( "." ) r = [ ( operator , match ) for operator , match in ALL_METRICS if match [ 0 ] == metric ] if not r or len ( r ) == 0 : raise ValueError ( f"Metric {metric} not recognised." ) else : return r [ 0 ]
Resolve metric key to a given target .
16,805
def get_metric ( revision , operator , path , key ) : if ":" in path : part , entry = path . split ( ":" ) val = revision [ operator ] [ part ] [ entry ] [ key ] else : val = revision [ operator ] [ path ] [ key ] return val
Get a metric from the cache .
16,806
def create_index ( config ) : filename = pathlib . Path ( config . cache_path ) / "index.json" index = { "version" : __version__ } with open ( filename , "w" ) as out : out . write ( json . dumps ( index , indent = 2 ) )
Create the root index .
16,807
def create ( config ) : if exists ( config ) : logger . debug ( "Wily cache exists, skipping" ) return config . cache_path logger . debug ( f"Creating wily cache {config.cache_path}" ) pathlib . Path ( config . cache_path ) . mkdir ( parents = True , exist_ok = True ) create_index ( config ) return config . cache_path
Create a wily cache .
16,808
def clean ( config ) : if not exists ( config ) : logger . debug ( "Wily cache does not exist, skipping" ) return shutil . rmtree ( config . cache_path ) logger . debug ( "Deleted wily cache" )
Delete a wily cache .
16,809
def store ( config , archiver , revision , stats ) : root = pathlib . Path ( config . cache_path ) / archiver . name if not root . exists ( ) : logger . debug ( "Creating wily cache" ) root . mkdir ( ) if config . path != "." : for operator , operator_data in list ( stats [ "operator_data" ] . items ( ) ) : if operator_data : new_operator_data = operator_data . copy ( ) for k , v in list ( operator_data . items ( ) ) : new_key = os . path . relpath ( str ( k ) , str ( config . path ) ) del new_operator_data [ k ] new_operator_data [ new_key ] = v del stats [ "operator_data" ] [ operator ] stats [ "operator_data" ] [ operator ] = new_operator_data logger . debug ( f"Creating {revision.key} output" ) filename = root / ( revision . key + ".json" ) if filename . exists ( ) : raise RuntimeError ( f"File {filename} already exists, index may be corrupt." ) with open ( filename , "w" ) as out : out . write ( json . dumps ( stats , indent = 2 ) ) return filename
Store a revision record within an archiver folder .
16,810
def store_archiver_index ( config , archiver , index ) : root = pathlib . Path ( config . cache_path ) / archiver . name if not root . exists ( ) : root . mkdir ( ) logger . debug ( "Created archiver directory" ) index = sorted ( index , key = lambda k : k [ "date" ] , reverse = True ) filename = root / "index.json" with open ( filename , "w" ) as out : out . write ( json . dumps ( index , indent = 2 ) ) logger . debug ( f"Created index output" ) return filename
Store an archiver s index record for faster search .
16,811
def list_archivers ( config ) : root = pathlib . Path ( config . cache_path ) result = [ ] for name in ALL_ARCHIVERS . keys ( ) : if ( root / name ) . exists ( ) : result . append ( name ) return result
List the names of archivers with data .
16,812
def get_default_metrics ( config ) : archivers = list_archivers ( config ) default_metrics = [ ] for archiver in archivers : index = get_archiver_index ( config , archiver ) if len ( index ) == 0 : logger . warning ( "No records found in the index, no metrics available" ) return [ ] operators = index [ 0 ] [ "operators" ] for operator in operators : o = resolve_operator ( operator ) if o . cls . default_metric_index is not None : metric = o . cls . metrics [ o . cls . default_metric_index ] default_metrics . append ( "{0}.{1}" . format ( o . cls . name , metric . name ) ) return default_metrics
Get the default metrics for a configuration .
16,813
def has_archiver_index ( config , archiver ) : root = pathlib . Path ( config . cache_path ) / archiver / "index.json" return root . exists ( )
Check if this archiver has an index file .
16,814
def get_archiver_index ( config , archiver ) : root = pathlib . Path ( config . cache_path ) / archiver with ( root / "index.json" ) . open ( "r" ) as index_f : index = json . load ( index_f ) return index
Get the contents of the archiver index file .
16,815
def get ( config , archiver , revision ) : root = pathlib . Path ( config . cache_path ) / archiver with ( root / f"{revision}.json" ) . open ( "r" ) as rev_f : index = json . load ( rev_f ) return index
Get the data for a given revision .
16,816
def mode ( data ) : c = Counter ( data ) mode , freq = c . most_common ( 1 ) [ 0 ] return mode
Return the modal value of a iterable with discrete values . If there is more than 1 modal value arbritrarily return the first top n .
16,817
def load ( fp , encode_nominal = False , return_type = DENSE ) : decoder = ArffDecoder ( ) return decoder . decode ( fp , encode_nominal = encode_nominal , return_type = return_type )
Load a file - like object containing the ARFF document and convert it into a Python object .
16,818
def loads ( s , encode_nominal = False , return_type = DENSE ) : decoder = ArffDecoder ( ) return decoder . decode ( s , encode_nominal = encode_nominal , return_type = return_type )
Convert a string instance containing the ARFF document into a Python object .
16,819
def dump ( obj , fp ) : encoder = ArffEncoder ( ) generator = encoder . iter_encode ( obj ) last_row = next ( generator ) for row in generator : fp . write ( last_row + u'\n' ) last_row = row fp . write ( last_row ) return fp
Serialize an object representing the ARFF document to a given file - like object .
16,820
def _decode ( self , s , encode_nominal = False , matrix_type = DENSE ) : self . _current_line = 0 if isinstance ( s , basestring ) : s = s . strip ( '\r\n ' ) . replace ( '\r\n' , '\n' ) . split ( '\n' ) obj = { u'description' : u'' , u'relation' : u'' , u'attributes' : [ ] , u'data' : [ ] } attribute_names = { } data = _get_data_object_for_decoding ( matrix_type ) STATE = _TK_DESCRIPTION s = iter ( s ) for row in s : self . _current_line += 1 row = row . strip ( ' \r\n' ) if not row : continue u_row = row . upper ( ) if u_row . startswith ( _TK_DESCRIPTION ) and STATE == _TK_DESCRIPTION : obj [ 'description' ] += self . _decode_comment ( row ) + '\n' elif u_row . startswith ( _TK_RELATION ) : if STATE != _TK_DESCRIPTION : raise BadLayout ( ) STATE = _TK_RELATION obj [ 'relation' ] = self . _decode_relation ( row ) elif u_row . startswith ( _TK_ATTRIBUTE ) : if STATE != _TK_RELATION and STATE != _TK_ATTRIBUTE : raise BadLayout ( ) STATE = _TK_ATTRIBUTE attr = self . _decode_attribute ( row ) if attr [ 0 ] in attribute_names : raise BadAttributeName ( attr [ 0 ] , attribute_names [ attr [ 0 ] ] ) else : attribute_names [ attr [ 0 ] ] = self . _current_line obj [ 'attributes' ] . append ( attr ) if isinstance ( attr [ 1 ] , ( list , tuple ) ) : if encode_nominal : conversor = EncodedNominalConversor ( attr [ 1 ] ) else : conversor = NominalConversor ( attr [ 1 ] ) else : CONVERSOR_MAP = { 'STRING' : unicode , 'INTEGER' : lambda x : int ( float ( x ) ) , 'NUMERIC' : float , 'REAL' : float } conversor = CONVERSOR_MAP [ attr [ 1 ] ] self . _conversors . append ( conversor ) elif u_row . startswith ( _TK_DATA ) : if STATE != _TK_ATTRIBUTE : raise BadLayout ( ) break elif u_row . startswith ( _TK_COMMENT ) : pass else : raise BadLayout ( ) def stream ( ) : for row in s : self . _current_line += 1 row = row . strip ( ) if row and not row . startswith ( _TK_COMMENT ) : yield row obj [ 'data' ] = data . decode_rows ( stream ( ) , self . _conversors ) if obj [ 'description' ] . endswith ( '\n' ) : obj [ 'description' ] = obj [ 'description' ] [ : - 1 ] return obj
Do the job the encode .
16,821
def decode ( self , s , encode_nominal = False , return_type = DENSE ) : try : return self . _decode ( s , encode_nominal = encode_nominal , matrix_type = return_type ) except ArffException as e : e . line = self . _current_line raise e
Returns the Python representation of a given ARFF file .
16,822
def encode ( self , obj ) : data = [ row for row in self . iter_encode ( obj ) ] return u'\n' . join ( data )
Encodes a given object to an ARFF file .
16,823
def iter_encode ( self , obj ) : if obj . get ( 'description' , None ) : for row in obj [ 'description' ] . split ( '\n' ) : yield self . _encode_comment ( row ) if not obj . get ( 'relation' ) : raise BadObject ( 'Relation name not found or with invalid value.' ) yield self . _encode_relation ( obj [ 'relation' ] ) yield u'' if not obj . get ( 'attributes' ) : raise BadObject ( 'Attributes not found.' ) attribute_names = set ( ) for attr in obj [ 'attributes' ] : if not isinstance ( attr , ( tuple , list ) ) or len ( attr ) != 2 or not isinstance ( attr [ 0 ] , basestring ) : raise BadObject ( 'Invalid attribute declaration "%s"' % str ( attr ) ) if isinstance ( attr [ 1 ] , basestring ) : if attr [ 1 ] not in _SIMPLE_TYPES : raise BadObject ( 'Invalid attribute type "%s"' % str ( attr ) ) elif not isinstance ( attr [ 1 ] , ( tuple , list ) ) : raise BadObject ( 'Invalid attribute type "%s"' % str ( attr ) ) if attr [ 0 ] in attribute_names : raise BadObject ( 'Trying to use attribute name "%s" for the ' 'second time.' % str ( attr [ 0 ] ) ) else : attribute_names . add ( attr [ 0 ] ) yield self . _encode_attribute ( attr [ 0 ] , attr [ 1 ] ) yield u'' attributes = obj [ 'attributes' ] yield _TK_DATA if 'data' in obj : data = _get_data_object_for_encoding ( obj . get ( 'data' ) ) for line in data . encode_data ( obj . get ( 'data' ) , attributes ) : yield line yield u''
The iterative version of arff . ArffEncoder . encode .
16,824
def begin ( self ) : resp = ws . ws2811_init ( self . _leds ) if resp != 0 : str_resp = ws . ws2811_get_return_t_str ( resp ) raise RuntimeError ( 'ws2811_init failed with code {0} ({1})' . format ( resp , str_resp ) )
Initialize library must be called once before other functions are called .
16,825
def show ( self ) : resp = ws . ws2811_render ( self . _leds ) if resp != 0 : str_resp = ws . ws2811_get_return_t_str ( resp ) raise RuntimeError ( 'ws2811_render failed with code {0} ({1})' . format ( resp , str_resp ) )
Update the display with the data from the LED buffer .
16,826
def startService ( self ) : Service . startService ( self ) self . _thread = threading . Thread ( target = self . _writer ) self . _thread . start ( ) addDestination ( self )
Start the writer thread .
16,827
def stopService ( self ) : Service . stopService ( self ) removeDestination ( self ) self . _reactor . callFromThread ( self . _reactor . stop ) return deferToThreadPool ( self . _mainReactor , self . _mainReactor . getThreadPool ( ) , self . _thread . join )
Stop the writer thread wait for it to finish .
16,828
def write ( self , logger = None , action = None ) : if logger is None : logger = _output . _DEFAULT_LOGGER logged_dict = self . _freeze ( action = action ) logger . write ( logged_dict , self . _serializer )
Write the message to the given logger .
16,829
def _add_logging ( dsk , ignore = None ) : ctx = current_action ( ) result = { } keys = toposort ( dsk ) def simplify ( k ) : if isinstance ( k , str ) : return k return "-" . join ( str ( o ) for o in k ) key_names = { } for key in keys : value = dsk [ key ] if not callable ( value ) and value in keys : key_names [ key ] = key_names [ value ] else : key_names [ key ] = simplify ( key ) key_to_action_id = { key : str ( ctx . serialize_task_id ( ) , "utf-8" ) for key in keys } for key in keys : func = dsk [ key ] [ 0 ] args = dsk [ key ] [ 1 : ] if not callable ( func ) : result [ key ] = dsk [ key ] continue wrapped_func = _RunWithEliotContext ( task_id = key_to_action_id [ key ] , func = func , key = key_names [ key ] , dependencies = [ key_names [ k ] for k in get_dependencies ( dsk , key ) ] , ) result [ key ] = ( wrapped_func , ) + tuple ( args ) assert result . keys ( ) == dsk . keys ( ) return result
Add logging to a Dask graph .
16,830
def eliot_friendly_generator_function ( original ) : @ wraps ( original ) def wrapper ( * a , ** kw ) : ok = True value_in = None gen = original ( * a , ** kw ) context = copy_context ( ) while True : try : def go ( ) : if ok : value_out = gen . send ( value_in ) else : value_out = gen . throw ( * value_in ) if wrapper . debug : Message . log ( message_type = u"yielded" ) return value_out value_out = context . run ( go ) except StopIteration : break else : try : value_in = yield value_out except : ok = False value_in = exc_info ( ) else : ok = True wrapper . debug = False return wrapper
Decorate a generator function so that the Eliot action context is preserved across yield expressions .
16,831
def sd_journal_send ( ** kwargs ) : fields = [ _ffi . new ( "char[]" , key . encode ( "ascii" ) + b'=' + value . replace ( b"%" , b"%%" ) ) for key , value in kwargs . items ( ) ] fields . append ( _ffi . NULL ) result = _journald . sd_journal_send ( * fields ) if result != 0 : raise IOError ( - result , strerror ( - result ) )
Send a message to the journald log .
16,832
def inline_callbacks ( original , debug = False ) : f = eliot_friendly_generator_function ( original ) if debug : f . debug = True return inlineCallbacks ( f )
Decorate a function like inlineCallbacks would but in a more Eliot - friendly way . Use it just like inlineCallbacks but where you want Eliot action contexts to Do The Right Thing inside the decorated function .
16,833
def addCallbacks ( self , callback , errback = None , callbackArgs = None , callbackKeywords = None , errbackArgs = None , errbackKeywords = None ) : if self . _finishAdded : raise AlreadyFinished ( ) if errback is None : errback = _passthrough def callbackWithContext ( * args , ** kwargs ) : return self . _action . run ( callback , * args , ** kwargs ) def errbackWithContext ( * args , ** kwargs ) : return self . _action . run ( errback , * args , ** kwargs ) self . result . addCallbacks ( callbackWithContext , errbackWithContext , callbackArgs , callbackKeywords , errbackArgs , errbackKeywords ) return self
Add a pair of callbacks that will be run in the context of an eliot action .
16,834
def addCallback ( self , callback , * args , ** kw ) : return self . addCallbacks ( callback , _passthrough , callbackArgs = args , callbackKeywords = kw )
Add a success callback that will be run in the context of an eliot action .
16,835
def addErrback ( self , errback , * args , ** kw ) : return self . addCallbacks ( _passthrough , errback , errbackArgs = args , errbackKeywords = kw )
Add a failure callback that will be run in the context of an eliot action .
16,836
def addBoth ( self , callback , * args , ** kw ) : return self . addCallbacks ( callback , callback , args , kw , args , kw )
Add a single callback as both success and failure callbacks .
16,837
def addActionFinish ( self ) : if self . _finishAdded : raise AlreadyFinished ( ) self . _finishAdded = True def done ( result ) : if isinstance ( result , Failure ) : exception = result . value else : exception = None self . _action . finish ( exception ) return result self . result . addBoth ( done ) return self . result
Indicates all callbacks that should run within the action s context have been added and that the action should therefore finish once those callbacks have fired .
16,838
def _loads ( s ) : if isinstance ( s , bytes ) : s = s . decode ( "utf-8" ) return pyjson . loads ( s )
Support decoding bytes .
16,839
def _dumps ( obj , cls = pyjson . JSONEncoder ) : class WithBytes ( cls ) : def default ( self , o ) : if isinstance ( o , bytes ) : warnings . warn ( "Eliot will soon stop supporting encoding bytes in JSON" " on Python 3" , DeprecationWarning ) return o . decode ( "utf-8" ) return cls . default ( self , o ) return pyjson . dumps ( obj , cls = WithBytes ) . encode ( "utf-8" )
Encode to bytes and presume bytes in inputs are UTF - 8 encoded strings .
16,840
def load_module ( name , original_module ) : module = ModuleType ( name ) if PY3 : import importlib . util spec = importlib . util . find_spec ( original_module . __name__ ) source = spec . loader . get_code ( original_module . __name__ ) else : if getattr ( sys , "frozen" , False ) : raise NotImplementedError ( "Can't load modules on Python 2 with PyInstaller" ) path = original_module . __file__ if path . endswith ( ".pyc" ) or path . endswith ( ".pyo" ) : path = path [ : - 1 ] with open ( path ) as f : source = f . read ( ) exec_ ( source , module . __dict__ , module . __dict__ ) return module
Load a copy of a module distinct from what you d get if you imported it directly .
16,841
def _writeTracebackMessage ( logger , typ , exception , traceback ) : msg = TRACEBACK_MESSAGE ( reason = exception , traceback = traceback , exception = typ ) msg = msg . bind ( ** _error_extraction . get_fields_for_exception ( logger , exception ) ) msg . write ( logger )
Write a traceback to the log .
16,842
def write_traceback ( logger = None , exc_info = None ) : if exc_info is None : exc_info = sys . exc_info ( ) typ , exception , tb = exc_info traceback = "" . join ( _traceback_no_io . format_exception ( typ , exception , tb ) ) _writeTracebackMessage ( logger , typ , exception , traceback )
Write the latest traceback to the log .
16,843
def exclusively ( f ) : @ wraps ( f ) def exclusively_f ( self , * a , ** kw ) : with self . _lock : return f ( self , * a , ** kw ) return exclusively_f
Decorate a function to make it thread - safe by serializing invocations using a per - instance lock .
16,844
def to_file ( output_file , encoder = EliotJSONEncoder ) : Logger . _destinations . add ( FileDestination ( file = output_file , encoder = encoder ) )
Add a destination that writes a JSON message per line to the given file .
16,845
def send ( self , message ) : message . update ( self . _globalFields ) errors = [ ] for dest in self . _destinations : try : dest ( message ) except : errors . append ( sys . exc_info ( ) ) if errors : raise _DestinationsSendError ( errors )
Deliver a message to all destinations .
16,846
def add ( self , * destinations ) : buffered_messages = None if not self . _any_added : self . _any_added = True buffered_messages = self . _destinations [ 0 ] . messages self . _destinations = [ ] self . _destinations . extend ( destinations ) if buffered_messages : for message in buffered_messages : self . send ( message )
Adds new destinations .
16,847
def encode ( timestamp ) : seconds = int ( timestamp ) nanoseconds = int ( ( timestamp - seconds ) * 1000000000 ) seconds = seconds + _OFFSET encoded = b2a_hex ( struct . pack ( _STRUCTURE , seconds , nanoseconds ) ) return "@" + encoded . decode ( "ascii" )
Convert seconds since epoch to TAI64N string .
16,848
def decode ( tai64n ) : seconds , nanoseconds = struct . unpack ( _STRUCTURE , a2b_hex ( tai64n [ 1 : ] ) ) seconds -= _OFFSET return seconds + ( nanoseconds / 1000000000.0 )
Convert TAI64N string to seconds since epoch .
16,849
def preserve_context ( f ) : action = current_action ( ) if action is None : return f task_id = action . serialize_task_id ( ) called = threading . Lock ( ) def restore_eliot_context ( * args , ** kwargs ) : if not called . acquire ( False ) : raise TooManyCalls ( f ) with Action . continue_task ( task_id = task_id ) : return f ( * args , ** kwargs ) return restore_eliot_context
Package up the given function with the current Eliot context and then restore context and call given function when the resulting callable is run . This allows continuing the action context within a different thread .
16,850
def serialize_task_id ( self ) : return "{}@{}" . format ( self . _identification [ TASK_UUID_FIELD ] , self . _nextTaskLevel ( ) . toString ( ) ) . encode ( "ascii" )
Create a unique identifier for the current location within the task .
16,851
def continue_task ( cls , logger = None , task_id = _TASK_ID_NOT_SUPPLIED ) : if task_id is _TASK_ID_NOT_SUPPLIED : raise RuntimeError ( "You must supply a task_id keyword argument." ) if isinstance ( task_id , bytes ) : task_id = task_id . decode ( "ascii" ) uuid , task_level = task_id . split ( "@" ) action = cls ( logger , uuid , TaskLevel . fromString ( task_level ) , "eliot:remote_task" ) action . _start ( { } ) return action
Start a new action which is part of a serialized task .
16,852
def _start ( self , fields ) : fields [ ACTION_STATUS_FIELD ] = STARTED_STATUS fields . update ( self . _identification ) if self . _serializers is None : serializer = None else : serializer = self . _serializers . start Message ( fields , serializer ) . write ( self . _logger , self )
Log the start message .
16,853
def finish ( self , exception = None ) : if self . _finished : return self . _finished = True serializer = None if exception is None : fields = self . _successFields fields [ ACTION_STATUS_FIELD ] = SUCCEEDED_STATUS if self . _serializers is not None : serializer = self . _serializers . success else : fields = _error_extraction . get_fields_for_exception ( self . _logger , exception ) fields [ EXCEPTION_FIELD ] = "%s.%s" % ( exception . __class__ . __module__ , exception . __class__ . __name__ ) fields [ REASON_FIELD ] = safeunicode ( exception ) fields [ ACTION_STATUS_FIELD ] = FAILED_STATUS if self . _serializers is not None : serializer = self . _serializers . failure fields . update ( self . _identification ) Message ( fields , serializer ) . write ( self . _logger , self )
Log the finish message .
16,854
def context ( self ) : parent = _ACTION_CONTEXT . set ( self ) try : yield self finally : _ACTION_CONTEXT . reset ( parent )
Create a context manager that ensures code runs within action s context .
16,855
def children ( self ) : return pvector ( sorted ( self . _children . values ( ) , key = lambda m : m . task_level ) )
The list of child messages and actions sorted by task level excluding the start and end messages .
16,856
def _start ( self , start_message ) : if start_message . contents . get ( ACTION_STATUS_FIELD , None ) != STARTED_STATUS : raise InvalidStartMessage . wrong_status ( start_message ) if start_message . task_level . level [ - 1 ] != 1 : raise InvalidStartMessage . wrong_task_level ( start_message ) return self . set ( start_message = start_message )
Start this action given its start message .
16,857
def serialize ( self , message ) : for key , field in self . fields . items ( ) : message [ key ] = field . serialize ( message [ key ] )
Serialize the given message in - place converting inputs to outputs .
16,858
def validate ( self , message ) : for key , field in self . fields . items ( ) : if key not in message : raise ValidationError ( message , "Field %r is missing" % ( key , ) ) field . validate ( message [ key ] ) if self . allow_additional_fields : return fieldSet = set ( self . fields ) | set ( RESERVED_FIELDS ) for key in message : if key not in fieldSet : raise ValidationError ( message , "Unexpected field %r" % ( key , ) )
Validate the given message .
16,859
def pretty_format ( message ) : skip = { TIMESTAMP_FIELD , TASK_UUID_FIELD , TASK_LEVEL_FIELD , MESSAGE_TYPE_FIELD , ACTION_TYPE_FIELD , ACTION_STATUS_FIELD } def add_field ( previous , key , value ) : value = unicode ( pprint . pformat ( value , width = 40 ) ) . replace ( "\\n" , "\n " ) . replace ( "\\t" , "\t" ) lines = value . split ( "\n" ) indent = "{}| " . format ( " " * ( 2 + len ( key ) ) ) value = "\n" . join ( [ lines [ 0 ] ] + [ indent + l for l in lines [ 1 : ] ] ) return " %s: %s\n" % ( key , value ) remaining = "" for field in [ ACTION_TYPE_FIELD , MESSAGE_TYPE_FIELD , ACTION_STATUS_FIELD ] : if field in message : remaining += add_field ( remaining , field , message [ field ] ) for ( key , value ) in sorted ( message . items ( ) ) : if key not in skip : remaining += add_field ( remaining , key , value ) level = "/" + "/" . join ( map ( unicode , message [ TASK_LEVEL_FIELD ] ) ) return "%s -> %s\n%sZ\n%s" % ( message [ TASK_UUID_FIELD ] , level , datetime . utcfromtimestamp ( message [ TIMESTAMP_FIELD ] ) . isoformat ( sep = str ( " " ) ) , remaining , )
Convert a message dictionary into a human - readable string .
16,860
def _main ( ) : if argv [ 1 : ] : stdout . write ( _CLI_HELP ) raise SystemExit ( ) for line in stdin : try : message = loads ( line ) except ValueError : stdout . write ( "Not JSON: {}\n\n" . format ( line . rstrip ( b"\n" ) ) ) continue if REQUIRED_FIELDS - set ( message . keys ( ) ) : stdout . write ( "Not an Eliot message: {}\n\n" . format ( line . rstrip ( b"\n" ) ) ) continue result = pretty_format ( message ) + "\n" if PY2 : result = result . encode ( "utf-8" ) stdout . write ( result )
Command - line program that reads in JSON from stdin and writes out pretty - printed messages to stdout .
16,861
def get_fields_for_exception ( self , logger , exception ) : for klass in getmro ( exception . __class__ ) : if klass in self . registry : extractor = self . registry [ klass ] try : return extractor ( exception ) except : from . _traceback import write_traceback write_traceback ( logger ) return { } return { }
Given an exception instance return fields to add to the failed action message .
16,862
def run ( self ) : for line in self . incoming : message = loads ( line ) result = self . _evaluate ( message ) if result is self . _SKIP : continue self . output . write ( dumps ( result , cls = _DatetimeJSONEncoder ) + b"\n" )
For each incoming message decode the JSON evaluate expression encode as JSON and write that to the output file .
16,863
def _evaluate ( self , message ) : return eval ( self . code , globals ( ) , { "J" : message , "timedelta" : timedelta , "datetime" : datetime , "SKIP" : self . _SKIP } )
Evaluate the expression with the given Python object in its locals .
16,864
def respond ( self ) : response = self . req . server . wsgi_app ( self . env , self . start_response ) try : for chunk in filter ( None , response ) : if not isinstance ( chunk , six . binary_type ) : raise ValueError ( 'WSGI Applications must yield bytes' ) self . write ( chunk ) finally : self . req . ensure_headers_sent ( ) if hasattr ( response , 'close' ) : response . close ( )
Process the current request .
16,865
def _encode_status ( status ) : if six . PY2 : return status if not isinstance ( status , str ) : raise TypeError ( 'WSGI response status is not of type str.' ) return status . encode ( 'ISO-8859-1' )
Cast status to bytes representation of current Python version .
16,866
def write ( self , chunk ) : if not self . started_response : raise AssertionError ( 'WSGI write called before start_response.' ) chunklen = len ( chunk ) rbo = self . remaining_bytes_out if rbo is not None and chunklen > rbo : if not self . req . sent_headers : self . req . simple_response ( '500 Internal Server Error' , 'The requested resource returned more bytes than the ' 'declared Content-Length.' , ) else : chunk = chunk [ : rbo ] self . req . ensure_headers_sent ( ) self . req . write ( chunk ) if rbo is not None : rbo -= chunklen if rbo < 0 : raise ValueError ( 'Response body exceeds the declared Content-Length.' , )
WSGI callable to write unbuffered data to the client .
16,867
def write ( self , b ) : self . _checkClosed ( ) if isinstance ( b , str ) : raise TypeError ( "can't write str to binary stream" ) with self . _write_lock : self . _write_buf . extend ( b ) self . _flush_unlocked ( ) return len ( b )
Write bytes to buffer .
16,868
def write ( self , data ) : bytes_sent = 0 data_mv = memoryview ( data ) payload_size = len ( data_mv ) while bytes_sent < payload_size : try : bytes_sent += self . send ( data_mv [ bytes_sent : bytes_sent + SOCK_WRITE_BLOCKSIZE ] , ) except socket . error as e : if e . args [ 0 ] not in errors . socket_errors_nonblocking : raise
Sendall for non - blocking sockets .
16,869
def flush ( self ) : if self . _wbuf : buffer = '' . join ( self . _wbuf ) self . _wbuf = [ ] self . write ( buffer )
Write all data from buffer to socket and reset write buffer .
16,870
def parse_wsgi_bind_location ( bind_addr_string ) : match = six . moves . urllib . parse . urlparse ( '//{}' . format ( bind_addr_string ) ) try : addr = match . hostname port = match . port if addr is not None or port is not None : return TCPSocket ( addr , port ) except ValueError : pass if bind_addr_string . startswith ( '@' ) : return AbstractSocket ( bind_addr_string [ 1 : ] ) return UnixSocket ( path = bind_addr_string )
Convert bind address string to a BindLocation .
16,871
def main ( ) : parser = argparse . ArgumentParser ( description = 'Start an instance of the Cheroot WSGI/HTTP server.' , ) for arg , spec in _arg_spec . items ( ) : parser . add_argument ( arg , ** spec ) raw_args = parser . parse_args ( ) '' in sys . path or sys . path . insert ( 0 , '' ) raw_args . _wsgi_app . server ( raw_args ) . safe_start ( )
Create a new Cheroot instance with arguments from the command line .
16,872
def server_args ( self , parsed_args ) : args = { arg : value for arg , value in vars ( parsed_args ) . items ( ) if not arg . startswith ( '_' ) and value is not None } args . update ( vars ( self ) ) return args
Return keyword args for Server class .
16,873
def plat_specific_errors ( * errnames ) : missing_attr = set ( [ None , ] ) unique_nums = set ( getattr ( errno , k , None ) for k in errnames ) return list ( unique_nums - missing_attr )
Return error numbers for all errors in errnames on this platform .
16,874
def _assert_ssl_exc_contains ( exc , * msgs ) : if len ( msgs ) < 1 : raise TypeError ( '_assert_ssl_exc_contains() requires ' 'at least one message to be passed.' , ) err_msg_lower = str ( exc ) . lower ( ) return any ( m . lower ( ) in err_msg_lower for m in msgs )
Check whether SSL exception contains either of messages provided .
16,875
def env_dn_dict ( self , env_prefix , cert_value ) : if not cert_value : return { } env = { } for rdn in cert_value : for attr_name , val in rdn : attr_code = self . CERT_KEY_TO_LDAP_CODE . get ( attr_name ) if attr_code : env [ '%s_%s' % ( env_prefix , attr_code ) ] = val return env
Return a dict of WSGI environment variables for a client cert DN .
16,876
def _safe_call ( self , is_reader , call , * args , ** kwargs ) : start = time . time ( ) while True : try : return call ( * args , ** kwargs ) except SSL . WantReadError : time . sleep ( self . ssl_retry ) except SSL . WantWriteError : time . sleep ( self . ssl_retry ) except SSL . SysCallError as e : if is_reader and e . args == ( - 1 , 'Unexpected EOF' ) : return b'' errnum = e . args [ 0 ] if is_reader and errnum in errors . socket_errors_to_ignore : return b'' raise socket . error ( errnum ) except SSL . Error as e : if is_reader and e . args == ( - 1 , 'Unexpected EOF' ) : return b'' thirdarg = None try : thirdarg = e . args [ 0 ] [ 0 ] [ 2 ] except IndexError : pass if thirdarg == 'http request' : raise errors . NoSSLError ( ) raise errors . FatalSSLAlert ( * e . args ) if time . time ( ) - start > self . ssl_timeout : raise socket . timeout ( 'timed out' )
Wrap the given call with SSL error - trapping .
16,877
def sendall ( self , * args , ** kwargs ) : return self . _safe_call ( False , super ( SSLFileobjectMixin , self ) . sendall , * args , ** kwargs )
Send whole message to the socket .
16,878
def bind ( self , sock ) : if self . context is None : self . context = self . get_context ( ) conn = SSLConnection ( self . context , sock ) self . _environ = self . get_environ ( ) return conn
Wrap and return the given socket .
16,879
def get_context ( self ) : c = SSL . Context ( SSL . SSLv23_METHOD ) c . use_privatekey_file ( self . private_key ) if self . certificate_chain : c . load_verify_locations ( self . certificate_chain ) c . use_certificate_file ( self . certificate ) return c
Return an SSL . Context from self attributes .
16,880
def get_environ ( self ) : ssl_environ = { 'HTTPS' : 'on' , } if self . certificate : cert = open ( self . certificate , 'rb' ) . read ( ) cert = crypto . load_certificate ( crypto . FILETYPE_PEM , cert ) ssl_environ . update ( { 'SSL_SERVER_M_VERSION' : cert . get_version ( ) , 'SSL_SERVER_M_SERIAL' : cert . get_serial_number ( ) , } ) for prefix , dn in [ ( 'I' , cert . get_issuer ( ) ) , ( 'S' , cert . get_subject ( ) ) , ] : dnstr = str ( dn ) [ 18 : - 2 ] wsgikey = 'SSL_SERVER_%s_DN' % prefix ssl_environ [ wsgikey ] = dnstr while dnstr : pos = dnstr . rfind ( '=' ) dnstr , value = dnstr [ : pos ] , dnstr [ pos + 1 : ] pos = dnstr . rfind ( '/' ) dnstr , key = dnstr [ : pos ] , dnstr [ pos + 1 : ] if key and value : wsgikey = 'SSL_SERVER_%s_DN_%s' % ( prefix , key ) ssl_environ [ wsgikey ] = value return ssl_environ
Return WSGI environ entries to be merged into each request .
16,881
def get_ssl_adapter_class ( name = 'builtin' ) : adapter = ssl_adapters [ name . lower ( ) ] if isinstance ( adapter , six . string_types ) : last_dot = adapter . rfind ( '.' ) attr_name = adapter [ last_dot + 1 : ] mod_path = adapter [ : last_dot ] try : mod = sys . modules [ mod_path ] if mod is None : raise KeyError ( ) except KeyError : mod = __import__ ( mod_path , globals ( ) , locals ( ) , [ '' ] ) try : adapter = getattr ( mod , attr_name ) except AttributeError : raise AttributeError ( "'%s' object has no attribute '%s'" % ( mod_path , attr_name ) ) return adapter
Return an SSL adapter class for the given name .
16,882
def read_trailer_lines ( self ) : if not self . closed : raise ValueError ( 'Cannot read trailers until the request body has been read.' , ) while True : line = self . rfile . readline ( ) if not line : raise ValueError ( 'Illegal end of headers.' ) self . bytes_read += len ( line ) if self . maxlen and self . bytes_read > self . maxlen : raise IOError ( 'Request Entity Too Large' ) if line == CRLF : break if not line . endswith ( CRLF ) : raise ValueError ( 'HTTP requires CRLF terminators' ) yield line
Read HTTP headers and yield them .
16,883
def send_headers ( self ) : hkeys = [ key . lower ( ) for key , value in self . outheaders ] status = int ( self . status [ : 3 ] ) if status == 413 : self . close_connection = True elif b'content-length' not in hkeys : if status < 200 or status in ( 204 , 205 , 304 ) : pass else : needs_chunked = ( self . response_protocol == 'HTTP/1.1' and self . method != b'HEAD' ) if needs_chunked : self . chunked_write = True self . outheaders . append ( ( b'Transfer-Encoding' , b'chunked' ) ) else : self . close_connection = True if b'connection' not in hkeys : if self . response_protocol == 'HTTP/1.1' : if self . close_connection : self . outheaders . append ( ( b'Connection' , b'close' ) ) else : if not self . close_connection : self . outheaders . append ( ( b'Connection' , b'Keep-Alive' ) ) if ( not self . close_connection ) and ( not self . chunked_read ) : remaining = getattr ( self . rfile , 'remaining' , 0 ) if remaining > 0 : self . rfile . read ( remaining ) if b'date' not in hkeys : self . outheaders . append ( ( b'Date' , email . utils . formatdate ( usegmt = True ) . encode ( 'ISO-8859-1' ) , ) ) if b'server' not in hkeys : self . outheaders . append ( ( b'Server' , self . server . server_name . encode ( 'ISO-8859-1' ) , ) ) proto = self . server . protocol . encode ( 'ascii' ) buf = [ proto + SPACE + self . status + CRLF ] for k , v in self . outheaders : buf . append ( k + COLON + SPACE + v + CRLF ) buf . append ( CRLF ) self . conn . wfile . write ( EMPTY . join ( buf ) )
Assert process and send the HTTP response message - headers .
16,884
def _conditional_error ( self , req , response ) : if not req or req . sent_headers : return try : req . simple_response ( response ) except errors . FatalSSLAlert : pass except errors . NoSSLError : self . _handle_no_ssl ( req )
Respond with an error .
16,885
def resolve_peer_creds ( self ) : if not IS_UID_GID_RESOLVABLE : raise NotImplementedError ( 'UID/GID lookup is unavailable under current platform. ' 'It can only be done under UNIX-like OS ' 'but not under the Google App Engine' , ) elif not self . peercreds_resolve_enabled : raise RuntimeError ( 'UID/GID lookup is disabled within this server' , ) user = pwd . getpwuid ( self . peer_uid ) . pw_name group = grp . getgrgid ( self . peer_gid ) . gr_name return user , group
Return the username and group tuple of the peercreds if available .
16,886
def _close_kernel_socket ( self ) : if six . PY2 and hasattr ( self . socket , '_sock' ) : self . socket . _sock . close ( )
Close kernel socket in outdated Python versions .
16,887
def clear_stats ( self ) : self . _start_time = None self . _run_time = 0 self . stats = { 'Enabled' : False , 'Bind Address' : lambda s : repr ( self . bind_addr ) , 'Run time' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or self . runtime ( ) , 'Accepts' : 0 , 'Accepts/sec' : lambda s : s [ 'Accepts' ] / self . runtime ( ) , 'Queue' : lambda s : getattr ( self . requests , 'qsize' , None ) , 'Threads' : lambda s : len ( getattr ( self . requests , '_threads' , [ ] ) ) , 'Threads Idle' : lambda s : getattr ( self . requests , 'idle' , None ) , 'Socket Errors' : 0 , 'Requests' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Requests' ] ( w ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Bytes Read' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Bytes Read' ] ( w ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Bytes Written' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Bytes Written' ] ( w ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Work Time' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Work Time' ] ( w ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Read Throughput' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Bytes Read' ] ( w ) / ( w [ 'Work Time' ] ( w ) or 1e-6 ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Write Throughput' : lambda s : ( not s [ 'Enabled' ] ) and - 1 or sum ( [ w [ 'Bytes Written' ] ( w ) / ( w [ 'Work Time' ] ( w ) or 1e-6 ) for w in s [ 'Worker Threads' ] . values ( ) ] , 0 , ) , 'Worker Threads' : { } , } logging . statistics [ 'Cheroot HTTPServer %d' % id ( self ) ] = self . stats
Reset server stat counters ..
16,888
def runtime ( self ) : if self . _start_time is None : return self . _run_time else : return self . _run_time + ( time . time ( ) - self . _start_time )
Return server uptime .
16,889
def bind_addr ( self , value ) : if isinstance ( value , tuple ) and value [ 0 ] in ( '' , None ) : raise ValueError ( "Host values of '' or None are not allowed. " "Use '0.0.0.0' (IPv4) or '::' (IPv6) instead " 'to listen on all active interfaces.' , ) self . _bind_addr = value
Set the interface on which to listen for connections .
16,890
def safe_start ( self ) : try : self . start ( ) except ( KeyboardInterrupt , IOError ) : self . error_log ( 'Keyboard Interrupt: shutting down' ) self . stop ( ) raise except SystemExit : self . error_log ( 'SystemExit raised: shutting down' ) self . stop ( ) raise
Run the server forever and stop it cleanly on exit .
16,891
def prepare ( self ) : self . _interrupt = None if self . software is None : self . software = '%s Server' % self . version self . socket = None msg = 'No socket could be created' if os . getenv ( 'LISTEN_PID' , None ) : self . socket = socket . fromfd ( 3 , socket . AF_INET , socket . SOCK_STREAM ) elif isinstance ( self . bind_addr , six . string_types ) : try : self . bind_unix_socket ( self . bind_addr ) except socket . error as serr : msg = '%s -- (%s: %s)' % ( msg , self . bind_addr , serr ) six . raise_from ( socket . error ( msg ) , serr ) else : host , port = self . bind_addr try : info = socket . getaddrinfo ( host , port , socket . AF_UNSPEC , socket . SOCK_STREAM , 0 , socket . AI_PASSIVE , ) except socket . gaierror : sock_type = socket . AF_INET bind_addr = self . bind_addr if ':' in host : sock_type = socket . AF_INET6 bind_addr = bind_addr + ( 0 , 0 ) info = [ ( sock_type , socket . SOCK_STREAM , 0 , '' , bind_addr ) ] for res in info : af , socktype , proto , canonname , sa = res try : self . bind ( af , socktype , proto ) break except socket . error as serr : msg = '%s -- (%s: %s)' % ( msg , sa , serr ) if self . socket : self . socket . close ( ) self . socket = None if not self . socket : raise socket . error ( msg ) self . socket . settimeout ( 1 ) self . socket . listen ( self . request_queue_size ) self . requests . start ( ) self . ready = True self . _start_time = time . time ( )
Prepare server to serving requests .
16,892
def error_log ( self , msg = '' , level = 20 , traceback = False ) : sys . stderr . write ( msg + '\n' ) sys . stderr . flush ( ) if traceback : tblines = traceback_ . format_exc ( ) sys . stderr . write ( tblines ) sys . stderr . flush ( )
Write error message to log .
16,893
def prepare_socket ( bind_addr , family , type , proto , nodelay , ssl_adapter ) : sock = socket . socket ( family , type , proto ) prevent_socket_inheritance ( sock ) host , port = bind_addr [ : 2 ] IS_EPHEMERAL_PORT = port == 0 if not ( IS_WINDOWS or IS_EPHEMERAL_PORT ) : sock . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , 1 ) if nodelay and not isinstance ( bind_addr , str ) : sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , 1 ) if ssl_adapter is not None : sock = ssl_adapter . bind ( sock ) listening_ipv6 = ( hasattr ( socket , 'AF_INET6' ) and family == socket . AF_INET6 and host in ( '::' , '::0' , '::0.0.0.0' ) ) if listening_ipv6 : try : sock . setsockopt ( socket . IPPROTO_IPV6 , socket . IPV6_V6ONLY , 0 , ) except ( AttributeError , socket . error ) : pass return sock
Create and prepare the socket object .
16,894
def resolve_real_bind_addr ( socket_ ) : bind_addr = socket_ . getsockname ( ) if socket_ . family in ( socket . AF_INET , socket . AF_INET6 , ) : return bind_addr [ : 2 ] return bind_addr
Retrieve actual bind addr from bound socket .
16,895
def interrupt ( self , interrupt ) : self . _interrupt = True self . stop ( ) self . _interrupt = interrupt
Perform the shutdown of this server and save the exception .
16,896
def run ( self ) : self . server . stats [ 'Worker Threads' ] [ self . getName ( ) ] = self . stats try : self . ready = True while True : conn = self . server . requests . get ( ) if conn is _SHUTDOWNREQUEST : return self . conn = conn if self . server . stats [ 'Enabled' ] : self . start_time = time . time ( ) try : conn . communicate ( ) finally : conn . close ( ) if self . server . stats [ 'Enabled' ] : self . requests_seen += self . conn . requests_seen self . bytes_read += self . conn . rfile . bytes_read self . bytes_written += self . conn . wfile . bytes_written self . work_time += time . time ( ) - self . start_time self . start_time = None self . conn = None except ( KeyboardInterrupt , SystemExit ) as ex : self . server . interrupt = ex
Process incoming HTTP connections .
16,897
def put ( self , obj ) : self . _queue . put ( obj , block = True , timeout = self . _queue_put_timeout ) if obj is _SHUTDOWNREQUEST : return
Put request into queue .
16,898
def stop ( self , timeout = 5 ) : for worker in self . _threads : self . _queue . put ( _SHUTDOWNREQUEST ) current = threading . currentThread ( ) if timeout is not None and timeout >= 0 : endtime = time . time ( ) + timeout while self . _threads : worker = self . _threads . pop ( ) if worker is not current and worker . isAlive ( ) : try : if timeout is None or timeout < 0 : worker . join ( ) else : remaining_time = endtime - time . time ( ) if remaining_time > 0 : worker . join ( remaining_time ) if worker . isAlive ( ) : c = worker . conn if c and not c . rfile . closed : try : c . socket . shutdown ( socket . SHUT_RD ) except TypeError : c . socket . shutdown ( ) worker . join ( ) except ( AssertionError , KeyboardInterrupt , ) : pass
Terminate all worker threads .
16,899
def _get_last_child_with_lineno ( node ) : ignored_fields = set ( [ 'ctx' , 'decorator_list' , 'names' , 'returns' ] ) fields = node . _fields if isinstance ( node , ast . Call ) : fields = ( 'func' , 'args' , 'starargs' , 'keywords' , 'kwargs' ) for name in reversed ( fields ) : if name in ignored_fields : continue try : last_field = getattr ( node , name ) except AttributeError : continue if isinstance ( last_field , ast . AST ) : return last_field elif isinstance ( last_field , list ) and last_field : return last_field [ - 1 ] return None
Return the last direct child of node that has a lineno attribute or None if node has no such children .