idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
42,000
def dumps ( self ) : self . data . update ( { 'bucket' : str ( self . obj . bucket_id ) , 'checksum' : self . obj . file . checksum , 'key' : self . obj . key , 'size' : self . obj . file . size , 'version_id' : str ( self . obj . version_id ) , } ) return self . data
Create a dump of the metadata associated to the record .
42,001
def flush ( self ) : files = self . dumps ( ) if files or '_files' in self . record : self . record [ '_files' ] = files
Flush changes to record .
42,002
def sort_by ( self , * ids ) : files = { str ( f_ . file_id ) : f_ . key for f_ in self } self . filesmap = OrderedDict ( [ ( files . get ( id_ , id_ ) , self [ files . get ( id_ , id_ ) ] . dumps ( ) ) for id_ in ids ] ) self . flush ( )
Update files order .
42,003
def dumps ( self , bucket = None ) : return [ self . file_cls ( o , self . filesmap . get ( o . key , { } ) ) . dumps ( ) for o in sorted_files_from_bucket ( bucket or self . bucket , self . keys ) ]
Serialize files from a bucket .
42,004
def files ( self ) : if self . model is None : raise MissingModelError ( ) records_buckets = RecordsBuckets . query . filter_by ( record_id = self . id ) . first ( ) if not records_buckets : bucket = self . _create_bucket ( ) if not bucket : return None RecordsBuckets . create ( record = self . model , bucket = bucket ) else : bucket = records_buckets . bucket return self . files_iter_cls ( self , bucket = bucket , file_cls = self . file_cls )
Get files iterator .
42,005
def files ( self , data ) : current_files = self . files if current_files : raise RuntimeError ( 'Can not update existing files.' ) for key in data : current_files [ key ] = data [ key ]
Set files from data .
42,006
def delete ( self , force = False ) : if force : RecordsBuckets . query . filter_by ( record = self . model , bucket = self . files . bucket ) . delete ( ) return super ( Record , self ) . delete ( force )
Delete a record and also remove the RecordsBuckets if necessary .
42,007
def create ( cls , record , bucket ) : rb = cls ( record = record , bucket = bucket ) db . session . add ( rb ) return rb
Create a new RecordsBuckets and adds it to the session .
42,008
def sorted_files_from_bucket ( bucket , keys = None ) : keys = keys or [ ] total = len ( keys ) sortby = dict ( zip ( keys , range ( total ) ) ) values = ObjectVersion . get_by_bucket ( bucket ) . all ( ) return sorted ( values , key = lambda x : sortby . get ( x . key , total ) )
Return files from bucket sorted by given keys .
42,009
def record_file_factory ( pid , record , filename ) : try : if not ( hasattr ( record , 'files' ) and record . files ) : return None except MissingModelError : return None try : return record . files [ filename ] except KeyError : return None
Get file from a record .
42,010
def file_download_ui ( pid , record , _record_file_factory = None , ** kwargs ) : _record_file_factory = _record_file_factory or record_file_factory fileobj = _record_file_factory ( pid , record , kwargs . get ( 'filename' ) ) if not fileobj : abort ( 404 ) obj = fileobj . obj ObjectResource . check_object_permission ( obj ) return ObjectResource . send_object ( obj . bucket , obj , expected_chksum = fileobj . get ( 'checksum' ) , logger_data = { 'bucket_id' : obj . bucket_id , 'pid_type' : pid . pid_type , 'pid_value' : pid . pid_value , } , as_attachment = ( 'download' in request . args ) )
File download view for a given record .
42,011
def default_bucket_link_factory ( pid ) : try : record = Record . get_record ( pid . get_assigned_object ( ) ) bucket = record . files . bucket return url_for ( 'invenio_files_rest.bucket_api' , bucket_id = bucket . id , _external = True ) except AttributeError : return None
Factory for record bucket generation .
42,012
def flake ( self , message ) : self . stdout . write ( str ( message ) ) self . stdout . write ( '\n' )
Print an error message to stdout .
42,013
def names ( self ) : names = [ ] if isinstance ( self . source , ast . List ) : for node in self . source . elts : if isinstance ( node , ast . Str ) : names . append ( node . s ) return names
Return a list of the names referenced by this binding .
42,014
def unusedAssignments ( self ) : for name , binding in self . items ( ) : if ( not binding . used and name not in self . globals and not self . uses_locals and isinstance ( binding , Assignment ) ) : yield name , binding
Return a generator for the assignments which have not been used .
42,015
def check_plugins ( self ) : checkers = { } for ep in pkg_resources . iter_entry_points ( group = 'frosted.plugins' ) : checkers . update ( { ep . name : ep . load ( ) } ) for plugin_name , plugin in checkers . items ( ) : if self . filename != '(none)' : messages = plugin . check ( self . filename ) for message , loc , args , kwargs in messages : self . report ( message , loc , * args , ** kwargs )
collect plugins from entry point frosted . plugins
42,016
def run_deferred ( self , deferred ) : for handler , scope , offset in deferred : self . scope_stack = scope self . offset = offset handler ( )
Run the callables in deferred using their associated scope stack .
42,017
def find_return_with_argument ( self , node ) : for item in node . body : if isinstance ( item , ast . Return ) and item . value : return item elif not isinstance ( item , ast . FunctionDef ) and hasattr ( item , 'body' ) : return_with_argument = self . find_return_with_argument ( item ) if return_with_argument : return return_with_argument
Finds and returns a return statment that has an argument .
42,018
def is_generator ( self , node ) : if not isinstance ( node . body , list ) : return False for item in node . body : if isinstance ( item , ( ast . Assign , ast . Expr ) ) : if isinstance ( item . value , ast . Yield ) : return True elif not isinstance ( item , ast . FunctionDef ) and hasattr ( item , 'body' ) : if self . is_generator ( item ) : return True return False
Checks whether a function is a generator by looking for a yield statement or expression .
42,019
def FOR ( self , node ) : vars = [ ] def collectLoopVars ( n ) : if isinstance ( n , ast . Name ) : vars . append ( n . id ) elif isinstance ( n , ast . expr_context ) : return else : for c in ast . iter_child_nodes ( n ) : collectLoopVars ( c ) collectLoopVars ( node . target ) for varn in vars : if ( isinstance ( self . scope . get ( varn ) , Importation ) and self . scope [ varn ] . used ) : self . report ( messages . ImportShadowedByLoopVar , node , varn , self . scope [ varn ] . source ) self . handle_children ( node )
Process bindings for loop variables .
42,020
def CLASSDEF ( self , node ) : for deco in node . decorator_list : self . handleNode ( deco , node ) for baseNode in node . bases : self . handleNode ( baseNode , node ) if not PY2 : for keywordNode in node . keywords : self . handleNode ( keywordNode , node ) self . push_scope ( ClassScope ) if self . settings . get ( 'run_doctests' , False ) : self . defer_function ( lambda : self . handle_doctests ( node ) ) for stmt in node . body : self . handleNode ( stmt , node ) self . pop_scope ( ) self . add_binding ( node , ClassDefinition ( node . name , node ) )
Check names used in a class definition including its decorators base classes and the body of its definition .
42,021
def check ( codeString , filename , reporter = modReporter . Default , settings_path = None , ** setting_overrides ) : if not settings_path and filename : settings_path = os . path . dirname ( os . path . abspath ( filename ) ) settings_path = settings_path or os . getcwd ( ) active_settings = settings . from_path ( settings_path ) . copy ( ) for key , value in itemsview ( setting_overrides ) : access_key = key . replace ( 'not_' , '' ) . lower ( ) if type ( active_settings . get ( access_key ) ) in ( list , tuple ) : if key . startswith ( 'not_' ) : active_settings [ access_key ] = list ( set ( active_settings [ access_key ] ) . difference ( value ) ) else : active_settings [ access_key ] = list ( set ( active_settings [ access_key ] ) . union ( value ) ) else : active_settings [ key ] = value active_settings . update ( setting_overrides ) if _should_skip ( filename , active_settings . get ( 'skip' , [ ] ) ) : if active_settings . get ( 'directly_being_checked' , None ) == 1 : reporter . flake ( FileSkipped ( filename ) ) return 1 elif active_settings . get ( 'verbose' , False ) : ignore = active_settings . get ( 'ignore_frosted_errors' , [ ] ) if ( not "W200" in ignore and not "W201" in ignore ) : reporter . flake ( FileSkipped ( filename , None , verbose = active_settings . get ( 'verbose' ) ) ) return 0 try : tree = compile ( codeString , filename , "exec" , _ast . PyCF_ONLY_AST ) except SyntaxError : value = sys . exc_info ( ) [ 1 ] msg = value . args [ 0 ] ( lineno , offset , text ) = value . lineno , value . offset , value . text if text is None : reporter . unexpected_error ( filename , 'problem decoding source' ) else : reporter . flake ( PythonSyntaxError ( filename , msg , lineno , offset , text , verbose = active_settings . get ( 'verbose' ) ) ) return 1 except Exception : reporter . unexpected_error ( filename , 'problem decoding source' ) return 1 w = checker . Checker ( tree , filename , None , ignore_lines = _noqa_lines ( codeString ) , ** active_settings ) w . messages . sort ( key = lambda m : m . lineno ) for warning in w . messages : reporter . flake ( warning ) return len ( w . messages )
Check the Python source given by codeString for unfrosted flakes .
42,022
def check_recursive ( paths , reporter = modReporter . Default , settings_path = None , ** setting_overrides ) : warnings = 0 for source_path in iter_source_code ( paths ) : warnings += check_path ( source_path , reporter , settings_path = None , ** setting_overrides ) return warnings
Recursively check all source files defined in paths .
42,023
def get_files_from_storage ( paths ) : for path in paths : f = default_storage . open ( path ) f . name = os . path . basename ( path ) try : yield f except ClientError : logger . exception ( "File not found: %s" , path )
Return S3 file where the name does not include the path .
42,024
def echo ( bot , update ) : message = update . get_effective_message ( ) bot . reply ( update , message )
Echo the user message .
42,025
def error ( bot , update , error ) : logger . error ( 'Update {} caused error {}' . format ( update , error ) , extra = { "tag" : "err" } )
Log Errors caused by Updates .
42,026
def main ( ) : updater = Updater ( "TOKEN" ) dp = updater . dispatcher dp . add_handler ( CommandHandler ( "start" , start ) ) dp . add_handler ( CommandHandler ( "help" , help ) ) dp . add_handler ( MessageHandler ( DefaultFilter ( ) , echo ) ) dp . add_error_handler ( error ) updater . run ( )
Start the bot .
42,027
def main ( config , host , port , logfile , debug , daemon , uid , gid , pidfile , umask , rundir ) : _main ( ** locals ( ) )
Main entry point for running a socket server from the commandline .
42,028
def make_sa ( ) : configuration = dict ( config . items ( 'coilmq' ) ) engine = engine_from_config ( configuration , 'qstore.sqlalchemy.' ) init_model ( engine ) store = SAQueue ( ) return store
Factory to creates a SQLAlchemy queue store pulling config values from the CoilMQ configuration .
42,029
def subscribe ( self , connection , destination ) : self . log . debug ( "Subscribing %s to %s" % ( connection , destination ) ) self . _topics [ destination ] . add ( connection )
Subscribes a connection to the specified topic destination .
42,030
def unsubscribe ( self , connection , destination ) : self . log . debug ( "Unsubscribing %s from %s" % ( connection , destination ) ) if connection in self . _topics [ destination ] : self . _topics [ destination ] . remove ( connection ) if not self . _topics [ destination ] : del self . _topics [ destination ]
Unsubscribes a connection from the specified topic destination .
42,031
def disconnect ( self , connection ) : self . log . debug ( "Disconnecting %s" % connection ) for dest in list ( self . _topics . keys ( ) ) : if connection in self . _topics [ dest ] : self . _topics [ dest ] . remove ( connection ) if not self . _topics [ dest ] : del self . _topics [ dest ]
Removes a subscriber connection .
42,032
def send ( self , message ) : dest = message . headers . get ( 'destination' ) if not dest : raise ValueError ( "Cannot send frame with no destination: %s" % message ) message . cmd = 'message' message . headers . setdefault ( 'message-id' , str ( uuid . uuid4 ( ) ) ) bad_subscribers = set ( ) for subscriber in self . _topics [ dest ] : try : subscriber . send_frame ( message ) except : self . log . exception ( "Error delivering message to subscriber %s; client will be disconnected." % subscriber ) bad_subscribers . add ( subscriber ) for subscriber in bad_subscribers : self . disconnect ( subscriber )
Sends a message to all subscribers of destination .
42,033
def subscriber_count ( self , destination = None ) : if destination : return len ( self . _queues [ destination ] ) else : total = 0 for k in self . _queues . keys ( ) : total += len ( self . _queues [ k ] ) return total
Returns a count of the number of subscribers .
42,034
def disconnect ( self , connection ) : self . log . debug ( "Disconnecting %s" % connection ) if connection in self . _pending : pending_frame = self . _pending [ connection ] self . store . requeue ( pending_frame . headers . get ( 'destination' ) , pending_frame ) del self . _pending [ connection ] for dest in list ( self . _queues . keys ( ) ) : if connection in self . _queues [ dest ] : self . _queues [ dest ] . remove ( connection ) if not self . _queues [ dest ] : del self . _queues [ dest ]
Removes a subscriber connection ensuring that any pending commands get requeued .
42,035
def send ( self , message ) : dest = message . headers . get ( 'destination' ) if not dest : raise ValueError ( "Cannot send frame with no destination: %s" % message ) message . cmd = 'message' message . headers . setdefault ( 'message-id' , str ( uuid . uuid4 ( ) ) ) subscribers = [ s for s in self . _queues [ dest ] if s not in self . _pending ] if not subscribers : self . log . debug ( "No eligible subscribers; adding message %s to queue %s" % ( message , dest ) ) self . store . enqueue ( dest , message ) else : selected = self . subscriber_scheduler . choice ( subscribers , message ) self . log . debug ( "Delivering message %s to subscriber %s" % ( message , selected ) ) self . _send_frame ( selected , message )
Sends a MESSAGE frame to an eligible subscriber connection .
42,036
def ack ( self , connection , frame , transaction = None ) : self . log . debug ( "ACK %s for %s" % ( frame , connection ) ) if connection in self . _pending : pending_frame = self . _pending [ connection ] if pending_frame . headers . get ( 'message-id' ) != frame . headers . get ( 'message-id' ) : self . log . warning ( "Got a ACK for unexpected message-id: %s" % frame . message_id ) self . store . requeue ( pending_frame . destination , pending_frame ) if transaction is not None : self . _transaction_frames [ connection ] [ transaction ] . append ( pending_frame ) del self . _pending [ connection ] self . _send_backlog ( connection ) else : self . log . debug ( "No pending messages for %s" % connection )
Acknowledge receipt of a message .
42,037
def resend_transaction_frames ( self , connection , transaction ) : for frame in self . _transaction_frames [ connection ] [ transaction ] : self . send ( frame )
Resend the messages that were ACK d in specified transaction .
42,038
def _send_frame ( self , connection , frame ) : assert connection is not None assert frame is not None self . log . debug ( "Delivering frame %s to connection %s" % ( frame , connection ) ) if connection . reliable_subscriber : if connection in self . _pending : raise RuntimeError ( "Connection already has a pending frame." ) self . log . debug ( "Tracking frame %s as pending for connection %s" % ( frame , connection ) ) self . _pending [ connection ] = frame connection . send_frame ( frame )
Sends a frame to a specific subscriber connection .
42,039
def make_dbm ( ) : try : data_dir = config . get ( 'coilmq' , 'qstore.dbm.data_dir' ) cp_ops = config . getint ( 'coilmq' , 'qstore.dbm.checkpoint_operations' ) cp_timeout = config . getint ( 'coilmq' , 'qstore.dbm.checkpoint_timeout' ) except ConfigParser . NoOptionError as e : raise ConfigError ( 'Missing configuration parameter: %s' % e ) if not os . path . exists ( data_dir ) : raise ConfigError ( 'DBM directory does not exist: %s' % data_dir ) if not os . access ( data_dir , os . W_OK | os . R_OK ) : raise ConfigError ( 'Cannot read and write DBM directory: %s' % data_dir ) store = DbmQueue ( data_dir , checkpoint_operations = cp_ops , checkpoint_timeout = cp_timeout ) return store
Creates a DBM queue store pulling config values from the CoilMQ configuration .
42,040
def _sync ( self ) : if ( self . _opcount > self . checkpoint_operations or datetime . now ( ) > self . _last_sync + self . checkpoint_timeout ) : self . log . debug ( "Synchronizing queue metadata." ) self . queue_metadata . sync ( ) self . _last_sync = datetime . now ( ) self . _opcount = 0 else : self . log . debug ( "NOT synchronizing queue metadata." )
Synchronize the cached data with the underlyind database .
42,041
def read ( config_values ) : if not config_values : raise RheaError ( 'Cannot read config_value: `{}`' . format ( config_values ) ) config_values = to_list ( config_values ) config = { } for config_value in config_values : config_value = ConfigSpec . get_from ( value = config_value ) config_value . check_type ( ) config_results = config_value . read ( ) if config_results and isinstance ( config_results , Mapping ) : config = deep_update ( config , config_results ) elif config_value . check_if_exists : raise RheaError ( 'Cannot read config_value: `{}`' . format ( config_value ) ) return config
Reads an ordered list of configuration values and deep merge the values in reverse order .
42,042
def parse_headers ( buff ) : preamble_lines = list ( map ( lambda x : six . u ( x ) . decode ( ) , iter ( lambda : buff . readline ( ) . strip ( ) , b'' ) ) ) if not preamble_lines : raise EmptyBuffer ( ) return preamble_lines [ 0 ] , OrderedDict ( [ l . split ( ':' ) for l in preamble_lines [ 1 : ] ] )
Parses buffer and returns command and headers as strings
42,043
def pack ( self ) : self . headers . setdefault ( 'content-length' , len ( self . body ) ) headerparts = ( "{0}:{1}\n" . format ( key , value ) for key , value in self . headers . items ( ) ) return six . b ( "{0}\n{1}\n" . format ( self . cmd , "" . join ( headerparts ) ) ) + ( self . body if isinstance ( self . body , six . binary_type ) else six . b ( self . body ) ) + six . b ( '\x00' )
Create a string representation from object state .
42,044
def extract_frame ( self ) : self . _buffer . seek ( self . _pointer , 0 ) try : f = Frame . from_buffer ( self . _buffer ) self . _pointer = self . _buffer . tell ( ) except ( IncompleteFrame , EmptyBuffer ) : self . _buffer . seek ( self . _pointer , 0 ) return None return f
Pulls one complete frame off the buffer and returns it .
42,045
def choice ( self , subscribers , message ) : if not subscribers : return None reliable_subscribers = [ s for s in subscribers if s . reliable_subscriber ] if reliable_subscribers : return random . choice ( reliable_subscribers ) else : return random . choice ( subscribers )
Choose a random connection favoring those that are reliable from subscriber pool to deliver specified message .
42,046
def choice ( self , queues , connection ) : if not queues : return None return random . choice ( list ( queues . keys ( ) ) )
Chooses a random queue for messages to specified connection .
42,047
def get_uri ( self , key , is_list = False , is_optional = False , is_secret = False , is_local = False , default = None , options = None ) : if is_list : return self . _get_typed_list_value ( key = key , target_type = UriSpec , type_convert = self . parse_uri_spec , is_optional = is_optional , is_secret = is_secret , is_local = is_local , default = default , options = options ) return self . _get_typed_value ( key = key , target_type = UriSpec , type_convert = self . parse_uri_spec , is_optional = is_optional , is_secret = is_secret , is_local = is_local , default = default , options = options )
Get a the value corresponding to the key and converts it to UriSpec .
42,048
def get_auth ( self , key , is_list = False , is_optional = False , is_secret = False , is_local = False , default = None , options = None ) : if is_list : return self . _get_typed_list_value ( key = key , target_type = AuthSpec , type_convert = self . parse_auth_spec , is_optional = is_optional , is_secret = is_secret , is_local = is_local , default = default , options = options ) return self . _get_typed_value ( key = key , target_type = AuthSpec , type_convert = self . parse_auth_spec , is_optional = is_optional , is_secret = is_secret , is_local = is_local , default = default , options = options )
Get a the value corresponding to the key and converts it to AuthSpec .
42,049
def get_list ( self , key , is_optional = False , is_secret = False , is_local = False , default = None , options = None ) : def parse_list ( v ) : parts = v . split ( ',' ) results = [ ] for part in parts : part = part . strip ( ) if part : results . append ( part ) return results return self . _get_typed_value ( key = key , target_type = list , type_convert = parse_list , is_optional = is_optional , is_secret = is_secret , is_local = is_local , default = default , options = options )
Get a the value corresponding to the key and converts comma separated values to a list .
42,050
def _get_typed_value ( self , key , target_type , type_convert , is_optional = False , is_secret = False , is_local = False , default = None , options = None ) : try : value = self . _get ( key ) except KeyError : if not is_optional : raise RheaError ( 'No value was provided for the non optional key `{}`.' . format ( key ) ) return default if isinstance ( value , six . string_types ) : try : self . _add_key ( key , is_secret = is_secret , is_local = is_local ) self . _check_options ( key = key , value = value , options = options ) return type_convert ( value ) except ValueError : raise RheaError ( "Cannot convert value `{}` (key: `{}`) " "to `{}`" . format ( value , key , target_type ) ) if isinstance ( value , target_type ) : self . _add_key ( key , is_secret = is_secret , is_local = is_local ) self . _check_options ( key = key , value = value , options = options ) return value raise RheaError ( "Cannot convert value `{}` (key: `{}`) " "to `{}`" . format ( value , key , target_type ) )
Return the value corresponding to the key converted to the given type .
42,051
def _get_typed_list_value ( self , key , target_type , type_convert , is_optional = False , is_secret = False , is_local = False , default = None , options = None ) : value = self . _get_typed_value ( key = key , target_type = list , type_convert = json . loads , is_optional = is_optional , is_secret = is_secret , is_local = is_local , default = default , options = options ) if not value : return default raise_type = 'dict' if target_type == Mapping else target_type if not isinstance ( value , list ) : raise RheaError ( "Cannot convert value `{}` (key: `{}`) " "to `{}`" . format ( value , key , raise_type ) ) result = [ ] for v in value : if isinstance ( v , six . string_types ) : try : result . append ( type_convert ( v ) ) except ValueError : raise RheaError ( "Cannot convert value `{}` (found in list key: `{}`) " "to `{}`" . format ( v , key , raise_type ) ) elif isinstance ( v , target_type ) : result . append ( v ) else : raise RheaError ( "Cannot convert value `{}` (found in list key: `{}`) " "to `{}`" . format ( v , key , raise_type ) ) return result
Return the value corresponding to the key converted first to list than each element to the given type .
42,052
def from_configfile ( self , configfile ) : cfg = ConfigParser ( ) if hasattr ( configfile , 'read' ) : cfg . read_file ( configfile ) else : filesread = cfg . read ( configfile ) if not filesread : raise ValueError ( 'Could not parse auth file: %s' % configfile ) if not cfg . has_section ( 'auth' ) : raise ValueError ( 'Config file contains no [auth] section.' ) self . store = dict ( cfg . items ( 'auth' ) )
Initialize the authentication store from a config - style file .
42,053
def authenticate ( self , login , passcode ) : return login in self . store and self . store [ login ] == passcode
Authenticate the login and passcode .
42,054
def process_frame ( self , frame ) : cmd_method = frame . cmd . lower ( ) if not cmd_method in VALID_COMMANDS : raise ProtocolError ( "Invalid STOMP command: {}" . format ( frame . cmd ) ) method = getattr ( self , cmd_method , None ) if not self . engine . connected and method not in ( self . connect , self . stomp ) : raise ProtocolError ( "Not connected." ) try : transaction = frame . headers . get ( 'transaction' ) if not transaction or method in ( self . begin , self . commit , self . abort ) : method ( frame ) else : if not transaction in self . engine . transactions : raise ProtocolError ( "Invalid transaction specified: %s" % transaction ) self . engine . transactions [ transaction ] . append ( frame ) except Exception as e : self . engine . log . error ( "Error processing STOMP frame: %s" % e ) self . engine . log . exception ( e ) try : self . engine . connection . send_frame ( ErrorFrame ( str ( e ) , str ( e ) ) ) except Exception as e : self . engine . log . error ( "Could not send error frame: %s" % e ) self . engine . log . exception ( e ) else : if frame . headers . get ( 'receipt' ) and method != self . connect : self . engine . connection . send_frame ( ReceiptFrame ( receipt = frame . headers . get ( 'receipt' ) ) )
Dispatches a received frame to the appropriate internal method .
42,055
def init_config ( config_file = None ) : global config if config_file and os . path . exists ( config_file ) : read = config . read ( [ config_file ] ) if not read : raise ValueError ( "Could not read configuration from file: %s" % config_file )
Initialize the configuration from a config file .
42,056
def init_logging ( logfile = None , loglevel = logging . INFO , configfile = None ) : use_configfile = False if configfile and os . path . exists ( configfile ) : testcfg = ConfigParser ( ) read = testcfg . read ( configfile ) use_configfile = ( read and testcfg . has_section ( 'loggers' ) ) if use_configfile : logging . config . fileConfig ( configfile ) if logfile : msg = "Config file conflicts with explicitly specified logfile; config file takes precedence." logging . warn ( msg ) else : format = '%(asctime)s [%(threadName)s] %(name)s - %(levelname)s - %(message)s' if logfile : logging . basicConfig ( filename = logfile , level = loglevel , format = format ) else : logging . basicConfig ( level = loglevel , format = format )
Configures the logging using either basic filename + loglevel or passed config file path .
42,057
def send_frame ( self , frame ) : packed = frame . pack ( ) if self . debug : self . log . debug ( "SEND: %r" % packed ) self . request . sendall ( packed )
Sends a frame to connected socket client .
42,058
def server_close ( self ) : self . log . debug ( "Closing the socket server connection." ) TCPServer . server_close ( self ) self . queue_manager . close ( ) self . topic_manager . close ( ) if hasattr ( self . authenticator , 'close' ) : self . authenticator . close ( ) self . shutdown ( )
Closes the socket server and any associated resources .
42,059
def serve_forever ( self , poll_interval = 0.5 ) : self . _serving_event . set ( ) self . _shutdown_request_event . clear ( ) TCPServer . serve_forever ( self , poll_interval = poll_interval )
Handle one request at a time until shutdown .
42,060
def randomize ( self , period = None ) : if period is not None : self . period = period perm = list ( range ( self . period ) ) perm_right = self . period - 1 for i in list ( perm ) : j = self . randint_function ( 0 , perm_right ) perm [ i ] , perm [ j ] = perm [ j ] , perm [ i ] self . permutation = tuple ( perm ) * 2
Randomize the permutation table used by the noise functions . This makes them generate a different noise pattern for the same inputs .
42,061
def noise3 ( self , x , y , z , repeat , base = 0.0 ) : i = int ( fmod ( floor ( x ) , repeat ) ) j = int ( fmod ( floor ( y ) , repeat ) ) k = int ( fmod ( floor ( z ) , repeat ) ) ii = ( i + 1 ) % repeat jj = ( j + 1 ) % repeat kk = ( k + 1 ) % repeat if base : i += base j += base k += base ii += base jj += base kk += base x -= floor ( x ) y -= floor ( y ) z -= floor ( z ) fx = x ** 3 * ( x * ( x * 6 - 15 ) + 10 ) fy = y ** 3 * ( y * ( y * 6 - 15 ) + 10 ) fz = z ** 3 * ( z * ( z * 6 - 15 ) + 10 ) perm = self . permutation A = perm [ i ] AA = perm [ A + j ] AB = perm [ A + jj ] B = perm [ ii ] BA = perm [ B + j ] BB = perm [ B + jj ] return lerp ( fz , lerp ( fy , lerp ( fx , grad3 ( perm [ AA + k ] , x , y , z ) , grad3 ( perm [ BA + k ] , x - 1 , y , z ) ) , lerp ( fx , grad3 ( perm [ AB + k ] , x , y - 1 , z ) , grad3 ( perm [ BB + k ] , x - 1 , y - 1 , z ) ) ) , lerp ( fy , lerp ( fx , grad3 ( perm [ AA + kk ] , x , y , z - 1 ) , grad3 ( perm [ BA + kk ] , x - 1 , y , z - 1 ) ) , lerp ( fx , grad3 ( perm [ AB + kk ] , x , y - 1 , z - 1 ) , grad3 ( perm [ BB + kk ] , x - 1 , y - 1 , z - 1 ) ) ) )
Tileable 3D noise . repeat specifies the integer interval in each dimension when the noise pattern repeats . base allows a different texture to be generated for the same repeat interval .
42,062
def load ( self ) : glTexImage3D ( GL_TEXTURE_3D , 0 , GL_LUMINANCE16_ALPHA16 , self . width , self . width , self . width , 0 , GL_LUMINANCE_ALPHA , GL_UNSIGNED_SHORT , ctypes . byref ( self . data ) )
Load the noise texture data into the current texture unit
42,063
def enable ( self ) : glEnable ( GL_TEXTURE_3D ) glTexParameteri ( GL_TEXTURE_3D , GL_TEXTURE_WRAP_S , GL_REPEAT ) glTexParameteri ( GL_TEXTURE_3D , GL_TEXTURE_WRAP_T , GL_REPEAT ) glTexParameteri ( GL_TEXTURE_3D , GL_TEXTURE_WRAP_R , GL_REPEAT ) glTexParameteri ( GL_TEXTURE_3D , GL_TEXTURE_MAG_FILTER , GL_LINEAR ) glTexParameteri ( GL_TEXTURE_3D , GL_TEXTURE_MIN_FILTER , GL_LINEAR )
Convenience method to enable 3D texturing state so the texture may be used by the ffpnoise shader function
42,064
def main ( ) : expr_list = [ "max(-_.千幸福的笑脸{घोड़ा=馬, " "dn2=dv2,千幸福的笑脸घ=千幸福的笑脸घ}) gte 100 " "times 3 && " "(min(ເຮືອນ{dn3=dv3,家=дом}) < 10 or sum(biz{dn5=dv5}) >99 and " "count(fizzle) lt 0or count(baz) > 1)" . decode ( 'utf8' ) , "max(foo{hostname=mini-mon,千=千}, 120) > 100 and (max(bar)>100 " " or max(biz)>100)" . decode ( 'utf8' ) , "max(foo)>=100" , "test_metric{this=that, that = this} < 1" , "max ( 3test_metric5 { this = that }) lt 5 times 3" , "3test_metric5 lt 3" , "ntp.offset > 1 or ntp.offset < -5" , "max(3test_metric5{it's this=that's it}) lt 5 times 3" , "count(log.error{test=1}, deterministic) > 1.0" , "count(log.error{test=1}, deterministic, 120) > 1.0" , "last(test_metric{hold=here}) < 13" , "count(log.error{test=1}, deterministic, 130) > 1.0" , "count(log.error{test=1}, deterministic) > 1.0 times 0" , ] for expr in expr_list : print ( 'orig expr: {}' . format ( expr . encode ( 'utf8' ) ) ) sub_exprs = [ ] try : alarm_expr_parser = AlarmExprParser ( expr ) sub_exprs = alarm_expr_parser . sub_expr_list except Exception as ex : print ( "Parse failed: {}" . format ( ex ) ) for sub_expr in sub_exprs : print ( 'sub expr: {}' . format ( sub_expr . fmtd_sub_expr_str . encode ( 'utf8' ) ) ) print ( 'sub_expr dimensions: {}' . format ( sub_expr . dimensions_str . encode ( 'utf8' ) ) ) print ( 'sub_expr deterministic: {}' . format ( sub_expr . deterministic ) ) print ( 'sub_expr period: {}' . format ( sub_expr . period ) ) print ( "" ) print ( "" )
Used for development and testing .
42,065
def fmtd_sub_expr_str ( self ) : result = u"{}({}" . format ( self . normalized_func , self . _metric_name ) if self . _dimensions is not None : result += "{" + self . dimensions_str + "}" if self . _period != _DEFAULT_PERIOD : result += ", {}" . format ( self . _period ) result += ")" result += " {} {}" . format ( self . _operator , self . _threshold ) if self . _periods != _DEFAULT_PERIODS : result += " times {}" . format ( self . _periods ) return result
Get the entire sub expressions as a string with spaces .
42,066
def normalized_operator ( self ) : if self . _operator . lower ( ) == "lt" or self . _operator == "<" : return u"LT" elif self . _operator . lower ( ) == "gt" or self . _operator == ">" : return u"GT" elif self . _operator . lower ( ) == "lte" or self . _operator == "<=" : return u"LTE" elif self . _operator . lower ( ) == "gte" or self . _operator == ">=" : return u"GTE"
Get the operator as one of LT GT LTE or GTE .
42,067
def validate_basic_smoother ( ) : x , y = sort_data ( * smoother_friedman82 . build_sample_smoother_problem_friedman82 ( ) ) plt . figure ( ) for span in smoother . DEFAULT_SPANS : my_smoother = smoother . perform_smooth ( x , y , span ) friedman_smooth , _resids = run_friedman_smooth ( x , y , span ) plt . plot ( x , my_smoother . smooth_result , '.-' , label = 'pyace span = {0}' . format ( span ) ) plt . plot ( x , friedman_smooth , '.-' , label = 'Friedman span = {0}' . format ( span ) ) finish_plot ( )
Run Friedman s test from Figure 2b .
42,068
def validate_basic_smoother_resid ( ) : x , y = sort_data ( * smoother_friedman82 . build_sample_smoother_problem_friedman82 ( ) ) plt . figure ( ) for span in smoother . DEFAULT_SPANS : my_smoother = smoother . perform_smooth ( x , y , span ) _friedman_smooth , resids = run_friedman_smooth ( x , y , span ) plt . plot ( x , my_smoother . cross_validated_residual , '.-' , label = 'pyace span = {0}' . format ( span ) ) plt . plot ( x , resids , '.-' , label = 'Friedman span = {0}' . format ( span ) ) finish_plot ( )
Compare residuals .
42,069
def validate_supersmoother ( ) : x , y = smoother_friedman82 . build_sample_smoother_problem_friedman82 ( ) x , y = sort_data ( x , y ) my_smoother = smoother . perform_smooth ( x , y , smoother_cls = supersmoother . SuperSmootherWithPlots ) supsmu_result = run_freidman_supsmu ( x , y , bass_enhancement = 0.0 ) mace_result = run_mace_smothr ( x , y , bass_enhancement = 0.0 ) plt . plot ( x , y , '.' , label = 'Data' ) plt . plot ( x , my_smoother . smooth_result , '-' , label = 'pyace' ) plt . plot ( x , supsmu_result , '--' , label = 'SUPSMU' ) plt . plot ( x , mace_result , ':' , label = 'SMOOTH' ) plt . legend ( ) plt . savefig ( 'supersmoother_validation.png' )
Validate the supersmoother .
42,070
def validate_supersmoother_bass ( ) : x , y = smoother_friedman82 . build_sample_smoother_problem_friedman82 ( ) plt . figure ( ) plt . plot ( x , y , '.' , label = 'Data' ) for bass in range ( 0 , 10 , 3 ) : smooth = supersmoother . SuperSmoother ( ) smooth . set_bass_enhancement ( bass ) smooth . specify_data_set ( x , y ) smooth . compute ( ) plt . plot ( x , smooth . smooth_result , '.' , label = 'Bass = {0}' . format ( bass ) ) finish_plot ( )
Validate the supersmoother with extra bass .
42,071
def validate_average_best_span ( ) : N = 200 num_trials = 400 avg = numpy . zeros ( N ) for i in range ( num_trials ) : x , y = smoother_friedman82 . build_sample_smoother_problem_friedman82 ( N = N ) my_smoother = smoother . perform_smooth ( x , y , smoother_cls = supersmoother . SuperSmoother ) avg += my_smoother . _smoothed_best_spans . smooth_result if not ( i + 1 ) % 20 : print ( i + 1 ) avg /= num_trials plt . plot ( my_smoother . x , avg , '.' , label = 'Average JCV' ) finish_plot ( )
Figure 2d? from Friedman .
42,072
def validate_known_curve ( ) : plt . figure ( ) N = 100 x = numpy . linspace ( - 1 , 1 , N ) y = numpy . sin ( 4 * x ) smoother . DEFAULT_BASIC_SMOOTHER = smoother . BasicFixedSpanSmootherSlowUpdate smooth = smoother . perform_smooth ( x , y , smoother_cls = supersmoother . SuperSmoother ) plt . plot ( x , smooth . smooth_result , label = 'Slow' ) smoother . DEFAULT_BASIC_SMOOTHER = smoother . BasicFixedSpanSmoother smooth = smoother . perform_smooth ( x , y , smoother_cls = supersmoother . SuperSmoother ) plt . plot ( x , smooth . smooth_result , label = 'Fast' ) plt . plot ( x , y , '.' , label = 'data' ) plt . legend ( ) plt . show ( )
Validate on a sin function .
42,073
def finish_plot ( ) : plt . legend ( ) plt . grid ( color = '0.7' ) plt . xlabel ( 'x' ) plt . ylabel ( 'y' ) plt . show ( )
Helper for plotting .
42,074
def run_freidman_supsmu ( x , y , bass_enhancement = 0.0 ) : N = len ( x ) weight = numpy . ones ( N ) results = numpy . zeros ( N ) flags = numpy . zeros ( ( N , 7 ) ) mace . supsmu ( x , y , weight , 1 , 0.0 , bass_enhancement , results , flags ) return results
Run the FORTRAN supersmoother .
42,075
def run_friedman_smooth ( x , y , span ) : N = len ( x ) weight = numpy . ones ( N ) results = numpy . zeros ( N ) residuals = numpy . zeros ( N ) mace . smooth ( x , y , weight , span , 1 , 1e-7 , results , residuals ) return results , residuals
Run the FORTRAN smoother .
42,076
def run_mace_smothr ( x , y , bass_enhancement = 0.0 ) : N = len ( x ) weight = numpy . ones ( N ) results = numpy . zeros ( N ) flags = numpy . zeros ( ( N , 7 ) ) mace . smothr ( 1 , x , y , weight , results , flags ) return results
Run the FORTRAN SMOTHR .
42,077
def sort_data ( x , y ) : xy = sorted ( zip ( x , y ) ) x , y = zip ( * xy ) return x , y
Sort the data .
42,078
def compute ( self ) : self . smooth_result , self . cross_validated_residual = run_friedman_smooth ( self . x , self . y , self . _span )
Run smoother .
42,079
def compute ( self ) : self . smooth_result = run_freidman_supsmu ( self . x , self . y ) self . _store_unsorted_results ( self . smooth_result , numpy . zeros ( len ( self . smooth_result ) ) )
Run SuperSmoother .
42,080
def _partition ( self ) : if not self . _kazoo_client : self . _kazoo_client = KazooClient ( hosts = self . _zookeeper_url ) self . _kazoo_client . start ( ) state_change_event = threading . Event ( ) self . _set_partitioner = ( SetPartitioner ( self . _kazoo_client , path = self . _zookeeper_path , set = self . _consumer . fetch_offsets . keys ( ) , state_change_event = state_change_event , identifier = str ( datetime . datetime . now ( ) ) ) ) try : while 1 : if self . _set_partitioner . failed : raise Exception ( "Failed to acquire partition" ) elif self . _set_partitioner . release : log . info ( "Releasing locks on partition set {} " "for topic {}" . format ( self . _partitions , self . _kafka_topic ) ) self . _set_partitioner . release_set ( ) self . _partitions = [ ] elif self . _set_partitioner . acquired : if not self . _partitions : self . _partitions = [ p for p in self . _set_partitioner ] if not self . _partitions : log . info ( "Not assigned any partitions on topic {}," " waiting for a Partitioner state change" . format ( self . _kafka_topic ) ) state_change_event . wait ( ) state_change_event . clear ( ) continue log . info ( "Acquired locks on partition set {} " "for topic {}" . format ( self . _partitions , self . _kafka_topic ) ) self . _consumer . stop ( ) self . _consumer = self . _create_kafka_consumer ( self . _partitions ) return elif self . _set_partitioner . allocating : log . info ( "Waiting to acquire locks on partition set" ) self . _set_partitioner . wait_for_acquire ( ) except Exception : log . exception ( 'KafkaConsumer encountered fatal exception ' 'processing messages.' ) raise
Consume messages from kafka
42,081
def connect_mysql ( host , port , user , password , database ) : return pymysql . connect ( host = host , port = port , user = user , passwd = password , db = database )
Connect to MySQL with retries .
42,082
def unsort_vector ( data , indices_of_increasing ) : return numpy . array ( [ data [ indices_of_increasing . index ( i ) ] for i in range ( len ( data ) ) ] )
Upermutate 1 - D data that is sorted by indices_of_increasing .
42,083
def specify_data_set ( self , x_input , y_input ) : self . x = x_input self . y = y_input
Define input to ACE .
42,084
def solve ( self ) : self . _initialize ( ) while self . _outer_error_is_decreasing ( ) and self . _outer_iters < MAX_OUTERS : print ( '* Starting outer iteration {0:03d}. Current err = {1:12.5E}' '' . format ( self . _outer_iters , self . _last_outer_error ) ) self . _iterate_to_update_x_transforms ( ) self . _update_y_transform ( ) self . _outer_iters += 1
Run the ACE calculational loop .
42,085
def _initialize ( self ) : self . y_transform = self . y - numpy . mean ( self . y ) self . y_transform /= numpy . std ( self . y_transform ) self . x_transforms = [ numpy . zeros ( len ( self . y ) ) for _xi in self . x ] self . _compute_sorted_indices ( )
Set up and normalize initial data once input data is specified .
42,086
def _compute_sorted_indices ( self ) : sorted_indices = [ ] for to_sort in [ self . y ] + self . x : data_w_indices = [ ( val , i ) for ( i , val ) in enumerate ( to_sort ) ] data_w_indices . sort ( ) sorted_indices . append ( [ i for val , i in data_w_indices ] ) self . _yi_sorted = sorted_indices [ 0 ] self . _xi_sorted = sorted_indices [ 1 : ]
The smoothers need sorted data . This sorts it from the perspective of each column .
42,087
def _outer_error_is_decreasing ( self ) : is_decreasing , self . _last_outer_error = self . _error_is_decreasing ( self . _last_outer_error ) return is_decreasing
True if outer iteration error is decreasing .
42,088
def _error_is_decreasing ( self , last_error ) : current_error = self . _compute_error ( ) is_decreasing = current_error < last_error return is_decreasing , current_error
True if current error is less than last_error .
42,089
def _compute_error ( self ) : sum_x = sum ( self . x_transforms ) err = sum ( ( self . y_transform - sum_x ) ** 2 ) / len ( sum_x ) return err
Compute unexplained error .
42,090
def _iterate_to_update_x_transforms ( self ) : self . _inner_iters = 0 self . _last_inner_error = float ( 'inf' ) while self . _inner_error_is_decreasing ( ) : print ( ' Starting inner iteration {0:03d}. Current err = {1:12.5E}' '' . format ( self . _inner_iters , self . _last_inner_error ) ) self . _update_x_transforms ( ) self . _inner_iters += 1
Perform the inner iteration .
42,091
def _update_x_transforms ( self ) : theta_minus_phis = self . y_transform - numpy . sum ( self . x_transforms , axis = 0 ) for xtransform_index in range ( len ( self . x_transforms ) ) : xtransform = self . x_transforms [ xtransform_index ] sorted_data_indices = self . _xi_sorted [ xtransform_index ] xk_sorted = sort_vector ( self . x [ xtransform_index ] , sorted_data_indices ) xtransform_sorted = sort_vector ( xtransform , sorted_data_indices ) theta_minus_phis_sorted = sort_vector ( theta_minus_phis , sorted_data_indices ) to_smooth = theta_minus_phis_sorted + xtransform_sorted smoother = perform_smooth ( xk_sorted , to_smooth , smoother_cls = self . _smoother_cls ) updated_x_transform_smooth = smoother . smooth_result updated_x_transform_smooth -= numpy . mean ( updated_x_transform_smooth ) unsorted_xt = unsort_vector ( updated_x_transform_smooth , sorted_data_indices ) self . x_transforms [ xtransform_index ] = unsorted_xt tmp_unsorted = unsort_vector ( to_smooth , sorted_data_indices ) theta_minus_phis = tmp_unsorted - unsorted_xt
Compute a new set of x - transform functions phik .
42,092
def write_input_to_file ( self , fname = 'ace_input.txt' ) : self . _write_columns ( fname , self . x , self . y )
Write y and x values used in this run to a space - delimited txt file .
42,093
def write_transforms_to_file ( self , fname = 'ace_transforms.txt' ) : self . _write_columns ( fname , self . x_transforms , self . y_transform )
Write y and x transforms used in this run to a space - delimited txt file .
42,094
def build_sample_smoother_problem_friedman82 ( N = 200 ) : x = numpy . random . uniform ( size = N ) err = numpy . random . standard_normal ( N ) y = numpy . sin ( 2 * math . pi * ( 1 - x ) ** 2 ) + x * err return x , y
Sample problem from supersmoother publication .
42,095
def add_moving_element ( self , element ) : element . initialize ( self . canvas ) self . elements . append ( element )
Add elements to the board
42,096
def on_key_pressed ( self , event ) : return if event . keysym == "Up" : self . manager . set_joystick ( 0.0 , - 1.0 , 0 ) elif event . keysym == "Down" : self . manager . set_joystick ( 0.0 , 1.0 , 0 ) elif event . keysym == "Left" : self . manager . set_joystick ( - 1.0 , 0.0 , 0 ) elif event . keysym == "Right" : self . manager . set_joystick ( 1.0 , 0.0 , 0 ) elif event . char == " " : mode = self . manager . get_mode ( ) if mode == self . manager . MODE_DISABLED : self . manager . set_mode ( self . manager . MODE_OPERATOR_CONTROL ) else : self . manager . set_mode ( self . manager . MODE_DISABLED )
likely to take in a set of parameters to treat as up down left right likely to actually be based on a joystick event ... not sure yet
42,097
def perform_smooth ( x_values , y_values , span = None , smoother_cls = None ) : if smoother_cls is None : smoother_cls = DEFAULT_BASIC_SMOOTHER smoother = smoother_cls ( ) smoother . specify_data_set ( x_values , y_values ) smoother . set_span ( span ) smoother . compute ( ) return smoother
Convenience function to run the basic smoother .
42,098
def add_data_point_xy ( self , x , y ) : self . x . append ( x ) self . y . append ( y )
Add a new data point to the data set to be smoothed .
42,099
def specify_data_set ( self , x_input , y_input , sort_data = False ) : if sort_data : xy = sorted ( zip ( x_input , y_input ) ) x , y = zip ( * xy ) x_input_list = list ( x_input ) self . _original_index_of_xvalue = [ x_input_list . index ( xi ) for xi in x ] if len ( set ( self . _original_index_of_xvalue ) ) != len ( x ) : raise RuntimeError ( 'There are some non-unique x-values' ) else : x , y = x_input , y_input self . x = x self . y = y
Fully define data by lists of x values and y values .