idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
239,200
def serve_file ( self , private_file ) : response = self . server_class ( ) . serve ( private_file ) if self . content_disposition : # Join syntax works in all Python versions. Python 3 doesn't support b'..'.format(), # and % formatting was added for bytes in 3.5: https://bugs.python.org/issue3982 filename = self . get_content_disposition_filename ( private_file ) response [ 'Content-Disposition' ] = b'; ' . join ( [ self . content_disposition . encode ( ) , self . _encode_filename_header ( filename ) ] ) return response
Serve the file that was retrieved from the storage . The relative path can be found with private_file . relative_name .
141
26
239,201
def get_content_disposition_filename ( self , private_file ) : return self . content_disposition_filename or os . path . basename ( private_file . relative_name )
Return the filename in the download header .
42
8
239,202
def _encode_filename_header ( self , filename ) : # Based on https://www.djangosnippets.org/snippets/1710/ user_agent = self . request . META . get ( 'HTTP_USER_AGENT' , None ) if 'WebKit' in user_agent : # Support available for UTF-8 encoded strings. # This also matches Edgee. return u'filename={}' . format ( filename ) . encode ( "utf-8" ) elif 'MSIE' in user_agent : # IE does not support RFC2231 for internationalized headers, but somehow # percent-decodes it so this can be used instead. Note that using the word # "attachment" anywhere in the filename overrides an inline content-disposition. url_encoded = quote ( filename . encode ( "utf-8" ) ) . replace ( 'attachment' , "a%74tachment" ) return "filename={}" . format ( url_encoded ) . encode ( "utf-8" ) else : # For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers). rfc2231_filename = quote ( filename . encode ( "utf-8" ) ) return "filename*=UTF-8''{}" . format ( rfc2231_filename ) . encode ( "utf-8" )
The filename encoded to use in a Content - Disposition header .
298
13
239,203
def add_no_cache_headers ( func ) : @ wraps ( func ) def _dec ( * args , * * kwargs ) : response = func ( * args , * * kwargs ) response [ 'Expires' ] = 'Thu, 01 Jan 1970 00:00:00 GMT' # HTTP 1.0 proxies response [ 'Cache-Control' ] = 'max-age=0, no-cache, must-revalidate, proxy-revalidate' # HTTP 1.1 return response return _dec
Makes sure the retrieved file is not cached on disk or cached by proxy servers in between . This would circumvent any checking whether the user may even access the file .
113
33
239,204
def readTrainingData ( file_locations , GROUP_LABEL ) : class Mock ( object ) : pass mock_module = Mock ( ) mock_module . PARENT_LABEL = GROUP_LABEL for location in file_locations : with open ( location ) as f : tree = etree . parse ( f ) xml = tree . getroot ( ) for each in data_prep_utils . TrainingData ( xml , mock_module ) : yield each
Used in downstream tests
98
4
239,205
def device ( value ) : browser = None for regex , name in BROWSERS : if regex . search ( value ) : browser = name break device = None for regex , name in DEVICES : if regex . search ( value ) : device = name break if browser and device : return _ ( '%(browser)s on %(device)s' ) % { 'browser' : browser , 'device' : device } if browser : return browser if device : return device return None
Transform a User Agent into human readable text .
102
9
239,206
def location ( value ) : try : location = geoip ( ) and geoip ( ) . city ( value ) except Exception : try : location = geoip ( ) and geoip ( ) . country ( value ) except Exception as e : warnings . warn ( str ( e ) ) location = None if location and location [ 'country_name' ] : if 'city' in location and location [ 'city' ] : return '{}, {}' . format ( location [ 'city' ] , location [ 'country_name' ] ) return location [ 'country_name' ] return None
Transform an IP address into an approximate location .
124
9
239,207
def before ( func ) : class BeforeDecorator ( LambdaDecorator ) : def before ( self , event , context ) : return func ( event , context ) return BeforeDecorator
Run a function before the handler is invoked is passed the event & context and must return an event & context too .
41
23
239,208
def after ( func ) : class AfterDecorator ( LambdaDecorator ) : def after ( self , retval ) : return func ( retval ) return AfterDecorator
Run a function after the handler is invoked is passed the response and must return an response too .
39
19
239,209
def on_exception ( func ) : class OnExceptionDecorator ( LambdaDecorator ) : def on_exception ( self , exception ) : return func ( exception ) return OnExceptionDecorator
Run a function when a handler thows an exception . It s return value is returned to AWS .
45
20
239,210
def async_handler ( handler ) : @ wraps ( handler ) def wrapper ( event , context ) : context . loop = asyncio . get_event_loop ( ) return context . loop . run_until_complete ( handler ( event , context ) ) return wrapper
This decorator allows for use of async handlers by automatically running them in an event loop . The loop is added to the context object for if the handler needs it .
55
33
239,211
def dump_json_body ( handler ) : @ wraps ( handler ) def wrapper ( event , context ) : response = handler ( event , context ) if 'body' in response : try : response [ 'body' ] = json . dumps ( response [ 'body' ] ) except Exception as exception : return { 'statusCode' : 500 , 'body' : str ( exception ) } return response return wrapper
Automatically serialize response bodies with json . dumps .
85
11
239,212
def json_http_resp ( handler ) : @ wraps ( handler ) def wrapper ( event , context ) : response = handler ( event , context ) try : body = json . dumps ( response ) except Exception as exception : return { 'statusCode' : 500 , 'body' : str ( exception ) } return { 'statusCode' : 200 , 'body' : body } return wrapper
Automatically serialize return value to the body of a successfull HTTP response .
81
16
239,213
def load_json_body ( handler ) : @ wraps ( handler ) def wrapper ( event , context ) : if isinstance ( event . get ( 'body' ) , str ) : try : event [ 'body' ] = json . loads ( event [ 'body' ] ) except : return { 'statusCode' : 400 , 'body' : 'BAD REQUEST' } return handler ( event , context ) return wrapper
Automatically deserialize event bodies with json . loads .
90
12
239,214
def json_schema_validator ( request_schema = None , response_schema = None ) : def wrapper_wrapper ( handler ) : @ wraps ( handler ) def wrapper ( event , context ) : if request_schema is not None : if jsonschema is None : logger . error ( 'jsonschema is not installed, skipping request validation' ) else : try : jsonschema . validate ( event , request_schema ) except jsonschema . ValidationError as exception : return { 'statusCode' : 400 , 'body' : 'RequestValidationError: {}' . format ( exception . message ) } response = handler ( event , context ) if response_schema is not None : if jsonschema is None : logger . error ( 'jsonschema is not installed, skipping response validation' ) else : try : jsonschema . validate ( response , response_schema ) except jsonschema . ValidationError as exception : return { 'statusCode' : 500 , 'body' : 'ResponseValidationError: {}' . format ( exception . message ) } return response return wrapper return wrapper_wrapper
Validate your request & response payloads against a JSONSchema .
249
14
239,215
def no_retry_on_failure ( handler ) : seen_request_ids = set ( ) @ wraps ( handler ) def wrapper ( event , context ) : if context . aws_request_id in seen_request_ids : logger . critical ( 'Retry attempt on request id %s detected.' , context . aws_request_id ) return { 'statusCode' : 200 } seen_request_ids . add ( context . aws_request_id ) return handler ( event , context ) return wrapper
AWS Lambda retries scheduled lambdas that don t execute succesfully .
112
18
239,216
def _wrap_thing ( self , thing , kind ) : thing [ 'created' ] = self . _epoch_utc_to_local ( thing [ 'created_utc' ] ) thing [ 'd_' ] = copy . deepcopy ( thing ) ThingType = namedtuple ( kind , thing . keys ( ) ) thing = ThingType ( * * thing ) return thing
Mimic praw . Submission and praw . Comment API
84
13
239,217
def _add_nec_args ( self , payload ) : if self . _limited ( payload ) : # Do nothing I guess? Not sure how paging works on this endpoint... return if 'limit' not in payload : payload [ 'limit' ] = self . max_results_per_request if 'sort' not in payload : # Getting weird results if this is not made explicit. Unclear why. payload [ 'sort' ] = 'desc' if 'filter' in payload : #and payload.get('created_utc', None) is None: if not isinstance ( payload [ 'filter' ] , list ) : if isinstance ( payload [ 'filter' ] , str ) : payload [ 'filter' ] = [ payload [ 'filter' ] ] else : payload [ 'filter' ] = list ( payload [ 'filter' ] ) if 'created_utc' not in payload [ 'filter' ] : payload [ 'filter' ] . append ( 'created_utc' )
Adds limit and created_utc arguments to the payload as necessary .
213
14
239,218
def pretty_path ( path ) : path = fmt . to_utf8 ( path ) home_dir = os . path . expanduser ( "~" ) if path . startswith ( home_dir ) : path = "~" + path [ len ( home_dir ) : ] return '"%s"' % ( path , )
Prettify path for logging .
73
7
239,219
def guarded ( self , call , * args ) : self . LOG . debug ( '%s(%s)' % ( call . __name__ , ', ' . join ( [ pretty_path ( i ) for i in args ] ) , ) ) if not self . options . dry_run : try : call ( * args ) except ( EnvironmentError , UnicodeError ) as exc : self . fatal ( '%s(%s) failed [%s]' % ( call . __name__ , ', ' . join ( [ pretty_path ( i ) for i in args ] ) , exc , ) )
Catch exceptions thrown by filesystem calls and don t really execute them in dry - run mode .
128
19
239,220
def run ( ) : logging . basicConfig ( level = logging . DEBUG ) load_config . ConfigLoader ( ) . load ( ) config . debug = True print ( repr ( config . engine . item ( sys . argv [ 1 ] ) ) )
Module level test .
53
4
239,221
def _make_it_so ( self , command , calls , * args , * * kwargs ) : observer = kwargs . pop ( 'observer' , False ) args = ( self . _fields [ "hash" ] , ) + args try : for call in calls : self . _engine . LOG . debug ( "%s%s torrent #%s (%s)" % ( command [ 0 ] . upper ( ) , command [ 1 : ] , self . _fields [ "hash" ] , call ) ) if call . startswith ( ':' ) or call [ : 2 ] . endswith ( '.' ) : namespace = self . _engine . _rpc else : namespace = self . _engine . _rpc . d result = getattr ( namespace , call . lstrip ( ':' ) ) ( * args ) if observer : observer ( result ) except xmlrpc . ERRORS as exc : raise error . EngineError ( "While %s torrent #%s: %s" % ( command , self . _fields [ "hash" ] , exc ) )
Perform some error - checked XMLRPC calls .
233
11
239,222
def fetch ( self , name , engine_name = None ) : # TODO: Get each on-demand field in a multicall for all other items, since # we likely need it anyway; another (more easy) way would be to pre-fetch dynamically # with the list of fields from filters and output formats try : return self . _fields [ name ] except KeyError : if isinstance ( name , ( int , long ) ) : name = "custom_%d" % name if name == "done" : val = float ( self . fetch ( "completed_chunks" ) ) / self . fetch ( "size_chunks" ) elif name == "files" : val = self . _get_files ( ) elif name . startswith ( "kind_" ) and name [ 5 : ] . isdigit ( ) : val = self . _get_kind ( int ( name [ 5 : ] , 10 ) ) elif name . startswith ( "custom_" ) : key = name [ 7 : ] try : if len ( key ) == 1 and key in "12345" : val = getattr ( self . _engine . _rpc . d , "custom" + key ) ( self . _fields [ "hash" ] ) else : val = self . _engine . _rpc . d . custom ( self . _fields [ "hash" ] , key ) except xmlrpc . ERRORS as exc : raise error . EngineError ( "While accessing field %r: %s" % ( name , exc ) ) else : getter_name = engine_name if engine_name else RtorrentEngine . PYRO2RT_MAPPING . get ( name , name ) if getter_name [ 0 ] == '=' : getter_name = getter_name [ 1 : ] else : getter_name = "get_" + getter_name getter = getattr ( self . _engine . _rpc . d , getter_name ) try : val = getter ( self . _fields [ "hash" ] ) except xmlrpc . ERRORS as exc : raise error . EngineError ( "While accessing field %r: %s" % ( name , exc ) ) # TODO: Currently, NOT caching makes no sense; in a demon, it does! #if isinstance(FieldDefinition.FIELDS.get(name), engine.ConstantField): self . _fields [ name ] = val return val
Get a field on demand .
536
6
239,223
def datapath ( self ) : path = self . _fields [ 'path' ] if not path : # stopped item with no base_dir? path = self . fetch ( 'directory' ) if path and not self . _fields [ 'is_multi_file' ] : path = os . path . join ( path , self . _fields [ 'name' ] ) return os . path . expanduser ( fmt . to_unicode ( path ) )
Get an item s data path .
98
7
239,224
def announce_urls ( self , default = [ ] ) : # pylint: disable=dangerous-default-value try : response = self . _engine . _rpc . t . multicall ( self . _fields [ "hash" ] , 0 , "t.url=" , "t.is_enabled=" ) except xmlrpc . ERRORS as exc : raise error . EngineError ( "While getting announce URLs for #%s: %s" % ( self . _fields [ "hash" ] , exc ) ) if response : return [ i [ 0 ] for i in response if i [ 1 ] ] else : return default
Get a list of all announce URLs . Returns default if no trackers are found at all .
138
19
239,225
def tag ( self , tags ) : # Get tag list and add/remove given tags tags = tags . lower ( ) previous = self . tagged tagset = previous . copy ( ) for tag in tags . replace ( ',' , ' ' ) . split ( ) : if tag . startswith ( '-' ) : tagset . discard ( tag [ 1 : ] ) elif tag . startswith ( '+' ) : tagset . add ( tag [ 1 : ] ) else : tagset . add ( tag ) # Write back new tagset, if changed tagset . discard ( '' ) if tagset != previous : tagset = ' ' . join ( sorted ( tagset ) ) self . _make_it_so ( "setting tags %r on" % ( tagset , ) , [ "custom.set" ] , "tags" , tagset ) self . _fields [ "custom_tags" ] = tagset
Add or remove tags .
199
5
239,226
def set_throttle ( self , name ) : if name . lower ( ) == "null" : name = "NULL" if name . lower ( ) == "none" : name = '' if name not in self . _engine . known_throttle_names : if self . _engine . _rpc . throttle . up . max ( xmlrpc . NOHASH , name ) == - 1 : if self . _engine . _rpc . throttle . down . max ( xmlrpc . NOHASH , name ) == - 1 : raise error . UserError ( "Unknown throttle name '{}'" . format ( name ) ) self . _engine . known_throttle_names . add ( name ) if ( name or "NONE" ) == self . throttle : self . _engine . LOG . debug ( "Keeping throttle %r on torrent #%s" % ( self . throttle , self . _fields [ "hash" ] ) ) return active = self . is_active if active : self . _engine . LOG . debug ( "Torrent #%s stopped for throttling" % ( self . _fields [ "hash" ] , ) ) self . stop ( ) self . _make_it_so ( "setting throttle %r on" % ( name , ) , [ "throttle_name.set" ] , name ) if active : self . _engine . LOG . debug ( "Torrent #%s restarted after throttling" % ( self . _fields [ "hash" ] , ) ) self . start ( )
Assign to throttle group .
330
6
239,227
def purge ( self ) : def partial_file ( item ) : "Filter out partial files" #print "???", repr(item) return item . completed_chunks < item . size_chunks self . cull ( file_filter = partial_file , attrs = [ "get_completed_chunks" , "get_size_chunks" ] )
Delete PARTIAL data files and remove torrent from client .
79
11
239,228
def load_config ( self , namespace = None , rcfile = None ) : if namespace is None : namespace = config if namespace . scgi_url : return # already have the connection to rTorrent # Get and check config file name if not rcfile : rcfile = getattr ( config , "rtorrent_rc" , None ) if not rcfile : raise error . UserError ( "No 'rtorrent_rc' path defined in configuration!" ) if not os . path . isfile ( rcfile ) : raise error . UserError ( "Config file %r doesn't exist!" % ( rcfile , ) ) # Parse the file self . LOG . debug ( "Loading rtorrent config from %r" % ( rcfile , ) ) rc_vals = Bunch ( scgi_local = '' , scgi_port = '' ) with open ( rcfile ) as handle : continued = False for line in handle . readlines ( ) : # Skip comments, continuations, and empty lines line = line . strip ( ) continued , was_continued = line . endswith ( '\\' ) , continued if not line or was_continued or line . startswith ( "#" ) : continue # Be lenient about errors, after all it's not our own config file try : key , val = line . split ( "=" , 1 ) except ValueError : self . LOG . warning ( "Ignored invalid line %r in %r!" % ( line , rcfile ) ) continue key , val = key . strip ( ) , val . strip ( ) key = self . RTORRENT_RC_ALIASES . get ( key , key ) . replace ( '.' , '_' ) # Copy values we're interested in if key in self . RTORRENT_RC_KEYS : self . LOG . debug ( "rtorrent.rc: %s = %s" % ( key , val ) ) rc_vals [ key ] = val # Validate fields if rc_vals . scgi_local : rc_vals . scgi_local = os . path . expanduser ( rc_vals . scgi_local ) if rc_vals . scgi_local . startswith ( '/' ) : rc_vals . scgi_local = "scgi://" + rc_vals . scgi_local if rc_vals . scgi_port and not rc_vals . scgi_port . startswith ( "scgi://" ) : rc_vals . scgi_port = "scgi://" + rc_vals . scgi_port # Prefer UNIX domain sockets over TCP sockets namespace . scgi_url = rc_vals . scgi_local or rc_vals . scgi_port
Load file given in rcfile .
591
7
239,229
def _resolve_viewname ( self , viewname ) : if viewname == "-" : try : # Only works with rTorrent-PS at this time! viewname = self . open ( ) . ui . current_view ( ) except xmlrpc . ERRORS as exc : raise error . EngineError ( "Can't get name of current view: %s" % ( exc ) ) return viewname
Check for special view names and return existing rTorrent one .
88
12
239,230
def open ( self ) : # Only connect once if self . _rpc is not None : return self . _rpc # Get connection URL from rtorrent.rc self . load_config ( ) # Reading abilities are on the downfall, so... if not config . scgi_url : raise error . UserError ( "You need to configure a XMLRPC connection, read" " https://pyrocore.readthedocs.io/en/latest/setup.html" ) # Connect and get instance ID (also ensures we're connectable) self . _rpc = xmlrpc . RTorrentProxy ( config . scgi_url ) self . versions , self . version_info = self . _rpc . _set_mappings ( ) self . engine_id = self . _rpc . session . name ( ) time_usec = self . _rpc . system . time_usec ( ) # Make sure xmlrpc-c works as expected if time_usec < 2 ** 32 : self . LOG . warn ( "Your xmlrpc-c is broken (64 bit integer support missing," " %r returned instead)" % ( type ( time_usec ) , ) ) # Get other manifest values self . engine_software = "rTorrent %s/%s" % self . versions if "+ssh:" in config . scgi_url : self . startup = int ( self . _rpc . startup_time ( ) or time . time ( ) ) else : self . _session_dir = self . _rpc . session . path ( ) if not self . _session_dir : raise error . UserError ( "You need a session directory, read" " https://pyrocore.readthedocs.io/en/latest/setup.html" ) if not os . path . exists ( self . _session_dir ) : raise error . UserError ( "Non-existing session directory %r" % self . _session_dir ) self . _download_dir = os . path . expanduser ( self . _rpc . directory . default ( ) ) if not os . path . exists ( self . _download_dir ) : raise error . UserError ( "Non-existing download directory %r" % self . _download_dir ) self . startup = os . path . getmtime ( os . path . join ( self . _session_dir , "rtorrent.lock" ) ) # Return connection self . LOG . debug ( repr ( self ) ) return self . _rpc
Open connection .
547
3
239,231
def multicall ( self , viewname , fields ) : commands = tuple ( 'd.{}=' . format ( x ) for x in fields ) result_type = namedtuple ( 'DownloadItem' , [ x . replace ( '.' , '_' ) for x in fields ] ) items = self . open ( ) . d . multicall ( viewname , * commands ) return [ result_type ( * x ) for x in items ]
Query the given fields of items in the given view .
96
11
239,232
def item ( self , infohash , prefetch = None , cache = False ) : return next ( self . items ( infohash , prefetch , cache ) )
Fetch a single item by its info hash .
36
10
239,233
def _load_rules ( self ) : for ruleset in self . active_rulesets : section_name = 'sweep_rules_' + ruleset . lower ( ) try : ruledefs = getattr ( self . config , section_name ) except AttributeError : raise error . UserError ( "There is no [{}] section in your configuration" . format ( section_name . upper ( ) ) ) for ruledef , filtercond in ruledefs . items ( ) : if ruledef . endswith ( '.filter' ) : rulename = ruledef . rsplit ( '.' , 1 ) [ 0 ] rule = SweepRule ( ruleset , rulename , int ( ruledefs . get ( rulename + '.prio' , '999' ) ) , ruledefs . get ( rulename + '.order' , self . default_order ) , parse_cond ( filtercond ) ) self . rules . append ( rule ) self . rules . sort ( key = lambda x : ( x . prio , x . name ) ) return self . rules
Load rule definitions from config .
231
6
239,234
def _parse_schedule ( self , schedule ) : result = { } for param in shlex . split ( str ( schedule ) ) : # do not feed unicode to shlex try : key , val = param . split ( '=' , 1 ) except ( TypeError , ValueError ) : self . fatal ( "Bad param '%s' in job schedule '%s'" % ( param , schedule ) ) else : result [ key ] = val return result
Parse a job schedule .
98
6
239,235
def _validate_config ( self ) : groups = dict ( job = defaultdict ( Bunch ) , httpd = defaultdict ( Bunch ) , ) for key , val in config . torque . items ( ) : # Auto-convert numbers and bools if val . isdigit ( ) : config . torque [ key ] = val = int ( val ) elif val . lower ( ) in ( matching . TRUE | matching . FALSE ) : val = matching . truth ( str ( val ) , key ) # Assemble grouped parameters stem = key . split ( '.' , 1 ) [ 0 ] if key == "httpd.active" : groups [ stem ] [ "active" ] = val elif stem in groups : try : stem , name , param = key . split ( '.' , 2 ) except ( TypeError , ValueError ) : self . fatal ( "Bad %s configuration key %r (expecting %s.NAME.PARAM)" % ( stem , key , stem ) ) else : groups [ stem ] [ name ] [ param ] = val for key , val in groups . iteritems ( ) : setattr ( self , key . replace ( "job" , "jobs" ) , Bunch ( val ) ) # Validate httpd config if self . httpd . active : if self . httpd . waitress . url_scheme not in ( "http" , "https" ) : self . fatal ( "HTTP URL scheme must be either 'http' or 'https'" ) if not isinstance ( self . httpd . waitress . port , int ) or not ( 1024 <= self . httpd . waitress . port < 65536 ) : self . fatal ( "HTTP port must be a 16 bit number >= 1024" ) # Validate jobs for name , params in self . jobs . items ( ) : for key in ( "handler" , "schedule" ) : if key not in params : self . fatal ( "Job '%s' is missing the required 'job.%s.%s' parameter" % ( name , name , key ) ) bool_param = lambda k , default , p = params : matching . truth ( p . get ( k , default ) , "job.%s.%s" % ( name , k ) ) params . job_name = name params . dry_run = bool_param ( "dry_run" , False ) or self . options . dry_run params . active = bool_param ( "active" , True ) params . schedule = self . _parse_schedule ( params . schedule ) if params . active : try : params . handler = pymagic . import_name ( params . handler ) except ImportError as exc : self . fatal ( "Bad handler name '%s' for job '%s':\n %s" % ( params . handler , name , exc ) )
Handle and check configuration .
609
5
239,236
def _add_jobs ( self ) : for name , params in self . jobs . items ( ) : if params . active : params . handler = params . handler ( params ) self . sched . add_cron_job ( params . handler . run , * * params . schedule )
Add configured jobs .
60
4
239,237
def _init_wsgi_server ( self ) : self . wsgi_server = None if self . httpd . active : # Only import dependencies when server is active from waitress . server import WSGIServer from pyrocore . daemon import webapp # Set up WSGI stack wsgi_app = webapp . make_app ( self . httpd ) # try: # import wsgilog # except ImportError: # self.LOG.info("'wsgilog' middleware not installed") # else: # wsgi_app = wsgilog.WsgiLog(wsgi_app, **self.httpd.wsgilog) ##logging.getLogger('waitress').setLevel(logging.DEBUG) self . LOG . debug ( "Waitress config: %r" % self . httpd . waitress ) self . wsgi_server = WSGIServer ( wsgi_app , * * self . httpd . waitress ) self . LOG . info ( "Started web server at %s://%s:%d/" % ( self . httpd . waitress . url_scheme , self . wsgi_server . get_server_name ( self . wsgi_server . effective_host ) , int ( self . wsgi_server . effective_port ) , ) )
Set up WSGI HTTP server .
292
7
239,238
def _run_forever ( self ) : while True : try : tick = time . time ( ) asyncore . loop ( timeout = self . POLL_TIMEOUT , use_poll = True ) # Sleep for remaining poll cycle time tick += self . POLL_TIMEOUT - time . time ( ) if tick > 0 : # wait POLL_TIMEOUT at most (robust against time shifts) time . sleep ( min ( tick , self . POLL_TIMEOUT ) ) except KeyboardInterrupt as exc : self . LOG . info ( "Termination request received (%s)" % exc ) break except SystemExit as exc : self . return_code = exc . code or 0 self . LOG . info ( "System exit (RC=%r)" % self . return_code ) break else : # Idle work #self.LOG.warn("IDLE %s %r" % (self.options.guard_file, os.path.exists(self.options.guard_file))) if self . options . guard_file and not os . path . exists ( self . options . guard_file ) : self . LOG . warn ( "Guard file '%s' disappeared, exiting!" % self . options . guard_file ) break
Run configured jobs until termination request .
265
7
239,239
def read_blob ( arg ) : result = None if arg == '@-' : result = sys . stdin . read ( ) elif any ( arg . startswith ( '@{}://' . format ( x ) ) for x in { 'http' , 'https' , 'ftp' , 'file' } ) : if not requests : raise error . UserError ( "You must 'pip install requests' to support @URL arguments." ) try : response = requests . get ( arg [ 1 : ] ) response . raise_for_status ( ) result = response . content except requests . RequestException as exc : raise error . UserError ( str ( exc ) ) else : with open ( os . path . expanduser ( arg [ 1 : ] ) , 'rb' ) as handle : result = handle . read ( ) return result
Read a BLOB from given
182
6
239,240
def open ( self ) : if not self . proxy : if not config . scgi_url : config . engine . load_config ( ) if not config . scgi_url : self . LOG . error ( "You need to configure a XMLRPC connection, read" " https://pyrocore.readthedocs.io/en/latest/setup.html" ) self . proxy = xmlrpc . RTorrentProxy ( config . scgi_url ) self . proxy . _set_mappings ( ) return self . proxy
Open connection and return proxy .
117
6
239,241
def execute ( self , proxy , method , args ) : try : result = getattr ( proxy , method ) ( raw_xml = self . options . xml , * tuple ( args ) ) except xmlrpc . ERRORS as exc : self . LOG . error ( "While calling %s(%s): %s" % ( method , ", " . join ( repr ( i ) for i in args ) , exc ) ) self . return_code = error . EX_NOINPUT if "not find" in getattr ( exc , "faultString" , "" ) else error . EX_DATAERR else : if not self . options . quiet : if self . options . repr : # Pretty-print if requested, or it's a collection and not a scalar result = pformat ( result ) elif hasattr ( result , "__iter__" ) : result = '\n' . join ( i if isinstance ( i , basestring ) else pformat ( i ) for i in result ) print ( fmt . to_console ( result ) )
Execute given XMLRPC call .
227
8
239,242
def do_repl ( self ) : from prompt_toolkit import prompt from prompt_toolkit . history import FileHistory from prompt_toolkit . auto_suggest import AutoSuggestFromHistory from prompt_toolkit . contrib . completers import WordCompleter self . options . quiet = False proxy = self . open ( ) ps1 = proxy . session . name ( ) + u'> ' words = [ 'help' , 'stats' , 'exit' ] words += [ x + '=' for x in proxy . system . listMethods ( ) ] history_file = os . path . join ( config . config_dir , '.rtxmlrpc_history' ) while True : try : try : cmd = prompt ( ps1 , completer = WordCompleter ( words ) , auto_suggest = AutoSuggestFromHistory ( ) , history = FileHistory ( history_file ) ) except KeyboardInterrupt : cmd = '' if not cmd : print ( "Enter '?' or 'help' for usage information, 'Ctrl-D' to exit." ) if cmd in { '?' , 'help' } : self . repl_usage ( ) continue elif cmd in { '' , 'stats' } : print ( repr ( proxy ) . split ( None , 1 ) [ 1 ] ) continue elif cmd in { 'exit' } : raise EOFError ( ) try : method , raw_args = cmd . split ( '=' , 1 ) except ValueError : print ( "ERROR: '=' not found" ) continue raw_args = raw_args . split ( ',' ) args = self . cooked ( raw_args ) self . execute ( proxy , method , args ) except EOFError : print ( 'Bye from {!r}' . format ( proxy ) ) break
REPL for rTorrent XMLRPC commands .
378
10
239,243
def do_import ( self ) : tmp_import = None try : if self . args [ 0 ] . startswith ( '@' ) and self . args [ 0 ] != '@-' : import_file = os . path . expanduser ( self . args [ 0 ] [ 1 : ] ) if not os . path . isfile ( import_file ) : self . parser . error ( "File not found (or not a file): {}" . format ( import_file ) ) args = ( xmlrpc . NOHASH , os . path . abspath ( import_file ) ) else : script_text = '\n' . join ( self . args + [ '' ] ) if script_text == '@-\n' : script_text = sys . stdin . read ( ) with tempfile . NamedTemporaryFile ( suffix = '.rc' , prefix = 'rtxmlrpc-' , delete = False ) as handle : handle . write ( script_text ) tmp_import = handle . name args = ( xmlrpc . NOHASH , tmp_import ) self . execute ( self . open ( ) , 'import' , args ) finally : if tmp_import and os . path . exists ( tmp_import ) : os . remove ( tmp_import )
Handle import files or streams passed with - i .
276
10
239,244
def do_command ( self ) : method = self . args [ 0 ] raw_args = self . args [ 1 : ] if '=' in method : if raw_args : self . parser . error ( "Please don't mix rTorrent and shell argument styles!" ) method , raw_args = method . split ( '=' , 1 ) raw_args = raw_args . split ( ',' ) self . execute ( self . open ( ) , method , self . cooked ( raw_args ) )
Call a single command with arguments .
106
7
239,245
def download_resource ( self , download_url , target , guard ) : download_url = download_url . strip ( ) if not os . path . isabs ( target ) : target = os . path . join ( config . config_dir , target ) if os . path . exists ( os . path . join ( target , guard ) ) : self . LOG . info ( "Already have '%s' in '%s'..." % ( download_url , target ) ) return if not os . path . isdir ( target ) : os . makedirs ( target ) self . LOG . info ( "Downloading '%s' to '%s'..." % ( download_url , target ) ) with closing ( urllib2 . urlopen ( download_url ) ) as url_handle : if download_url . endswith ( ".zip" ) : with closing ( ZipFile ( StringIO ( url_handle . read ( ) ) ) ) as zip_handle : # pylint: disable=no-member zip_handle . extractall ( target ) # pylint: disable=no-member else : with open ( os . path . join ( target , guard ) , "wb" ) as file_handle : shutil . copyfileobj ( url_handle , file_handle )
Helper to download and install external resources .
279
8
239,246
def fmt_duration ( secs ) : return ' ' . join ( fmt . human_duration ( secs , 0 , precision = 2 , short = True ) . strip ( ) . split ( ) )
Format a duration in seconds .
43
6
239,247
def disk_free ( path ) : stats = os . statvfs ( path ) return stats . f_bavail * stats . f_frsize
Return free bytes on partition holding path .
34
8
239,248
def truth ( val , context ) : try : 0 + val except TypeError : lower_val = val . lower ( ) if lower_val in TRUE : return True elif lower_val in FALSE : return False else : raise FilterError ( "Bad boolean value %r in %r (expected one of '%s', or '%s')" % ( val , context , "' '" . join ( TRUE ) , "' '" . join ( FALSE ) ) ) else : return bool ( val )
Convert truth value in val to a boolean .
106
10
239,249
def _time_ym_delta ( timestamp , delta , months ) : timestamp = list ( time . localtime ( timestamp ) ) timestamp [ int ( months ) ] += delta return time . mktime ( timestamp )
Helper to add a year or month delta to a timestamp .
45
12
239,250
def unquote_pre_filter ( pre_filter , _regex = re . compile ( r'[\\]+' ) ) : if pre_filter . startswith ( '"' ) and pre_filter . endswith ( '"' ) : # Unquote outer level pre_filter = pre_filter [ 1 : - 1 ] pre_filter = _regex . sub ( lambda x : x . group ( 0 ) [ : len ( x . group ( 0 ) ) // 2 ] , pre_filter ) return pre_filter
Unquote a pre - filter condition .
114
8
239,251
def _create_filter ( self , condition ) : # "Normal" comparison operators? comparison = re . match ( r"^(%s)(<[>=]?|>=?|!=|~)(.*)$" % self . ident_re , condition ) if comparison : name , comparison , values = comparison . groups ( ) if values and values [ 0 ] in "+-" : raise FilterError ( "Comparison operator cannot be followed by '%s' in '%s'" % ( values [ 0 ] , condition ) ) values = self . COMPARISON_OPS [ comparison ] % values else : # Split name from value(s) try : name , values = condition . split ( '=' , 1 ) except ValueError : if self . default_field : name , values = self . default_field , condition else : raise FilterError ( "Field name missing in '%s' (expected '=')" % condition ) # Try to find field definition field = self . lookup ( name ) if not field : raise FilterError ( "Unknown field %r in %r" % ( name , condition ) ) if field . get ( "matcher" ) is None : raise FilterError ( "Field %r cannot be used as a filter" % ( name , ) ) # Make filters from values (split on commas outside of /…/) filters = [ ] split_values = re . findall ( r'(!?/[^/]*/|[^,]+)(?:,|$)' , values ) if values else [ '' ] if not split_values : raise FilterError ( "Internal Error: Cannot split %r into match values" % ( values , ) ) for value in split_values : wrapper = None if value . startswith ( '!' ) : wrapper = NegateFilter value = value [ 1 : ] field_matcher = field [ "matcher" ] ( name , value ) filters . append ( wrapper ( field_matcher ) if wrapper else field_matcher ) # Return filters return CompoundFilterAny ( filters ) if len ( filters ) > 1 else filters [ 0 ]
Create a filter object from a textual condition .
450
9
239,252
def parse ( self , conditions ) : conditions_text = conditions try : conditions = shlex . split ( fmt . to_utf8 ( conditions ) ) except AttributeError : # Not a string, assume parsed tree conditions_text = self . _tree2str ( conditions ) # Empty list? if not conditions : raise FilterError ( "No conditions given at all!" ) # NOT *must* appear at the start of a group negate = conditions [ : 1 ] == [ "NOT" ] if negate : conditions = conditions [ 1 : ] if not conditions : raise FilterError ( "NOT must be followed by some conditions!" ) # Handle grouping if '[' in conditions : tree = [ [ ] ] for term in conditions : if term == '[' : tree . append ( [ ] ) # new grouping elif term == ']' : subtree = tree . pop ( ) if not tree : raise FilterError ( "Unbalanced brackets, too many closing ']' in condition %r" % ( conditions_text , ) ) tree [ - 1 ] . append ( subtree ) # append finished group to containing level else : tree [ - 1 ] . append ( term ) # append to current level if len ( tree ) > 1 : raise FilterError ( "Unbalanced brackets, too many open '[' in condition %r" % ( conditions_text , ) ) conditions = tree [ 0 ] # Prepare root matcher conditions = list ( conditions ) matcher = CompoundFilterAll ( ) if "OR" in conditions : root = CompoundFilterAny ( ) root . append ( matcher ) else : root = matcher # Go through conditions and parse them for condition in conditions : if condition == "OR" : # Leading OR, or OR OR in sequence? if not matcher : raise FilterError ( "Left-hand side of OR missing in %r!" % ( conditions_text , ) ) # Start next run of AND conditions matcher = CompoundFilterAll ( ) root . append ( matcher ) elif isinstance ( condition , list ) : matcher . append ( self . parse ( condition ) ) else : matcher . append ( self . _create_filter ( condition ) ) # Trailing OR? if not matcher : raise FilterError ( "Right-hand side of OR missing in %r!" % ( conditions_text , ) ) return NegateFilter ( root ) if negate else root
Parse filter conditions .
502
5
239,253
def _flux_engine_data ( engine ) : data = stats . engine_data ( engine ) # Make it flat data [ "up_rate" ] = data [ "upload" ] [ 0 ] data [ "up_limit" ] = data [ "upload" ] [ 1 ] data [ "down_rate" ] = data [ "download" ] [ 0 ] data [ "down_limit" ] = data [ "download" ] [ 1 ] data [ "version" ] = data [ "versions" ] [ 0 ] views = data [ "views" ] del data [ "upload" ] del data [ "download" ] del data [ "versions" ] del data [ "views" ] return data , views
Return rTorrent data set for pushing to InfluxDB .
154
12
239,254
def run ( self ) : try : proxy = config_ini . engine . open ( ) self . LOG . info ( "Stats for %s - up %s, %s" % ( config_ini . engine . engine_id , fmt . human_duration ( proxy . system . time ( ) - config_ini . engine . startup , 0 , 2 , True ) . strip ( ) , proxy ) ) except ( error . LoggableError , xmlrpc . ERRORS ) , exc : self . LOG . warn ( str ( exc ) )
Statistics logger job callback .
116
5
239,255
def _influxdb_url ( self ) : url = "{0}/db/{1}/series" . format ( self . influxdb . url . rstrip ( '/' ) , self . config . dbname ) if self . influxdb . user and self . influxdb . password : url += "?u={0}&p={1}" . format ( self . influxdb . user , self . influxdb . password ) return url
Return REST API URL to access time series .
96
9
239,256
def _push_data ( self ) : if not ( self . config . series or self . config . series_host ) : self . LOG . info ( "Misconfigured InfluxDB job, neither 'series' nor 'series_host' is set!" ) return # Assemble data fluxdata = [ ] if self . config . series : try : config_ini . engine . open ( ) data , views = _flux_engine_data ( config_ini . engine ) fluxdata . append ( dict ( name = self . config . series , columns = data . keys ( ) , points = [ data . values ( ) ] ) ) fluxdata . append ( dict ( name = self . config . series + '_views' , columns = views . keys ( ) , points = [ views . values ( ) ] ) ) except ( error . LoggableError , xmlrpc . ERRORS ) , exc : self . LOG . warn ( "InfluxDB stats: {0}" . format ( exc ) ) # if self.config.series_host: # fluxdata.append(dict( # name = self.config.series_host, # columns = .keys(), # points = [.values()] # )) if not fluxdata : self . LOG . debug ( "InfluxDB stats: no data (previous errors?)" ) return # Encode into InfluxDB data packet fluxurl = self . _influxdb_url ( ) fluxjson = json . dumps ( fluxdata ) self . LOG . debug ( "POST to {0} with {1}" . format ( fluxurl . split ( '?' ) [ 0 ] , fluxjson ) ) # Push it! try : # TODO: Use a session requests . post ( fluxurl , data = fluxjson , timeout = self . influxdb . timeout ) except RequestException , exc : self . LOG . info ( "InfluxDB POST error: {0}" . format ( exc ) )
Push stats data to InfluxDB .
416
8
239,257
def run ( self ) : from pyrocore import config try : config . engine . open ( ) # TODO: select view into items items = [ ] self . run_filter ( items ) except ( error . LoggableError , xmlrpc . ERRORS ) as exc : self . LOG . warn ( str ( exc ) )
Filter job callback .
71
4
239,258
def replace_fields ( meta , patterns ) : for pattern in patterns : try : field , regex , subst , _ = pattern . split ( pattern [ - 1 ] ) # TODO: Allow numerical indices, and "+" for append namespace = meta keypath = [ i . replace ( '\0' , '.' ) for i in field . replace ( '..' , '\0' ) . split ( '.' ) ] for key in keypath [ : - 1 ] : namespace = namespace [ key ] namespace [ keypath [ - 1 ] ] = re . sub ( regex , subst , namespace [ keypath [ - 1 ] ] ) except ( KeyError , IndexError , TypeError , ValueError ) as exc : raise error . UserError ( "Bad substitution '%s' (%s)!" % ( pattern , exc ) ) return meta
Replace patterns in fields .
178
6
239,259
def connect ( config_dir = None , optional_config_files = None , cron_cfg = "cron" ) : from pyrocore . scripts . base import ScriptBase from pyrocore . util import load_config ScriptBase . setup ( cron_cfg = cron_cfg ) load_config . ConfigLoader ( config_dir ) . load ( optional_config_files or [ ] ) from pyrocore import config config . engine . open ( ) return config . engine
Initialize everything for interactive use .
104
7
239,260
def setup ( cls , cron_cfg = "cron" ) : random . seed ( ) logging_cfg = cls . LOGGING_CFG if "%s" in logging_cfg : logging_cfg = logging_cfg % ( cron_cfg if "--cron" in sys . argv [ 1 : ] else "scripts" , ) logging_cfg = os . path . expanduser ( logging_cfg ) if os . path . exists ( logging_cfg ) : logging . HERE = os . path . dirname ( logging_cfg ) logging . config . fileConfig ( logging_cfg ) else : logging . basicConfig ( level = logging . INFO ) logging . getLogger ( ) . debug ( "Logging config read from '%s'" % logging_cfg )
Set up the runtime environment .
169
6
239,261
def _get_pkg_meta ( self ) : logger = logging . getLogger ( 'pyrocore.scripts.base.version_info' ) pkg_info = None warnings = [ ] for info_ext , info_name in ( ( '.dist-info' , 'METADATA' ) , ( '.egg-info' , 'PKG-INFO' ) ) : try : # Development setup pkg_path = os . path . join ( __file__ . split ( __name__ . replace ( '.' , os . sep ) ) [ 0 ] , # containing path __name__ . split ( "." ) [ 0 ] # package name ) if os . path . exists ( pkg_path + info_ext ) : pkg_path += info_ext else : globbed_paths = glob . glob ( pkg_path + "-*-py%d.%d" % sys . version_info [ : 2 ] + info_ext ) if len ( globbed_paths ) == 1 : pkg_path = globbed_paths [ 0 ] elif globbed_paths : warnings . append ( "Found {} release-specific candidate versions in *{}" . format ( len ( globbed_paths ) , info_ext ) ) pkg_path = None else : globbed_paths = glob . glob ( pkg_path + "-*" + info_ext ) if len ( globbed_paths ) == 1 : pkg_path = globbed_paths [ 0 ] else : warnings . append ( "Found {} candidate versions in *{}" . format ( len ( globbed_paths ) , info_ext ) ) pkg_path = None if pkg_path : with open ( os . path . join ( pkg_path , info_name ) ) as handle : pkg_info = handle . read ( ) break except IOError : continue if not pkg_info : logger . warn ( "Software version cannot be determined! ({})" . format ( ', ' . join ( warnings ) ) ) return pkg_info or "Version: 0.0.0\n"
Try to find package metadata .
463
6
239,262
def add_bool_option ( self , * args , * * kwargs ) : dest = [ o for o in args if o . startswith ( "--" ) ] [ 0 ] . replace ( "--" , "" ) . replace ( "-" , "_" ) self . parser . add_option ( dest = dest , action = "store_true" , default = False , help = kwargs [ 'help' ] , * args )
Add a boolean option .
98
5
239,263
def add_value_option ( self , * args , * * kwargs ) : kwargs [ 'metavar' ] = args [ - 1 ] if 'dest' not in kwargs : kwargs [ 'dest' ] = [ o for o in args if o . startswith ( "--" ) ] [ 0 ] . replace ( "--" , "" ) . replace ( "-" , "_" ) if 'default' in kwargs and kwargs [ 'default' ] : kwargs [ 'help' ] += " [%s]" % kwargs [ 'default' ] self . parser . add_option ( * args [ : - 1 ] , * * kwargs )
Add a value option .
156
5
239,264
def handle_completion ( self ) : # We don't want these in the help, so handle them explicitely if len ( sys . argv ) > 1 and sys . argv [ 1 ] . startswith ( "--help-completion-" ) : handler = getattr ( self , sys . argv [ 1 ] [ 2 : ] . replace ( '-' , '_' ) , None ) if handler : print '\n' . join ( sorted ( handler ( ) ) ) self . STD_LOG_LEVEL = logging . DEBUG sys . exit ( error . EX_OK )
Handle shell completion stuff .
128
5
239,265
def help_completion_options ( self ) : for opt in self . parser . option_list : for lopt in opt . _long_opts : yield lopt
Return options of this command .
37
6
239,266
def fatal ( self , msg , exc = None ) : if exc is not None : self . LOG . fatal ( "%s (%s)" % ( msg , exc ) ) if self . options . debug : return # let the caller re-raise it else : self . LOG . fatal ( msg ) sys . exit ( error . EX_SOFTWARE )
Exit on a fatal error .
73
6
239,267
def run ( self ) : log_total = True try : try : # Preparation steps self . get_options ( ) # Template method with the tool's main loop self . mainloop ( ) except error . LoggableError , exc : if self . options . debug : raise # Log errors caused by invalid user input try : msg = str ( exc ) except UnicodeError : msg = unicode ( exc , "UTF-8" ) self . LOG . error ( msg ) sys . exit ( error . EX_SOFTWARE ) except KeyboardInterrupt , exc : if self . options . debug : raise sys . stderr . write ( "\n\nAborted by CTRL-C!\n" ) sys . stderr . flush ( ) sys . exit ( error . EX_TEMPFAIL ) except IOError , exc : # [Errno 32] Broken pipe? if exc . errno == errno . EPIPE : sys . stderr . write ( "\n%s, exiting!\n" % exc ) sys . stderr . flush ( ) # Monkey patch to prevent an exception during logging shutdown try : handlers = logging . _handlerList except AttributeError : pass else : for handler in handlers : try : handler . flush = lambda * _ : None except AttributeError : pass # skip special handlers log_total = False sys . exit ( error . EX_IOERR ) else : raise finally : # Shut down if log_total and self . options : ## No time logging on --version and such running_time = time . time ( ) - self . startup self . LOG . log ( self . STD_LOG_LEVEL , "Total time: %.3f seconds." % running_time ) logging . shutdown ( ) # Special exit code? if self . return_code : sys . exit ( self . return_code )
The main program skeleton .
396
5
239,268
def add_options ( self ) : super ( ScriptBaseWithConfig , self ) . add_options ( ) self . add_value_option ( "--config-dir" , "DIR" , help = "configuration directory [{}]" . format ( os . environ . get ( 'PYRO_CONFIG_DIR' , self . CONFIG_DIR_DEFAULT ) ) ) self . add_value_option ( "--config-file" , "PATH" , action = "append" , default = [ ] , help = "additional config file(s) to read" ) self . add_value_option ( "-D" , "--define" , "KEY=VAL [-D ...]" , default = [ ] , action = "append" , dest = "defines" , help = "override configuration attributes" )
Add configuration options .
181
4
239,269
def check_for_connection ( self ) : for idx , arg in enumerate ( self . args ) : if arg . startswith ( '@' ) : if arg [ 1 : ] not in config . connections : self . parser . error ( "Undefined connection '{}'!" . format ( arg [ 1 : ] ) ) config . scgi_url = config . connections [ arg [ 1 : ] ] self . LOG . debug ( "Switched to connection %s (%s)" , arg [ 1 : ] , config . scgi_url ) del self . args [ idx ] break
Scan arguments for a
129
4
239,270
def quit ( self ) : self . script . LOG . warn ( "Abort due to user choice!" ) sys . exit ( self . QUIT_RC )
Exit the program due to user s choices .
34
9
239,271
def redirect ( req , _log = pymagic . get_lazy_logger ( "redirect" ) ) : log = req . environ . get ( "wsgilog.logger" , _log ) target = req . relative_url ( req . urlvars . to ) log . info ( "Redirecting '%s' to '%s'" % ( req . url , target ) ) return exc . HTTPMovedPermanently ( location = target )
Redirect controller to emit a HTTP 301 .
105
9
239,272
def make_app ( httpd_config ) : #mimetypes.add_type('image/vnd.microsoft.icon', '.ico') # Default paths to serve static file from htdocs_paths = [ os . path . realpath ( os . path . join ( config . config_dir , "htdocs" ) ) , os . path . join ( os . path . dirname ( config . __file__ ) , "data" , "htdocs" ) , ] return ( Router ( ) . add_route ( "/" , controller = redirect , to = "/static/index.html" ) . add_route ( "/favicon.ico" , controller = redirect , to = "/static/favicon.ico" ) . add_route ( "/static/{filepath:.+}" , controller = StaticFolders ( htdocs_paths ) ) . add_route ( "/json/{action}" , controller = JsonController ( * * httpd_config . json ) ) )
Factory for the monitoring webapp .
224
7
239,273
def guarded ( self , func , * args , * * kwargs ) : try : return func ( * args , * * kwargs ) except ( EnvironmentError , error . LoggableError , xmlrpc . ERRORS ) as g_exc : if func . __name__ not in self . ERRORS_LOGGED : self . LOG . warn ( "While calling '%s': %s" % ( func . __name__ , g_exc ) ) self . ERRORS_LOGGED . add ( func . __name__ ) return None
Call a function return None on errors .
120
8
239,274
def json_engine ( self , req ) : # pylint: disable=R0201,W0613 try : return stats . engine_data ( config . engine ) except ( error . LoggableError , xmlrpc . ERRORS ) as torrent_exc : raise exc . HTTPInternalServerError ( str ( torrent_exc ) )
Return torrent engine data .
73
5
239,275
def json_charts ( self , req ) : disk_used , disk_total , disk_detail = 0 , 0 , [ ] for disk_usage_path in self . cfg . disk_usage_path . split ( os . pathsep ) : disk_usage = self . guarded ( psutil . disk_usage , os . path . expanduser ( disk_usage_path . strip ( ) ) ) if disk_usage : disk_used += disk_usage . used disk_total += disk_usage . total disk_detail . append ( ( disk_usage . used , disk_usage . total ) ) data = dict ( engine = self . json_engine ( req ) , uptime = time . time ( ) - psutil . BOOT_TIME , # pylint: disable=no-member fqdn = self . guarded ( socket . getfqdn ) , cpu_usage = self . guarded ( psutil . cpu_percent , 0 ) , ram_usage = self . guarded ( psutil . virtual_memory ) , swap_usage = self . guarded ( psutil . swap_memory ) , disk_usage = ( disk_used , disk_total , disk_detail ) if disk_total else None , disk_io = self . guarded ( psutil . disk_io_counters ) , net_io = self . guarded ( psutil . net_io_counters ) , ) return data
Return charting data .
305
5
239,276
def parse_route ( cls , template ) : regex = '' last_pos = 0 for match in cls . ROUTES_RE . finditer ( template ) : regex += re . escape ( template [ last_pos : match . start ( ) ] ) var_name = match . group ( 1 ) expr = match . group ( 2 ) or '[^/]+' expr = '(?P<%s>%s)' % ( var_name , expr ) regex += expr last_pos = match . end ( ) regex += re . escape ( template [ last_pos : ] ) regex = '^%s$' % regex return re . compile ( regex )
Parse a route definition and return the compiled regex that matches it .
143
14
239,277
def add_route ( self , template , controller , * * kwargs ) : if isinstance ( controller , basestring ) : controller = pymagic . import_name ( controller ) self . routes . append ( ( self . parse_route ( template ) , controller , kwargs ) ) return self
Add a route definition
66
4
239,278
def _duration ( start , end ) : if start and end : if start > end : return None else : return end - start elif start : return time . time ( ) - start else : return None
Return time delta .
43
4
239,279
def _fmt_files ( filelist ) : depth = max ( i . path . count ( '/' ) for i in filelist ) pad = [ '\uFFFE' ] * depth base_indent = ' ' * 38 indent = 0 result = [ ] prev_path = pad sorted_files = sorted ( ( i . path . split ( '/' ) [ : - 1 ] + pad , i . path . rsplit ( '/' , 1 ) [ - 1 ] , i ) for i in filelist ) for path , name , fileinfo in sorted_files : path = path [ : depth ] if path != prev_path : common = min ( [ depth ] + [ idx for idx , ( dirname , prev_name ) in enumerate ( zip ( path , prev_path ) ) if dirname != prev_name ] ) #result.append("!!%r %r" % (indent, common)) #result.append("!!%r" % (prev_path,)) #result.append("!!%r" % (path,)) while indent > common : indent -= 1 result . append ( "%s%s/" % ( base_indent , ' ' * indent ) ) for dirname in path [ common : ] : if dirname == '\uFFFE' : break result . append ( "%s%s\\ %s" % ( base_indent , ' ' * indent , dirname ) ) indent += 1 ##result.append("!!%r %r" % (path, name)) result . append ( " %s %s %s %s| %s" % ( { 0 : "off " , 1 : " " , 2 : "high" } . get ( fileinfo . prio , "????" ) , fmt . iso_datetime ( fileinfo . mtime ) , fmt . human_size ( fileinfo . size ) , ' ' * indent , name , ) ) prev_path = path while indent > 0 : indent -= 1 result . append ( "%s%s/" % ( base_indent , ' ' * indent ) ) result . append ( "%s= %d file(s)" % ( base_indent , len ( filelist ) ) ) return '\n' . join ( result )
Produce a file listing .
489
6
239,280
def detect_traits ( item ) : return traits . detect_traits ( name = item . name , alias = item . alias , filetype = ( list ( item . fetch ( "kind_51" ) ) or [ None ] ) . pop ( ) , )
Build traits list from attributes of the passed item . Currently kind_51 name and alias are considered .
57
20
239,281
def add_manifold_attribute ( cls , name ) : if name . startswith ( "custom_" ) : try : return FieldDefinition . FIELDS [ name ] except KeyError : field = OnDemandField ( fmt . to_unicode , name , "custom attribute %r" % name . split ( '_' , 1 ) [ 1 ] , matcher = matching . PatternFilter ) setattr ( cls , name , field ) # add field to all proxy objects return field elif name . startswith ( "kind_" ) and name [ 5 : ] . isdigit ( ) : try : return FieldDefinition . FIELDS [ name ] except KeyError : limit = int ( name [ 5 : ] . lstrip ( '0' ) or '0' , 10 ) if limit > 100 : raise error . UserError ( "kind_N: N > 100 in %r" % name ) field = OnDemandField ( set , name , "kinds of files that make up more than %d%% of this item's size" % limit , matcher = matching . TaggedAsFilter , formatter = _fmt_tags , engine_name = "kind_%d" % limit ) setattr ( cls , name , field ) return field
Register a manifold engine attribute .
274
6
239,282
def add_custom_fields ( cls , * args , * * kw ) : for factory in config . custom_field_factories : for field in factory ( ) : setattr ( cls , field . name , field )
Add any custom fields defined in the configuration .
50
9
239,283
def _fetch_items ( self ) : if self . _items is None : self . _items = list ( self . engine . items ( self ) ) return self . _items
Fetch to attribute .
39
5
239,284
def _check_hash_view ( self ) : infohash = None if self . viewname . startswith ( '#' ) : infohash = self . viewname [ 1 : ] elif len ( self . viewname ) == 40 : try : int ( self . viewname , 16 ) except ( TypeError , ValueError ) : pass else : infohash = self . viewname return infohash
Return infohash if view name refers to a single item else None .
90
15
239,285
def size ( self ) : #return len(self._fetch_items()) if self . _check_hash_view ( ) : return 1 else : return self . engine . open ( ) . view . size ( xmlrpc . NOHASH , self . viewname )
Total unfiltered size of view .
59
8
239,286
def group_by ( self , fields , items = None ) : result = defaultdict ( list ) if items is None : items = self . items ( ) try : key = operator . attrgetter ( fields + '' ) except TypeError : def key ( obj , names = tuple ( fields ) ) : 'Helper to return group key tuple' return tuple ( getattr ( obj , x ) for x in names ) for item in items : result [ key ( item ) ] . append ( item ) return result
Returns a dict of lists of items grouped by the given fields .
107
13
239,287
def _set_mappings ( self ) : try : self . _versions = ( self . system . client_version ( ) , self . system . library_version ( ) , ) self . _version_info = tuple ( int ( i ) for i in self . _versions [ 0 ] . split ( '.' ) ) self . _use_deprecated = self . _version_info < ( 0 , 8 , 7 ) # Merge mappings for this version self . _mapping = self . _mapping . copy ( ) for key , val in sorted ( i for i in vars ( config ) . items ( ) if i [ 0 ] . startswith ( "xmlrpc_" ) ) : map_version = tuple ( int ( i ) for i in key . split ( '_' ) [ 1 : ] ) if map_version <= self . _version_info : if config . debug : self . LOG . debug ( "MAPPING for %r added: %r" % ( map_version , val ) ) self . _mapping . update ( val ) self . _fix_mappings ( ) except ERRORS as exc : raise error . LoggableError ( "Can't connect to %s (%s)" % ( self . _url , exc ) ) return self . _versions , self . _version_info
Set command mappings according to rTorrent version .
287
10
239,288
def _fix_mappings ( self ) : self . _mapping . update ( ( key + '=' , val + '=' ) for key , val in self . _mapping . items ( ) if not key . endswith ( '=' ) ) if config . debug : self . LOG . debug ( "CMD MAPPINGS ARE: %r" % ( self . _mapping , ) )
Add computed stuff to mappings .
87
7
239,289
def _map_call ( self , cmd ) : if config . debug and cmd != self . _mapping . get ( cmd , cmd ) : self . LOG . debug ( "MAP %s ==> %s" % ( cmd , self . _mapping [ cmd ] ) ) cmd = self . _mapping . get ( cmd , cmd ) # These we do by code, to avoid lengthy lists in the config if not self . _use_deprecated and any ( cmd . startswith ( i ) for i in ( "d.get_" , "f.get_" , "p.get_" , "t.get_" ) ) : cmd = cmd [ : 2 ] + cmd [ 6 : ] return cmd
Map old to new command names .
157
7
239,290
def parse ( self ) : try : if not os . path . getsize ( self . ns . pathname ) : # Ignore 0-byte dummy files (Firefox creates these while downloading) self . job . LOG . warn ( "Ignoring 0-byte metafile '%s'" % ( self . ns . pathname , ) ) return self . metadata = metafile . checked_open ( self . ns . pathname ) except EnvironmentError as exc : self . job . LOG . error ( "Can't read metafile '%s' (%s)" % ( self . ns . pathname , str ( exc ) . replace ( ": '%s'" % self . ns . pathname , "" ) , ) ) return except ValueError as exc : self . job . LOG . error ( "Invalid metafile '%s': %s" % ( self . ns . pathname , exc ) ) return self . ns . info_hash = metafile . info_hash ( self . metadata ) self . ns . info_name = self . metadata [ "info" ] [ "name" ] self . job . LOG . info ( "Loaded '%s' from metafile '%s'" % ( self . ns . info_name , self . ns . pathname ) ) # Check whether item is already loaded try : name = self . job . proxy . d . name ( self . ns . info_hash , fail_silently = True ) except xmlrpc . HashNotFound : pass except xmlrpc . ERRORS as exc : if exc . faultString != "Could not find info-hash." : self . job . LOG . error ( "While checking for #%s: %s" % ( self . ns . info_hash , exc ) ) return else : self . job . LOG . warn ( "Item #%s '%s' already added to client" % ( self . ns . info_hash , name ) ) return return True
Parse metafile and check pre - conditions .
421
11
239,291
def addinfo ( self ) : # Basic values self . ns . watch_path = self . job . config . path self . ns . relpath = None for watch in self . job . config . path : if self . ns . pathname . startswith ( watch . rstrip ( '/' ) + '/' ) : self . ns . relpath = os . path . dirname ( self . ns . pathname ) [ len ( watch . rstrip ( '/' ) ) + 1 : ] break # Build indicator flags for target state from filename flags = self . ns . pathname . split ( os . sep ) flags . extend ( flags [ - 1 ] . split ( '.' ) ) self . ns . flags = set ( i for i in flags if i ) # Metafile stuff announce = self . metadata . get ( "announce" , None ) if announce : self . ns . tracker_alias = configuration . map_announce2alias ( announce ) main_file = self . ns . info_name if "files" in self . metadata [ "info" ] : main_file = list ( sorted ( ( i [ "length" ] , i [ "path" ] [ - 1 ] ) for i in self . metadata [ "info" ] [ "files" ] ) ) [ - 1 ] [ 1 ] self . ns . filetype = os . path . splitext ( main_file ) [ 1 ] # Add name traits kind , info = traits . name_trait ( self . ns . info_name , add_info = True ) self . ns . traits = Bunch ( info ) self . ns . traits . kind = kind self . ns . label = '/' . join ( traits . detect_traits ( name = self . ns . info_name , alias = self . ns . tracker_alias , filetype = self . ns . filetype ) ) . strip ( '/' ) # Finally, expand commands from templates self . ns . commands = [ ] for key , cmd in sorted ( self . job . custom_cmds . items ( ) ) : try : self . ns . commands . append ( formatting . expand_template ( cmd , self . ns ) ) except error . LoggableError as exc : self . job . LOG . error ( "While expanding '%s' custom command: %s" % ( key , exc ) )
Add known facts to templating namespace .
505
9
239,292
def load ( self ) : if not self . ns . info_hash and not self . parse ( ) : return self . addinfo ( ) # TODO: dry_run try : # TODO: Scrub metafile if requested # Determine target state start_it = self . job . config . load_mode . lower ( ) in ( "start" , "started" ) queue_it = self . job . config . queued if "start" in self . ns . flags : start_it = True elif "load" in self . ns . flags : start_it = False if "queue" in self . ns . flags : queue_it = True # Load metafile into client load_cmd = self . job . proxy . load . verbose if queue_it : if not start_it : self . ns . commands . append ( "d.priority.set=0" ) elif start_it : load_cmd = self . job . proxy . load . start_verbose self . job . LOG . debug ( "Templating values are:\n %s" % "\n " . join ( "%s=%s" % ( key , repr ( val ) ) for key , val in sorted ( self . ns . items ( ) ) ) ) load_cmd ( xmlrpc . NOHASH , self . ns . pathname , * tuple ( self . ns . commands ) ) time . sleep ( .05 ) # let things settle # Announce new item if not self . job . config . quiet : msg = "%s: Loaded '%s' from '%s/'%s%s" % ( self . job . __class__ . __name__ , fmt . to_utf8 ( self . job . proxy . d . name ( self . ns . info_hash , fail_silently = True ) ) , os . path . dirname ( self . ns . pathname ) . rstrip ( os . sep ) , " [queued]" if queue_it else "" , ( " [startable]" if queue_it else " [started]" ) if start_it else " [normal]" , ) self . job . proxy . log ( xmlrpc . NOHASH , msg ) # TODO: Evaluate fields and set client values # TODO: Add metadata to tied file if requested # TODO: Execute commands AFTER adding the item, with full templating # Example: Labeling - add items to a persistent view, i.e. "postcmd = view.set_visible={{label}}" # could also be done automatically from the path, see above under "flags" (autolabel = True) # and add traits to the flags, too, in that case except xmlrpc . ERRORS as exc : self . job . LOG . error ( "While loading #%s: %s" % ( self . ns . info_hash , exc ) )
Load metafile into client .
623
7
239,293
def handle_path ( self , event ) : self . job . LOG . debug ( "Notification %r" % event ) if event . dir : return if any ( event . pathname . endswith ( i ) for i in self . METAFILE_EXT ) : MetafileHandler ( self . job , event . pathname ) . handle ( ) elif os . path . basename ( event . pathname ) == "watch.ini" : self . job . LOG . info ( "NOT YET Reloading watch config for '%s'" % event . path )
Handle a path - related event .
124
7
239,294
def setup ( self ) : if not pyinotify . WatchManager : raise error . UserError ( "You need to install 'pyinotify' to use %s (%s)!" % ( self . __class__ . __name__ , pyinotify . _import_error ) ) # pylint: disable=E1101, W0212 self . manager = pyinotify . WatchManager ( ) self . handler = TreeWatchHandler ( job = self ) self . notifier = pyinotify . AsyncNotifier ( self . manager , self . handler ) if self . LOG . isEnabledFor ( logging . DEBUG ) : mask = pyinotify . ALL_EVENTS else : mask = pyinotify . IN_CLOSE_WRITE | pyinotify . IN_MOVED_TO # bogus pylint: disable=E1101 # Add all configured base dirs for path in self . config . path : self . manager . add_watch ( path . strip ( ) , mask , rec = True , auto_add = True )
Set up inotify manager .
231
7
239,295
def get_filetypes ( filelist , path = None , size = os . path . getsize ) : path = path or ( lambda _ : _ ) # Get total size for each file extension histo = defaultdict ( int ) for entry in filelist : ext = os . path . splitext ( path ( entry ) ) [ 1 ] . lstrip ( '.' ) . lower ( ) if ext and ext [ 0 ] == 'r' and ext [ 1 : ] . isdigit ( ) : ext = "rar" elif ext == "jpeg" : ext = "jpg" elif ext == "mpeg" : ext = "mpg" histo [ ext ] += size ( entry ) # Normalize values to integer percent total = sum ( histo . values ( ) ) if total : for ext , val in histo . items ( ) : histo [ ext ] = int ( val * 100.0 / total + .499 ) return sorted ( zip ( histo . values ( ) , histo . keys ( ) ) , reverse = True )
Get a sorted list of file types and their weight in percent from an iterable of file names .
226
20
239,296
def name_trait ( name , add_info = False ) : kind , info = None , { } # Anything to check against? if name and not name . startswith ( "VTS_" ) : lower_name = name . lower ( ) trait_patterns = ( ( "tv" , TV_PATTERNS , "show" ) , ( "movie" , MOVIE_PATTERNS , "title" ) ) # TV check if any ( i in lower_name for i in _DEFINITELY_TV ) : kind = "tv" trait_patterns = trait_patterns [ : 1 ] # Regex checks re_name = '.' . join ( [ i . lstrip ( '[(' ) . rstrip ( ')]' ) for i in name . split ( ' .' ) ] ) for trait , patterns , title_group in trait_patterns : matched , patname = None , None for patname , pattern in patterns : matched = pattern . match ( re_name ) ##print matched, patname, re_name; print " ", pattern.pattern if matched and not any ( i in matched . groupdict ( ) [ title_group ] . lower ( ) for i in BAD_TITLE_WORDS ) : kind , info = trait , matched . groupdict ( ) break if matched : info [ "pattern" ] = patname # Fold auxiliary groups into main one for key , val in list ( info . items ( ) ) : if key [ - 1 ] . isdigit ( ) : del info [ key ] if val : key = re . sub ( "[0-9]+$" , "" , key ) info [ key ] = ( "%s %s" % ( info . get ( key ) or "" , val ) ) . strip ( ) break # TODO: Split by "dvdrip", year, etc. to get to the title and then # do a imdb / tvdb lookup; cache results, hits for longer, misses # for a day at max. # Return requested result return ( kind , info ) if add_info else kind
Determine content type from name .
449
8
239,297
def detect_traits ( name = None , alias = None , filetype = None ) : result = [ ] if filetype : filetype = filetype . lstrip ( '.' ) # Check for "themed" trackers theme = config . traits_by_alias . get ( alias ) if alias and theme : result = [ theme , filetype or "other" ] # Guess from file extensionn and name elif filetype in KIND_AUDIO : result = [ "audio" , filetype ] elif filetype in KIND_VIDEO : result = [ "video" , filetype ] contents = name_trait ( name ) if contents : result = [ contents , filetype ] elif filetype in KIND_IMAGE : result = [ "img" , filetype ] elif filetype in KIND_DOCS : result = [ "docs" , filetype ] elif filetype in KIND_ARCHIVE : result = [ "misc" , filetype ] contents = name_trait ( name ) if contents : result = [ contents , filetype ] return result
Build traits list from passed attributes .
233
7
239,298
def console_progress ( ) : def progress ( totalhashed , totalsize ) : "Helper" msg = " " * 30 if totalhashed < totalsize : msg = "%5.1f%% complete" % ( totalhashed * 100.0 / totalsize ) sys . stdout . write ( msg + " \r" ) sys . stdout . flush ( ) try : return progress if sys . stdout . isatty ( ) else None except AttributeError : return None
Return a progress indicator for consoles if stdout is a tty .
104
14
239,299
def check_info ( info ) : if not isinstance ( info , dict ) : raise ValueError ( "bad metainfo - not a dictionary" ) pieces = info . get ( "pieces" ) if not isinstance ( pieces , basestring ) or len ( pieces ) % 20 != 0 : raise ValueError ( "bad metainfo - bad pieces key" ) piece_size = info . get ( "piece length" ) if not isinstance ( piece_size , ( int , long ) ) or piece_size <= 0 : raise ValueError ( "bad metainfo - illegal piece length" ) name = info . get ( "name" ) if not isinstance ( name , basestring ) : raise ValueError ( "bad metainfo - bad name (type is %r)" % type ( name ) . __name__ ) if not ALLOWED_ROOT_NAME . match ( name ) : raise ValueError ( "name %s disallowed for security reasons" % name ) if ( "files" in info ) == ( "length" in info ) : raise ValueError ( "single/multiple file mix" ) if "length" in info : length = info . get ( "length" ) if not isinstance ( length , ( int , long ) ) or length < 0 : raise ValueError ( "bad metainfo - bad length" ) else : files = info . get ( "files" ) if not isinstance ( files , ( list , tuple ) ) : raise ValueError ( "bad metainfo - bad file list" ) for item in files : if not isinstance ( item , dict ) : raise ValueError ( "bad metainfo - bad file value" ) length = item . get ( "length" ) if not isinstance ( length , ( int , long ) ) or length < 0 : raise ValueError ( "bad metainfo - bad length" ) path = item . get ( "path" ) if not isinstance ( path , ( list , tuple ) ) or not path : raise ValueError ( "bad metainfo - bad path" ) for part in path : if not isinstance ( part , basestring ) : raise ValueError ( "bad metainfo - bad path dir" ) part = fmt . to_unicode ( part ) if part == '..' : raise ValueError ( "relative path in %s disallowed for security reasons" % '/' . join ( path ) ) if part and not ALLOWED_PATH_NAME . match ( part ) : raise ValueError ( "path %s disallowed for security reasons" % part ) file_paths = [ os . sep . join ( item [ "path" ] ) for item in files ] if len ( set ( file_paths ) ) != len ( file_paths ) : raise ValueError ( "bad metainfo - duplicate path" ) return info
Validate info dict .
613
5