idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
37,300
def escape ( html , force = False ) : if hasattr ( html , '__html__' ) and not force : return html if html in NOTHING : return '' else : return to_string ( html ) . replace ( '&' , '&amp;' ) . replace ( '<' , '&lt;' ) . replace ( '>' , '&gt;' ) . replace ( "'" , '&#39;' ) . replace ( '"' , '&quot;' )
Returns the given HTML with ampersands quotes and angle brackets encoded .
37,301
def capfirst ( x ) : x = to_string ( x ) . strip ( ) if x : return x [ 0 ] . upper ( ) + x [ 1 : ] . lower ( ) else : return x
Capitalise the first letter of x .
37,302
def nicename ( name ) : name = to_string ( name ) return capfirst ( ' ' . join ( name . replace ( '-' , ' ' ) . replace ( '_' , ' ' ) . split ( ) ) )
Make name a more user friendly string .
37,303
def set ( self , key , value ) : task = Task . current_task ( ) try : context = task . _context except AttributeError : task . _context = context = { } context [ key ] = value
Set a value in the task context
37,304
def stack_pop ( self , key ) : task = Task . current_task ( ) try : context = task . _context_stack except AttributeError : raise KeyError ( 'pop from empty stack' ) from None value = context [ key ] stack_value = value . pop ( ) if not value : context . pop ( key ) return stack_value
Remove a value in a task context stack
37,305
def module_attribute ( dotpath , default = None , safe = False ) : if dotpath : bits = str ( dotpath ) . split ( ':' ) try : if len ( bits ) == 2 : attr = bits [ 1 ] module_name = bits [ 0 ] else : bits = bits [ 0 ] . split ( '.' ) if len ( bits ) > 1 : attr = bits [ - 1 ] module_name = '.' . join ( bits [ : - 1 ] ) else : raise ValueError ( 'Could not find attribute in %s' % dotpath ) module = import_module ( module_name ) return getattr ( module , attr ) except Exception : if not safe : raise return default else : if not safe : raise ImportError return default
Load an attribute from a module .
37,306
def start ( self , exit = True ) : if self . state == ACTOR_STATES . INITIAL : self . _concurrency . before_start ( self ) self . _concurrency . add_events ( self ) try : self . cfg . when_ready ( self ) except Exception : self . logger . exception ( 'Unhandled exception in when_ready hook' ) self . _started = self . _loop . time ( ) self . _exit = exit self . state = ACTOR_STATES . STARTING self . _run ( )
Called after forking to start the actor s life .
37,307
def send ( self , target , action , * args , ** kwargs ) : target = self . monitor if target == 'monitor' else target mailbox = self . mailbox if isinstance ( target , ActorProxyMonitor ) : mailbox = target . mailbox else : actor = self . get_actor ( target ) if isinstance ( actor , Actor ) : return command_in_context ( action , self , actor , args , kwargs ) elif isinstance ( actor , ActorProxyMonitor ) : mailbox = actor . mailbox if hasattr ( mailbox , 'send' ) : return mailbox . send ( action , self , target , args , kwargs ) else : raise CommandError ( 'Cannot execute "%s" in %s. Unknown actor %s.' % ( action , self , target ) )
Send a message to target to perform action with given positional args and key - valued kwargs . Returns a coroutine or a Future .
37,308
def stream ( self , request , counter = 0 ) : if self . _children : for child in self . _children : if isinstance ( child , String ) : yield from child . stream ( request , counter + 1 ) else : yield child
Returns an iterable over strings .
37,309
def to_bytes ( self , request = None ) : data = bytearray ( ) for chunk in self . stream ( request ) : if isinstance ( chunk , str ) : chunk = chunk . encode ( self . charset ) data . extend ( chunk ) return bytes ( data )
Called to transform the collection of streams into the content string . This method can be overwritten by derived classes .
37,310
def attr ( self , * args ) : attr = self . _attr if not args : return attr or { } result , adding = self . _attrdata ( 'attr' , * args ) if adding : for key , value in result . items ( ) : if DATARE . match ( key ) : self . data ( key [ 5 : ] , value ) else : if attr is None : self . _extra [ 'attr' ] = attr = { } attr [ key ] = value result = self return result
Add the specific attribute to the attribute dictionary with key name and value value and return self .
37,311
def addClass ( self , cn ) : if cn : if isinstance ( cn , ( tuple , list , set , frozenset ) ) : add = self . addClass for c in cn : add ( c ) else : classes = self . _classes if classes is None : self . _extra [ 'classes' ] = classes = set ( ) add = classes . add for cn in cn . split ( ) : add ( slugify ( cn ) ) return self
Add the specific class names to the class set and return self .
37,312
def flatatt ( self , ** attr ) : cs = '' attr = self . _attr classes = self . _classes data = self . _data css = self . _css attr = attr . copy ( ) if attr else { } if classes : cs = ' ' . join ( classes ) attr [ 'class' ] = cs if css : attr [ 'style' ] = ' ' . join ( ( '%s:%s;' % ( k , v ) for k , v in css . items ( ) ) ) if data : for k , v in data . items ( ) : attr [ 'data-%s' % k ] = dump_data_value ( v ) if attr : return '' . join ( attr_iter ( attr ) ) else : return ''
Return a string with attributes to add to the tag
37,313
def css ( self , mapping = None ) : css = self . _css if mapping is None : return css elif isinstance ( mapping , Mapping ) : if css is None : self . _extra [ 'css' ] = css = { } css . update ( mapping ) return self else : return css . get ( mapping ) if css else None
Update the css dictionary if mapping is a dictionary otherwise return the css value at mapping .
37,314
def absolute_path ( self , path , minify = True ) : if minify : ending = '.%s' % self . mediatype if not path . endswith ( ending ) : if self . minified : path = '%s.min' % path path = '%s%s' % ( path , ending ) if self . is_relative ( path ) and self . media_path : return '%s%s' % ( self . media_path , path ) elif self . asset_protocol and path . startswith ( '//' ) : return '%s%s' % ( self . asset_protocol , path ) else : return path
Return a suitable absolute url for path .
37,315
def insert ( self , index , child , rel = None , type = None , media = None , condition = None , ** kwargs ) : if child : srel = 'stylesheet' stype = 'text/css' minify = rel in ( None , srel ) and type in ( None , stype ) path = self . absolute_path ( child , minify = minify ) if path . endswith ( '.css' ) : rel = rel or srel type = type or stype value = Html ( 'link' , href = path , rel = rel , ** kwargs ) if type : value . attr ( 'type' , type ) if media not in ( None , 'all' ) : value . attr ( 'media' , media ) if condition : value = Html ( None , '<!--[if %s]>\n' % condition , value , '<![endif] ) value = value . to_string ( ) if value not in self . children : if index is None : self . children . append ( value ) else : self . children . insert ( index , value )
Append a link to this container .
37,316
def insert ( self , index , child , ** kwargs ) : if child : script = self . script ( child , ** kwargs ) if script not in self . children : if index is None : self . children . append ( script ) else : self . children . insert ( index , script )
add a new script to the container .
37,317
def get_meta ( self , name , meta_key = None ) : meta_key = meta_key or 'name' for child in self . meta . _children : if isinstance ( child , Html ) and child . attr ( meta_key ) == name : return child . attr ( 'content' )
Get the content attribute of a meta tag name .
37,318
def replace_meta ( self , name , content = None , meta_key = None ) : children = self . meta . _children if not content : children = tuple ( children ) meta_key = meta_key or 'name' for child in children : if child . attr ( meta_key ) == name : if content : child . attr ( 'content' , content ) else : self . meta . _children . remove ( child ) return if content : self . add_meta ( ** { meta_key : name , 'content' : content } )
Replace the content attribute of meta tag name
37,319
def randompaths ( request , num_paths = 1 , size = 250 , mu = 0 , sigma = 1 ) : r = [ ] for p in range ( num_paths ) : v = 0 path = [ v ] r . append ( path ) for t in range ( size ) : v += normalvariate ( mu , sigma ) path . append ( v ) return r
Lists of random walks .
37,320
def setup ( self , environ ) : json_handler = Root ( ) . putSubHandler ( 'calc' , Calculator ( ) ) middleware = wsgi . Router ( '/' , post = json_handler , accept_content_types = JSON_CONTENT_TYPES ) response = [ wsgi . GZipMiddleware ( 200 ) ] return wsgi . WsgiHandler ( middleware = [ wsgi . wait_for_body_middleware , middleware ] , response_middleware = response )
Called once to setup the list of wsgi middleware .
37,321
def AsyncResponseMiddleware ( environ , resp ) : future = create_future ( ) future . _loop . call_soon ( future . set_result , resp ) return future
This is just for testing the asynchronous response middleware
37,322
def encode ( self , message ) : if not isinstance ( message , dict ) : message = { 'message' : message } message [ 'time' ] = time . time ( ) return json . dumps ( message )
Encode a message when publishing .
37,323
def on_message ( self , websocket , msg ) : if msg : lines = [ ] for li in msg . split ( '\n' ) : li = li . strip ( ) if li : lines . append ( li ) msg = ' ' . join ( lines ) if msg : return self . pubsub . publish ( self . channel , msg )
When a new message arrives it publishes to all listening clients .
37,324
async def rpc_message ( self , request , message ) : await self . pubsub . publish ( self . channel , message ) return 'OK'
Publish a message via JSON - RPC
37,325
def setup ( self , environ ) : request = wsgi_request ( environ ) cfg = request . cache . cfg loop = request . cache . _loop self . store = create_store ( cfg . data_store , loop = loop ) pubsub = self . store . pubsub ( protocol = Protocol ( ) ) channel = '%s_webchat' % self . name ensure_future ( pubsub . subscribe ( channel ) , loop = loop ) return WsgiHandler ( [ Router ( '/' , get = self . home_page ) , WebSocket ( '/message' , Chat ( pubsub , channel ) ) , Router ( '/rpc' , post = Rpc ( pubsub , channel ) , response_content_types = JSON_CONTENT_TYPES ) ] , [ AsyncResponseMiddleware , GZipMiddleware ( min_length = 20 ) ] )
Called once only to setup the WSGI application handler .
37,326
def _get_headers ( self , environ ) : headers = self . headers method = environ [ 'REQUEST_METHOD' ] if has_empty_content ( self . status_code , method ) and method != HEAD : headers . pop ( 'content-type' , None ) headers . pop ( 'content-length' , None ) self . _content = ( ) else : if not self . is_streamed ( ) : cl = reduce ( count_len , self . _content , 0 ) headers [ 'content-length' ] = str ( cl ) ct = headers . get ( 'content-type' ) if self . encoding : ct = ct or 'text/plain' if ';' not in ct : ct = '%s; charset=%s' % ( ct , self . encoding ) if ct : headers [ 'content-type' ] = ct if method == HEAD : self . _content = ( ) if ( self . status_code < 400 and self . _can_store_cookies and self . _cookies ) : for c in self . cookies . values ( ) : headers . add ( 'set-cookie' , c . OutputString ( ) ) return headers . items ( )
The list of headers for this response
37,327
def rpc_method ( func , doc = None , format = 'json' , request_handler = None ) : def _ ( self , * args , ** kwargs ) : request = args [ 0 ] if request_handler : kwargs = request_handler ( request , format , kwargs ) try : return func ( * args , ** kwargs ) except TypeError : msg = checkarity ( func , args , kwargs ) if msg : raise InvalidParams ( 'Invalid Parameters. %s' % msg ) else : raise _ . __doc__ = doc or func . __doc__ _ . __name__ = func . __name__ _ . FromApi = True return _
A decorator which exposes a function func as an rpc function .
37,328
def clean_path_middleware ( environ , start_response = None ) : path = environ [ 'PATH_INFO' ] if path and '//' in path : url = re . sub ( "/+" , '/' , path ) if not url . startswith ( '/' ) : url = '/%s' % url qs = environ [ 'QUERY_STRING' ] if qs : url = '%s?%s' % ( url , qs ) raise HttpRedirect ( url )
Clean url from double slashes and redirect if needed .
37,329
def authorization_middleware ( environ , start_response = None ) : key = 'http.authorization' c = environ . get ( key ) if c is None : code = 'HTTP_AUTHORIZATION' if code in environ : environ [ key ] = parse_authorization_header ( environ [ code ] )
Parse the HTTP_AUTHORIZATION key in the environ .
37,330
async def wait_for_body_middleware ( environ , start_response = None ) : if environ . get ( 'wsgi.async' ) : try : chunk = await environ [ 'wsgi.input' ] . read ( ) except TypeError : chunk = b'' environ [ 'wsgi.input' ] = BytesIO ( chunk ) environ . pop ( 'wsgi.async' )
Use this middleware to wait for the full body .
37,331
def middleware_in_executor ( middleware ) : @ wraps ( middleware ) def _ ( environ , start_response ) : loop = get_event_loop ( ) return loop . run_in_executor ( None , middleware , environ , start_response ) return _
Use this middleware to run a synchronous middleware in the event loop executor .
37,332
async def release_forks ( self , philosopher ) : forks = self . forks self . forks = [ ] self . started_waiting = 0 for fork in forks : philosopher . logger . debug ( 'Putting down fork %s' , fork ) await philosopher . send ( 'monitor' , 'putdown_fork' , fork ) await sleep ( self . cfg . waiting_period )
The philosopher has just eaten and is ready to release both forks .
37,333
def hello ( environ , start_response ) : if environ [ 'REQUEST_METHOD' ] == 'GET' : data = b'Hello World!\n' status = '200 OK' response_headers = [ ( 'Content-type' , 'text/plain' ) , ( 'Content-Length' , str ( len ( data ) ) ) ] start_response ( status , response_headers ) return iter ( [ data ] ) else : raise MethodNotAllowed
The WSGI_ application handler which returns an iterable over the Hello World! message .
37,334
async def parse_headers ( fp , _class = HTTPMessage ) : headers = [ ] while True : line = await fp . readline ( ) headers . append ( line ) if len ( headers ) > _MAXHEADERS : raise HttpException ( "got more than %d headers" % _MAXHEADERS ) if line in ( b'\r\n' , b'\n' , b'' ) : break hstring = b'' . join ( headers ) . decode ( 'iso-8859-1' ) return email . parser . Parser ( _class = _class ) . parsestr ( hstring )
Parses only RFC2822 headers from a file pointer . email Parser wants to see strings rather than bytes . But a TextIOWrapper around self . rfile would buffer too many bytes from the stream bytes which we later need to read as bytes . So we read the correct bytes here as bytes for email Parser to parse .
37,335
def _waiting_expect ( self ) : if self . _expect_sent is None : if self . environ . get ( 'HTTP_EXPECT' , '' ) . lower ( ) == '100-continue' : return True self . _expect_sent = '' return False
True when the client is waiting for 100 Continue .
37,336
def base64 ( self , charset = None ) : return b64encode ( self . bytes ( ) ) . decode ( charset or self . charset )
Data encoded as base 64
37,337
def feed_data ( self , data ) : if data : self . _bytes . append ( data ) if self . parser . stream : self . parser . stream ( self ) else : self . parser . buffer . extend ( data )
Feed new data into the MultiPart parser or the data stream
37,338
def server ( name = 'proxy-server' , headers_middleware = None , server_software = None , ** kwargs ) : if headers_middleware is None : headers_middleware = [ x_forwarded_for ] wsgi_proxy = ProxyServerWsgiHandler ( headers_middleware ) kwargs [ 'server_software' ] = server_software or SERVER_SOFTWARE return wsgi . WSGIServer ( wsgi_proxy , name = name , ** kwargs )
Function to Create a WSGI Proxy Server .
37,339
async def request ( self ) : request_headers = self . request_headers ( ) environ = self . environ method = environ [ 'REQUEST_METHOD' ] data = None if method in ENCODE_BODY_METHODS : data = DataIterator ( self ) http = self . wsgi . http_client try : await http . request ( method , environ [ 'RAW_URI' ] , data = data , headers = request_headers , version = environ [ 'SERVER_PROTOCOL' ] , pre_request = self . pre_request ) except Exception as exc : self . error ( exc )
Perform the Http request to the upstream server
37,340
def pre_request ( self , response , exc = None ) : if response . request . method == 'CONNECT' : self . start_response ( '200 Connection established' , [ ( 'content-length' , '0' ) ] ) self . future . set_result ( [ b'' ] ) upstream = response . connection dostream = self . connection dostream . upgrade ( partial ( StreamTunnel . create , upstream ) ) upstream . upgrade ( partial ( StreamTunnel . create , dostream ) ) response . fire_event ( 'post_request' ) raise AbortEvent else : response . event ( 'data_processed' ) . bind ( self . data_processed ) response . event ( 'post_request' ) . bind ( self . post_request )
Start the tunnel .
37,341
def connection_lost ( self , exc = None ) : if self . _loop . get_debug ( ) : self . producer . logger . debug ( 'connection lost %s' , self ) self . event ( 'connection_lost' ) . fire ( exc = exc )
Fires the connection_lost event .
37,342
def start ( self , request = None ) : self . connection . processed += 1 self . producer . requests_processed += 1 self . event ( 'post_request' ) . bind ( self . finished_reading ) self . request = request or self . create_request ( ) try : self . fire_event ( 'pre_request' ) except AbortEvent : if self . _loop . get_debug ( ) : self . producer . logger . debug ( 'Abort request %s' , request ) else : self . start_request ( )
Starts processing the request for this protocol consumer .
37,343
def process_global ( name , val = None , setval = False ) : p = current_process ( ) if not hasattr ( p , '_pulsar_globals' ) : p . _pulsar_globals = { 'lock' : Lock ( ) } if setval : p . _pulsar_globals [ name ] = val else : return p . _pulsar_globals . get ( name )
Access and set global variables for the current process .
37,344
def get_environ_proxies ( ) : proxy_keys = [ 'all' , 'http' , 'https' , 'ftp' , 'socks' , 'ws' , 'wss' , 'no' ] def get_proxy ( k ) : return os . environ . get ( k ) or os . environ . get ( k . upper ( ) ) proxies = [ ( key , get_proxy ( key + '_proxy' ) ) for key in proxy_keys ] return dict ( [ ( key , val ) for ( key , val ) in proxies if val ] )
Return a dict of environment proxies . From requests_ .
37,345
def has_vary_header ( response , header_query ) : if not response . has_header ( 'Vary' ) : return False vary_headers = cc_delim_re . split ( response [ 'Vary' ] ) existing_headers = set ( [ header . lower ( ) for header in vary_headers ] ) return header_query . lower ( ) in existing_headers
Checks to see if the response has a given header name in its Vary header .
37,346
def execute ( self , request ) : handle = None if request : request [ 0 ] = command = to_string ( request [ 0 ] ) . lower ( ) info = COMMANDS_INFO . get ( command ) if info : handle = getattr ( self . store , info . method_name ) if self . channels or self . patterns : if command not in self . store . SUBSCRIBE_COMMANDS : return self . reply_error ( self . store . PUBSUB_ONLY ) if self . blocked : return self . reply_error ( 'Blocked client cannot request' ) if self . transaction is not None and command not in 'exec' : self . transaction . append ( ( handle , request ) ) return self . connection . write ( self . store . QUEUED ) self . execute_command ( handle , request )
Execute a new request .
37,347
def handle_cookies ( response , exc = None ) : if exc : return headers = response . headers request = response . request client = request . client response . _cookies = c = SimpleCookie ( ) if 'set-cookie' in headers or 'set-cookie2' in headers : for cookie in ( headers . get ( 'set-cookie2' ) , headers . get ( 'set-cookie' ) ) : if cookie : c . load ( cookie ) if client . store_cookies : client . cookies . extract_cookies ( response , request )
Handle response cookies .
37,348
def on_headers ( self , response , exc = None ) : if response . status_code == 101 : connection = response . connection request = response . request handler = request . websocket_handler if not handler : handler = WS ( ) parser = request . client . frame_parser ( kind = 1 ) consumer = partial ( WebSocketClient . create , response , handler , parser ) connection . upgrade ( consumer ) response . event ( 'post_request' ) . fire ( ) websocket = connection . current_consumer ( ) response . request_again = lambda r : websocket
Websocket upgrade as on_headers event .
37,349
def add2python ( self , module = None , up = 0 , down = None , front = False , must_exist = True ) : if module : try : return import_module ( module ) except ImportError : pass dir = self . dir ( ) . ancestor ( up ) if down : dir = dir . join ( * down ) if dir . isdir ( ) : if dir not in sys . path : if front : sys . path . insert ( 0 , dir ) else : sys . path . append ( dir ) elif must_exist : raise ImportError ( 'Directory {0} not available' . format ( dir ) ) else : return None if module : try : return import_module ( module ) except ImportError : if must_exist : raise
Add a directory to the python path .
37,350
def get ( self , request ) : ul = Html ( 'ul' ) for router in sorted ( self . routes , key = lambda r : r . creation_count ) : a = router . link ( escape ( router . route . path ) ) a . addClass ( router . name ) for method in METHODS : if router . getparam ( method ) : a . addClass ( method ) li = Html ( 'li' , a , ' %s' % router . getparam ( 'title' , '' ) ) ul . append ( li ) title = 'Pulsar' html = request . html_document html . head . title = title html . head . links . append ( 'httpbin.css' ) html . head . links . append ( 'favicon.ico' , rel = "icon" , type = 'image/x-icon' ) html . head . scripts . append ( 'httpbin.js' ) ul = ul . to_string ( request ) templ = asset ( 'template.html' ) body = templ % ( title , JAPANESE , CHINESE , version , pyversion , ul ) html . body . append ( body ) return html . http_response ( request )
The home page of this router
37,351
def stats ( self , request ) : doc = HtmlDocument ( title = 'Live server stats' , media_path = '/assets/' ) return doc . http_response ( request )
Live stats for the server .
37,352
def get_preparation_data ( name ) : d = dict ( name = name , sys_path = sys . path , sys_argv = sys . argv , log_to_stderr = _log_to_stderr , orig_dir = process . ORIGINAL_DIR , authkey = process . current_process ( ) . authkey , ) if _logger is not None : d [ 'log_level' ] = _logger . getEffectiveLevel ( ) if not WINEXE : main_path = getattr ( sys . modules [ '__main__' ] , '__file__' , None ) if not main_path and sys . argv [ 0 ] not in ( '' , '-c' ) : main_path = sys . argv [ 0 ] if main_path is not None : if ( not os . path . isabs ( main_path ) and process . ORIGINAL_DIR is not None ) : main_path = os . path . join ( process . ORIGINAL_DIR , main_path ) if not main_path . endswith ( '.exe' ) : d [ 'main_path' ] = os . path . normpath ( main_path ) return d
Return info about parent needed by child to unpickle process object . Monkey - patch from
37,353
def remote_call ( request , cls , method , args , kw ) : actor = request . actor name = 'remote_%s' % cls . __name__ if not hasattr ( actor , name ) : object = cls ( actor ) setattr ( actor , name , object ) else : object = getattr ( actor , name ) method_name = '%s%s' % ( PREFIX , method ) return getattr ( object , method_name ) ( request , * args , ** kw )
Command for executing remote calls on a remote object
37,354
def clear ( self ) : self . _size = 0 self . _level = 1 self . _head = Node ( 'HEAD' , None , [ None ] * SKIPLIST_MAXLEVEL , [ 1 ] * SKIPLIST_MAXLEVEL )
Clear the container from all data .
37,355
def extend ( self , iterable ) : i = self . insert for score_values in iterable : i ( * score_values )
Extend this skiplist with an iterable over score value pairs .
37,356
def remove_range ( self , start , end , callback = None ) : N = len ( self ) if start < 0 : start = max ( N + start , 0 ) if start >= N : return 0 if end is None : end = N elif end < 0 : end = max ( N + end , 0 ) else : end = min ( end , N ) if start >= end : return 0 node = self . _head index = 0 chain = [ None ] * self . _level for i in range ( self . _level - 1 , - 1 , - 1 ) : while node . next [ i ] and ( index + node . width [ i ] ) <= start : index += node . width [ i ] node = node . next [ i ] chain [ i ] = node node = node . next [ 0 ] initial = self . _size while node and index < end : next = node . next [ 0 ] self . _remove_node ( node , chain ) index += 1 if callback : callback ( node . score , node . value ) node = next return initial - self . _size
Remove a range by rank .
37,357
def remove_range_by_score ( self , minval , maxval , include_min = True , include_max = True , callback = None ) : node = self . _head chain = [ None ] * self . _level if include_min : for i in range ( self . _level - 1 , - 1 , - 1 ) : while node . next [ i ] and node . next [ i ] . score < minval : node = node . next [ i ] chain [ i ] = node else : for i in range ( self . _level - 1 , - 1 , - 1 ) : while node . next [ i ] and node . next [ i ] . score <= minval : node = node . next [ i ] chain [ i ] = node node = node . next [ 0 ] initial = self . _size while node and node . score >= minval : if ( ( include_max and node . score > maxval ) or ( not include_max and node . score >= maxval ) ) : break next = node . next [ 0 ] self . _remove_node ( node , chain ) if callback : callback ( node . score , node . value ) node = next return initial - self . _size
Remove a range with scores between minval and maxval .
37,358
def count ( self , minval , maxval , include_min = True , include_max = True ) : rank1 = self . rank ( minval ) if rank1 < 0 : rank1 = - rank1 - 1 elif not include_min : rank1 += 1 rank2 = self . rank ( maxval ) if rank2 < 0 : rank2 = - rank2 - 1 elif include_max : rank2 += 1 return max ( rank2 - rank1 , 0 )
Returns the number of elements in the skiplist with a score between min and max .
37,359
def quality ( self , key ) : for item , quality in self : if self . _value_matches ( key , item ) : return quality return 0
Returns the quality of the key .
37,360
def to_header ( self ) : result = [ ] for value , quality in self : if quality != 1 : value = '%s;q=%s' % ( value , quality ) result . append ( value ) return ',' . join ( result )
Convert the header set into an HTTP header string .
37,361
def best_match ( self , matches , default = None ) : if matches : best_quality = - 1 result = default for client_item , quality in self : for server_item in matches : if quality <= best_quality : break if self . _value_matches ( server_item , client_item ) : best_quality = quality result = server_item return result else : return self . best
Returns the best match from a list of possible matches based on the quality of the client . If two items have the same quality the one is returned that comes first .
37,362
def convert_bytes ( b ) : if b is None : return '#NA' for s in reversed ( memory_symbols ) : if b >= memory_size [ s ] : value = float ( b ) / memory_size [ s ] return '%.1f%sB' % ( value , s ) return "%sB" % b
Convert a number of bytes into a human readable memory usage bytes kilo mega giga tera peta exa zetta yotta
37,363
def process_info ( pid = None ) : if psutil is None : return { } pid = pid or os . getpid ( ) try : p = psutil . Process ( pid ) except psutil . NoSuchProcess : return { } else : mem = p . memory_info ( ) return { 'memory' : convert_bytes ( mem . rss ) , 'memory_virtual' : convert_bytes ( mem . vms ) , 'cpu_percent' : p . cpu_percent ( ) , 'nice' : p . nice ( ) , 'num_threads' : p . num_threads ( ) }
Returns a dictionary of system information for the process pid .
37,364
async def start_serving ( self , address = None , sockets = None , backlog = 100 , sslcontext = None ) : if self . _server : raise RuntimeError ( 'Already serving' ) create_server = self . _loop . create_server server = None if sockets : for sock in sockets : srv = await create_server ( self . create_protocol , sock = sock , backlog = backlog , ssl = sslcontext ) if server : server . sockets . extend ( srv . sockets ) else : server = srv elif isinstance ( address , tuple ) : server = await create_server ( self . create_protocol , host = address [ 0 ] , port = address [ 1 ] , backlog = backlog , ssl = sslcontext ) else : raise RuntimeError ( 'sockets or address must be supplied' ) self . _set_server ( server )
Start serving .
37,365
def _close_connections ( self , connection = None , timeout = 5 ) : all = [ ] if connection : waiter = connection . event ( 'connection_lost' ) . waiter ( ) if waiter : all . append ( waiter ) connection . close ( ) else : connections = list ( self . _concurrent_connections ) self . _concurrent_connections = set ( ) for connection in connections : waiter = connection . event ( 'connection_lost' ) . waiter ( ) if waiter : all . append ( waiter ) connection . close ( ) if all : self . logger . info ( '%s closing %d connections' , self , len ( all ) ) return asyncio . wait ( all , timeout = timeout , loop = self . _loop )
Close connection if specified otherwise close all connections .
37,366
async def start_serving ( self , address = None , sockets = None , ** kw ) : if self . _server : raise RuntimeError ( 'Already serving' ) server = DGServer ( self . _loop ) loop = self . _loop if sockets : for sock in sockets : transport , _ = await loop . create_datagram_endpoint ( self . create_protocol , sock = sock ) server . transports . append ( transport ) elif isinstance ( address , tuple ) : transport , _ = await loop . create_datagram_endpoint ( self . create_protocol , local_addr = address ) server . transports . append ( transport ) else : raise RuntimeError ( 'sockets or address must be supplied' ) self . _set_server ( server )
create the server endpoint .
37,367
async def monitor_start ( self , monitor ) : cfg = self . cfg if ( not platform . has_multiprocessing_socket or cfg . concurrency == 'thread' ) : cfg . set ( 'workers' , 0 ) servers = await self . binds ( monitor ) if not servers : raise ImproperlyConfigured ( 'Could not open a socket. ' 'No address to bind to' ) addresses = [ ] for server in servers . values ( ) : addresses . extend ( server . addresses ) self . cfg . addresses = addresses
Create the socket listening to the bind address .
37,368
async def create_server ( self , worker , protocol_factory , address = None , sockets = None , idx = 0 ) : cfg = self . cfg max_requests = cfg . max_requests if max_requests : max_requests = int ( lognormvariate ( log ( max_requests ) , 0.2 ) ) server = self . server_factory ( protocol_factory , loop = worker . _loop , max_requests = max_requests , keep_alive = cfg . keep_alive , name = self . name , logger = self . logger , server_software = cfg . server_software , cfg = cfg , idx = idx ) for event in ( 'connection_made' , 'pre_request' , 'post_request' , 'connection_lost' ) : callback = getattr ( cfg , event ) if callback != pass_through : server . event ( event ) . bind ( callback ) await server . start_serving ( sockets = sockets , address = address , backlog = cfg . backlog , sslcontext = self . sslcontext ( ) ) return server
Create the Server which will listen for requests .
37,369
def channels ( self , pattern = None ) : if pattern : return self . store . execute ( 'PUBSUB' , 'CHANNELS' , pattern ) else : return self . store . execute ( 'PUBSUB' , 'CHANNELS' )
Lists the currently active channels matching pattern
37,370
def lock ( self , name , ** kwargs ) : return self . pubsub . store . client ( ) . lock ( self . prefixed ( name ) , ** kwargs )
Global distributed lock
37,371
async def publish ( self , channel , event , data = None ) : msg = { 'event' : event , 'channel' : channel } if data : msg [ 'data' ] = data try : await self . pubsub . publish ( self . prefixed ( channel ) , msg ) except ConnectionRefusedError : self . connection_error = True self . logger . critical ( '%s cannot publish on "%s" channel - connection error' , self , channel ) else : self . connection_ok ( )
Publish a new event on a channel
37,372
async def close ( self ) : push_connection = self . pubsub . push_connection self . status = self . statusType . closed if push_connection : push_connection . event ( 'connection_lost' ) . unbind ( self . _connection_lost ) await self . pubsub . close ( )
Close channels and underlying pubsub handler
37,373
def origin_req_host ( self ) : if self . history : return self . history [ 0 ] . request . origin_req_host else : return scheme_host_port ( self . url ) [ 1 ]
Required by Cookies handlers
37,374
def get_header ( self , header_name , default = None ) : return self . headers . get ( header_name , self . unredirected_headers . get ( header_name , default ) )
Retrieve header_name from this request headers .
37,375
def remove_header ( self , header_name ) : val1 = self . headers . pop ( header_name , None ) val2 = self . unredirected_headers . pop ( header_name , None ) return val1 or val2
Remove header_name from this request .
37,376
def raw ( self ) : if self . _raw is None : self . _raw = HttpStream ( self ) return self . _raw
A raw asynchronous Http response
37,377
def links ( self ) : headers = self . headers or { } header = headers . get ( 'link' ) li = { } if header : links = parse_header_links ( header ) for link in links : key = link . get ( 'rel' ) or link . get ( 'url' ) li [ key ] = link return li
Returns the parsed header links of the response if any
37,378
def text ( self ) : data = self . content return data . decode ( self . encoding or 'utf-8' ) if data else ''
Decode content as a string .
37,379
def decode_content ( self ) : ct = self . headers . get ( 'content-type' ) if ct : ct , options = parse_options_header ( ct ) charset = options . get ( 'charset' ) if ct in JSON_CONTENT_TYPES : return self . json ( ) elif ct . startswith ( 'text/' ) : return self . text elif ct == FORM_URL_ENCODED : return parse_qsl ( self . content . decode ( charset ) , keep_blank_values = True ) return self . content
Return the best possible representation of the response body .
37,380
def request ( self , method , url , ** params ) : response = self . _request ( method , url , ** params ) if not self . _loop . is_running ( ) : return self . _loop . run_until_complete ( response ) else : return response
Constructs and sends a request to a remote server .
37,381
def ssl_context ( self , verify = True , cert_reqs = None , check_hostname = False , certfile = None , keyfile = None , cafile = None , capath = None , cadata = None , ** kw ) : assert ssl , 'SSL not supported' cafile = cafile or DEFAULT_CA_BUNDLE_PATH if verify is True : cert_reqs = ssl . CERT_REQUIRED check_hostname = True if isinstance ( verify , str ) : cert_reqs = ssl . CERT_REQUIRED if os . path . isfile ( verify ) : cafile = verify elif os . path . isdir ( verify ) : capath = verify return ssl . _create_unverified_context ( cert_reqs = cert_reqs , check_hostname = check_hostname , certfile = certfile , keyfile = keyfile , cafile = cafile , capath = capath , cadata = cadata )
Create a SSL context object .
37,382
async def create_tunnel_connection ( self , req ) : tunnel_address = req . tunnel_address connection = await self . create_connection ( tunnel_address ) response = connection . current_consumer ( ) for event in response . events ( ) . values ( ) : event . clear ( ) response . start ( HttpTunnel ( self , req ) ) await response . event ( 'post_request' ) . waiter ( ) if response . status_code != 200 : raise ConnectionRefusedError ( 'Cannot connect to tunnel: status code %s' % response . status_code ) raw_sock = connection . transport . get_extra_info ( 'socket' ) if raw_sock is None : raise RuntimeError ( 'Transport without socket' ) raw_sock = raw_sock . dup ( ) connection . transport . close ( ) await connection . event ( 'connection_lost' ) . waiter ( ) self . sessions -= 1 self . requests_processed -= 1 connection = await self . create_connection ( sock = raw_sock , ssl = req . ssl ( self ) , server_hostname = req . netloc ) return connection
Create a tunnel connection
37,383
def python_path ( self , script ) : if not script : try : import __main__ script = getfile ( __main__ ) except Exception : return script = os . path . realpath ( script ) if self . cfg . get ( 'python_path' , True ) : path = os . path . dirname ( script ) if path not in sys . path : sys . path . insert ( 0 , path ) return script
Called during initialisation to obtain the script name .
37,384
def start ( self , exit = True ) : on_start = self ( ) actor = arbiter ( ) if actor and on_start : actor . start ( exit = exit ) if actor . exit_code is not None : return actor . exit_code return on_start
Invoked the application callable method and start the arbiter if it wasn t already started .
37,385
def stop ( self , actor = None ) : if actor is None : actor = get_actor ( ) if actor and actor . is_arbiter ( ) : monitor = actor . get_actor ( self . name ) if monitor : return monitor . stop ( ) raise RuntimeError ( 'Cannot stop application' )
Stop the application
37,386
def set_owner_process ( uid , gid ) : if gid : try : os . setgid ( gid ) except OverflowError : os . setgid ( - ctypes . c_int ( - gid ) . value ) if uid : os . setuid ( uid )
set user and group of workers processes
37,387
def wait ( value , must_be_child = False ) : current = getcurrent ( ) parent = current . parent if must_be_child and not parent : raise MustBeInChildGreenlet ( 'Cannot wait on main greenlet' ) return parent . switch ( value ) if parent else value
Wait for a possible asynchronous value to complete .
37,388
def run_in_greenlet ( callable ) : @ wraps ( callable ) async def _ ( * args , ** kwargs ) : green = greenlet ( callable ) result = green . switch ( * args , ** kwargs ) while isawaitable ( result ) : try : result = green . switch ( ( await result ) ) except Exception : exc_info = sys . exc_info ( ) result = green . throw ( * exc_info ) return green . switch ( result ) return _
Decorator to run a callable on a new greenlet .
37,389
def build_response ( content , code = 200 ) : response = make_response ( jsonify ( content ) , content [ 'code' ] ) response . headers [ 'Access-Control-Allow-Origin' ] = '*' response . headers [ 'Access-Control-Allow-Headers' ] = 'Origin, X-Requested-With, Content-Type, Accept, Authorization' return response
Build response add headers
37,390
def post ( self ) : data = request . get_json ( ) options , sql_raw = data . get ( 'options' ) , data . get ( 'sql_raw' ) if options == 'format' : sql_formmated = sqlparse . format ( sql_raw , keyword_case = 'upper' , reindent = True ) return build_response ( dict ( data = sql_formmated , code = 200 ) ) elif options in ( 'all' , 'selected' ) : conn = SQL ( config . sql_host , config . sql_port , config . sql_user , config . sql_pwd , config . sql_db ) result = conn . run ( sql_raw ) return build_response ( dict ( data = result , code = 200 ) ) else : pass pass
return executed sql result to client .
37,391
def get ( self , page = 0 , size = 10 ) : dash_list = r_db . zrevrange ( config . DASH_ID_KEY , 0 , - 1 , True ) id_list = dash_list [ page * size : page * size + size ] dash_meta = [ ] data = [ ] if id_list : dash_meta = r_db . hmget ( config . DASH_META_KEY , [ i [ 0 ] for i in id_list ] ) data = [ json . loads ( i ) for i in dash_meta ] return build_response ( dict ( data = data , code = 200 ) )
Get dashboard meta info from in page page and page size is size .
37,392
def get ( self ) : keys = r_kv . keys ( ) keys . sort ( ) return build_response ( dict ( data = keys , code = 200 ) )
Get key list in storage .
37,393
def get ( self , key ) : data = r_kv . get ( key ) return build_response ( dict ( data = data , code = 200 ) )
Get a key - value from storage according to the key name .
37,394
def get ( self , dash_id ) : return make_response ( render_template ( 'dashboard.html' , dash_id = dash_id , api_root = config . app_host ) )
Just return the dashboard id in the rendering html .
37,395
def get ( self , dash_id ) : data = json . loads ( r_db . hmget ( config . DASH_CONTENT_KEY , dash_id ) [ 0 ] ) return build_response ( dict ( data = data , code = 200 ) )
Read dashboard content .
37,396
def put ( self , dash_id = 0 ) : data = request . get_json ( ) updated = self . _update_dash ( dash_id , data ) return build_response ( dict ( data = updated , code = 200 ) )
Update a dash meta and content return updated dash content .
37,397
def delete ( self , dash_id ) : removed_info = dict ( time_modified = r_db . zscore ( config . DASH_ID_KEY , dash_id ) , meta = r_db . hget ( config . DASH_META_KEY , dash_id ) , content = r_db . hget ( config . DASH_CONTENT_KEY , dash_id ) ) r_db . zrem ( config . DASH_ID_KEY , dash_id ) r_db . hdel ( config . DASH_META_KEY , dash_id ) r_db . hdel ( config . DASH_CONTENT_KEY , dash_id ) return { 'removed_info' : removed_info }
Delete a dash meta and content return updated dash content .
37,398
def main ( lang = 'deu' , n = 900 , epochs = 50 , batch_size = 64 , num_neurons = 256 , encoder_input_data = None , decoder_input_data = None , decoder_target_data = None , checkpoint_dir = os . path . join ( BIGDATA_PATH , 'checkpoints' ) , ) : mkdir_p ( checkpoint_dir ) encoder_input_path = os . path . join ( checkpoint_dir , 'nlpia-ch10-translate-input-{}.npy' . format ( lang ) ) decoder_input_path = os . path . join ( checkpoint_dir , 'nlpia-ch10-translate-decoder-input-{}.npy' . format ( lang ) ) decoder_target_path = os . path . join ( checkpoint_dir , 'nlpia-ch10-translate-target-{}.npy' . format ( 'eng' ) ) data_paths = ( encoder_input_path , decoder_input_path , decoder_target_path ) encoder_input_data = [ ] if all ( [ os . path . isfile ( p ) for p in data_paths ] ) : encoder_input_data = np . load ( encoder_input_path ) decoder_input_data = np . load ( decoder_input_path ) decoder_target_data = np . load ( decoder_target_path ) if len ( encoder_input_data ) < n : encoder_input_data , decoder_input_data , decoder_target_data = onehot_char_training_data ( lang = lang , n = n , data_paths = data_paths ) encoder_input_data = encoder_input_data [ : n ] decoder_input_data = decoder_input_data [ : n ] decoder_target_data = decoder_target_data [ : n ] model = fit ( data_paths = data_paths , epochs = epochs , batch_size = batch_size , num_neurons = num_neurons ) return model
Train an LSTM encoder - decoder squence - to - sequence model on Anki flashcards for international translation
37,399
def energy ( self , v , h = None ) : h = np . zeros ( self . Nh ) if h is None else h negE = np . dot ( v , self . bv ) negE += np . dot ( h , self . bh ) for j in range ( self . Nv ) : for i in range ( j ) : negE += v [ i ] * v [ j ] * self . Wvv [ i ] [ j ] for i in range ( self . Nv ) : for k in range ( self . Nh ) : negE += v [ i ] * h [ k ] * self . Wvh [ i ] [ k ] for l in range ( self . Nh ) : for k in range ( l ) : negE += h [ k ] * h [ l ] * self . Whh [ k ] [ l ] return - negE
Compute the global energy for the current joint state of all nodes