idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
37,000
def limit ( self , limit ) : query = self . _copy ( ) query . _limit = limit return query
Apply a LIMIT to the query and return the newly resulting Query .
37,001
def offset ( self , offset ) : query = self . _copy ( ) query . _offset = offset return query
Apply an OFFSET to the query and return the newly resulting Query .
37,002
def one ( self ) : results = self . rpc_model . search_read ( self . domain , 2 , None , self . _order_by , self . fields , context = self . context ) if not results : raise fulfil_client . exc . NoResultFound if len ( results ) > 1 : raise fulfil_client . exc . MultipleResultsFound return results [ 0 ]
Return exactly one result or raise an exception .
37,003
def order_by ( self , * criterion ) : query = self . _copy ( ) query . _order_by = criterion return query
apply one or more ORDER BY criterion to the query and return the newly resulting Query
37,004
def delete ( self ) : ids = self . rpc_model . search ( self . domain , context = self . context ) if ids : self . rpc_model . delete ( ids )
Delete all records matching the query .
37,005
def _logged_in_successful ( data ) : if re . match ( r'^:(testserver\.local|tmi\.twitch\.tv)' r' NOTICE \* :' r'(Login unsuccessful|Error logging in)*$' , data . strip ( ) ) : return False else : return True
Test the login status from the returned communication of the server .
37,006
def connect ( self ) : s = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) connect_host = "irc.twitch.tv" connect_port = 6667 try : s . connect ( ( connect_host , connect_port ) ) except ( Exception , IOError ) : print ( "Unable to create a socket to %s:%s" % ( connect_host , connect_port ) ) raise s . send ( ( 'PASS %s\r\n' % self . oauth ) . encode ( 'utf-8' ) ) s . send ( ( 'NICK %s\r\n' % self . username ) . encode ( 'utf-8' ) ) if self . verbose : print ( 'PASS %s\r\n' % self . oauth ) print ( 'NICK %s\r\n' % self . username ) received = s . recv ( 1024 ) . decode ( ) if self . verbose : print ( received ) if not TwitchChatStream . _logged_in_successful ( received ) : raise IOError ( "Twitch did not accept the username-oauth " "combination" ) else : fcntl . fcntl ( s , fcntl . F_SETFL , os . O_NONBLOCK ) if self . s is not None : self . s . close ( ) self . s = s self . join_channel ( self . username ) while self . current_channel != self . username : self . twitch_receive_messages ( )
Connect to Twitch
37,007
def _push_from_buffer ( self ) : if len ( self . buffer ) > 0 : if time . time ( ) - self . last_sent_time > 5 : try : message = self . buffer . pop ( 0 ) self . s . send ( message . encode ( 'utf-8' ) ) if self . verbose : print ( message ) finally : self . last_sent_time = time . time ( )
Push a message on the stack to the IRC stream . This is necessary to avoid Twitch overflow control .
37,008
def join_channel ( self , channel ) : self . s . send ( ( 'JOIN #%s\r\n' % channel ) . encode ( 'utf-8' ) ) if self . verbose : print ( 'JOIN #%s\r\n' % channel )
Join a different chat channel on Twitch . Note this function returns immediately but the switch might take a moment
37,009
def _parse_message ( self , data ) : if TwitchChatStream . _check_has_ping ( data ) : self . _send_pong ( ) if TwitchChatStream . _check_has_channel ( data ) : self . current_channel = TwitchChatStream . _check_has_channel ( data ) [ 0 ] if TwitchChatStream . _check_has_message ( data ) : return { 'channel' : re . findall ( r'^:.+![a-zA-Z0-9_]+' r'@[a-zA-Z0-9_]+' r'.+ ' r'PRIVMSG (.*?) :' , data ) [ 0 ] , 'username' : re . findall ( r'^:([a-zA-Z0-9_]+)!' , data ) [ 0 ] , 'message' : re . findall ( r'PRIVMSG #[a-zA-Z0-9_]+ :(.+)' , data ) [ 0 ] . decode ( 'utf8' ) } else : return None
Parse the bytes received from the socket .
37,010
def reset ( self ) : if self . ffmpeg_process is not None : try : self . ffmpeg_process . send_signal ( signal . SIGINT ) except OSError : pass command = [ ] command . extend ( [ self . ffmpeg_binary , '-loglevel' , 'verbose' , '-y' , '-analyzeduration' , '1' , '-f' , 'rawvideo' , '-r' , '%d' % self . fps , '-vcodec' , 'rawvideo' , '-s' , '%dx%d' % ( self . width , self . height ) , '-pix_fmt' , 'rgb24' , '-thread_queue_size' , '1024' , '-i' , '/tmp/videopipe' , ] ) if self . audio_enabled : command . extend ( [ '-ar' , '%d' % AUDIORATE , '-ac' , '2' , '-f' , 's16le' , '-thread_queue_size' , '1024' , '-i' , '/tmp/audiopipe' ] ) else : command . extend ( [ '-ar' , '8000' , '-ac' , '1' , '-f' , 's16le' , '-i' , '/dev/zero' , ] ) command . extend ( [ '-vcodec' , 'libx264' , '-r' , '%d' % self . fps , '-b:v' , '3000k' , '-s' , '%dx%d' % ( self . width , self . height ) , '-preset' , 'faster' , '-tune' , 'zerolatency' , '-crf' , '23' , '-pix_fmt' , 'yuv420p' , '-minrate' , '3000k' , '-maxrate' , '3000k' , '-bufsize' , '12000k' , '-g' , '60' , '-keyint_min' , '1' , '-acodec' , 'libmp3lame' , '-ar' , '44100' , '-b:a' , '160k' , '-ac' , '1' , '-map' , '0:v' , '-map' , '1:a' , '-threads' , '2' , '-f' , 'flv' , 'rtmp://live-ams.twitch.tv/app/%s' % self . twitch_stream_key ] ) devnullpipe = open ( "/dev/null" , "w" ) if self . verbose : devnullpipe = None self . ffmpeg_process = subprocess . Popen ( command , stdin = subprocess . PIPE , stderr = devnullpipe , stdout = devnullpipe )
Reset the videostream by restarting ffmpeg
37,011
def send_audio ( self , left_channel , right_channel ) : if self . audio_pipe is None : if not os . path . exists ( '/tmp/audiopipe' ) : os . mkfifo ( '/tmp/audiopipe' ) self . audio_pipe = os . open ( '/tmp/audiopipe' , os . O_WRONLY ) assert len ( left_channel . shape ) == 1 assert left_channel . shape == right_channel . shape frame = np . column_stack ( ( left_channel , right_channel ) ) . flatten ( ) frame = np . clip ( 32767 * frame , - 32767 , 32767 ) . astype ( 'int16' ) try : os . write ( self . audio_pipe , frame . tostring ( ) ) except OSError : raise
Add the audio samples to the stream . The left and the right channel should have the same shape . Raises an OSError when the stream is closed .
37,012
def import_attribute ( self , path ) : module = '.' . join ( path . split ( '.' ) [ : - 1 ] ) function = path . split ( '.' ) [ - 1 ] module = importlib . import_module ( module ) return getattr ( module , function )
Import an attribute from a module .
37,013
def inactive_response ( self , request ) : inactive_url = getattr ( settings , 'LOGIN_INACTIVE_REDIRECT_URL' , '' ) if inactive_url : return HttpResponseRedirect ( inactive_url ) else : return self . error_to_response ( request , { 'error' : _ ( "This user account is marked as inactive." ) } )
Return an inactive message .
37,014
def create_profile ( self , user , save = False , ** kwargs ) : profile = self . get_model ( ) ( user = user , ** kwargs ) if save : profile . save ( ) return profile
Create a profile model .
37,015
def get_or_create_profile ( self , user , save = False , ** kwargs ) : try : profile = self . get_model ( ) . objects . get ( user = user , ** kwargs ) return profile , False except self . get_model ( ) . DoesNotExist : profile = self . create_profile ( user , save = save , ** kwargs ) return profile , True
Return a profile from DB or if there is none create a new one .
37,016
def request_access_token ( self , params ) : return self . request ( self . access_token_url , method = "GET" , params = params )
Foursquare does not accept POST requests to retrieve an access token so we ll be doing a GET request instead .
37,017
def get_initial_data ( self , request , user , profile , client ) : if INITAL_DATA_FUNCTION : func = self . import_attribute ( INITAL_DATA_FUNCTION ) return func ( request , user , profile , client ) return { }
Return initial data for the setup form . The function can be controlled with SOCIALREGISTRATION_INITIAL_DATA_FUNCTION .
37,018
def get_context ( self , request , user , profile , client ) : if CONTEXT_FUNCTION : func = self . import_attribute ( CONTEXT_FUNCTION ) return func ( request , user , profile , client ) return { }
Return additional context for the setup view . The function can be controlled with SOCIALREGISTRATION_SETUP_CONTEXT_FUNCTION .
37,019
def generate_username_and_redirect ( self , request , user , profile , client ) : func = self . get_username_function ( ) user . username = func ( user , profile , client ) user . set_unusable_password ( ) user . save ( ) profile . user = user profile . save ( ) user = profile . authenticate ( ) self . send_connect_signal ( request , user , profile , client ) self . login ( request , user ) self . send_login_signal ( request , user , profile , client ) self . delete_session_data ( request ) return HttpResponseRedirect ( self . get_next ( request ) )
Generate a username and then redirect the user to the correct place . This method is called when SOCIALREGISTRATION_GENERATE_USERNAME is set .
37,020
def get ( self , request ) : if request . user . is_authenticated ( ) : return HttpResponseRedirect ( self . get_next ( request ) ) try : user , profile , client = self . get_session_data ( request ) except KeyError : return self . error_to_response ( request , dict ( error = _ ( "Social profile is missing from your session." ) ) ) if GENERATE_USERNAME : return self . generate_username_and_redirect ( request , user , profile , client ) form = self . get_form ( ) ( initial = self . get_initial_data ( request , user , profile , client ) ) additional_context = self . get_context ( request , user , profile , client ) return self . render_to_response ( dict ( { 'form' : form } , ** additional_context ) )
When signing a new user up - either display a setup form or generate the username automatically .
37,021
def post ( self , request ) : if request . user . is_authenticated ( ) : return self . error_to_response ( request , dict ( error = _ ( "You are already logged in." ) ) ) try : user , profile , client = self . get_session_data ( request ) except KeyError : return self . error_to_response ( request , dict ( error = _ ( "A social profile is missing from your session." ) ) ) form = self . get_form ( ) ( request . POST , request . FILES , initial = self . get_initial_data ( request , user , profile , client ) ) if not form . is_valid ( ) : additional_context = self . get_context ( request , user , profile , client ) return self . render_to_response ( dict ( { 'form' : form } , ** additional_context ) ) user , profile = form . save ( request , user , profile , client ) user = profile . authenticate ( ) self . send_connect_signal ( request , user , profile , client ) self . login ( request , user ) self . send_login_signal ( request , user , profile , client ) self . delete_session_data ( request ) return HttpResponseRedirect ( self . get_next ( request ) )
Save the user and profile login and send the right signals .
37,022
def post ( self , request ) : request . session [ 'next' ] = self . get_next ( request ) client = self . get_client ( ) ( ) request . session [ self . get_client ( ) . get_session_key ( ) ] = client url = client . get_redirect_url ( request = request ) logger . debug ( "Redirecting to %s" , url ) try : return HttpResponseRedirect ( url ) except OAuthError , error : return self . error_to_response ( request , { 'error' : error } ) except socket . timeout : return self . error_to_response ( request , { 'error' : _ ( 'Could not connect to service (timed out)' ) } )
Create a client store it in the user s session and redirect the user to the API provider to authorize our app and permissions .
37,023
def client ( self , verifier = None ) : if not self . _request_token and not self . _access_token : client = oauth . Client ( self . consumer , timeout = TIMEOUT ) if self . _request_token and not self . _access_token : if verifier is not None : self . _request_token . set_verifier ( verifier ) client = oauth . Client ( self . consumer , self . _request_token , timeout = TIMEOUT ) if self . _access_token : client = oauth . Client ( self . consumer , self . _access_token , timeout = TIMEOUT ) return client
Return the correct client depending on which stage of the OAuth process we re in .
37,024
def _get_request_token ( self ) : params = { 'oauth_callback' : self . get_callback_url ( ) } response , content = self . client ( ) . request ( self . request_token_url , "POST" , body = urllib . urlencode ( params ) ) content = smart_unicode ( content ) if not response [ 'status' ] == '200' : raise OAuthError ( _ ( u"Invalid status code %s while obtaining request token from %s: %s" ) % ( response [ 'status' ] , self . request_token_url , content ) ) token = dict ( urlparse . parse_qsl ( content ) ) return oauth . Token ( token [ 'oauth_token' ] , token [ 'oauth_token_secret' ] )
Fetch a request token from self . request_token_url .
37,025
def _get_access_token ( self , verifier = None ) : response , content = self . client ( verifier ) . request ( self . access_token_url , "POST" ) content = smart_unicode ( content ) if not response [ 'status' ] == '200' : raise OAuthError ( _ ( u"Invalid status code %s while obtaining access token from %s: %s" ) % ( response [ 'status' ] , self . access_token_url , content ) ) token = dict ( urlparse . parse_qsl ( content ) ) return ( oauth . Token ( token [ 'oauth_token' ] , token [ 'oauth_token_secret' ] ) , token )
Fetch an access token from self . access_token_url .
37,026
def get_request_token ( self ) : if self . _request_token is None : self . _request_token = self . _get_request_token ( ) return self . _request_token
Return the request token for this API . If we ve not fetched it yet go out request and memoize it .
37,027
def get_access_token ( self , verifier = None ) : if self . _access_token is None : self . _access_token , self . _access_token_dict = self . _get_access_token ( verifier ) return self . _access_token
Return the access token for this API . If we ve not fetched it yet go out request and memoize it .
37,028
def complete ( self , GET ) : token = self . get_access_token ( verifier = GET . get ( 'oauth_verifier' , None ) ) return token
When redirect back to our application try to complete the flow by requesting an access token . If the access token request fails it ll throw an OAuthError . Tries to complete the flow by validating against the GET paramters received .
37,029
def request ( self , url , method = "GET" , params = None , headers = None ) : params = params or { } headers = headers or { } logger . debug ( "URL: %s" , url ) logger . debug ( "Method: %s" , method ) logger . debug ( "Headers: %s" , headers ) logger . debug ( "Params: %s" , params ) response , content = self . client ( ) . request ( url , method , headers = headers , body = urllib . urlencode ( params ) ) content = smart_unicode ( content ) logger . debug ( "Status: %s" , response [ 'status' ] ) logger . debug ( "Content: %s" , content ) if response [ 'status' ] != '200' : raise OAuthError ( _ ( u"Invalid status code %s while requesting %s: %s" ) % ( response [ 'status' ] , url , content ) ) return content
Make signed requests against url .
37,030
def get_redirect_url ( self , state = '' , ** kwargs ) : params = { 'response_type' : 'code' , 'client_id' : self . client_id , 'redirect_uri' : self . get_callback_url ( ** kwargs ) , 'scope' : self . scope or '' , 'state' : state , } return '%s?%s' % ( self . auth_url , urllib . urlencode ( params ) )
Assemble the URL to where we ll be redirecting the user to to request permissions .
37,031
def request_access_token ( self , params ) : return self . request ( self . access_token_url , method = "POST" , params = params , is_signed = False )
Request the access token from self . access_token_url . The default behaviour is to use a POST request but some services use GET requests . Individual clients can override this method to use the correct HTTP method .
37,032
def _get_access_token ( self , code , ** params ) : params . update ( { 'code' : code , 'client_id' : self . client_id , 'client_secret' : self . secret , 'redirect_uri' : self . get_callback_url ( ) , } ) logger . debug ( "Params: %s" , params ) resp , content = self . request_access_token ( params = params ) content = smart_unicode ( content ) logger . debug ( "Status: %s" , resp [ 'status' ] ) logger . debug ( "Content: %s" , content ) content = self . parse_access_token ( content ) if 'error' in content : raise OAuthError ( _ ( u"Received error while obtaining access token from %s: %s" ) % ( self . access_token_url , content [ 'error' ] ) ) return content
Fetch an access token with the provided code .
37,033
def get_access_token ( self , code = None , ** params ) : if self . _access_token is None : if code is None : raise ValueError ( _ ( 'Invalid code.' ) ) self . access_token_dict = self . _get_access_token ( code , ** params ) try : self . _access_token = self . access_token_dict [ 'access_token' ] except KeyError , e : raise OAuthError ( "Credentials could not be validated, the provider returned no access token." ) return self . _access_token
Return the memoized access token or go out and fetch one .
37,034
def complete ( self , GET ) : if 'error' in GET : raise OAuthError ( _ ( "Received error while obtaining access token from %s: %s" ) % ( self . access_token_url , GET [ 'error' ] ) ) return self . get_access_token ( code = GET . get ( 'code' ) )
Complete the OAuth2 flow by fetching an access token with the provided code in the GET parameters .
37,035
def request ( self , url , method = "GET" , params = None , headers = None , is_signed = True ) : params = params or { } headers = headers or { } if is_signed : params . update ( self . get_signing_params ( ) ) if method . upper ( ) == "GET" : url = '%s?%s' % ( url , urllib . urlencode ( params ) ) return self . client ( ) . request ( url , method = method , headers = headers ) return self . client ( ) . request ( url , method , body = urllib . urlencode ( params ) , headers = headers )
Make a request against url . By default the request is signed with an access token but can be turned off by passing is_signed = False .
37,036
def request_access_token ( self , params ) : return self . client ( ) . request ( self . access_token_url , method = "POST" , body = urllib . urlencode ( params ) , headers = { 'Content-Type' : 'application/x-www-form-urlencoded' } )
Google requires correct content - type for POST requests
37,037
def readInternalC ( self ) : v = self . _read32 ( ) v >>= 4 internal = v & 0x7FF if v & 0x800 : internal -= 4096 return internal * 0.0625
Return internal temperature value in degrees celsius .
37,038
def readTempC ( self ) : v = self . _read32 ( ) if v & 0x7 : return float ( 'NaN' ) if v & 0x80000000 : v >>= 18 v -= 16384 else : v >>= 18 return v * 0.25
Return the thermocouple temperature value in degrees celsius .
37,039
def escape_quotes ( self , val ) : if self . is_string ( val ) and self . _in_quotes ( val , self . quote ) : middle = self . remove_quotes ( val ) . replace ( "\\" + self . quote , self . quote ) middle = middle . replace ( self . quote , "\\" + self . quote ) val = self . add_quotes ( middle ) return val
Escape any quotes in a value
37,040
def standardise_quotes ( self , val ) : if self . _in_quotes ( val , self . altquote ) : middle = self . remove_quotes ( val ) val = self . add_quotes ( middle ) return self . escape_quotes ( val )
Change the quotes used to wrap a value to the pprint default E . g . val to val or val to val
37,041
def process_dict ( self , d , level , comments ) : lines = [ ] for k , v in d . items ( ) : if not self . __is_metadata ( k ) : qk = self . quoter . add_quotes ( k ) qv = self . quoter . add_quotes ( v ) line = self . __format_line ( self . whitespace ( level , 2 ) , qk , qv ) line += self . process_attribute_comment ( comments , k ) lines . append ( line ) return lines
Process keys and values within a block
37,042
def process_config_dict ( self , key , d , level ) : lines = [ ] for k , v in d . items ( ) : k = "CONFIG {}" . format ( self . quoter . add_quotes ( k . upper ( ) ) ) v = self . quoter . add_quotes ( v ) lines . append ( self . __format_line ( self . whitespace ( level , 1 ) , k , v ) ) return lines
Process the CONFIG block
37,043
def is_hidden_container ( self , key , val ) : if key in ( "layers" , "classes" , "styles" , "symbols" , "labels" , "outputformats" , "features" , "scaletokens" , "composites" ) and isinstance ( val , list ) : return True else : return False
The key is not one of the Mapfile keywords and its values are a list
37,044
def pprint ( self , composites ) : if composites and not isinstance ( composites , list ) : composites = [ composites ] lines = [ ] for composite in composites : type_ = composite [ "__type__" ] if type_ in ( "metadata" , "validation" ) : lines += self . process_key_dict ( type_ , composite , level = 0 ) else : lines += self . _format ( composite ) result = str ( self . newlinechar . join ( lines ) ) return result
Print out a nicely indented Mapfile
37,045
def process_composite_comment ( self , level , comments , key ) : if key not in comments : comment = "" else : value = comments [ key ] spacer = self . whitespace ( level , 0 ) if isinstance ( value , list ) : comments = [ self . format_comment ( spacer , v ) for v in value ] comment = self . newlinechar . join ( comments ) else : comment = self . format_comment ( spacer , value ) return comment
Process comments for composites such as MAP LAYER etc .
37,046
def start ( self , children ) : composites = [ ] for composite_dict in children : if False and self . include_position : key_token = composite_dict [ 1 ] key_name = key_token . value . lower ( ) composites_position = self . get_position_dict ( composite_dict ) composites_position [ key_name ] = self . create_position_dict ( key_token , None ) composites . append ( composite_dict ) if len ( composites ) == 1 : return composites [ 0 ] else : return composites
Parses a MapServer Mapfile Parsing of partial Mapfiles or lists of composites is also possible
37,047
def check_composite_tokens ( self , name , tokens ) : assert len ( tokens ) >= 2 key = tokens [ 0 ] assert key . value . lower ( ) == name assert tokens [ - 1 ] . value . lower ( ) == "end" if len ( tokens ) == 2 : body = [ ] else : body = tokens [ 1 : - 1 ] body_tokens = [ ] for t in body : if isinstance ( t , dict ) : body_tokens . append ( t [ "__tokens__" ] ) else : body_tokens . append ( t ) return key , body_tokens
Return the key and contents of a KEY .. END block for PATTERN POINTS and PROJECTION
37,048
def process_value_pairs ( self , tokens , type_ ) : key , body = self . check_composite_tokens ( type_ , tokens ) key_name = self . key_name ( key ) d = CaseInsensitiveOrderedDict ( CaseInsensitiveOrderedDict ) for t in body : k = self . clean_string ( t [ 0 ] . value ) . lower ( ) v = self . clean_string ( t [ 1 ] . value ) if k in d . keys ( ) : log . warning ( "A duplicate key ({}) was found in {}. Only the last value ({}) will be used. " . format ( k , type_ , v ) ) d [ k ] = v if self . include_position : pd = self . create_position_dict ( key , body ) d [ "__position__" ] = pd d [ "__type__" ] = key_name return d
Metadata Values and Validation blocks can either have string pairs or attributes Attributes will already be processed
37,049
def add_metadata_comments ( self , d , metadata ) : if len ( metadata ) > 2 : string_pairs = metadata [ 1 : - 1 ] for sp in string_pairs : if isinstance ( sp . children [ 0 ] , Token ) : token = sp . children [ 0 ] assert token . type == "UNQUOTED_STRING" key = token . value else : token = sp . children [ 0 ] . children [ 0 ] key = token . value key = self . _mapfile_todict . clean_string ( key ) . lower ( ) assert key in d . keys ( ) key_comments = self . get_comments ( sp . meta ) d [ "__comments__" ] [ key ] = key_comments return d
Any duplicate keys will be replaced with the last duplicate along with comments
37,050
def assign_comments ( self , tree , comments ) : comments = list ( comments ) comments . sort ( key = lambda c : c . line ) idx_by_line = { 0 : 0 } for i , c in enumerate ( comments ) : if c . line not in idx_by_line : idx_by_line [ c . line ] = i idx = [ ] self . comments = [ c . value . strip ( ) for c in comments ] last_comment_line = max ( idx_by_line . keys ( ) ) for i in range ( last_comment_line , 0 , - 1 ) : if i in idx_by_line : idx . append ( idx_by_line [ i ] ) else : idx . append ( idx [ - 1 ] ) idx . append ( 0 ) idx . reverse ( ) self . idx = idx self . _assign_comments ( tree , 0 )
Capture any comments in the tree
37,051
def parse ( self , text , fn = None ) : if PY2 and not isinstance ( text , unicode ) : text = unicode ( text , 'utf-8' ) if self . expand_includes : text = self . load_includes ( text , fn = fn ) try : self . _comments [ : ] = [ ] tree = self . lalr . parse ( text ) if self . include_comments : self . assign_comments ( tree , self . _comments ) return tree except ( ParseError , UnexpectedInput ) as ex : if fn : log . error ( "Parsing of {} unsuccessful" . format ( fn ) ) else : log . error ( "Parsing of Mapfile unsuccessful" ) log . info ( ex ) raise
Parse the Mapfile
37,052
def validate ( self , value , add_comments = False , schema_name = "map" ) : validator = self . get_schema_validator ( schema_name ) error_messages = [ ] if isinstance ( value , list ) : for d in value : error_messages += self . _validate ( d , validator , add_comments , schema_name ) else : error_messages = self . _validate ( value , validator , add_comments , schema_name ) return error_messages
verbose - also return the jsonschema error details
37,053
def output ( s ) : p = Parser ( ) t = ExpressionsTransformer ( ) ast = p . parse ( s ) logging . debug ( ast . pretty ( ) ) print ( ast . pretty ( ) ) d = t . transform ( ast ) print ( json . dumps ( d , indent = 4 ) ) return d
Parse transform and pretty print the result
37,054
def main ( ctx , verbose , quiet ) : verbosity = verbose - quiet configure_logging ( verbosity ) ctx . obj = { } ctx . obj [ 'verbosity' ] = verbosity
Execute the main mappyfile command
37,055
def format ( ctx , input_mapfile , output_mapfile , indent , spacer , quote , newlinechar , expand , comments ) : quote = codecs . decode ( quote , 'unicode_escape' ) spacer = codecs . decode ( spacer , 'unicode_escape' ) newlinechar = codecs . decode ( newlinechar , 'unicode_escape' ) d = mappyfile . open ( input_mapfile , expand_includes = expand , include_comments = comments , include_position = True ) mappyfile . save ( d , output_mapfile , indent = indent , spacer = spacer , quote = quote , newlinechar = newlinechar ) sys . exit ( 0 )
Format a the input - mapfile and save as output - mapfile . Note output - mapfile will be overwritten if it already exists .
37,056
def open ( fn , expand_includes = True , include_comments = False , include_position = False , ** kwargs ) : p = Parser ( expand_includes = expand_includes , include_comments = include_comments , ** kwargs ) ast = p . parse_file ( fn ) m = MapfileToDict ( include_position = include_position , include_comments = include_comments , ** kwargs ) d = m . transform ( ast ) return d
Load a Mapfile from the supplied filename into a Python dictionary .
37,057
def load ( fp , expand_includes = True , include_position = False , include_comments = False , ** kwargs ) : p = Parser ( expand_includes = expand_includes , include_comments = include_comments , ** kwargs ) ast = p . load ( fp ) m = MapfileToDict ( include_position = include_position , include_comments = include_comments , ** kwargs ) d = m . transform ( ast ) return d
Load a Mapfile from an open file or file - like object .
37,058
def loads ( s , expand_includes = True , include_position = False , include_comments = False , ** kwargs ) : p = Parser ( expand_includes = expand_includes , include_comments = include_comments , ** kwargs ) ast = p . parse ( s ) m = MapfileToDict ( include_position = include_position , include_comments = include_comments , ** kwargs ) d = m . transform ( ast ) return d
Load a Mapfile from a string
37,059
def save ( d , output_file , indent = 4 , spacer = " " , quote = '"' , newlinechar = "\n" , end_comment = False , ** kwargs ) : map_string = _pprint ( d , indent , spacer , quote , newlinechar , end_comment ) _save ( output_file , map_string ) return output_file
Write a dictionary to an output Mapfile on disk
37,060
def dumps ( d , indent = 4 , spacer = " " , quote = '"' , newlinechar = "\n" , end_comment = False , ** kwargs ) : return _pprint ( d , indent , spacer , quote , newlinechar , end_comment , ** kwargs )
Output a Mapfile dictionary as a string
37,061
def find ( lst , key , value ) : return next ( ( item for item in lst if item [ key . lower ( ) ] == value ) , None )
Find an item in a list of dicts using a key and a value
37,062
def findall ( lst , key , value ) : return [ item for item in lst if item [ key . lower ( ) ] in value ]
Find all items in lst where key matches value . For example find all LAYER s in a MAP where GROUP equals VALUE
37,063
def findunique ( lst , key ) : return sorted ( set ( [ item [ key . lower ( ) ] for item in lst ] ) )
Find all unique key values for items in lst .
37,064
def update ( d1 , d2 ) : NoneType = type ( None ) if d2 . get ( "__delete__" , False ) : return { } for k , v in d2 . items ( ) : if isinstance ( v , dict ) : if v . get ( "__delete__" , False ) : del d1 [ k ] else : d1 [ k ] = update ( d1 . get ( k , { } ) , v ) elif isinstance ( v , ( tuple , list ) ) and all ( isinstance ( li , ( NoneType , dict ) ) for li in v ) : orig_list = d1 . get ( k , [ ] ) new_list = [ ] pairs = list ( zip_longest ( orig_list , v , fillvalue = None ) ) for orig_item , new_item in pairs : if orig_item is None : orig_item = { } if new_item is None : new_item = { } if new_item . get ( "__delete__" , False ) : d = None else : d = update ( orig_item , new_item ) if d is not None : new_list . append ( d ) d1 [ k ] = new_list else : if k in d1 and v == "__delete__" : del d1 [ k ] else : d1 [ k ] = v return d1
Update dict d1 with properties from d2
37,065
def erosion ( mapfile , dilated ) : ll = mappyfile . find ( mapfile [ "layers" ] , "name" , "line" ) ll [ "status" ] = "OFF" pl = mappyfile . find ( mapfile [ "layers" ] , "name" , "polygon" ) pl2 = deepcopy ( pl ) pl2 [ "name" ] = "newpolygon" mapfile [ "layers" ] . append ( pl2 ) dilated = dilated . buffer ( - 0.3 ) pl2 [ "features" ] [ 0 ] [ "wkt" ] = dilated . wkt style = pl [ "classes" ] [ 0 ] [ "styles" ] [ 0 ] style [ "color" ] = "#999999" style [ "outlinecolor" ] = "#b2b2b2"
We will continue to work with the modified Mapfile If we wanted to start from scratch we could simply reread it
37,066
def translate_kwargs ( self , ** kwargs ) : local_kwargs = self . kwargs . copy ( ) local_kwargs . update ( kwargs ) if "data" in local_kwargs and "json" in local_kwargs : raise ValueError ( "Cannot use data and json together" ) if "data" in local_kwargs and isinstance ( local_kwargs [ "data" ] , dict ) : local_kwargs . update ( { "json" : local_kwargs [ "data" ] } ) del local_kwargs [ "data" ] headers = DEFAULT_HEADERS . copy ( ) if "headers" in kwargs : headers . update ( kwargs [ "headers" ] ) if "json" in local_kwargs : headers . update ( { "Content-Type" : "application/json;charset=UTF-8" } ) local_kwargs . update ( { "headers" : headers } ) return local_kwargs
Translate kwargs replacing data with json if necessary .
37,067
def post ( self , endpoint , return_response = False , ** kwargs ) : args = self . translate_kwargs ( ** kwargs ) response = self . session . post ( self . make_url ( endpoint ) , ** args ) decoded_response = _decode_response ( response ) if return_response : return decoded_response , response return decoded_response
Send HTTP POST to the endpoint .
37,068
def escape_string ( string ) : result = string result = result . replace ( '\\' , '\\\\' ) result = result . replace ( '"' , '\\"' ) return '"' + result + '"'
Escape a string for use in Gerrit commands .
37,069
def append ( self , data ) : if not data : return if isinstance ( data , list ) : _items = [ x . replace ( "\n" , " " ) . strip ( ) . lstrip ( '*' ) . strip ( ) for x in data ] _paragraph = "\n" . join ( [ "* %s" % x for x in _items if x ] ) if _paragraph : self . paragraphs . append ( _paragraph ) elif isinstance ( data , str ) : _paragraph = data . strip ( ) if _paragraph : self . paragraphs . append ( _paragraph ) else : raise ValueError ( 'Data must be a list or a string' )
Append the given data to the output .
37,070
def format ( self ) : message = "" if self . paragraphs : if self . header : message += ( self . header + '\n\n' ) message += "\n\n" . join ( self . paragraphs ) if self . footer : message += ( '\n\n' + self . footer ) return message
Format the message parts to a string .
37,071
def enable_batch ( self , onerror = "continue" ) : self . batch_request = True self . batch_request_id = 1 self . _create_batch_node ( onerror )
Enables batch request gathering .
37,072
def _filter_response ( self , response_dict ) : filtered_dict = { } for key , value in response_dict . items ( ) : if key == "_jsns" : continue if key == "xmlns" : continue if type ( value ) == list and len ( value ) == 1 : filtered_dict [ key ] = value [ 0 ] elif type ( value ) == dict and len ( value . keys ( ) ) == 1 and "_content" in value . keys ( ) : filtered_dict [ key ] = value [ "_content" ] elif type ( value ) == dict : tmp_dict = self . _filter_response ( value ) filtered_dict [ key ] = tmp_dict else : filtered_dict [ key ] = value return filtered_dict
Add additional filters to the response dictionary
37,073
def create_preauth ( byval , key , by = 'name' , expires = 0 , timestamp = None ) : if timestamp is None : timestamp = int ( datetime . now ( ) . strftime ( "%s" ) ) * 1000 pak = hmac . new ( codecs . latin_1_encode ( key ) [ 0 ] , ( '%s|%s|%s|%s' % ( byval , by , expires , timestamp ) ) . encode ( "utf-8" ) , hashlib . sha1 ) . hexdigest ( ) return pak
Generates a zimbra preauth value
37,074
def zimbra_to_python ( zimbra_dict , key_attribute = "n" , content_attribute = "_content" ) : local_dict = { } for item in zimbra_dict : local_dict [ item [ key_attribute ] ] = item [ content_attribute ] return local_dict
Converts single level Zimbra dicts to a standard python dict
37,075
def get_value ( haystack , needle , key_attribute = "n" , content_attribute = "_content" ) : for value in haystack : if value [ key_attribute ] == needle : return value [ content_attribute ] return None
Fetch a value from a zimbra - like json dict ( keys are n values are _content
37,076
def dict_to_dom ( root_node , xml_dict ) : if '_content' in list ( xml_dict . keys ( ) ) : root_node . appendChild ( root_node . ownerDocument . createTextNode ( convert_to_str ( xml_dict [ '_content' ] ) ) ) for key , value in xml_dict . items ( ) : if key == '_content' : continue if type ( value ) == dict : tmp_node = root_node . ownerDocument . createElement ( key ) dict_to_dom ( tmp_node , value ) root_node . appendChild ( tmp_node ) elif type ( value ) == list : for multinode in value : tmp_node = root_node . ownerDocument . createElement ( key ) dict_to_dom ( tmp_node , multinode ) root_node . appendChild ( tmp_node ) else : root_node . setAttribute ( key , convert_to_str ( value ) )
Create a DOM node and optionally several subnodes from a dictionary .
37,077
def dom_to_dict ( root_node ) : tag = root_node . tagName if ":" in tag : tag = tag . split ( ":" ) [ 1 ] root_dict = { tag : { } } node_dict = root_dict [ tag ] if root_node . hasAttributes ( ) : for key in list ( root_node . attributes . keys ( ) ) : node_dict [ key ] = root_node . getAttribute ( key ) for child in root_node . childNodes : if child . nodeType == root_node . TEXT_NODE : node_dict [ '_content' ] = child . data else : subnode_dict = dom_to_dict ( child ) child_tag = child . tagName if ":" in child_tag : child_tag = child_tag . split ( ":" ) [ 1 ] new_val = subnode_dict [ child_tag ] if child_tag in node_dict : prev_val = node_dict [ child_tag ] if type ( prev_val ) != list : node_dict [ child_tag ] = [ prev_val ] node_dict [ child_tag ] . append ( new_val ) else : node_dict [ child_tag ] = new_val return root_dict
Serializes the given node to the dictionary
37,078
def connect ( self ) : sock = socket . create_connection ( ( self . host , self . port ) , self . timeout , self . source_address ) if getattr ( self , '_tunnel_host' , None ) : self . sock = sock self . _tunnel ( ) self . sock = ssl . wrap_socket ( sock , self . key_file , self . cert_file , ssl_version = ssl . PROTOCOL_TLSv1_2 )
Overrides HTTPSConnection . connect to specify TLS version
37,079
def gen_request ( self , request_type = "json" , token = None , set_batch = False , batch_onerror = None ) : if request_type == "json" : local_request = RequestJson ( ) elif request_type == "xml" : local_request = RequestXml ( ) else : raise UnknownRequestType ( ) if token is not None : local_request . set_auth_token ( token ) if set_batch : local_request . enable_batch ( batch_onerror ) return local_request
Convenience method to quickly generate a token
37,080
def send_request ( self , request , response = None ) : local_response = None if response is None : if request . request_type == "json" : local_response = ResponseJson ( ) elif request . request_type == "xml" : local_response = ResponseXml ( ) else : raise UnknownRequestType ( ) try : server_request = ur . urlopen ( self . url , request . get_request ( ) . encode ( "utf-8" ) , self . timeout ) server_response = server_request . read ( ) if isinstance ( server_response , bytes ) : server_response = server_response . decode ( "utf-8" ) if response is None : local_response . set_response ( server_response ) else : response . set_response ( server_response ) except ue . HTTPError as e : if e . code == 500 : server_response = e . fp . read ( ) if isinstance ( server_response , bytes ) : server_response = server_response . decode ( "utf-8" ) if response is None : local_response . set_response ( server_response ) else : response . set_response ( server_response ) else : raise e if response is None : return local_response
Send the request .
37,081
def authenticate ( url , account , key , by = 'name' , expires = 0 , timestamp = None , timeout = None , request_type = "xml" , admin_auth = False , use_password = False , raise_on_error = False ) : if timestamp is None : timestamp = int ( time . time ( ) ) * 1000 pak = "" if not admin_auth : pak = preauth . create_preauth ( account , key , by , expires , timestamp ) if request_type == 'xml' : auth_request = RequestXml ( ) else : auth_request = RequestJson ( ) request_data = { 'account' : { 'by' : by , '_content' : account } } ns = "urn:zimbraAccount" if admin_auth : ns = "urn:zimbraAdmin" request_data [ 'password' ] = key elif use_password : request_data [ 'password' ] = { "_content" : key } else : request_data [ 'preauth' ] = { 'timestamp' : timestamp , 'expires' : expires , '_content' : pak } auth_request . add_request ( 'AuthRequest' , request_data , ns ) server = Communication ( url , timeout ) if request_type == 'xml' : response = ResponseXml ( ) else : response = ResponseJson ( ) server . send_request ( auth_request , response ) if response . is_fault ( ) : if raise_on_error : raise AuthenticationFailed ( "Cannot authenticate user: (%s) %s" % ( response . get_fault_code ( ) , response . get_fault_message ( ) ) ) return None return response . get_response ( ) [ 'AuthResponse' ] [ 'authToken' ]
Authenticate to the Zimbra server
37,082
def read_dbf ( dbf_path , index = None , cols = False , incl_index = False ) : db = ps . open ( dbf_path ) if cols : if incl_index : cols . append ( index ) vars_to_read = cols else : vars_to_read = db . header data = dict ( [ ( var , db . by_col ( var ) ) for var in vars_to_read ] ) if index : index = db . by_col ( index ) db . close ( ) return pd . DataFrame ( data , index = index ) else : db . close ( ) return pd . DataFrame ( data )
Read a dbf file as a pandas . DataFrame optionally selecting the index variable and which columns are to be loaded .
37,083
def column_mask ( self ) : margin = compress_pruned ( self . _slice . margin ( axis = 0 , weighted = False , include_transforms_for_dims = self . _hs_dims , prune = self . _prune , ) ) mask = margin < self . _size if margin . shape == self . _shape : return mask return np . logical_or ( np . zeros ( self . _shape , dtype = bool ) , mask )
ndarray True where column margin < = min_base_size same shape as slice .
37,084
def table_mask ( self ) : margin = compress_pruned ( self . _slice . margin ( axis = None , weighted = False , include_transforms_for_dims = self . _hs_dims , prune = self . _prune , ) ) mask = margin < self . _size if margin . shape == self . _shape : return mask if self . _slice . dim_types [ 0 ] == DT . MR : return np . logical_or ( np . zeros ( self . _shape , dtype = bool ) , mask [ : , None ] ) return np . logical_or ( np . zeros ( self . _shape , dtype = bool ) , mask )
ndarray True where table margin < = min_base_size same shape as slice .
37,085
def values ( self ) : return [ _ColumnPairwiseSignificance ( self . _slice , col_idx , self . _axis , self . _weighted , self . _alpha , self . _only_larger , self . _hs_dims , ) for col_idx in range ( self . _slice . get_shape ( hs_dims = self . _hs_dims ) [ 1 ] ) ]
list of _ColumnPairwiseSignificance tests .
37,086
def pairwise_indices ( self ) : return np . array ( [ sig . pairwise_indices for sig in self . values ] ) . T
ndarray containing tuples of pairwise indices .
37,087
def summary_pairwise_indices ( self ) : summary_pairwise_indices = np . empty ( self . values [ 0 ] . t_stats . shape [ 1 ] , dtype = object ) summary_pairwise_indices [ : ] = [ sig . summary_pairwise_indices for sig in self . values ] return summary_pairwise_indices
ndarray containing tuples of pairwise indices for the column summary .
37,088
def reset ( self ) : simulation = self . survey_scenario . simulation holder = simulation . get_holder ( self . weight_name ) holder . array = numpy . array ( self . initial_weight , dtype = holder . variable . dtype )
Reset the calibration to it initial state
37,089
def _set_survey_scenario ( self , survey_scenario ) : self . survey_scenario = survey_scenario if survey_scenario . simulation is None : survey_scenario . simulation = survey_scenario . new_simulation ( ) period = self . period self . filter_by = filter_by = survey_scenario . calculate_variable ( variable = self . filter_by_name , period = period ) self . weight_name = weight_name = self . survey_scenario . weight_column_name_by_entity [ 'menage' ] self . initial_weight_name = weight_name + "_ini" self . initial_weight = initial_weight = survey_scenario . calculate_variable ( variable = weight_name , period = period ) self . initial_total_population = sum ( initial_weight * filter_by ) self . weight = survey_scenario . calculate_variable ( variable = weight_name , period = period )
Set survey scenario
37,090
def set_parameters ( self , parameter , value ) : if parameter == 'lo' : self . parameters [ 'lo' ] = 1 / value else : self . parameters [ parameter ] = value
Set parameters value
37,091
def _build_calmar_data ( self ) : assert self . initial_weight_name is not None data = pd . DataFrame ( ) data [ self . initial_weight_name ] = self . initial_weight * self . filter_by for variable in self . margins_by_variable : if variable == 'total_population' : continue assert variable in self . survey_scenario . tax_benefit_system . variables period = self . period data [ variable ] = self . survey_scenario . calculate_variable ( variable = variable , period = period ) return data
Builds the data dictionnary used as calmar input argument
37,092
def _update_weights ( self , margins , parameters = { } ) : data = self . _build_calmar_data ( ) assert self . initial_weight_name is not None parameters [ 'initial_weight' ] = self . initial_weight_name val_pondfin , lambdasol , updated_margins = calmar ( data , margins , ** parameters ) self . weight = val_pondfin * self . filter_by + self . weight * ( logical_not ( self . filter_by ) ) return updated_margins
Run calmar stores new weights and returns adjusted margins
37,093
def set_calibrated_weights ( self ) : period = self . period survey_scenario = self . survey_scenario assert survey_scenario . simulation is not None for simulation in [ survey_scenario . simulation , survey_scenario . baseline_simulation ] : if simulation is None : continue simulation . set_input ( self . weight_name , period , self . weight )
Modify the weights to use the calibrated weights
37,094
def get_parameter_action ( action ) : actions = set ( ) if isinstance ( action , argparse . _AppendAction ) : actions . add ( SPECIFY_EVERY_PARAM ) return actions
To foster a general schema that can accomodate multiple parsers the general behavior here is described rather than the specific language of a given parser . For instance the append action of an argument is collapsing each argument given to a single argument . It also returns a set of actions as well since presumably some actions can impact multiple parameter options
37,095
def wishart_pfaffian ( self ) : return np . array ( [ Pfaffian ( self , val ) . value for i , val in np . ndenumerate ( self . _chisq ) ] ) . reshape ( self . _chisq . shape )
ndarray of wishart pfaffian CDF before normalization
37,096
def other_ind ( self ) : return np . full ( self . n_min , self . size - 1 , dtype = np . int )
last row or column of square A
37,097
def K ( self ) : K1 = np . float_power ( pi , 0.5 * self . n_min * self . n_min ) K1 /= ( np . float_power ( 2 , 0.5 * self . n_min * self . _n_max ) * self . _mgamma ( 0.5 * self . _n_max , self . n_min ) * self . _mgamma ( 0.5 * self . n_min , self . n_min ) ) K2 = np . float_power ( 2 , self . alpha * self . size + 0.5 * self . size * ( self . size + 1 ) ) for i in xrange ( self . size ) : K2 *= gamma ( self . alpha + i + 1 ) return K1 * K2
Normalizing constant for wishart CDF .
37,098
def value ( self ) : wishart = self . _wishart_cdf A = self . A p = self . _gammainc_a g = gamma ( wishart . alpha_vec ) q_ind = np . arange ( 2 * wishart . n_min - 2 ) q_vec = 2 * wishart . alpha + q_ind + 2 q = np . float_power ( 0.5 , q_vec ) * gamma ( q_vec ) * gammainc ( q_vec , self . _chisq_val ) for i in xrange ( wishart . n_min ) : b = 0.5 * p [ i ] * p [ i ] for j in xrange ( i , wishart . n_min - 1 ) : b -= q [ i + j ] / ( g [ i ] * g [ j + 1 ] ) A [ j + 1 , i ] = p [ i ] * p [ j + 1 ] - 2 * b A [ i , j + 1 ] = - A [ j + 1 , i ] if np . any ( np . isnan ( A ) ) : return 0 return np . sqrt ( det ( A ) )
return float Cumulative Distribution Function .
37,099
def A ( self ) : wishart = self . _wishart_cdf base = np . zeros ( [ wishart . size , wishart . size ] ) if wishart . n_min % 2 : base = self . _make_size_even ( base ) return base
ndarray - a skew - symmetric matrix for integrating the target distribution