idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
25,500
def read ( self , size = - 1 ) : part = self . peek ( size ) self . _pos += len ( part ) return part
Read bytes from the buffer and advance the read position . Returns the bytes in a bytestring .
25,501
def peek ( self , size = - 1 ) : if size < 0 or size > len ( self ) : size = len ( self ) part = self . _bytes [ self . _pos : self . _pos + size ] return part
Get bytes from the buffer without advancing the read position . Returns the bytes in a bytestring .
25,502
def register_compressor ( ext , callback ) : if not ( ext and ext [ 0 ] == '.' ) : raise ValueError ( 'ext must be a string starting with ., not %r' % ext ) if ext in _COMPRESSOR_REGISTRY : logger . warning ( 'overriding existing compression handler for %r' , ext ) _COMPRESSOR_REGISTRY [ ext ] = callback
Register a callback for transparently decompressing files with a specific extension .
25,503
def _check_kwargs ( kallable , kwargs ) : supported_keywords = sorted ( _inspect_kwargs ( kallable ) ) unsupported_keywords = [ k for k in sorted ( kwargs ) if k not in supported_keywords ] supported_kwargs = { k : v for ( k , v ) in kwargs . items ( ) if k in supported_keywords } if unsupported_keywords : logger . warning ( 'ignoring unsupported keyword arguments: %r' , unsupported_keywords ) return supported_kwargs
Check which keyword arguments the callable supports .
25,504
def open ( uri , mode = 'r' , buffering = - 1 , encoding = None , errors = None , newline = None , closefd = True , opener = None , ignore_ext = False , transport_params = None , ) : r logger . debug ( '%r' , locals ( ) ) if not isinstance ( mode , six . string_types ) : raise TypeError ( 'mode should be a string' ) if transport_params is None : transport_params = { } fobj = _shortcut_open ( uri , mode , ignore_ext = ignore_ext , buffering = buffering , encoding = encoding , errors = errors , ) if fobj is not None : return fobj if encoding is not None and 'b' in mode : mode = mode . replace ( 'b' , '' ) if PATHLIB_SUPPORT and isinstance ( uri , pathlib . Path ) : uri = str ( uri ) explicit_encoding = encoding encoding = explicit_encoding if explicit_encoding else SYSTEM_ENCODING try : binary_mode = { 'r' : 'rb' , 'r+' : 'rb+' , 'w' : 'wb' , 'w+' : 'wb+' , 'a' : 'ab' , 'a+' : 'ab+' } [ mode ] except KeyError : binary_mode = mode binary , filename = _open_binary_stream ( uri , binary_mode , transport_params ) if ignore_ext : decompressed = binary else : decompressed = _compression_wrapper ( binary , filename , mode ) if 'b' not in mode or explicit_encoding is not None : decoded = _encoding_wrapper ( decompressed , mode , encoding = encoding , errors = errors ) else : decoded = decompressed return decoded
r Open the URI object returning a file - like object .
25,505
def smart_open ( uri , mode = "rb" , ** kw ) : logger . warning ( 'this function is deprecated, use smart_open.open instead' ) ignore_extension = kw . pop ( 'ignore_extension' , False ) expected_kwargs = _inspect_kwargs ( open ) scrubbed_kwargs = { } transport_params = { } if 'host' in kw or 's3_upload' in kw : transport_params [ 'multipart_upload_kwargs' ] = { } transport_params [ 'resource_kwargs' ] = { } if 'host' in kw : url = kw . pop ( 'host' ) if not url . startswith ( 'http' ) : url = 'http://' + url transport_params [ 'resource_kwargs' ] . update ( endpoint_url = url ) if 's3_upload' in kw and kw [ 's3_upload' ] : transport_params [ 'multipart_upload_kwargs' ] . update ( ** kw . pop ( 's3_upload' ) ) if 'profile_name' in kw and 's3_session' in kw : logger . error ( 'profile_name and s3_session are mutually exclusive, ignoring the former' ) if 'profile_name' in kw : transport_params [ 'session' ] = boto3 . Session ( profile_name = kw . pop ( 'profile_name' ) ) if 's3_session' in kw : transport_params [ 'session' ] = kw . pop ( 's3_session' ) for key , value in kw . items ( ) : if key in expected_kwargs : scrubbed_kwargs [ key ] = value else : transport_params [ key ] = value return open ( uri , mode , ignore_ext = ignore_extension , transport_params = transport_params , ** scrubbed_kwargs )
Deprecated use smart_open . open instead .
25,506
def _shortcut_open ( uri , mode , ignore_ext = False , buffering = - 1 , encoding = None , errors = None , ) : if not isinstance ( uri , six . string_types ) : return None parsed_uri = _parse_uri ( uri ) if parsed_uri . scheme != 'file' : return None _ , extension = P . splitext ( parsed_uri . uri_path ) if extension in _COMPRESSOR_REGISTRY and not ignore_ext : return None open_kwargs = { } if encoding is not None : open_kwargs [ 'encoding' ] = encoding mode = mode . replace ( 'b' , '' ) if errors and 'b' not in mode : open_kwargs [ 'errors' ] = errors if six . PY3 : return _builtin_open ( parsed_uri . uri_path , mode , buffering = buffering , ** open_kwargs ) elif not open_kwargs : return _builtin_open ( parsed_uri . uri_path , mode , buffering = buffering ) return io . open ( parsed_uri . uri_path , mode , buffering = buffering , ** open_kwargs )
Try to open the URI using the standard library io . open function .
25,507
def _open_binary_stream ( uri , mode , transport_params ) : if mode not in ( 'rb' , 'rb+' , 'wb' , 'wb+' , 'ab' , 'ab+' ) : raise NotImplementedError ( 'unsupported mode: %r' % mode ) if isinstance ( uri , six . string_types ) : filename = uri . split ( '/' ) [ - 1 ] parsed_uri = _parse_uri ( uri ) unsupported = "%r mode not supported for %r scheme" % ( mode , parsed_uri . scheme ) if parsed_uri . scheme == "file" : fobj = io . open ( parsed_uri . uri_path , mode ) return fobj , filename elif parsed_uri . scheme in smart_open_ssh . SCHEMES : fobj = smart_open_ssh . open ( parsed_uri . uri_path , mode , host = parsed_uri . host , user = parsed_uri . user , port = parsed_uri . port , ) return fobj , filename elif parsed_uri . scheme in smart_open_s3 . SUPPORTED_SCHEMES : return _s3_open_uri ( parsed_uri , mode , transport_params ) , filename elif parsed_uri . scheme == "hdfs" : _check_kwargs ( smart_open_hdfs . open , transport_params ) return smart_open_hdfs . open ( parsed_uri . uri_path , mode ) , filename elif parsed_uri . scheme == "webhdfs" : kw = _check_kwargs ( smart_open_webhdfs . open , transport_params ) return smart_open_webhdfs . open ( parsed_uri . uri_path , mode , ** kw ) , filename elif parsed_uri . scheme . startswith ( 'http' ) : filename = P . basename ( urlparse . urlparse ( uri ) . path ) kw = _check_kwargs ( smart_open_http . open , transport_params ) return smart_open_http . open ( uri , mode , ** kw ) , filename else : raise NotImplementedError ( "scheme %r is not supported" , parsed_uri . scheme ) elif hasattr ( uri , 'read' ) : filename = getattr ( uri , 'name' , 'unknown' ) return uri , filename else : raise TypeError ( "don't know how to handle uri %r" % uri )
Open an arbitrary URI in the specified binary mode .
25,508
def _my_urlsplit ( url ) : if '?' not in url : return urlsplit ( url , allow_fragments = False ) sr = urlsplit ( url . replace ( '?' , '\n' ) , allow_fragments = False ) SplitResult = collections . namedtuple ( 'SplitResult' , 'scheme netloc path query fragment' ) return SplitResult ( sr . scheme , sr . netloc , sr . path . replace ( '\n' , '?' ) , '' , '' )
This is a hack to prevent the regular urlsplit from splitting around question marks .
25,509
def _parse_uri ( uri_as_string ) : if os . name == 'nt' : if '://' not in uri_as_string : uri_as_string = 'file://' + uri_as_string parsed_uri = _my_urlsplit ( uri_as_string ) if parsed_uri . scheme == "hdfs" : return _parse_uri_hdfs ( parsed_uri ) elif parsed_uri . scheme == "webhdfs" : return _parse_uri_webhdfs ( parsed_uri ) elif parsed_uri . scheme in smart_open_s3 . SUPPORTED_SCHEMES : return _parse_uri_s3x ( parsed_uri ) elif parsed_uri . scheme == 'file' : return _parse_uri_file ( parsed_uri . netloc + parsed_uri . path ) elif parsed_uri . scheme in ( '' , None ) : return _parse_uri_file ( uri_as_string ) elif parsed_uri . scheme . startswith ( 'http' ) : return Uri ( scheme = parsed_uri . scheme , uri_path = uri_as_string ) elif parsed_uri . scheme in smart_open_ssh . SCHEMES : return _parse_uri_ssh ( parsed_uri ) else : raise NotImplementedError ( "unknown URI scheme %r in %r" % ( parsed_uri . scheme , uri_as_string ) )
Parse the given URI from a string .
25,510
def _parse_uri_ssh ( unt ) : if '@' in unt . netloc : user , host_port = unt . netloc . split ( '@' , 1 ) else : user , host_port = None , unt . netloc if ':' in host_port : host , port = host_port . split ( ':' , 1 ) else : host , port = host_port , None if not user : user = None if not port : port = smart_open_ssh . DEFAULT_PORT else : port = int ( port ) return Uri ( scheme = unt . scheme , uri_path = unt . path , user = user , host = host , port = port )
Parse a Uri from a urllib namedtuple .
25,511
def _need_to_buffer ( file_obj , mode , ext ) : try : is_seekable = file_obj . seekable ( ) except AttributeError : is_seekable = hasattr ( file_obj , 'seek' ) return six . PY2 and mode . startswith ( 'r' ) and ext in _COMPRESSOR_REGISTRY and not is_seekable
Returns True if we need to buffer the whole file in memory in order to proceed .
25,512
def _encoding_wrapper ( fileobj , mode , encoding = None , errors = None ) : logger . debug ( 'encoding_wrapper: %r' , locals ( ) ) if 'b' in mode and encoding is None : return fileobj if encoding is None : encoding = SYSTEM_ENCODING kw = { 'errors' : errors } if errors else { } if mode [ 0 ] == 'r' or mode . endswith ( '+' ) : fileobj = codecs . getreader ( encoding ) ( fileobj , ** kw ) if mode [ 0 ] in ( 'w' , 'a' ) or mode . endswith ( '+' ) : fileobj = codecs . getwriter ( encoding ) ( fileobj , ** kw ) return fileobj
Decode bytes into text if necessary .
25,513
def get_zone ( ) : response = urlopen ( '{}/getNewZone' . format ( ACRA_CONNECTOR_API_ADDRESS ) ) json_data = response . read ( ) . decode ( 'utf-8' ) zone_data = json . loads ( json_data ) return zone_data [ 'id' ] , b64decode ( zone_data [ 'public_key' ] )
make http response to AcraServer api to generate new zone and return tuple of zone id and public key
25,514
def iqr ( a ) : a = np . asarray ( a ) q1 = stats . scoreatpercentile ( a , 25 ) q3 = stats . scoreatpercentile ( a , 75 ) return q3 - q1
Calculate the IQR for an array of numbers .
25,515
def freedman_diaconis_bins ( a ) : a = np . asarray ( a ) h = 2 * iqr ( a ) / ( len ( a ) ** ( 1 / 3 ) ) if h == 0 : bins = np . ceil ( np . sqrt ( a . size ) ) else : bins = np . ceil ( ( np . nanmax ( a ) - np . nanmin ( a ) ) / h ) return np . int ( bins )
Calculate number of hist bins using Freedman - Diaconis rule .
25,516
def assign_bins ( x , breaks , weight = None , pad = False , closed = 'right' ) : right = closed == 'right' if weight is None : weight = np . ones ( len ( x ) ) else : weight = np . asarray ( weight ) weight [ np . isnan ( weight ) ] = 0 bin_idx = pd . cut ( x , bins = breaks , labels = False , right = right , include_lowest = True ) bin_widths = np . diff ( breaks ) bin_x = ( breaks [ : - 1 ] + breaks [ 1 : ] ) * 0.5 df = pd . DataFrame ( { 'bin_idx' : bin_idx , 'weight' : weight } ) wftable = df . pivot_table ( 'weight' , index = [ 'bin_idx' ] , aggfunc = np . sum ) [ 'weight' ] if len ( wftable ) < len ( bin_x ) : empty_bins = set ( range ( len ( bin_x ) ) ) - set ( bin_idx ) for b in empty_bins : wftable . loc [ b ] = 0 wftable = wftable . sort_index ( ) bin_count = wftable . tolist ( ) if pad : bw0 = bin_widths [ 0 ] bwn = bin_widths [ - 1 ] bin_count = np . hstack ( [ 0 , bin_count , 0 ] ) bin_widths = np . hstack ( [ bw0 , bin_widths , bwn ] ) bin_x = np . hstack ( [ bin_x [ 0 ] - bw0 , bin_x , bin_x [ - 1 ] + bwn ] ) return result_dataframe ( bin_count , bin_x , bin_widths )
Assign value in x to bins demacated by the break points
25,517
def result_dataframe ( count , x , width , xmin = None , xmax = None ) : if xmin is None : xmin = x - width / 2 if xmax is None : xmax = x + width / 2 xmin [ 1 : ] = xmax [ : - 1 ] density = ( count / width ) / np . sum ( np . abs ( count ) ) out = pd . DataFrame ( { 'count' : count , 'x' : x , 'xmin' : xmin , 'xmax' : xmax , 'width' : width , 'density' : density , 'ncount' : count / np . max ( np . abs ( count ) ) , 'ndensity' : count / np . max ( np . abs ( density ) ) } ) return out
Create a dataframe to hold bin information
25,518
def fuzzybreaks ( scale , breaks = None , boundary = None , binwidth = None , bins = 30 , right = True ) : if isinstance ( scale , scale_discrete ) : breaks = scale . get_breaks ( ) return - 0.5 + np . arange ( 1 , len ( breaks ) + 2 ) else : if breaks is not None : breaks = scale . transform ( breaks ) if breaks is not None : return breaks recompute_bins = binwidth is not None srange = scale . limits if binwidth is None or np . isnan ( binwidth ) : binwidth = ( srange [ 1 ] - srange [ 0 ] ) / bins if boundary is None or np . isnan ( boundary ) : boundary = round_any ( srange [ 0 ] , binwidth , np . floor ) if recompute_bins : bins = np . int ( np . ceil ( ( srange [ 1 ] - boundary ) / binwidth ) ) breaks = np . arange ( boundary , srange [ 1 ] + binwidth , binwidth ) return _adjust_breaks ( breaks , right )
Compute fuzzy breaks
25,519
def build ( self , plot ) : get_property = plot . theme . themeables . property with suppress ( KeyError ) : self . box_direction = get_property ( 'legend_box' ) if self . box_direction is None : self . box_direction = 'vertical' with suppress ( KeyError ) : self . position = get_property ( 'legend_position' ) if self . position == 'none' : return with suppress ( KeyError ) : self . box_align = get_property ( 'legend_box_just' ) if self . box_align is None : if self . position in { 'left' , 'right' } : tmp = 'left' else : tmp = 'center' self . box_align = tmp with suppress ( KeyError ) : self . box_margin = get_property ( 'legend_box_margin' ) if self . box_margin is None : self . box_margin = 10 with suppress ( KeyError ) : self . spacing = get_property ( 'legend_spacing' ) if self . spacing is None : self . spacing = 10 gdefs = self . train ( plot ) if not gdefs : return gdefs = self . merge ( gdefs ) gdefs = self . create_geoms ( gdefs , plot ) if not gdefs : return gboxes = self . draw ( gdefs , plot . theme ) bigbox = self . assemble ( gboxes , gdefs , plot . theme ) return bigbox
Build the guides
25,520
def train ( self , plot ) : gdefs = [ ] for scale in plot . scales : for output in scale . aesthetics : guide = self . get ( output , scale . guide ) if guide is None or guide is False : continue guide = self . validate ( guide ) if ( guide . available_aes != 'any' and scale . aesthetics [ 0 ] not in guide . available_aes ) : raise PlotnineError ( "{} cannot be used for {}" . format ( guide . __class__ . __name__ , scale . aesthetics ) ) if is_waive ( guide . title ) : if scale . name : guide . title = scale . name else : try : guide . title = str ( plot . labels [ output ] ) except KeyError : warn ( "Cannot generate legend for the {!r} " "aesthetic. Make sure you have mapped a " "variable to it" . format ( output ) , PlotnineWarning ) continue guide = guide . train ( scale , output ) if guide is not None : gdefs . append ( guide ) return gdefs
Compute all the required guides
25,521
def validate ( self , guide ) : if is_string ( guide ) : guide = Registry [ 'guide_{}' . format ( guide ) ] ( ) if not isinstance ( guide , guide_class ) : raise PlotnineError ( "Unknown guide: {}" . format ( guide ) ) return guide
Validate guide object
25,522
def create_geoms ( self , gdefs , plot ) : new_gdefs = [ ] for gdef in gdefs : gdef = gdef . create_geoms ( plot ) if gdef : new_gdefs . append ( gdef ) return new_gdefs
Add geoms to the guide definitions
25,523
def draw ( self , gdefs , theme ) : for g in gdefs : g . theme = theme g . _set_defaults ( ) return [ g . draw ( ) for g in gdefs ]
Draw out each guide definition
25,524
def assemble ( self , gboxes , gdefs , theme ) : for gdef in gdefs : if gdef . order == 0 : gdef . order = 100 elif not 0 <= gdef . order <= 99 : raise PlotnineError ( "'order' for a guide should be " "between 0 and 99" ) orders = [ gdef . order for gdef in gdefs ] idx = np . argsort ( orders ) gboxes = [ gboxes [ i ] for i in idx ] if self . box_direction == 'vertical' : packer = VPacker elif self . box_direction == 'horizontal' : packer = HPacker else : raise PlotnineError ( "'legend_box' should be either " "'vertical' or 'horizontal'" ) box = packer ( children = gboxes , align = self . box_align , pad = self . box_margin , sep = self . spacing ) return box
Put together all the guide boxes
25,525
def add_entries_to_gallery ( app , doctree , docname ) : if docname != 'gallery' : return if not has_gallery ( app . builder . name ) : return try : node = doctree . traverse ( gallery ) [ 0 ] except TypeError : return content = [ ] for entry in app . env . gallery_entries : raw_html_node = nodes . raw ( '' , text = entry . html , format = 'html' ) content . append ( raw_html_node ) node . replace_self ( content )
Add entries to the gallery node
25,526
def html ( self ) : if self . description : tooltip = 'tooltip="{}"' . format ( self . description ) else : tooltip = '' return entry_html ( title = self . title , thumbnail = self . thumbnail , link = self . html_link , tooltip = tooltip )
Return html for a the entry
25,527
def _check_log_scale ( base , sides , scales , coord ) : def is_log ( trans ) : return ( trans . __class__ . __name__ . startswith ( 'log' ) and hasattr ( trans , 'base' ) ) base_x , base_y = base , base x_is_log = is_log ( scales . x . trans ) y_is_log = is_log ( scales . y . trans ) if isinstance ( coord , coord_flip ) : x_is_log , y_is_log = y_is_log , x_is_log if 't' in sides or 'b' in sides : if base_x is None : base_x = scales . x . trans . base if not x_is_log : warnings . warn ( "annotation_logticks for x-axis which does not have " "a log scale. The logticks may not make sense." , PlotnineWarning ) elif x_is_log and base_x != scales . x . trans . base : warnings . warn ( "The x-axis is log transformed in base {} ," "but the annotation_logticks are computed in base {}" "" . format ( base_x , scales . x . trans . base ) , PlotnineWarning ) if 'l' in sides or 'r' in sides : if base_y is None : base_y = scales . y . trans . base if not y_is_log : warnings . warn ( "annotation_logticks for y-axis which does not have " "a log scale. The logticks may not make sense." , PlotnineWarning ) elif y_is_log and base_y != scales . x . trans . base : warnings . warn ( "The y-axis is log transformed in base {} ," "but the annotation_logticks are computed in base {}" "" . format ( base_y , scales . x . trans . base ) , PlotnineWarning ) return base_x , base_y
Check the log transforms
25,528
def _calc_ticks ( value_range , base ) : def _minor ( x , mid_idx ) : return np . hstack ( [ x [ 1 : mid_idx ] , x [ mid_idx + 1 : - 1 ] ] ) low = np . floor ( value_range [ 0 ] ) high = np . ceil ( value_range [ 1 ] ) arr = base ** np . arange ( low , float ( high + 1 ) ) n_ticks = base - 1 breaks = [ log ( np . linspace ( b1 , b2 , n_ticks + 1 ) , base ) for ( b1 , b2 ) in list ( zip ( arr , arr [ 1 : ] ) ) ] major = np . array ( [ x [ 0 ] for x in breaks ] + [ breaks [ - 1 ] [ - 1 ] ] ) if n_ticks % 2 : mid_idx = n_ticks // 2 middle = [ x [ mid_idx ] for x in breaks ] minor = np . hstack ( [ _minor ( x , mid_idx ) for x in breaks ] ) else : middle = [ ] minor = np . hstack ( [ x [ 1 : - 1 ] for x in breaks ] ) return major , middle , minor
Calculate tick marks within a range
25,529
def get_option ( name ) : d = globals ( ) if name in { 'get_option' , 'set_option' } or name not in d : from . . exceptions import PlotnineError raise PlotnineError ( "Unknown option {}" . format ( name ) ) return d [ name ]
Get package option
25,530
def set_option ( name , value ) : d = globals ( ) if name in { 'get_option' , 'set_option' } or name not in d : from . . exceptions import PlotnineError raise PlotnineError ( "Unknown option {}" . format ( name ) ) old = d [ name ] d [ name ] = value return old
Set package option
25,531
def expand_limits ( ** kwargs ) : def as_list ( key ) : with suppress ( KeyError ) : if isinstance ( kwargs [ key ] , ( int , float , str ) ) : kwargs [ key ] = [ kwargs [ key ] ] if isinstance ( kwargs , dict ) : as_list ( 'x' ) as_list ( 'y' ) data = pd . DataFrame ( kwargs ) else : data = kwargs mapping = { } for ae in set ( kwargs ) & all_aesthetics : mapping [ ae ] = ae return geom_blank ( mapping = mapping , data = data , inherit_aes = False )
Expand the limits any aesthetic using data
25,532
def get_scale ( self , gg ) : ae = self . aesthetic series = self . limits_series ae_values = [ ] for layer in gg . layers : with suppress ( KeyError ) : value = layer . mapping [ ae ] if isinstance ( value , str ) : ae_values . append ( value ) for value in ae_values : if ( 'factor(' in value or 'Categorical(' in value ) : series = pd . Categorical ( self . limits_series ) break return make_scale ( self . aesthetic , series , limits = self . limits , trans = self . trans )
Create a scale
25,533
def from_stat ( stat ) : name = stat . params [ 'geom' ] if issubclass ( type ( name ) , geom ) : return name if isinstance ( name , type ) and issubclass ( name , geom ) : klass = name elif is_string ( name ) : if not name . startswith ( 'geom_' ) : name = 'geom_{}' . format ( name ) klass = Registry [ name ] else : raise PlotnineError ( 'Unknown geom of type {}' . format ( type ( name ) ) ) return klass ( stat = stat , ** stat . _kwargs )
Return an instantiated geom object
25,534
def aesthetics ( cls ) : main = cls . DEFAULT_AES . keys ( ) | cls . REQUIRED_AES other = { 'group' } if 'color' in main : other . add ( 'colour' ) if 'outlier_color' in main : other . add ( 'outlier_colour' ) return main | other
Return all the aesthetics for this geom
25,535
def draw_layer ( self , data , layout , coord , ** params ) : for pid , pdata in data . groupby ( 'PANEL' ) : if len ( pdata ) == 0 : continue ploc = pid - 1 panel_params = layout . panel_params [ ploc ] ax = layout . axs [ ploc ] self . draw_panel ( pdata , panel_params , coord , ax , ** params )
Draw layer across all panels
25,536
def draw_panel ( self , data , panel_params , coord , ax , ** params ) : for _ , gdata in data . groupby ( 'group' ) : gdata . reset_index ( inplace = True , drop = True ) self . draw_group ( gdata , panel_params , coord , ax , ** params )
Plot all groups
25,537
def _verify_arguments ( self , kwargs ) : geom_stat_args = kwargs . keys ( ) | self . _stat . _kwargs . keys ( ) unknown = ( geom_stat_args - self . aesthetics ( ) - self . DEFAULT_PARAMS . keys ( ) - self . _stat . aesthetics ( ) - self . _stat . DEFAULT_PARAMS . keys ( ) - { 'data' , 'mapping' , 'show_legend' , 'inherit_aes' } ) if unknown : msg = ( "Parameters {}, are not understood by " "either the geom, stat or layer." ) raise PlotnineError ( msg . format ( unknown ) )
Verify arguments passed to the geom
25,538
def handle_na ( self , data ) : return remove_missing ( data , self . params [ 'na_rm' ] , list ( self . REQUIRED_AES | self . NON_MISSING_AES ) , self . __class__ . __name__ )
Remove rows with NaN values
25,539
def setup_params ( self , data ) : if 'ymax' in data : if any ( ( data [ 'ymin' ] != 0 ) & ( data [ 'ymax' ] != 0 ) ) : warn ( "Stacking not well defined when not " "anchored on the axis." , PlotnineWarning ) var = 'ymax' elif 'y' in data : var = 'y' else : warn ( "Stacking requires either ymin & ymax or y " "aesthetics. Maybe you want position = 'identity'?" , PlotnineWarning ) var = None params = self . params . copy ( ) params [ 'var' ] = var params [ 'fill' ] = self . fill return params
Verify modify & return a copy of the params .
25,540
def strategy ( data , params ) : vjust = params [ 'vjust' ] y = data [ 'y' ] . copy ( ) y [ np . isnan ( y ) ] = 0 heights = np . append ( 0 , y . cumsum ( ) ) if params [ 'fill' ] : heights = heights / np . abs ( heights [ - 1 ] ) data [ 'ymin' ] = np . min ( [ heights [ : - 1 ] , heights [ 1 : ] ] , axis = 0 ) data [ 'ymax' ] = np . max ( [ heights [ : - 1 ] , heights [ 1 : ] ] , axis = 0 ) data [ 'y' ] = ( ( 1 - vjust ) * data [ 'ymin' ] + vjust * data [ 'ymax' ] ) return data
Stack overlapping intervals .
25,541
def densitybin ( x , weight = None , binwidth = None , bins = None , rangee = None ) : if all ( pd . isnull ( x ) ) : return pd . DataFrame ( ) if weight is None : weight = np . ones ( len ( x ) ) weight = np . asarray ( weight ) weight [ np . isnan ( weight ) ] = 0 if rangee is None : rangee = np . min ( x ) , np . max ( x ) if bins is None : bins = 30 if binwidth is None : binwidth = np . ptp ( rangee ) / bins order = np . argsort ( x ) weight = weight [ order ] x = x [ order ] cbin = 0 binn = [ None ] * len ( x ) binend = - np . inf for i , value in enumerate ( x ) : if value >= binend : binend = value + binwidth cbin = cbin + 1 binn [ i ] = cbin def func ( series ) : return ( series . min ( ) + series . max ( ) ) / 2 results = pd . DataFrame ( { 'x' : x , 'bin' : binn , 'binwidth' : binwidth , 'weight' : weight } ) results [ 'bincenter' ] = results . groupby ( 'bin' ) [ 'x' ] . transform ( func ) return results
Do density binning
25,542
def theme_get ( ) : from . theme_gray import theme_gray _theme = get_option ( 'current_theme' ) if isinstance ( _theme , type ) : _theme = _theme ( ) return _theme or theme_gray ( )
Return the default theme
25,543
def apply ( self , ax ) : for th in self . themeables . values ( ) : th . apply ( ax )
Apply this theme then apply additional modifications in order .
25,544
def apply_rcparams ( self ) : from matplotlib import rcParams for key , val in self . rcParams . items ( ) : try : rcParams [ key ] = val except Exception as e : msg = ( "raised an Exception: {}" ) raise PlotnineError ( msg . format ( key , val , e ) )
Set the rcParams
25,545
def rcParams ( self ) : try : rcParams = deepcopy ( self . _rcParams ) except NotImplementedError : rcParams = copy ( self . _rcParams ) for th in self . themeables . values ( ) : rcParams . update ( th . rcParams ) return rcParams
Return rcParams dict for this theme .
25,546
def add_theme ( self , other , inplace = False ) : if other . complete : return other theme_copy = self if inplace else deepcopy ( self ) theme_copy . themeables . update ( deepcopy ( other . themeables ) ) return theme_copy
Add themes together .
25,547
def _draw_plots ( self , plots ) : artist_offsets = { 'collections' : [ ] , 'patches' : [ ] , 'lines' : [ ] , 'texts' : [ ] , 'artists' : [ ] } scale_limits = dict ( ) def initialise_artist_offsets ( n ) : for artist_type in artist_offsets : artist_offsets [ artist_type ] = [ 0 ] * n def get_frame_artists ( plot ) : frame_artists = [ ] for i , ax in enumerate ( plot . axs ) : for name in artist_offsets : start = artist_offsets [ name ] [ i ] new_artists = getattr ( ax , name ) [ start : ] frame_artists . extend ( new_artists ) artist_offsets [ name ] [ i ] += len ( new_artists ) return frame_artists def set_scale_limits ( plot ) : for sc in plot . scales : ae = sc . aesthetics [ 0 ] scale_limits [ ae ] = sc . limits def check_scale_limits ( plot , frame_no ) : if len ( scale_limits ) != len ( plot . scales ) : raise PlotnineError ( "All plots must have the same number of scales " "as the first plot of the animation." ) for sc in plot . scales : ae = sc . aesthetics [ 0 ] if ae not in scale_limits : raise PlotnineError ( "The plot for frame {} does not have a scale " "for the {} aesthetic." . format ( frame_no , ae ) ) if sc . limits != scale_limits [ ae ] : raise PlotnineError ( "The {} scale of plot for frame {} has different " "limits from those of the first frame." "" . format ( ae , frame_no ) ) figure = None axs = None artists = [ ] for frame_no , p in enumerate ( plots ) : if figure is None : figure , plot = p . draw ( return_ggplot = True ) axs = plot . axs initialise_artist_offsets ( len ( axs ) ) set_scale_limits ( plot ) else : p = copy ( p ) plot = p . _draw_using_figure ( figure , axs ) try : check_scale_limits ( plot , frame_no ) except PlotnineError as err : plt . close ( figure ) raise err artists . append ( get_frame_artists ( plot ) ) if figure is None : figure = plt . figure ( ) return figure , artists
Plot and return the figure and artists
25,548
def flip_labels ( obj ) : def sub ( a , b ) : for label in list ( obj . keys ( ) ) : if label . startswith ( a ) : new_label = b + label [ 1 : ] obj [ new_label ] = obj . pop ( label ) sub ( 'x' , 'z' ) sub ( 'y' , 'x' ) sub ( 'z' , 'y' ) return obj
Rename fields x to y and y to x
25,549
def bootstrap_statistics ( series , statistic , n_samples = 1000 , confidence_interval = 0.95 , random_state = None ) : if random_state is None : random_state = np . random alpha = 1 - confidence_interval size = ( n_samples , len ( series ) ) inds = random_state . randint ( 0 , len ( series ) , size = size ) samples = series . values [ inds ] means = np . sort ( statistic ( samples , axis = 1 ) ) return pd . DataFrame ( { 'ymin' : means [ int ( ( alpha / 2 ) * n_samples ) ] , 'ymax' : means [ int ( ( 1 - alpha / 2 ) * n_samples ) ] , 'y' : [ statistic ( series ) ] } )
Default parameters taken from R s Hmisc smean . cl . boot
25,550
def mean_cl_boot ( series , n_samples = 1000 , confidence_interval = 0.95 , random_state = None ) : return bootstrap_statistics ( series , np . mean , n_samples = n_samples , confidence_interval = confidence_interval , random_state = random_state )
Bootstrapped mean with confidence limits
25,551
def mean_sdl ( series , mult = 2 ) : m = series . mean ( ) s = series . std ( ) return pd . DataFrame ( { 'y' : [ m ] , 'ymin' : m - mult * s , 'ymax' : m + mult * s } )
mean plus or minus a constant times the standard deviation
25,552
def median_hilow ( series , confidence_interval = 0.95 ) : tail = ( 1 - confidence_interval ) / 2 return pd . DataFrame ( { 'y' : [ np . median ( series ) ] , 'ymin' : np . percentile ( series , 100 * tail ) , 'ymax' : np . percentile ( series , 100 * ( 1 - tail ) ) } )
Median and a selected pair of outer quantiles having equal tail areas
25,553
def mean_se ( series , mult = 1 ) : m = np . mean ( series ) se = mult * np . sqrt ( np . var ( series ) / len ( series ) ) return pd . DataFrame ( { 'y' : [ m ] , 'ymin' : m - se , 'ymax' : m + se } )
Calculate mean and standard errors on either side
25,554
def set_breaks_and_labels ( self , ranges , layout_info , pidx ) : ax = self . axs [ pidx ] facet . set_breaks_and_labels ( self , ranges , layout_info , pidx ) ax . xaxis . set_ticks_position ( 'bottom' ) ax . yaxis . set_ticks_position ( 'left' )
Add breaks and labels to the axes
25,555
def spaceout_and_resize_panels ( self ) : figure = self . figure theme = self . theme try : aspect_ratio = theme . themeables . property ( 'aspect_ratio' ) except KeyError : aspect_ratio = self . coordinates . aspect ( self . layout . panel_params [ 0 ] ) if aspect_ratio is None : return left = figure . subplotpars . left right = figure . subplotpars . right top = figure . subplotpars . top bottom = figure . subplotpars . bottom W , H = figure . get_size_inches ( ) w = ( right - left ) * W h = w * aspect_ratio H = h / ( top - bottom ) figure . set_figheight ( H )
Adjust the space between the panels
25,556
def _draw_segments ( data , ax , ** params ) : color = to_rgba ( data [ 'color' ] , data [ 'alpha' ] ) indices = [ ] segments = [ ] for _ , df in data . groupby ( 'group' ) : idx = df . index indices . extend ( idx [ : - 1 ] ) x = data [ 'x' ] . iloc [ idx ] y = data [ 'y' ] . iloc [ idx ] segments . append ( make_line_segments ( x , y , ispath = True ) ) segments = np . vstack ( segments ) if color is None : edgecolor = color else : edgecolor = [ color [ i ] for i in indices ] linewidth = data . loc [ indices , 'size' ] linestyle = data . loc [ indices , 'linetype' ] coll = mcoll . LineCollection ( segments , edgecolor = edgecolor , linewidth = linewidth , linestyle = linestyle , zorder = params [ 'zorder' ] ) ax . add_collection ( coll )
Draw independent line segments between all the points
25,557
def _draw_lines ( data , ax , ** params ) : color = to_rgba ( data [ 'color' ] . iloc [ 0 ] , data [ 'alpha' ] . iloc [ 0 ] ) join_style = _get_joinstyle ( data , params ) lines = mlines . Line2D ( data [ 'x' ] , data [ 'y' ] , color = color , linewidth = data [ 'size' ] . iloc [ 0 ] , linestyle = data [ 'linetype' ] . iloc [ 0 ] , zorder = params [ 'zorder' ] , ** join_style ) ax . add_artist ( lines )
Draw a path with the same characteristics from the first point to the last point
25,558
def get_paths ( self , x1 , y1 , x2 , y2 , panel_params , coord , ax ) : Path = mpath . Path verts = [ None , None , None , ( 0 , 0 ) ] codes = [ Path . MOVETO , Path . LINETO , Path . LINETO , Path . STOP ] slc = slice ( 0 , 3 ) fig = ax . get_figure ( ) width , height = fig . get_size_inches ( ) ranges = coord . range ( panel_params ) width_ = np . ptp ( ranges . x ) height_ = np . ptp ( ranges . y ) lx = self . length * width_ / width ly = self . length * height_ / height a = self . angle * np . pi / 180 xdiff , ydiff = x2 - x1 , y2 - y1 rotations = np . arctan2 ( ydiff / ly , xdiff / lx ) v1x = x1 + lx * np . cos ( rotations + a ) v1y = y1 + ly * np . sin ( rotations + a ) v2x = x1 + lx * np . cos ( rotations - a ) v2y = y1 + ly * np . sin ( rotations - a ) paths = [ ] for t in zip ( v1x , v1y , x1 , y1 , v2x , v2y ) : verts [ slc ] = [ t [ : 2 ] , t [ 2 : 4 ] , t [ 4 : ] ] paths . append ( Path ( verts , codes ) ) return paths
Compute paths that create the arrow heads
25,559
def combine_vars ( data , environment = None , vars = None , drop = True ) : if not vars : return pd . DataFrame ( ) values = [ eval_facet_vars ( df , vars , environment ) for df in data if df is not None ] has_all = [ x . shape [ 1 ] == len ( vars ) for x in values ] if not any ( has_all ) : raise PlotnineError ( "At least one layer must contain all variables " + "used for facetting" ) base = pd . concat ( [ x for i , x in enumerate ( values ) if has_all [ i ] ] , axis = 0 ) base = base . drop_duplicates ( ) if not drop : base = unique_combs ( base ) base = base . sort_values ( list ( base . columns ) ) for i , value in enumerate ( values ) : if has_all [ i ] or len ( value . columns ) == 0 : continue old = base . loc [ : , base . columns - value . columns ] new = value . loc [ : , base . columns & value . columns ] . drop_duplicates ( ) if not drop : new = unique_combs ( new ) base = base . append ( cross_join ( old , new ) , ignore_index = True ) if len ( base ) == 0 : raise PlotnineError ( "Faceting variables must have at least one value" ) base = base . reset_index ( drop = True ) return base
Base layout function that generates all combinations of data needed for facetting The first data frame in the list should be the default data for the plot . Other data frames in the list are ones that are added to the layers .
25,560
def unique_combs ( df ) : lst = ( x . unique ( ) for x in ( df [ c ] for c in df ) ) rows = list ( itertools . product ( * lst ) ) _df = pd . DataFrame ( rows , columns = df . columns ) for col in df : _df [ col ] = _df [ col ] . astype ( df [ col ] . dtype , copy = False ) return _df
Return data frame with all possible combinations of the values in the columns
25,561
def eval_facet_vars ( data , vars , env ) : def I ( value ) : return value env = env . with_outer_namespace ( { 'I' : I } ) facet_vals = pd . DataFrame ( index = data . index ) for name in vars : if name in data : res = data [ name ] elif str . isidentifier ( name ) : continue else : try : res = env . eval ( name , inner_namespace = data ) except NameError : continue facet_vals [ name ] = res return facet_vals
Evaluate facet variables
25,562
def map ( self , data , layout ) : msg = "{} should implement this method." raise NotImplementedError ( msg . format ( self . __class . __name__ ) )
Assign a data points to panels
25,563
def train_position_scales ( self , layout , layers ) : _layout = layout . layout panel_scales_x = layout . panel_scales_x panel_scales_y = layout . panel_scales_y for layer in layers : data = layer . data match_id = match ( data [ 'PANEL' ] , _layout [ 'PANEL' ] ) if panel_scales_x : x_vars = list ( set ( panel_scales_x [ 0 ] . aesthetics ) & set ( data . columns ) ) SCALE_X = _layout [ 'SCALE_X' ] . iloc [ match_id ] . tolist ( ) panel_scales_x . train ( data , x_vars , SCALE_X ) if panel_scales_y : y_vars = list ( set ( panel_scales_y [ 0 ] . aesthetics ) & set ( data . columns ) ) SCALE_Y = _layout [ 'SCALE_Y' ] . iloc [ match_id ] . tolist ( ) panel_scales_y . train ( data , y_vars , SCALE_Y ) return self
Compute ranges for the x and y scales
25,564
def _create_subplots ( self , fig , layout ) : num_panels = len ( layout ) axsarr = np . empty ( ( self . nrow , self . ncol ) , dtype = object ) i = 1 for row in range ( self . nrow ) : for col in range ( self . ncol ) : axsarr [ row , col ] = fig . add_subplot ( self . nrow , self . ncol , i ) i += 1 if self . dir == 'h' : order = 'C' if not self . as_table : axsarr = axsarr [ : : - 1 ] elif self . dir == 'v' : order = 'F' if not self . as_table : axsarr = np . array ( [ row [ : : - 1 ] for row in axsarr ] ) axs = axsarr . ravel ( order ) for ax in axs [ num_panels : ] : fig . delaxes ( ax ) axs = axs [ : num_panels ] return axs
Create suplots and return axs
25,565
def make_axes ( self , figure , layout , coordinates ) : axs = self . _create_subplots ( figure , layout ) self . first_ax = figure . axes [ 0 ] self . last_ax = figure . axes [ - 1 ] self . figure = figure self . axs = axs return axs
Create and return Matplotlib axes
25,566
def strip_size ( self , location = 'top' , num_lines = None ) : dpi = 72 theme = self . theme get_property = theme . themeables . property if location == 'right' : strip_name = 'strip_text_y' num_lines = num_lines or self . num_vars_y else : strip_name = 'strip_text_x' num_lines = num_lines or self . num_vars_x if not num_lines : return 0 try : fontsize = get_property ( strip_name , 'size' ) except KeyError : fontsize = float ( theme . rcParams . get ( 'font.size' , 10 ) ) try : linespacing = get_property ( strip_name , 'linespacing' ) except KeyError : linespacing = 1 m1 , m2 = self . inner_strip_margins ( location ) breadth = ( linespacing * fontsize ) * num_lines / dpi breadth = breadth + ( m1 + m2 ) / dpi return breadth
Breadth of the strip background in inches
25,567
def strip_dimensions ( self , text_lines , location , pid ) : dpi = 72 num_lines = len ( text_lines ) get_property = self . theme . themeables . property ax = self . axs [ pid ] bbox = ax . get_window_extent ( ) . transformed ( self . figure . dpi_scale_trans . inverted ( ) ) ax_width , ax_height = bbox . width , bbox . height strip_size = self . strip_size ( location , num_lines ) m1 , m2 = self . inner_strip_margins ( location ) m1 , m2 = m1 / dpi , m2 / dpi margin = 0 if location == 'right' : box_x = 1 box_y = 0 box_width = strip_size / ax_width box_height = 1 with suppress ( KeyError ) : box_y = get_property ( 'strip_background_y' , 'y' ) with suppress ( KeyError ) : box_height = get_property ( 'strip_background_y' , 'height' ) with suppress ( KeyError ) : margin = get_property ( 'strip_margin_y' ) x = 1 + ( strip_size - m2 + m1 ) / ( 2 * ax_width ) y = ( 2 * box_y + box_height ) / 2 hslide = 1 + margin * strip_size / ax_width x *= hslide box_x *= hslide else : box_x = 0 box_y = 1 box_width = 1 box_height = strip_size / ax_height with suppress ( KeyError ) : box_x = get_property ( 'strip_background_x' , 'x' ) with suppress ( KeyError ) : box_width = get_property ( 'strip_background_x' , 'width' ) with suppress ( KeyError ) : margin = get_property ( 'strip_margin_x' ) x = ( 2 * box_x + box_width ) / 2 y = 1 + ( strip_size - m1 + m2 ) / ( 2 * ax_height ) vslide = 1 + margin * strip_size / ax_height y *= vslide box_y *= vslide dimensions = types . SimpleNamespace ( x = x , y = y , box_x = box_x , box_y = box_y , box_width = box_width , box_height = box_height ) return dimensions
Calculate the dimension
25,568
def draw_strip_text ( self , text_lines , location , pid ) : ax = self . axs [ pid ] themeable = self . figure . _themeable dim = self . strip_dimensions ( text_lines , location , pid ) if location == 'right' : rotation = - 90 label = '\n' . join ( reversed ( text_lines ) ) else : rotation = 0 label = '\n' . join ( text_lines ) rect = mpatch . FancyBboxPatch ( ( dim . box_x , dim . box_y ) , width = dim . box_width , height = dim . box_height , facecolor = 'lightgrey' , edgecolor = 'None' , transform = ax . transAxes , zorder = 2.2 , boxstyle = 'square, pad=0' , clip_on = False ) text = mtext . Text ( dim . x , dim . y , label , rotation = rotation , verticalalignment = 'center' , horizontalalignment = 'center' , transform = ax . transAxes , zorder = 3.3 , clip_on = False ) ax . add_artist ( rect ) ax . add_artist ( text ) for key in ( 'strip_text_x' , 'strip_text_y' , 'strip_background_x' , 'strip_background_y' ) : if key not in themeable : themeable [ key ] = [ ] if location == 'right' : themeable [ 'strip_background_y' ] . append ( rect ) themeable [ 'strip_text_y' ] . append ( text ) else : themeable [ 'strip_background_x' ] . append ( rect ) themeable [ 'strip_text_x' ] . append ( text )
Create a background patch and put a label on it
25,569
def format_data ( self , value ) : s = locale . format_string ( '%1.10e' , ( value , ) ) return self . fix_minus ( s )
Return a formatted string representation of a number .
25,570
def add_interpolated_colorbar ( da , colors , direction ) : if len ( colors ) == 1 : colors = [ colors [ 0 ] , colors [ 0 ] ] nbreak = len ( colors ) if direction == 'vertical' : mesh_width = 1 mesh_height = nbreak - 1 linewidth = da . height / mesh_height x = np . array ( [ 0 , da . width ] ) y = np . arange ( 0 , nbreak ) * linewidth X , Y = np . meshgrid ( x , y ) Z = Y / y . max ( ) else : mesh_width = nbreak - 1 mesh_height = 1 linewidth = da . width / mesh_width x = np . arange ( 0 , nbreak ) * linewidth y = np . array ( [ 0 , da . height ] ) X , Y = np . meshgrid ( x , y ) Z = X / x . max ( ) coordinates = np . zeros ( ( ( mesh_width + 1 ) * ( mesh_height + 1 ) , 2 ) , dtype = float ) coordinates [ : , 0 ] = X . ravel ( ) coordinates [ : , 1 ] = Y . ravel ( ) cmap = ListedColormap ( colors ) coll = mcoll . QuadMesh ( mesh_width , mesh_height , coordinates , antialiased = False , shading = 'gouraud' , linewidth = 0 , cmap = cmap , array = Z . ravel ( ) ) da . add_artist ( coll )
Add rastered colorbar to DrawingArea
25,571
def add_segmented_colorbar ( da , colors , direction ) : nbreak = len ( colors ) if direction == 'vertical' : linewidth = da . height / nbreak verts = [ None ] * nbreak x1 , x2 = 0 , da . width for i , color in enumerate ( colors ) : y1 = i * linewidth y2 = y1 + linewidth verts [ i ] = ( ( x1 , y1 ) , ( x1 , y2 ) , ( x2 , y2 ) , ( x2 , y1 ) ) else : linewidth = da . width / nbreak verts = [ None ] * nbreak y1 , y2 = 0 , da . height for i , color in enumerate ( colors ) : x1 = i * linewidth x2 = x1 + linewidth verts [ i ] = ( ( x1 , y1 ) , ( x1 , y2 ) , ( x2 , y2 ) , ( x2 , y1 ) ) coll = mcoll . PolyCollection ( verts , facecolors = colors , linewidth = 0 , antialiased = False ) da . add_artist ( coll )
Add non - rastered colorbar to DrawingArea
25,572
def create_labels ( da , labels , locations , direction ) : fontsize = 9 aux_transform = mtransforms . IdentityTransform ( ) labels_box = MyAuxTransformBox ( aux_transform ) xs , ys = [ 0 ] * len ( labels ) , locations ha , va = 'left' , 'center' x1 , y1 = 0 , 0 x2 , y2 = 0 , da . height if direction == 'horizontal' : xs , ys = ys , xs ha , va = 'center' , 'top' x2 , y2 = da . width , 0 txt1 = mtext . Text ( x1 , y1 , '' , horizontalalignment = ha , verticalalignment = va ) txt2 = mtext . Text ( x2 , y2 , '' , horizontalalignment = ha , verticalalignment = va ) labels_box . add_artist ( txt1 ) labels_box . add_artist ( txt2 ) legend_text = [ ] for i , ( x , y , text ) in enumerate ( zip ( xs , ys , labels ) ) : txt = mtext . Text ( x , y , text , size = fontsize , horizontalalignment = ha , verticalalignment = va ) labels_box . add_artist ( txt ) legend_text . append ( txt ) return labels_box , legend_text
Return an OffsetBox with label texts
25,573
def create_geoms ( self , plot ) : for l in plot . layers : exclude = set ( ) if isinstance ( l . show_legend , dict ) : l . show_legend = rename_aesthetics ( l . show_legend ) exclude = { ae for ae , val in l . show_legend . items ( ) if not val } elif l . show_legend not in ( None , True ) : continue matched = self . legend_aesthetics ( l , plot ) if set ( matched ) - exclude : break else : return None return self
This guide is not geom based
25,574
def draw ( self , renderer ) : dpi_cor = renderer . points_to_pixels ( 1. ) self . dpi_transform . clear ( ) self . dpi_transform . scale ( dpi_cor , dpi_cor ) for c in self . _children : c . draw ( renderer ) self . stale = False
Draw the children
25,575
def from_class_name ( name , theme_element ) : msg = "No such themeable element {}" . format ( name ) try : klass = themeable . _registry [ name ] except KeyError : raise PlotnineError ( msg ) if not issubclass ( klass , themeable ) : raise PlotnineError ( msg ) return klass ( theme_element )
Create an themeable by name
25,576
def merge ( self , other ) : if self . is_blank ( ) or other . is_blank ( ) : raise ValueError ( 'Cannot merge if there is a blank.' ) else : self . properties . update ( other . properties )
Merge properties of other into self
25,577
def update ( self , other ) : for new in other . values ( ) : new_key = new . __class__ . __name__ for child in new . __class__ . mro ( ) [ 1 : - 2 ] : child_key = child . __name__ try : self [ child_key ] . merge ( new ) except KeyError : pass except ValueError : del self [ child_key ] try : self [ new_key ] . merge ( new ) except ( KeyError , ValueError ) : self [ new_key ] = new
Update themeables with those from other
25,578
def values ( self ) : def key ( th ) : return len ( th . __class__ . __mro__ ) return sorted ( dict . values ( self ) , key = key , reverse = True )
Return a list themeables sorted in reverse based on the their depth in the inheritance hierarchy .
25,579
def setup_data ( self , data , params ) : check_required_aesthetics ( self . REQUIRED_AES , data . columns , self . __class__ . __name__ ) return data
Verify & return data
25,580
def compute_layer ( cls , data , params , layout ) : def fn ( pdata ) : if len ( pdata ) == 0 : return pdata scales = layout . get_scales ( pdata [ 'PANEL' ] . iat [ 0 ] ) return cls . compute_panel ( pdata , scales , params ) return groupby_apply ( data , 'PANEL' , fn )
Compute position for the layer in all panels
25,581
def compute_panel ( cls , data , scales , params ) : msg = '{} needs to implement this method' raise NotImplementedError ( msg . format ( cls . __name__ ) )
Positions must override this function
25,582
def transform_position ( data , trans_x = None , trans_y = None ) : X = { 'x' , 'xmin' , 'xmax' , 'xend' , 'xintercept' } Y = { 'y' , 'ymin' , 'ymax' , 'yend' , 'yintercept' } if trans_x : xs = [ name for name in data . columns if name in X ] data [ xs ] = data [ xs ] . apply ( trans_x ) if trans_y : ys = [ name for name in data . columns if name in Y ] data [ ys ] = data [ ys ] . apply ( trans_y ) return data
Transform all the variables that map onto the x and y scales .
25,583
def from_geom ( geom ) : name = geom . params [ 'position' ] if issubclass ( type ( name ) , position ) : return name if isinstance ( name , type ) and issubclass ( name , position ) : klass = name elif is_string ( name ) : if not name . startswith ( 'position_' ) : name = 'position_{}' . format ( name ) klass = Registry [ name ] else : raise PlotnineError ( 'Unknown position of type {}' . format ( type ( name ) ) ) return klass ( )
Create and return a position object for the geom
25,584
def make_scale ( ae , series , * args , ** kwargs ) : stype = scale_type ( series ) if stype == 'discrete' : with suppress ( KeyError ) : del kwargs [ 'trans' ] scale_name = 'scale_{}_{}' . format ( ae , stype ) scale_klass = Registry [ scale_name ] return scale_klass ( * args , ** kwargs )
Return a proper scale object for the series
25,585
def append ( self , sc ) : ae = sc . aesthetics [ 0 ] cover_ae = self . find ( ae ) if any ( cover_ae ) : warn ( _TPL_DUPLICATE_SCALE . format ( ae ) , PlotnineWarning ) idx = cover_ae . index ( True ) self . pop ( idx ) list . append ( self , sc )
Add scale sc and remove any previous scales that cover the same aesthetics
25,586
def input ( self ) : lst = [ s . aesthetics for s in self ] return list ( itertools . chain ( * lst ) )
Return a list of all the aesthetics covered by the scales .
25,587
def get_scales ( self , aesthetic ) : bool_lst = self . find ( aesthetic ) try : idx = bool_lst . index ( True ) return self [ idx ] except ValueError : return None
Return the scale for the aesthetic or None if there isn t one .
25,588
def non_position_scales ( self ) : l = [ s for s in self if not ( 'x' in s . aesthetics ) and not ( 'y' in s . aesthetics ) ] return Scales ( l )
Return a list of the non - position scales that are present
25,589
def position_scales ( self ) : l = [ s for s in self if ( 'x' in s . aesthetics ) or ( 'y' in s . aesthetics ) ] return Scales ( l )
Return a list of the position scales that are present
25,590
def train_df ( self , df , drop = False ) : if ( len ( df ) == 0 ) or ( len ( self ) == 0 ) : return df for sc in self : sc . train_df ( df ) return df
Train scales from a dataframe
25,591
def map_df ( self , df ) : if ( len ( df ) == 0 ) or ( len ( self ) == 0 ) : return df for sc in self : df = sc . map_df ( df ) return df
Map values from a dataframe .
25,592
def transform_df ( self , df ) : if ( len ( df ) == 0 ) or ( len ( self ) == 0 ) : return df for sc in self : df = sc . transform_df ( df ) return df
Transform values in a dataframe .
25,593
def add_defaults ( self , data , aesthetics ) : if not aesthetics : return aws = set ( ) if self : for s in ( set ( sc . aesthetics ) for sc in self ) : aws . update ( s ) new_aesthetics = [ x for x in aesthetics . keys ( ) if x not in aws ] if not new_aesthetics : return seen = set ( ) for ae in new_aesthetics : col = aesthetics [ ae ] if col not in data : col = ae scale_var = aes_to_scale ( ae ) if self . get_scales ( scale_var ) : continue seen . add ( scale_var ) try : sc = make_scale ( scale_var , data [ col ] ) except PlotnineError : continue self . append ( sc )
Add default scales for the aesthetics if none are present
25,594
def add_missing ( self , aesthetics ) : aesthetics = set ( aesthetics ) - set ( self . input ( ) ) for ae in aesthetics : scale_name = 'scale_{}_continuous' . format ( ae ) scale_f = Registry [ scale_name ] self . append ( scale_f ( ) )
Add missing but required scales .
25,595
def label_value ( label_info , multi_line = True ) : label_info = label_info . astype ( str ) if not multi_line : label_info = collapse_label_lines ( label_info ) return label_info
Convert series values to str and maybe concatenate them
25,596
def label_both ( label_info , multi_line = True , sep = ': ' ) : label_info = label_info . astype ( str ) for var in label_info . index : label_info [ var ] = '{0}{1}{2}' . format ( var , sep , label_info [ var ] ) if not multi_line : label_info = collapse_label_lines ( label_info ) return label_info
Concatenate the index and the value of the series .
25,597
def label_context ( label_info , multi_line = True , sep = ': ' ) : if len ( label_info ) == 1 : return label_value ( label_info , multi_line ) else : return label_both ( label_info , multi_line , sep )
Create an unabiguous label string
25,598
def as_labeller ( x , default = label_value , multi_line = True ) : if x is None : x = default with suppress ( KeyError , TypeError ) : x = LABELLERS [ x ] with suppress ( AttributeError ) : if x . __name__ == '_labeller' : return x def _labeller ( label_info ) : label_info = pd . Series ( label_info ) . astype ( str ) if callable ( x ) and x . __name__ in LABELLERS : return x ( label_info ) elif hasattr ( x , '__contains__' ) : for var in label_info . index : if label_info [ var ] in x : label_info [ var ] = x [ label_info [ var ] ] return label_info elif callable ( x ) : for var in label_info . index : label_info [ var ] = x ( label_info [ var ] ) return label_info else : msg = "Could not use '{0}' for labelling." raise PlotnineError ( msg . format ( x ) ) return _labeller
Coerse to labeller function
25,599
def labeller ( rows = None , cols = None , multi_line = True , default = label_value , ** kwargs ) : rows_labeller = as_labeller ( rows , default , multi_line ) cols_labeller = as_labeller ( cols , default , multi_line ) def _labeller ( label_info ) : if label_info . _meta [ 'dimension' ] == 'rows' : margin_labeller = rows_labeller else : margin_labeller = cols_labeller label_info = label_info . astype ( str ) for name , value in label_info . iteritems ( ) : func = as_labeller ( kwargs . get ( name ) , margin_labeller ) new_info = func ( label_info [ [ name ] ] ) label_info [ name ] = new_info [ name ] if not multi_line : label_info = collapse_label_lines ( label_info ) return label_info return _labeller
Return a labeller function