id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
233,300
dddomodossola/remi
remi/gui.py
TableWidget.item_at
def item_at(self, row, column): """Returns the TableItem instance at row, column cordinates Args: row (int): zero based index column (int): zero based index """ return self.children[str(row)].children[str(column)]
python
def item_at(self, row, column): return self.children[str(row)].children[str(column)]
[ "def", "item_at", "(", "self", ",", "row", ",", "column", ")", ":", "return", "self", ".", "children", "[", "str", "(", "row", ")", "]", ".", "children", "[", "str", "(", "column", ")", "]" ]
Returns the TableItem instance at row, column cordinates Args: row (int): zero based index column (int): zero based index
[ "Returns", "the", "TableItem", "instance", "at", "row", "column", "cordinates" ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/gui.py#L2468-L2475
233,301
dddomodossola/remi
remi/gui.py
TableWidget.set_row_count
def set_row_count(self, count): """Sets the table row count. Args: count (int): number of rows """ current_row_count = self.row_count() current_column_count = self.column_count() if count > current_row_count: cl = TableEditableItem if self._editable else TableItem for i in range(current_row_count, count): tr = TableRow() for c in range(0, current_column_count): tr.append(cl(), str(c)) if self._editable: tr.children[str(c)].onchange.connect( self.on_item_changed, int(i), int(c)) self.append(tr, str(i)) self._update_first_row() elif count < current_row_count: for i in range(count, current_row_count): self.remove_child(self.children[str(i)])
python
def set_row_count(self, count): current_row_count = self.row_count() current_column_count = self.column_count() if count > current_row_count: cl = TableEditableItem if self._editable else TableItem for i in range(current_row_count, count): tr = TableRow() for c in range(0, current_column_count): tr.append(cl(), str(c)) if self._editable: tr.children[str(c)].onchange.connect( self.on_item_changed, int(i), int(c)) self.append(tr, str(i)) self._update_first_row() elif count < current_row_count: for i in range(count, current_row_count): self.remove_child(self.children[str(i)])
[ "def", "set_row_count", "(", "self", ",", "count", ")", ":", "current_row_count", "=", "self", ".", "row_count", "(", ")", "current_column_count", "=", "self", ".", "column_count", "(", ")", "if", "count", ">", "current_row_count", ":", "cl", "=", "TableEdit...
Sets the table row count. Args: count (int): number of rows
[ "Sets", "the", "table", "row", "count", "." ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/gui.py#L2500-L2521
233,302
dddomodossola/remi
remi/gui.py
TableWidget.set_column_count
def set_column_count(self, count): """Sets the table column count. Args: count (int): column of rows """ current_row_count = self.row_count() current_column_count = self.column_count() if count > current_column_count: cl = TableEditableItem if self._editable else TableItem for r_key in self.children.keys(): row = self.children[r_key] for i in range(current_column_count, count): row.append(cl(), str(i)) if self._editable: row.children[str(i)].onchange.connect( self.on_item_changed, int(r_key), int(i)) self._update_first_row() elif count < current_column_count: for row in self.children.values(): for i in range(count, current_column_count): row.remove_child(row.children[str(i)]) self._column_count = count
python
def set_column_count(self, count): current_row_count = self.row_count() current_column_count = self.column_count() if count > current_column_count: cl = TableEditableItem if self._editable else TableItem for r_key in self.children.keys(): row = self.children[r_key] for i in range(current_column_count, count): row.append(cl(), str(i)) if self._editable: row.children[str(i)].onchange.connect( self.on_item_changed, int(r_key), int(i)) self._update_first_row() elif count < current_column_count: for row in self.children.values(): for i in range(count, current_column_count): row.remove_child(row.children[str(i)]) self._column_count = count
[ "def", "set_column_count", "(", "self", ",", "count", ")", ":", "current_row_count", "=", "self", ".", "row_count", "(", ")", "current_column_count", "=", "self", ".", "column_count", "(", ")", "if", "count", ">", "current_column_count", ":", "cl", "=", "Tab...
Sets the table column count. Args: count (int): column of rows
[ "Sets", "the", "table", "column", "count", "." ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/gui.py#L2523-L2545
233,303
dddomodossola/remi
remi/gui.py
TableWidget.on_item_changed
def on_item_changed(self, item, new_value, row, column): """Event for the item change. Args: emitter (TableWidget): The emitter of the event. item (TableItem): The TableItem instance. new_value (str): New text content. row (int): row index. column (int): column index. """ return (item, new_value, row, column)
python
def on_item_changed(self, item, new_value, row, column): return (item, new_value, row, column)
[ "def", "on_item_changed", "(", "self", ",", "item", ",", "new_value", ",", "row", ",", "column", ")", ":", "return", "(", "item", ",", "new_value", ",", "row", ",", "column", ")" ]
Event for the item change. Args: emitter (TableWidget): The emitter of the event. item (TableItem): The TableItem instance. new_value (str): New text content. row (int): row index. column (int): column index.
[ "Event", "for", "the", "item", "change", "." ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/gui.py#L2549-L2559
233,304
dddomodossola/remi
remi/gui.py
Svg.set_viewbox
def set_viewbox(self, x, y, w, h): """Sets the origin and size of the viewbox, describing a virtual view area. Args: x (int): x coordinate of the viewbox origin y (int): y coordinate of the viewbox origin w (int): width of the viewbox h (int): height of the viewbox """ self.attributes['viewBox'] = "%s %s %s %s" % (x, y, w, h) self.attributes['preserveAspectRatio'] = 'none'
python
def set_viewbox(self, x, y, w, h): self.attributes['viewBox'] = "%s %s %s %s" % (x, y, w, h) self.attributes['preserveAspectRatio'] = 'none'
[ "def", "set_viewbox", "(", "self", ",", "x", ",", "y", ",", "w", ",", "h", ")", ":", "self", ".", "attributes", "[", "'viewBox'", "]", "=", "\"%s %s %s %s\"", "%", "(", "x", ",", "y", ",", "w", ",", "h", ")", "self", ".", "attributes", "[", "'p...
Sets the origin and size of the viewbox, describing a virtual view area. Args: x (int): x coordinate of the viewbox origin y (int): y coordinate of the viewbox origin w (int): width of the viewbox h (int): height of the viewbox
[ "Sets", "the", "origin", "and", "size", "of", "the", "viewbox", "describing", "a", "virtual", "view", "area", "." ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/gui.py#L3396-L3406
233,305
dddomodossola/remi
remi/gui.py
SvgShape.set_position
def set_position(self, x, y): """Sets the shape position. Args: x (int): the x coordinate y (int): the y coordinate """ self.attributes['x'] = str(x) self.attributes['y'] = str(y)
python
def set_position(self, x, y): self.attributes['x'] = str(x) self.attributes['y'] = str(y)
[ "def", "set_position", "(", "self", ",", "x", ",", "y", ")", ":", "self", ".", "attributes", "[", "'x'", "]", "=", "str", "(", "x", ")", "self", ".", "attributes", "[", "'y'", "]", "=", "str", "(", "y", ")" ]
Sets the shape position. Args: x (int): the x coordinate y (int): the y coordinate
[ "Sets", "the", "shape", "position", "." ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/gui.py#L3423-L3431
233,306
dddomodossola/remi
remi/gui.py
SvgRectangle.set_size
def set_size(self, w, h): """ Sets the rectangle size. Args: w (int): width of the rectangle h (int): height of the rectangle """ self.attributes['width'] = str(w) self.attributes['height'] = str(h)
python
def set_size(self, w, h): self.attributes['width'] = str(w) self.attributes['height'] = str(h)
[ "def", "set_size", "(", "self", ",", "w", ",", "h", ")", ":", "self", ".", "attributes", "[", "'width'", "]", "=", "str", "(", "w", ")", "self", ".", "attributes", "[", "'height'", "]", "=", "str", "(", "h", ")" ]
Sets the rectangle size. Args: w (int): width of the rectangle h (int): height of the rectangle
[ "Sets", "the", "rectangle", "size", "." ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/gui.py#L3478-L3486
233,307
dddomodossola/remi
remi/server.py
start
def start(main_gui_class, **kwargs): """This method starts the webserver with a specific App subclass.""" debug = kwargs.pop('debug', False) standalone = kwargs.pop('standalone', False) logging.basicConfig(level=logging.DEBUG if debug else logging.INFO, format='%(name)-16s %(levelname)-8s %(message)s') logging.getLogger('remi').setLevel( level=logging.DEBUG if debug else logging.INFO) if standalone: s = StandaloneServer(main_gui_class, start=True, **kwargs) else: s = Server(main_gui_class, start=True, **kwargs)
python
def start(main_gui_class, **kwargs): debug = kwargs.pop('debug', False) standalone = kwargs.pop('standalone', False) logging.basicConfig(level=logging.DEBUG if debug else logging.INFO, format='%(name)-16s %(levelname)-8s %(message)s') logging.getLogger('remi').setLevel( level=logging.DEBUG if debug else logging.INFO) if standalone: s = StandaloneServer(main_gui_class, start=True, **kwargs) else: s = Server(main_gui_class, start=True, **kwargs)
[ "def", "start", "(", "main_gui_class", ",", "*", "*", "kwargs", ")", ":", "debug", "=", "kwargs", ".", "pop", "(", "'debug'", ",", "False", ")", "standalone", "=", "kwargs", ".", "pop", "(", "'standalone'", ",", "False", ")", "logging", ".", "basicConf...
This method starts the webserver with a specific App subclass.
[ "This", "method", "starts", "the", "webserver", "with", "a", "specific", "App", "subclass", "." ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/server.py#L877-L890
233,308
dddomodossola/remi
remi/server.py
App._instance
def _instance(self): global clients global runtimeInstances """ This method is used to get the Application instance previously created managing on this, it is possible to switch to "single instance for multiple clients" or "multiple instance for multiple clients" execution way """ self.session = 0 #checking previously defined session if 'cookie' in self.headers: self.session = parse_session_cookie(self.headers['cookie']) #if not a valid session id if self.session == None: self.session = 0 if not self.session in clients.keys(): self.session = 0 #if no session id if self.session == 0: if self.server.multiple_instance: self.session = int(time.time()*1000) #send session to browser del self.headers['cookie'] #if the client instance doesn't exist if not(self.session in clients): self.update_interval = self.server.update_interval from remi import gui head = gui.HEAD(self.server.title) # use the default css, but append a version based on its hash, to stop browser caching head.add_child('internal_css', "<link href='/res:style.css' rel='stylesheet' />\n") body = gui.BODY() body.onload.connect(self.onload) body.onerror.connect(self.onerror) body.ononline.connect(self.ononline) body.onpagehide.connect(self.onpagehide) body.onpageshow.connect(self.onpageshow) body.onresize.connect(self.onresize) self.page = gui.HTML() self.page.add_child('head', head) self.page.add_child('body', body) if not hasattr(self, 'websockets'): self.websockets = [] self.update_lock = threading.RLock() if not hasattr(self, '_need_update_flag'): self._need_update_flag = False self._stop_update_flag = False if self.update_interval > 0: self._update_thread = threading.Thread(target=self._idle_loop) self._update_thread.setDaemon(True) self._update_thread.start() runtimeInstances[str(id(self))] = self clients[self.session] = self else: #restore instance attributes client = clients[self.session] self.websockets = client.websockets self.page = client.page self.update_lock = client.update_lock self.update_interval = client.update_interval self._need_update_flag = client._need_update_flag if hasattr(client, '_update_thread'): self._update_thread = client._update_thread net_interface_ip = self.headers.get('Host', "%s:%s"%(self.connection.getsockname()[0],self.server.server_address[1])) websocket_timeout_timer_ms = str(self.server.websocket_timeout_timer_ms) pending_messages_queue_length = str(self.server.pending_messages_queue_length) self.page.children['head'].set_internal_js(net_interface_ip, pending_messages_queue_length, websocket_timeout_timer_ms)
python
def _instance(self): global clients global runtimeInstances self.session = 0 #checking previously defined session if 'cookie' in self.headers: self.session = parse_session_cookie(self.headers['cookie']) #if not a valid session id if self.session == None: self.session = 0 if not self.session in clients.keys(): self.session = 0 #if no session id if self.session == 0: if self.server.multiple_instance: self.session = int(time.time()*1000) #send session to browser del self.headers['cookie'] #if the client instance doesn't exist if not(self.session in clients): self.update_interval = self.server.update_interval from remi import gui head = gui.HEAD(self.server.title) # use the default css, but append a version based on its hash, to stop browser caching head.add_child('internal_css', "<link href='/res:style.css' rel='stylesheet' />\n") body = gui.BODY() body.onload.connect(self.onload) body.onerror.connect(self.onerror) body.ononline.connect(self.ononline) body.onpagehide.connect(self.onpagehide) body.onpageshow.connect(self.onpageshow) body.onresize.connect(self.onresize) self.page = gui.HTML() self.page.add_child('head', head) self.page.add_child('body', body) if not hasattr(self, 'websockets'): self.websockets = [] self.update_lock = threading.RLock() if not hasattr(self, '_need_update_flag'): self._need_update_flag = False self._stop_update_flag = False if self.update_interval > 0: self._update_thread = threading.Thread(target=self._idle_loop) self._update_thread.setDaemon(True) self._update_thread.start() runtimeInstances[str(id(self))] = self clients[self.session] = self else: #restore instance attributes client = clients[self.session] self.websockets = client.websockets self.page = client.page self.update_lock = client.update_lock self.update_interval = client.update_interval self._need_update_flag = client._need_update_flag if hasattr(client, '_update_thread'): self._update_thread = client._update_thread net_interface_ip = self.headers.get('Host', "%s:%s"%(self.connection.getsockname()[0],self.server.server_address[1])) websocket_timeout_timer_ms = str(self.server.websocket_timeout_timer_ms) pending_messages_queue_length = str(self.server.pending_messages_queue_length) self.page.children['head'].set_internal_js(net_interface_ip, pending_messages_queue_length, websocket_timeout_timer_ms)
[ "def", "_instance", "(", "self", ")", ":", "global", "clients", "global", "runtimeInstances", "self", ".", "session", "=", "0", "#checking previously defined session", "if", "'cookie'", "in", "self", ".", "headers", ":", "self", ".", "session", "=", "parse_sessi...
This method is used to get the Application instance previously created managing on this, it is possible to switch to "single instance for multiple clients" or "multiple instance for multiple clients" execution way
[ "This", "method", "is", "used", "to", "get", "the", "Application", "instance", "previously", "created", "managing", "on", "this", "it", "is", "possible", "to", "switch", "to", "single", "instance", "for", "multiple", "clients", "or", "multiple", "instance", "f...
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/server.py#L318-L397
233,309
dddomodossola/remi
remi/server.py
App.do_gui_update
def do_gui_update(self): """ This method gets called also by Timer, a new thread, and so needs to lock the update """ with self.update_lock: changed_widget_dict = {} self.root.repr(changed_widget_dict) for widget in changed_widget_dict.keys(): html = changed_widget_dict[widget] __id = str(widget.identifier) self._send_spontaneous_websocket_message(_MSG_UPDATE + __id + ',' + to_websocket(html)) self._need_update_flag = False
python
def do_gui_update(self): with self.update_lock: changed_widget_dict = {} self.root.repr(changed_widget_dict) for widget in changed_widget_dict.keys(): html = changed_widget_dict[widget] __id = str(widget.identifier) self._send_spontaneous_websocket_message(_MSG_UPDATE + __id + ',' + to_websocket(html)) self._need_update_flag = False
[ "def", "do_gui_update", "(", "self", ")", ":", "with", "self", ".", "update_lock", ":", "changed_widget_dict", "=", "{", "}", "self", ".", "root", ".", "repr", "(", "changed_widget_dict", ")", "for", "widget", "in", "changed_widget_dict", ".", "keys", "(", ...
This method gets called also by Timer, a new thread, and so needs to lock the update
[ "This", "method", "gets", "called", "also", "by", "Timer", "a", "new", "thread", "and", "so", "needs", "to", "lock", "the", "update" ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/server.py#L436-L446
233,310
dddomodossola/remi
remi/server.py
App.do_GET
def do_GET(self): # check here request header to identify the type of req, if http or ws # if this is a ws req, instance a ws handler, add it to App's ws list, return if "Upgrade" in self.headers: if self.headers['Upgrade'] == 'websocket': #passing arguments to websocket handler, otherwise it will lost the last message, # and will be unable to handshake ws = WebSocketsHandler(self.headers, self.request, self.client_address, self.server) return """Handler for the GET requests.""" do_process = False if self.server.auth is None: do_process = True else: if not ('Authorization' in self.headers) or self.headers['Authorization'] is None: self._log.info("Authenticating") self.do_AUTHHEAD() self.wfile.write(encode_text('no auth header received')) elif self.headers['Authorization'] == 'Basic ' + self.server.auth.decode(): do_process = True else: self.do_AUTHHEAD() self.wfile.write(encode_text(self.headers['Authorization'])) self.wfile.write(encode_text('not authenticated')) if do_process: path = str(unquote(self.path)) # noinspection PyBroadException try: self._instance() # build the page (call main()) in user code, if not built yet with self.update_lock: # build the root page once if necessary if not 'root' in self.page.children['body'].children.keys(): self._log.info('built UI (path=%s)' % path) self.set_root_widget(self.main(*self.server.userdata)) self._process_all(path) except: self._log.error('error processing GET request', exc_info=True)
python
def do_GET(self): # check here request header to identify the type of req, if http or ws # if this is a ws req, instance a ws handler, add it to App's ws list, return if "Upgrade" in self.headers: if self.headers['Upgrade'] == 'websocket': #passing arguments to websocket handler, otherwise it will lost the last message, # and will be unable to handshake ws = WebSocketsHandler(self.headers, self.request, self.client_address, self.server) return do_process = False if self.server.auth is None: do_process = True else: if not ('Authorization' in self.headers) or self.headers['Authorization'] is None: self._log.info("Authenticating") self.do_AUTHHEAD() self.wfile.write(encode_text('no auth header received')) elif self.headers['Authorization'] == 'Basic ' + self.server.auth.decode(): do_process = True else: self.do_AUTHHEAD() self.wfile.write(encode_text(self.headers['Authorization'])) self.wfile.write(encode_text('not authenticated')) if do_process: path = str(unquote(self.path)) # noinspection PyBroadException try: self._instance() # build the page (call main()) in user code, if not built yet with self.update_lock: # build the root page once if necessary if not 'root' in self.page.children['body'].children.keys(): self._log.info('built UI (path=%s)' % path) self.set_root_widget(self.main(*self.server.userdata)) self._process_all(path) except: self._log.error('error processing GET request', exc_info=True)
[ "def", "do_GET", "(", "self", ")", ":", "# check here request header to identify the type of req, if http or ws", "# if this is a ws req, instance a ws handler, add it to App's ws list, return", "if", "\"Upgrade\"", "in", "self", ".", "headers", ":", "if", "self", ".", "headers",...
Handler for the GET requests.
[ "Handler", "for", "the", "GET", "requests", "." ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/server.py#L551-L590
233,311
dddomodossola/remi
remi/server.py
App.on_close
def on_close(self): """ Called by the server when the App have to be terminated """ self._stop_update_flag = True for ws in self.websockets: ws.close()
python
def on_close(self): self._stop_update_flag = True for ws in self.websockets: ws.close()
[ "def", "on_close", "(", "self", ")", ":", "self", ".", "_stop_update_flag", "=", "True", "for", "ws", "in", "self", ".", "websockets", ":", "ws", ".", "close", "(", ")" ]
Called by the server when the App have to be terminated
[ "Called", "by", "the", "server", "when", "the", "App", "have", "to", "be", "terminated" ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/remi/server.py#L680-L685
233,312
dddomodossola/remi
editor/editor_widgets.py
SignalConnectionManager.update
def update(self, widget, widget_tree): """ for the selected widget are listed the relative signals for each signal there is a dropdown containing all the widgets the user will select the widget that have to listen a specific event """ self.listeners_list = [] self.build_widget_list_from_tree(widget_tree) self.label.set_text('Signal connections: ' + widget.attributes['editor_varname']) #del self.container self.container = gui.VBox(width='100%', height='90%') self.container.style['justify-content'] = 'flex-start' self.container.style['overflow-y'] = 'scroll' self.append(self.container, 'container') ##for all the events of this widget #isclass instead of ismethod because event methods are replaced with ClassEventConnector for (setOnEventListenerFuncname,setOnEventListenerFunc) in inspect.getmembers(widget): #if the member is decorated by decorate_set_on_listener and the function is referred to this event if hasattr(setOnEventListenerFunc, '_event_info'): self.container.append( SignalConnection(widget, self.listeners_list, setOnEventListenerFuncname, setOnEventListenerFunc, width='100%') )
python
def update(self, widget, widget_tree): self.listeners_list = [] self.build_widget_list_from_tree(widget_tree) self.label.set_text('Signal connections: ' + widget.attributes['editor_varname']) #del self.container self.container = gui.VBox(width='100%', height='90%') self.container.style['justify-content'] = 'flex-start' self.container.style['overflow-y'] = 'scroll' self.append(self.container, 'container') ##for all the events of this widget #isclass instead of ismethod because event methods are replaced with ClassEventConnector for (setOnEventListenerFuncname,setOnEventListenerFunc) in inspect.getmembers(widget): #if the member is decorated by decorate_set_on_listener and the function is referred to this event if hasattr(setOnEventListenerFunc, '_event_info'): self.container.append( SignalConnection(widget, self.listeners_list, setOnEventListenerFuncname, setOnEventListenerFunc, width='100%') )
[ "def", "update", "(", "self", ",", "widget", ",", "widget_tree", ")", ":", "self", ".", "listeners_list", "=", "[", "]", "self", ".", "build_widget_list_from_tree", "(", "widget_tree", ")", "self", ".", "label", ".", "set_text", "(", "'Signal connections: '", ...
for the selected widget are listed the relative signals for each signal there is a dropdown containing all the widgets the user will select the widget that have to listen a specific event
[ "for", "the", "selected", "widget", "are", "listed", "the", "relative", "signals", "for", "each", "signal", "there", "is", "a", "dropdown", "containing", "all", "the", "widgets", "the", "user", "will", "select", "the", "widget", "that", "have", "to", "listen...
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/editor/editor_widgets.py#L183-L206
233,313
dddomodossola/remi
editor/editor_widgets.py
ProjectConfigurationDialog.confirm_dialog
def confirm_dialog(self, emitter): """event called pressing on OK button. """ #here the user input is transferred to the dict, ready to use self.from_fields_to_dict() return super(ProjectConfigurationDialog,self).confirm_dialog(self)
python
def confirm_dialog(self, emitter): #here the user input is transferred to the dict, ready to use self.from_fields_to_dict() return super(ProjectConfigurationDialog,self).confirm_dialog(self)
[ "def", "confirm_dialog", "(", "self", ",", "emitter", ")", ":", "#here the user input is transferred to the dict, ready to use", "self", ".", "from_fields_to_dict", "(", ")", "return", "super", "(", "ProjectConfigurationDialog", ",", "self", ")", ".", "confirm_dialog", ...
event called pressing on OK button.
[ "event", "called", "pressing", "on", "OK", "button", "." ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/editor/editor_widgets.py#L256-L261
233,314
dddomodossola/remi
editor/editor_widgets.py
ProjectConfigurationDialog.show
def show(self, baseAppInstance): """Allows to show the widget as root window""" self.from_dict_to_fields(self.configDict) super(ProjectConfigurationDialog, self).show(baseAppInstance)
python
def show(self, baseAppInstance): self.from_dict_to_fields(self.configDict) super(ProjectConfigurationDialog, self).show(baseAppInstance)
[ "def", "show", "(", "self", ",", "baseAppInstance", ")", ":", "self", ".", "from_dict_to_fields", "(", "self", ".", "configDict", ")", "super", "(", "ProjectConfigurationDialog", ",", "self", ")", ".", "show", "(", "baseAppInstance", ")" ]
Allows to show the widget as root window
[ "Allows", "to", "show", "the", "widget", "as", "root", "window" ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/editor/editor_widgets.py#L263-L266
233,315
dddomodossola/remi
editor/editor_widgets.py
CssSizeInput.set_value
def set_value(self, value): """The value have to be in the form '10px' or '10%', so numeric value plus measure unit """ v = 0 measure_unit = 'px' try: v = int(float(value.replace('px', ''))) except ValueError: try: v = int(float(value.replace('%', ''))) measure_unit = '%' except ValueError: pass self.numInput.set_value(v) self.dropMeasureUnit.set_value(measure_unit)
python
def set_value(self, value): v = 0 measure_unit = 'px' try: v = int(float(value.replace('px', ''))) except ValueError: try: v = int(float(value.replace('%', ''))) measure_unit = '%' except ValueError: pass self.numInput.set_value(v) self.dropMeasureUnit.set_value(measure_unit)
[ "def", "set_value", "(", "self", ",", "value", ")", ":", "v", "=", "0", "measure_unit", "=", "'px'", "try", ":", "v", "=", "int", "(", "float", "(", "value", ".", "replace", "(", "'px'", ",", "''", ")", ")", ")", "except", "ValueError", ":", "try...
The value have to be in the form '10px' or '10%', so numeric value plus measure unit
[ "The", "value", "have", "to", "be", "in", "the", "form", "10px", "or", "10%", "so", "numeric", "value", "plus", "measure", "unit" ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/editor/editor_widgets.py#L629-L643
233,316
dddomodossola/remi
examples/examples_from_contributors/remi_ext.py
SingleRowSelectionTable.on_table_row_click
def on_table_row_click(self, row, item): ''' Highlight selected row.''' if hasattr(self, "last_clicked_row"): del self.last_clicked_row.style['outline'] self.last_clicked_row = row self.last_clicked_row.style['outline'] = "2px dotted blue" return (row, item)
python
def on_table_row_click(self, row, item): ''' Highlight selected row.''' if hasattr(self, "last_clicked_row"): del self.last_clicked_row.style['outline'] self.last_clicked_row = row self.last_clicked_row.style['outline'] = "2px dotted blue" return (row, item)
[ "def", "on_table_row_click", "(", "self", ",", "row", ",", "item", ")", ":", "if", "hasattr", "(", "self", ",", "\"last_clicked_row\"", ")", ":", "del", "self", ".", "last_clicked_row", ".", "style", "[", "'outline'", "]", "self", ".", "last_clicked_row", ...
Highlight selected row.
[ "Highlight", "selected", "row", "." ]
85206f62220662bb7ecd471042268def71ccad28
https://github.com/dddomodossola/remi/blob/85206f62220662bb7ecd471042268def71ccad28/examples/examples_from_contributors/remi_ext.py#L11-L17
233,317
RaRe-Technologies/smart_open
smart_open/doctools.py
extract_kwargs
def extract_kwargs(docstring): """Extract keyword argument documentation from a function's docstring. Parameters ---------- docstring: str The docstring to extract keyword arguments from. Returns ------- list of (str, str, list str) str The name of the keyword argument. str Its type. str Its documentation as a list of lines. Notes ----- The implementation is rather fragile. It expects the following: 1. The parameters are under an underlined Parameters section 2. Keyword parameters have the literal ", optional" after the type 3. Names and types are not indented 4. Descriptions are indented with 4 spaces 5. The Parameters section ends with an empty line. Examples -------- >>> docstring = '''The foo function. ... Parameters ... ---------- ... bar: str, optional ... This parameter is the bar. ... baz: int, optional ... This parameter is the baz. ... ... ''' >>> kwargs = extract_kwargs(docstring) >>> kwargs[0] ('bar', 'str, optional', ['This parameter is the bar.']) """ lines = inspect.cleandoc(docstring).split('\n') retval = [] # # 1. Find the underlined 'Parameters' section # 2. Once there, continue parsing parameters until we hit an empty line # while lines[0] != 'Parameters': lines.pop(0) lines.pop(0) lines.pop(0) while lines and lines[0]: name, type_ = lines.pop(0).split(':', 1) description = [] while lines and lines[0].startswith(' '): description.append(lines.pop(0).strip()) if 'optional' in type_: retval.append((name.strip(), type_.strip(), description)) return retval
python
def extract_kwargs(docstring): lines = inspect.cleandoc(docstring).split('\n') retval = [] # # 1. Find the underlined 'Parameters' section # 2. Once there, continue parsing parameters until we hit an empty line # while lines[0] != 'Parameters': lines.pop(0) lines.pop(0) lines.pop(0) while lines and lines[0]: name, type_ = lines.pop(0).split(':', 1) description = [] while lines and lines[0].startswith(' '): description.append(lines.pop(0).strip()) if 'optional' in type_: retval.append((name.strip(), type_.strip(), description)) return retval
[ "def", "extract_kwargs", "(", "docstring", ")", ":", "lines", "=", "inspect", ".", "cleandoc", "(", "docstring", ")", ".", "split", "(", "'\\n'", ")", "retval", "=", "[", "]", "#", "# 1. Find the underlined 'Parameters' section", "# 2. Once there, continue parsing p...
Extract keyword argument documentation from a function's docstring. Parameters ---------- docstring: str The docstring to extract keyword arguments from. Returns ------- list of (str, str, list str) str The name of the keyword argument. str Its type. str Its documentation as a list of lines. Notes ----- The implementation is rather fragile. It expects the following: 1. The parameters are under an underlined Parameters section 2. Keyword parameters have the literal ", optional" after the type 3. Names and types are not indented 4. Descriptions are indented with 4 spaces 5. The Parameters section ends with an empty line. Examples -------- >>> docstring = '''The foo function. ... Parameters ... ---------- ... bar: str, optional ... This parameter is the bar. ... baz: int, optional ... This parameter is the baz. ... ... ''' >>> kwargs = extract_kwargs(docstring) >>> kwargs[0] ('bar', 'str, optional', ['This parameter is the bar.'])
[ "Extract", "keyword", "argument", "documentation", "from", "a", "function", "s", "docstring", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/doctools.py#L20-L86
233,318
RaRe-Technologies/smart_open
smart_open/doctools.py
to_docstring
def to_docstring(kwargs, lpad=''): """Reconstruct a docstring from keyword argument info. Basically reverses :func:`extract_kwargs`. Parameters ---------- kwargs: list Output from the extract_kwargs function lpad: str, optional Padding string (from the left). Returns ------- str The docstring snippet documenting the keyword arguments. Examples -------- >>> kwargs = [ ... ('bar', 'str, optional', ['This parameter is the bar.']), ... ('baz', 'int, optional', ['This parameter is the baz.']), ... ] >>> print(to_docstring(kwargs), end='') bar: str, optional This parameter is the bar. baz: int, optional This parameter is the baz. """ buf = io.StringIO() for name, type_, description in kwargs: buf.write('%s%s: %s\n' % (lpad, name, type_)) for line in description: buf.write('%s %s\n' % (lpad, line)) return buf.getvalue()
python
def to_docstring(kwargs, lpad=''): buf = io.StringIO() for name, type_, description in kwargs: buf.write('%s%s: %s\n' % (lpad, name, type_)) for line in description: buf.write('%s %s\n' % (lpad, line)) return buf.getvalue()
[ "def", "to_docstring", "(", "kwargs", ",", "lpad", "=", "''", ")", ":", "buf", "=", "io", ".", "StringIO", "(", ")", "for", "name", ",", "type_", ",", "description", "in", "kwargs", ":", "buf", ".", "write", "(", "'%s%s: %s\\n'", "%", "(", "lpad", ...
Reconstruct a docstring from keyword argument info. Basically reverses :func:`extract_kwargs`. Parameters ---------- kwargs: list Output from the extract_kwargs function lpad: str, optional Padding string (from the left). Returns ------- str The docstring snippet documenting the keyword arguments. Examples -------- >>> kwargs = [ ... ('bar', 'str, optional', ['This parameter is the bar.']), ... ('baz', 'int, optional', ['This parameter is the baz.']), ... ] >>> print(to_docstring(kwargs), end='') bar: str, optional This parameter is the bar. baz: int, optional This parameter is the baz.
[ "Reconstruct", "a", "docstring", "from", "keyword", "argument", "info", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/doctools.py#L89-L125
233,319
RaRe-Technologies/smart_open
smart_open/doctools.py
extract_examples_from_readme_rst
def extract_examples_from_readme_rst(indent=' '): """Extract examples from this project's README.rst file. Parameters ---------- indent: str Prepend each line with this string. Should contain some number of spaces. Returns ------- str The examples. Notes ----- Quite fragile, depends on named labels inside the README.rst file. """ curr_dir = os.path.dirname(os.path.abspath(__file__)) readme_path = os.path.join(curr_dir, '..', 'README.rst') try: with open(readme_path) as fin: lines = list(fin) start = lines.index('.. _doctools_before_examples:\n') end = lines.index(".. _doctools_after_examples:\n") lines = lines[start+4:end-2] return ''.join([indent + re.sub('^ ', '', l) for l in lines]) except Exception: return indent + 'See README.rst'
python
def extract_examples_from_readme_rst(indent=' '): curr_dir = os.path.dirname(os.path.abspath(__file__)) readme_path = os.path.join(curr_dir, '..', 'README.rst') try: with open(readme_path) as fin: lines = list(fin) start = lines.index('.. _doctools_before_examples:\n') end = lines.index(".. _doctools_after_examples:\n") lines = lines[start+4:end-2] return ''.join([indent + re.sub('^ ', '', l) for l in lines]) except Exception: return indent + 'See README.rst'
[ "def", "extract_examples_from_readme_rst", "(", "indent", "=", "' '", ")", ":", "curr_dir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "readme_path", "=", "os", ".", "path", ".", "join", ...
Extract examples from this project's README.rst file. Parameters ---------- indent: str Prepend each line with this string. Should contain some number of spaces. Returns ------- str The examples. Notes ----- Quite fragile, depends on named labels inside the README.rst file.
[ "Extract", "examples", "from", "this", "project", "s", "README", ".", "rst", "file", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/doctools.py#L128-L155
233,320
RaRe-Technologies/smart_open
smart_open/s3.py
open
def open( bucket_id, key_id, mode, buffer_size=DEFAULT_BUFFER_SIZE, min_part_size=DEFAULT_MIN_PART_SIZE, session=None, resource_kwargs=None, multipart_upload_kwargs=None, ): """Open an S3 object for reading or writing. Parameters ---------- bucket_id: str The name of the bucket this object resides in. key_id: str The name of the key within the bucket. mode: str The mode for opening the object. Must be either "rb" or "wb". buffer_size: int, optional The buffer size to use when performing I/O. min_part_size: int, optional The minimum part size for multipart uploads. For writing only. session: object, optional The S3 session to use when working with boto3. resource_kwargs: dict, optional Keyword arguments to use when accessing the S3 resource for reading or writing. multipart_upload_kwargs: dict, optional Additional parameters to pass to boto3's initiate_multipart_upload function. For writing only. """ logger.debug('%r', locals()) if mode not in MODES: raise NotImplementedError('bad mode: %r expected one of %r' % (mode, MODES)) if resource_kwargs is None: resource_kwargs = {} if multipart_upload_kwargs is None: multipart_upload_kwargs = {} if mode == READ_BINARY: fileobj = SeekableBufferedInputBase( bucket_id, key_id, buffer_size=buffer_size, session=session, resource_kwargs=resource_kwargs, ) elif mode == WRITE_BINARY: fileobj = BufferedOutputBase( bucket_id, key_id, min_part_size=min_part_size, session=session, multipart_upload_kwargs=multipart_upload_kwargs, resource_kwargs=resource_kwargs, ) else: assert False, 'unexpected mode: %r' % mode return fileobj
python
def open( bucket_id, key_id, mode, buffer_size=DEFAULT_BUFFER_SIZE, min_part_size=DEFAULT_MIN_PART_SIZE, session=None, resource_kwargs=None, multipart_upload_kwargs=None, ): logger.debug('%r', locals()) if mode not in MODES: raise NotImplementedError('bad mode: %r expected one of %r' % (mode, MODES)) if resource_kwargs is None: resource_kwargs = {} if multipart_upload_kwargs is None: multipart_upload_kwargs = {} if mode == READ_BINARY: fileobj = SeekableBufferedInputBase( bucket_id, key_id, buffer_size=buffer_size, session=session, resource_kwargs=resource_kwargs, ) elif mode == WRITE_BINARY: fileobj = BufferedOutputBase( bucket_id, key_id, min_part_size=min_part_size, session=session, multipart_upload_kwargs=multipart_upload_kwargs, resource_kwargs=resource_kwargs, ) else: assert False, 'unexpected mode: %r' % mode return fileobj
[ "def", "open", "(", "bucket_id", ",", "key_id", ",", "mode", ",", "buffer_size", "=", "DEFAULT_BUFFER_SIZE", ",", "min_part_size", "=", "DEFAULT_MIN_PART_SIZE", ",", "session", "=", "None", ",", "resource_kwargs", "=", "None", ",", "multipart_upload_kwargs", "=", ...
Open an S3 object for reading or writing. Parameters ---------- bucket_id: str The name of the bucket this object resides in. key_id: str The name of the key within the bucket. mode: str The mode for opening the object. Must be either "rb" or "wb". buffer_size: int, optional The buffer size to use when performing I/O. min_part_size: int, optional The minimum part size for multipart uploads. For writing only. session: object, optional The S3 session to use when working with boto3. resource_kwargs: dict, optional Keyword arguments to use when accessing the S3 resource for reading or writing. multipart_upload_kwargs: dict, optional Additional parameters to pass to boto3's initiate_multipart_upload function. For writing only.
[ "Open", "an", "S3", "object", "for", "reading", "or", "writing", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/s3.py#L69-L131
233,321
RaRe-Technologies/smart_open
smart_open/s3.py
BufferedInputBase.read
def read(self, size=-1): """Read up to size bytes from the object and return them.""" if size == 0: return b'' elif size < 0: from_buf = self._read_from_buffer() self._current_pos = self._content_length return from_buf + self._raw_reader.read() # # Return unused data first # if len(self._buffer) >= size: return self._read_from_buffer(size) # # If the stream is finished, return what we have. # if self._eof: return self._read_from_buffer() # # Fill our buffer to the required size. # # logger.debug('filling %r byte-long buffer up to %r bytes', len(self._buffer), size) self._fill_buffer(size) return self._read_from_buffer(size)
python
def read(self, size=-1): if size == 0: return b'' elif size < 0: from_buf = self._read_from_buffer() self._current_pos = self._content_length return from_buf + self._raw_reader.read() # # Return unused data first # if len(self._buffer) >= size: return self._read_from_buffer(size) # # If the stream is finished, return what we have. # if self._eof: return self._read_from_buffer() # # Fill our buffer to the required size. # # logger.debug('filling %r byte-long buffer up to %r bytes', len(self._buffer), size) self._fill_buffer(size) return self._read_from_buffer(size)
[ "def", "read", "(", "self", ",", "size", "=", "-", "1", ")", ":", "if", "size", "==", "0", ":", "return", "b''", "elif", "size", "<", "0", ":", "from_buf", "=", "self", ".", "_read_from_buffer", "(", ")", "self", ".", "_current_pos", "=", "self", ...
Read up to size bytes from the object and return them.
[ "Read", "up", "to", "size", "bytes", "from", "the", "object", "and", "return", "them", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/s3.py#L240-L266
233,322
RaRe-Technologies/smart_open
smart_open/s3.py
BufferedInputBase.readline
def readline(self, limit=-1): """Read up to and including the next newline. Returns the bytes read.""" if limit != -1: raise NotImplementedError('limits other than -1 not implemented yet') the_line = io.BytesIO() while not (self._eof and len(self._buffer) == 0): # # In the worst case, we're reading the unread part of self._buffer # twice here, once in the if condition and once when calling index. # # This is sub-optimal, but better than the alternative: wrapping # .index in a try..except, because that is slower. # remaining_buffer = self._buffer.peek() if self._line_terminator in remaining_buffer: next_newline = remaining_buffer.index(self._line_terminator) the_line.write(self._read_from_buffer(next_newline + 1)) break else: the_line.write(self._read_from_buffer()) self._fill_buffer() return the_line.getvalue()
python
def readline(self, limit=-1): if limit != -1: raise NotImplementedError('limits other than -1 not implemented yet') the_line = io.BytesIO() while not (self._eof and len(self._buffer) == 0): # # In the worst case, we're reading the unread part of self._buffer # twice here, once in the if condition and once when calling index. # # This is sub-optimal, but better than the alternative: wrapping # .index in a try..except, because that is slower. # remaining_buffer = self._buffer.peek() if self._line_terminator in remaining_buffer: next_newline = remaining_buffer.index(self._line_terminator) the_line.write(self._read_from_buffer(next_newline + 1)) break else: the_line.write(self._read_from_buffer()) self._fill_buffer() return the_line.getvalue()
[ "def", "readline", "(", "self", ",", "limit", "=", "-", "1", ")", ":", "if", "limit", "!=", "-", "1", ":", "raise", "NotImplementedError", "(", "'limits other than -1 not implemented yet'", ")", "the_line", "=", "io", ".", "BytesIO", "(", ")", "while", "no...
Read up to and including the next newline. Returns the bytes read.
[ "Read", "up", "to", "and", "including", "the", "next", "newline", ".", "Returns", "the", "bytes", "read", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/s3.py#L281-L302
233,323
RaRe-Technologies/smart_open
smart_open/s3.py
BufferedInputBase._read_from_buffer
def _read_from_buffer(self, size=-1): """Remove at most size bytes from our buffer and return them.""" # logger.debug('reading %r bytes from %r byte-long buffer', size, len(self._buffer)) size = size if size >= 0 else len(self._buffer) part = self._buffer.read(size) self._current_pos += len(part) # logger.debug('part: %r', part) return part
python
def _read_from_buffer(self, size=-1): # logger.debug('reading %r bytes from %r byte-long buffer', size, len(self._buffer)) size = size if size >= 0 else len(self._buffer) part = self._buffer.read(size) self._current_pos += len(part) # logger.debug('part: %r', part) return part
[ "def", "_read_from_buffer", "(", "self", ",", "size", "=", "-", "1", ")", ":", "# logger.debug('reading %r bytes from %r byte-long buffer', size, len(self._buffer))", "size", "=", "size", "if", "size", ">=", "0", "else", "len", "(", "self", ".", "_buffer", ")", "p...
Remove at most size bytes from our buffer and return them.
[ "Remove", "at", "most", "size", "bytes", "from", "our", "buffer", "and", "return", "them", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/s3.py#L311-L318
233,324
RaRe-Technologies/smart_open
smart_open/ssh.py
open
def open(path, mode='r', host=None, user=None, port=DEFAULT_PORT): """Open a file on a remote machine over SSH. Expects authentication to be already set up via existing keys on the local machine. Parameters ---------- path: str The path to the file to open on the remote machine. mode: str, optional The mode to use for opening the file. host: str, optional The hostname of the remote machine. May not be None. user: str, optional The username to use to login to the remote machine. If None, defaults to the name of the current user. port: int, optional The port to connect to. Returns ------- A file-like object. Important --------- If you specify a previously unseen host, then its host key will be added to the local ~/.ssh/known_hosts *automatically*. """ if not host: raise ValueError('you must specify the host to connect to') if not user: user = getpass.getuser() conn = _connect(host, user, port) sftp_client = conn.get_transport().open_sftp_client() return sftp_client.open(path, mode)
python
def open(path, mode='r', host=None, user=None, port=DEFAULT_PORT): if not host: raise ValueError('you must specify the host to connect to') if not user: user = getpass.getuser() conn = _connect(host, user, port) sftp_client = conn.get_transport().open_sftp_client() return sftp_client.open(path, mode)
[ "def", "open", "(", "path", ",", "mode", "=", "'r'", ",", "host", "=", "None", ",", "user", "=", "None", ",", "port", "=", "DEFAULT_PORT", ")", ":", "if", "not", "host", ":", "raise", "ValueError", "(", "'you must specify the host to connect to'", ")", "...
Open a file on a remote machine over SSH. Expects authentication to be already set up via existing keys on the local machine. Parameters ---------- path: str The path to the file to open on the remote machine. mode: str, optional The mode to use for opening the file. host: str, optional The hostname of the remote machine. May not be None. user: str, optional The username to use to login to the remote machine. If None, defaults to the name of the current user. port: int, optional The port to connect to. Returns ------- A file-like object. Important --------- If you specify a previously unseen host, then its host key will be added to the local ~/.ssh/known_hosts *automatically*.
[ "Open", "a", "file", "on", "a", "remote", "machine", "over", "SSH", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/ssh.py#L62-L97
233,325
RaRe-Technologies/smart_open
smart_open/http.py
open
def open(uri, mode, kerberos=False, user=None, password=None): """Implement streamed reader from a web site. Supports Kerberos and Basic HTTP authentication. Parameters ---------- url: str The URL to open. mode: str The mode to open using. kerberos: boolean, optional If True, will attempt to use the local Kerberos credentials user: str, optional The username for authenticating over HTTP password: str, optional The password for authenticating over HTTP Note ---- If neither kerberos or (user, password) are set, will connect unauthenticated. """ if mode == 'rb': return BufferedInputBase(uri, mode, kerberos=kerberos, user=user, password=password) else: raise NotImplementedError('http support for mode %r not implemented' % mode)
python
def open(uri, mode, kerberos=False, user=None, password=None): if mode == 'rb': return BufferedInputBase(uri, mode, kerberos=kerberos, user=user, password=password) else: raise NotImplementedError('http support for mode %r not implemented' % mode)
[ "def", "open", "(", "uri", ",", "mode", ",", "kerberos", "=", "False", ",", "user", "=", "None", ",", "password", "=", "None", ")", ":", "if", "mode", "==", "'rb'", ":", "return", "BufferedInputBase", "(", "uri", ",", "mode", ",", "kerberos", "=", ...
Implement streamed reader from a web site. Supports Kerberos and Basic HTTP authentication. Parameters ---------- url: str The URL to open. mode: str The mode to open using. kerberos: boolean, optional If True, will attempt to use the local Kerberos credentials user: str, optional The username for authenticating over HTTP password: str, optional The password for authenticating over HTTP Note ---- If neither kerberos or (user, password) are set, will connect unauthenticated.
[ "Implement", "streamed", "reader", "from", "a", "web", "site", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/http.py#L25-L51
233,326
RaRe-Technologies/smart_open
smart_open/http.py
BufferedInputBase.read
def read(self, size=-1): """ Mimics the read call to a filehandle object. """ logger.debug("reading with size: %d", size) if self.response is None: return b'' if size == 0: return b'' elif size < 0 and len(self._read_buffer) == 0: retval = self.response.raw.read() elif size < 0: retval = self._read_buffer.read() + self.response.raw.read() else: while len(self._read_buffer) < size: logger.debug("http reading more content at current_pos: %d with size: %d", self._current_pos, size) bytes_read = self._read_buffer.fill(self._read_iter) if bytes_read == 0: # Oops, ran out of data early. retval = self._read_buffer.read() self._current_pos += len(retval) return retval # If we got here, it means we have enough data in the buffer # to return to the caller. retval = self._read_buffer.read(size) self._current_pos += len(retval) return retval
python
def read(self, size=-1): logger.debug("reading with size: %d", size) if self.response is None: return b'' if size == 0: return b'' elif size < 0 and len(self._read_buffer) == 0: retval = self.response.raw.read() elif size < 0: retval = self._read_buffer.read() + self.response.raw.read() else: while len(self._read_buffer) < size: logger.debug("http reading more content at current_pos: %d with size: %d", self._current_pos, size) bytes_read = self._read_buffer.fill(self._read_iter) if bytes_read == 0: # Oops, ran out of data early. retval = self._read_buffer.read() self._current_pos += len(retval) return retval # If we got here, it means we have enough data in the buffer # to return to the caller. retval = self._read_buffer.read(size) self._current_pos += len(retval) return retval
[ "def", "read", "(", "self", ",", "size", "=", "-", "1", ")", ":", "logger", ".", "debug", "(", "\"reading with size: %d\"", ",", "size", ")", "if", "self", ".", "response", "is", "None", ":", "return", "b''", "if", "size", "==", "0", ":", "return", ...
Mimics the read call to a filehandle object.
[ "Mimics", "the", "read", "call", "to", "a", "filehandle", "object", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/http.py#L104-L134
233,327
RaRe-Technologies/smart_open
smart_open/bytebuffer.py
ByteBuffer.read
def read(self, size=-1): """Read bytes from the buffer and advance the read position. Returns the bytes in a bytestring. Parameters ---------- size: int, optional Maximum number of bytes to read. If negative or not supplied, read all unread bytes in the buffer. Returns ------- bytes """ part = self.peek(size) self._pos += len(part) return part
python
def read(self, size=-1): part = self.peek(size) self._pos += len(part) return part
[ "def", "read", "(", "self", ",", "size", "=", "-", "1", ")", ":", "part", "=", "self", ".", "peek", "(", "size", ")", "self", ".", "_pos", "+=", "len", "(", "part", ")", "return", "part" ]
Read bytes from the buffer and advance the read position. Returns the bytes in a bytestring. Parameters ---------- size: int, optional Maximum number of bytes to read. If negative or not supplied, read all unread bytes in the buffer. Returns ------- bytes
[ "Read", "bytes", "from", "the", "buffer", "and", "advance", "the", "read", "position", ".", "Returns", "the", "bytes", "in", "a", "bytestring", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/bytebuffer.py#L67-L83
233,328
RaRe-Technologies/smart_open
smart_open/bytebuffer.py
ByteBuffer.peek
def peek(self, size=-1): """Get bytes from the buffer without advancing the read position. Returns the bytes in a bytestring. Parameters ---------- size: int, optional Maximum number of bytes to return. If negative or not supplied, return all unread bytes in the buffer. Returns ------- bytes """ if size < 0 or size > len(self): size = len(self) part = self._bytes[self._pos:self._pos+size] return part
python
def peek(self, size=-1): if size < 0 or size > len(self): size = len(self) part = self._bytes[self._pos:self._pos+size] return part
[ "def", "peek", "(", "self", ",", "size", "=", "-", "1", ")", ":", "if", "size", "<", "0", "or", "size", ">", "len", "(", "self", ")", ":", "size", "=", "len", "(", "self", ")", "part", "=", "self", ".", "_bytes", "[", "self", ".", "_pos", "...
Get bytes from the buffer without advancing the read position. Returns the bytes in a bytestring. Parameters ---------- size: int, optional Maximum number of bytes to return. If negative or not supplied, return all unread bytes in the buffer. Returns ------- bytes
[ "Get", "bytes", "from", "the", "buffer", "without", "advancing", "the", "read", "position", ".", "Returns", "the", "bytes", "in", "a", "bytestring", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/bytebuffer.py#L85-L103
233,329
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
register_compressor
def register_compressor(ext, callback): """Register a callback for transparently decompressing files with a specific extension. Parameters ---------- ext: str The extension. callback: callable The callback. It must accept two position arguments, file_obj and mode. Examples -------- Instruct smart_open to use the identity function whenever opening a file with a .xz extension (see README.rst for the complete example showing I/O): >>> def _handle_xz(file_obj, mode): ... import lzma ... return lzma.LZMAFile(filename=file_obj, mode=mode, format=lzma.FORMAT_XZ) >>> >>> register_compressor('.xz', _handle_xz) """ if not (ext and ext[0] == '.'): raise ValueError('ext must be a string starting with ., not %r' % ext) if ext in _COMPRESSOR_REGISTRY: logger.warning('overriding existing compression handler for %r', ext) _COMPRESSOR_REGISTRY[ext] = callback
python
def register_compressor(ext, callback): if not (ext and ext[0] == '.'): raise ValueError('ext must be a string starting with ., not %r' % ext) if ext in _COMPRESSOR_REGISTRY: logger.warning('overriding existing compression handler for %r', ext) _COMPRESSOR_REGISTRY[ext] = callback
[ "def", "register_compressor", "(", "ext", ",", "callback", ")", ":", "if", "not", "(", "ext", "and", "ext", "[", "0", "]", "==", "'.'", ")", ":", "raise", "ValueError", "(", "'ext must be a string starting with ., not %r'", "%", "ext", ")", "if", "ext", "i...
Register a callback for transparently decompressing files with a specific extension. Parameters ---------- ext: str The extension. callback: callable The callback. It must accept two position arguments, file_obj and mode. Examples -------- Instruct smart_open to use the identity function whenever opening a file with a .xz extension (see README.rst for the complete example showing I/O): >>> def _handle_xz(file_obj, mode): ... import lzma ... return lzma.LZMAFile(filename=file_obj, mode=mode, format=lzma.FORMAT_XZ) >>> >>> register_compressor('.xz', _handle_xz)
[ "Register", "a", "callback", "for", "transparently", "decompressing", "files", "with", "a", "specific", "extension", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L70-L97
233,330
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
_check_kwargs
def _check_kwargs(kallable, kwargs): """Check which keyword arguments the callable supports. Parameters ---------- kallable: callable A function or method to test kwargs: dict The keyword arguments to check. If the callable doesn't support any of these, a warning message will get printed. Returns ------- dict A dictionary of argument names and values supported by the callable. """ supported_keywords = sorted(_inspect_kwargs(kallable)) unsupported_keywords = [k for k in sorted(kwargs) if k not in supported_keywords] supported_kwargs = {k: v for (k, v) in kwargs.items() if k in supported_keywords} if unsupported_keywords: logger.warning('ignoring unsupported keyword arguments: %r', unsupported_keywords) return supported_kwargs
python
def _check_kwargs(kallable, kwargs): supported_keywords = sorted(_inspect_kwargs(kallable)) unsupported_keywords = [k for k in sorted(kwargs) if k not in supported_keywords] supported_kwargs = {k: v for (k, v) in kwargs.items() if k in supported_keywords} if unsupported_keywords: logger.warning('ignoring unsupported keyword arguments: %r', unsupported_keywords) return supported_kwargs
[ "def", "_check_kwargs", "(", "kallable", ",", "kwargs", ")", ":", "supported_keywords", "=", "sorted", "(", "_inspect_kwargs", "(", "kallable", ")", ")", "unsupported_keywords", "=", "[", "k", "for", "k", "in", "sorted", "(", "kwargs", ")", "if", "k", "not...
Check which keyword arguments the callable supports. Parameters ---------- kallable: callable A function or method to test kwargs: dict The keyword arguments to check. If the callable doesn't support any of these, a warning message will get printed. Returns ------- dict A dictionary of argument names and values supported by the callable.
[ "Check", "which", "keyword", "arguments", "the", "callable", "supports", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L172-L195
233,331
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
open
def open( uri, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None, ignore_ext=False, transport_params=None, ): r"""Open the URI object, returning a file-like object. The URI is usually a string in a variety of formats: 1. a URI for the local filesystem: `./lines.txt`, `/home/joe/lines.txt.gz`, `file:///home/joe/lines.txt.bz2` 2. a URI for HDFS: `hdfs:///some/path/lines.txt` 3. a URI for Amazon's S3 (can also supply credentials inside the URI): `s3://my_bucket/lines.txt`, `s3://my_aws_key_id:key_secret@my_bucket/lines.txt` The URI may also be one of: - an instance of the pathlib.Path class - a stream (anything that implements io.IOBase-like functionality) This function supports transparent compression and decompression using the following codec: - ``.gz`` - ``.bz2`` The function depends on the file extension to determine the appropriate codec. Parameters ---------- uri: str or object The object to open. mode: str, optional Mimicks built-in open parameter of the same name. buffering: int, optional Mimicks built-in open parameter of the same name. encoding: str, optional Mimicks built-in open parameter of the same name. errors: str, optional Mimicks built-in open parameter of the same name. newline: str, optional Mimicks built-in open parameter of the same name. closefd: boolean, optional Mimicks built-in open parameter of the same name. Ignored. opener: object, optional Mimicks built-in open parameter of the same name. Ignored. ignore_ext: boolean, optional Disable transparent compression/decompression based on the file extension. transport_params: dict, optional Additional parameters for the transport layer (see notes below). Returns ------- A file-like object. Notes ----- smart_open has several implementations for its transport layer (e.g. S3, HTTP). Each transport layer has a different set of keyword arguments for overriding default behavior. If you specify a keyword argument that is *not* supported by the transport layer being used, smart_open will ignore that argument and log a warning message. S3 (for details, see :mod:`smart_open.s3` and :func:`smart_open.s3.open`): %(s3)s HTTP (for details, see :mod:`smart_open.http` and :func:`smart_open.http.open`): %(http)s WebHDFS (for details, see :mod:`smart_open.webhdfs` and :func:`smart_open.webhdfs.open`): %(webhdfs)s SSH (for details, see :mod:`smart_open.ssh` and :func:`smart_open.ssh.open`): %(ssh)s Examples -------- %(examples)s See Also -------- - `Standard library reference <https://docs.python.org/3.7/library/functions.html#open>`__ - `smart_open README.rst <https://github.com/RaRe-Technologies/smart_open/blob/master/README.rst>`__ """ logger.debug('%r', locals()) if not isinstance(mode, six.string_types): raise TypeError('mode should be a string') if transport_params is None: transport_params = {} fobj = _shortcut_open( uri, mode, ignore_ext=ignore_ext, buffering=buffering, encoding=encoding, errors=errors, ) if fobj is not None: return fobj # # This is a work-around for the problem described in Issue #144. # If the user has explicitly specified an encoding, then assume they want # us to open the destination in text mode, instead of the default binary. # # If we change the default mode to be text, and match the normal behavior # of Py2 and 3, then the above assumption will be unnecessary. # if encoding is not None and 'b' in mode: mode = mode.replace('b', '') # Support opening ``pathlib.Path`` objects by casting them to strings. if PATHLIB_SUPPORT and isinstance(uri, pathlib.Path): uri = str(uri) explicit_encoding = encoding encoding = explicit_encoding if explicit_encoding else SYSTEM_ENCODING # # This is how we get from the filename to the end result. Decompression is # optional, but it always accepts bytes and returns bytes. # # Decoding is also optional, accepts bytes and returns text. The diagram # below is for reading, for writing, the flow is from right to left, but # the code is identical. # # open as binary decompress? decode? # filename ---------------> bytes -------------> bytes ---------> text # binary decompressed decode # try: binary_mode = {'r': 'rb', 'r+': 'rb+', 'w': 'wb', 'w+': 'wb+', 'a': 'ab', 'a+': 'ab+'}[mode] except KeyError: binary_mode = mode binary, filename = _open_binary_stream(uri, binary_mode, transport_params) if ignore_ext: decompressed = binary else: decompressed = _compression_wrapper(binary, filename, mode) if 'b' not in mode or explicit_encoding is not None: decoded = _encoding_wrapper(decompressed, mode, encoding=encoding, errors=errors) else: decoded = decompressed return decoded
python
def open( uri, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None, ignore_ext=False, transport_params=None, ): r"""Open the URI object, returning a file-like object. The URI is usually a string in a variety of formats: 1. a URI for the local filesystem: `./lines.txt`, `/home/joe/lines.txt.gz`, `file:///home/joe/lines.txt.bz2` 2. a URI for HDFS: `hdfs:///some/path/lines.txt` 3. a URI for Amazon's S3 (can also supply credentials inside the URI): `s3://my_bucket/lines.txt`, `s3://my_aws_key_id:key_secret@my_bucket/lines.txt` The URI may also be one of: - an instance of the pathlib.Path class - a stream (anything that implements io.IOBase-like functionality) This function supports transparent compression and decompression using the following codec: - ``.gz`` - ``.bz2`` The function depends on the file extension to determine the appropriate codec. Parameters ---------- uri: str or object The object to open. mode: str, optional Mimicks built-in open parameter of the same name. buffering: int, optional Mimicks built-in open parameter of the same name. encoding: str, optional Mimicks built-in open parameter of the same name. errors: str, optional Mimicks built-in open parameter of the same name. newline: str, optional Mimicks built-in open parameter of the same name. closefd: boolean, optional Mimicks built-in open parameter of the same name. Ignored. opener: object, optional Mimicks built-in open parameter of the same name. Ignored. ignore_ext: boolean, optional Disable transparent compression/decompression based on the file extension. transport_params: dict, optional Additional parameters for the transport layer (see notes below). Returns ------- A file-like object. Notes ----- smart_open has several implementations for its transport layer (e.g. S3, HTTP). Each transport layer has a different set of keyword arguments for overriding default behavior. If you specify a keyword argument that is *not* supported by the transport layer being used, smart_open will ignore that argument and log a warning message. S3 (for details, see :mod:`smart_open.s3` and :func:`smart_open.s3.open`): %(s3)s HTTP (for details, see :mod:`smart_open.http` and :func:`smart_open.http.open`): %(http)s WebHDFS (for details, see :mod:`smart_open.webhdfs` and :func:`smart_open.webhdfs.open`): %(webhdfs)s SSH (for details, see :mod:`smart_open.ssh` and :func:`smart_open.ssh.open`): %(ssh)s Examples -------- %(examples)s See Also -------- - `Standard library reference <https://docs.python.org/3.7/library/functions.html#open>`__ - `smart_open README.rst <https://github.com/RaRe-Technologies/smart_open/blob/master/README.rst>`__ """ logger.debug('%r', locals()) if not isinstance(mode, six.string_types): raise TypeError('mode should be a string') if transport_params is None: transport_params = {} fobj = _shortcut_open( uri, mode, ignore_ext=ignore_ext, buffering=buffering, encoding=encoding, errors=errors, ) if fobj is not None: return fobj # # This is a work-around for the problem described in Issue #144. # If the user has explicitly specified an encoding, then assume they want # us to open the destination in text mode, instead of the default binary. # # If we change the default mode to be text, and match the normal behavior # of Py2 and 3, then the above assumption will be unnecessary. # if encoding is not None and 'b' in mode: mode = mode.replace('b', '') # Support opening ``pathlib.Path`` objects by casting them to strings. if PATHLIB_SUPPORT and isinstance(uri, pathlib.Path): uri = str(uri) explicit_encoding = encoding encoding = explicit_encoding if explicit_encoding else SYSTEM_ENCODING # # This is how we get from the filename to the end result. Decompression is # optional, but it always accepts bytes and returns bytes. # # Decoding is also optional, accepts bytes and returns text. The diagram # below is for reading, for writing, the flow is from right to left, but # the code is identical. # # open as binary decompress? decode? # filename ---------------> bytes -------------> bytes ---------> text # binary decompressed decode # try: binary_mode = {'r': 'rb', 'r+': 'rb+', 'w': 'wb', 'w+': 'wb+', 'a': 'ab', 'a+': 'ab+'}[mode] except KeyError: binary_mode = mode binary, filename = _open_binary_stream(uri, binary_mode, transport_params) if ignore_ext: decompressed = binary else: decompressed = _compression_wrapper(binary, filename, mode) if 'b' not in mode or explicit_encoding is not None: decoded = _encoding_wrapper(decompressed, mode, encoding=encoding, errors=errors) else: decoded = decompressed return decoded
[ "def", "open", "(", "uri", ",", "mode", "=", "'r'", ",", "buffering", "=", "-", "1", ",", "encoding", "=", "None", ",", "errors", "=", "None", ",", "newline", "=", "None", ",", "closefd", "=", "True", ",", "opener", "=", "None", ",", "ignore_ext", ...
r"""Open the URI object, returning a file-like object. The URI is usually a string in a variety of formats: 1. a URI for the local filesystem: `./lines.txt`, `/home/joe/lines.txt.gz`, `file:///home/joe/lines.txt.bz2` 2. a URI for HDFS: `hdfs:///some/path/lines.txt` 3. a URI for Amazon's S3 (can also supply credentials inside the URI): `s3://my_bucket/lines.txt`, `s3://my_aws_key_id:key_secret@my_bucket/lines.txt` The URI may also be one of: - an instance of the pathlib.Path class - a stream (anything that implements io.IOBase-like functionality) This function supports transparent compression and decompression using the following codec: - ``.gz`` - ``.bz2`` The function depends on the file extension to determine the appropriate codec. Parameters ---------- uri: str or object The object to open. mode: str, optional Mimicks built-in open parameter of the same name. buffering: int, optional Mimicks built-in open parameter of the same name. encoding: str, optional Mimicks built-in open parameter of the same name. errors: str, optional Mimicks built-in open parameter of the same name. newline: str, optional Mimicks built-in open parameter of the same name. closefd: boolean, optional Mimicks built-in open parameter of the same name. Ignored. opener: object, optional Mimicks built-in open parameter of the same name. Ignored. ignore_ext: boolean, optional Disable transparent compression/decompression based on the file extension. transport_params: dict, optional Additional parameters for the transport layer (see notes below). Returns ------- A file-like object. Notes ----- smart_open has several implementations for its transport layer (e.g. S3, HTTP). Each transport layer has a different set of keyword arguments for overriding default behavior. If you specify a keyword argument that is *not* supported by the transport layer being used, smart_open will ignore that argument and log a warning message. S3 (for details, see :mod:`smart_open.s3` and :func:`smart_open.s3.open`): %(s3)s HTTP (for details, see :mod:`smart_open.http` and :func:`smart_open.http.open`): %(http)s WebHDFS (for details, see :mod:`smart_open.webhdfs` and :func:`smart_open.webhdfs.open`): %(webhdfs)s SSH (for details, see :mod:`smart_open.ssh` and :func:`smart_open.ssh.open`): %(ssh)s Examples -------- %(examples)s See Also -------- - `Standard library reference <https://docs.python.org/3.7/library/functions.html#open>`__ - `smart_open README.rst <https://github.com/RaRe-Technologies/smart_open/blob/master/README.rst>`__
[ "r", "Open", "the", "URI", "object", "returning", "a", "file", "-", "like", "object", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L201-L359
233,332
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
smart_open
def smart_open(uri, mode="rb", **kw): """Deprecated, use smart_open.open instead.""" logger.warning('this function is deprecated, use smart_open.open instead') # # The new function uses a shorter name for this parameter, handle it separately. # ignore_extension = kw.pop('ignore_extension', False) expected_kwargs = _inspect_kwargs(open) scrubbed_kwargs = {} transport_params = {} # # Handle renamed keyword arguments. This is required to maintain backward # compatibility. See test_smart_open_old.py for tests. # if 'host' in kw or 's3_upload' in kw: transport_params['multipart_upload_kwargs'] = {} transport_params['resource_kwargs'] = {} if 'host' in kw: url = kw.pop('host') if not url.startswith('http'): url = 'http://' + url transport_params['resource_kwargs'].update(endpoint_url=url) if 's3_upload' in kw and kw['s3_upload']: transport_params['multipart_upload_kwargs'].update(**kw.pop('s3_upload')) # # Providing the entire Session object as opposed to just the profile name # is more flexible and powerful, and thus preferable in the case of # conflict. # if 'profile_name' in kw and 's3_session' in kw: logger.error('profile_name and s3_session are mutually exclusive, ignoring the former') if 'profile_name' in kw: transport_params['session'] = boto3.Session(profile_name=kw.pop('profile_name')) if 's3_session' in kw: transport_params['session'] = kw.pop('s3_session') for key, value in kw.items(): if key in expected_kwargs: scrubbed_kwargs[key] = value else: # # Assume that anything not explicitly supported by the new function # is a transport layer keyword argument. This is safe, because if # the argument ends up being unsupported in the transport layer, # it will only cause a logging warning, not a crash. # transport_params[key] = value return open(uri, mode, ignore_ext=ignore_extension, transport_params=transport_params, **scrubbed_kwargs)
python
def smart_open(uri, mode="rb", **kw): logger.warning('this function is deprecated, use smart_open.open instead') # # The new function uses a shorter name for this parameter, handle it separately. # ignore_extension = kw.pop('ignore_extension', False) expected_kwargs = _inspect_kwargs(open) scrubbed_kwargs = {} transport_params = {} # # Handle renamed keyword arguments. This is required to maintain backward # compatibility. See test_smart_open_old.py for tests. # if 'host' in kw or 's3_upload' in kw: transport_params['multipart_upload_kwargs'] = {} transport_params['resource_kwargs'] = {} if 'host' in kw: url = kw.pop('host') if not url.startswith('http'): url = 'http://' + url transport_params['resource_kwargs'].update(endpoint_url=url) if 's3_upload' in kw and kw['s3_upload']: transport_params['multipart_upload_kwargs'].update(**kw.pop('s3_upload')) # # Providing the entire Session object as opposed to just the profile name # is more flexible and powerful, and thus preferable in the case of # conflict. # if 'profile_name' in kw and 's3_session' in kw: logger.error('profile_name and s3_session are mutually exclusive, ignoring the former') if 'profile_name' in kw: transport_params['session'] = boto3.Session(profile_name=kw.pop('profile_name')) if 's3_session' in kw: transport_params['session'] = kw.pop('s3_session') for key, value in kw.items(): if key in expected_kwargs: scrubbed_kwargs[key] = value else: # # Assume that anything not explicitly supported by the new function # is a transport layer keyword argument. This is safe, because if # the argument ends up being unsupported in the transport layer, # it will only cause a logging warning, not a crash. # transport_params[key] = value return open(uri, mode, ignore_ext=ignore_extension, transport_params=transport_params, **scrubbed_kwargs)
[ "def", "smart_open", "(", "uri", ",", "mode", "=", "\"rb\"", ",", "*", "*", "kw", ")", ":", "logger", ".", "warning", "(", "'this function is deprecated, use smart_open.open instead'", ")", "#", "# The new function uses a shorter name for this parameter, handle it separatel...
Deprecated, use smart_open.open instead.
[ "Deprecated", "use", "smart_open", ".", "open", "instead", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L383-L439
233,333
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
_shortcut_open
def _shortcut_open( uri, mode, ignore_ext=False, buffering=-1, encoding=None, errors=None, ): """Try to open the URI using the standard library io.open function. This can be much faster than the alternative of opening in binary mode and then decoding. This is only possible under the following conditions: 1. Opening a local file 2. Ignore extension is set to True If it is not possible to use the built-in open for the specified URI, returns None. :param str uri: A string indicating what to open. :param str mode: The mode to pass to the open function. :param dict kw: :returns: The opened file :rtype: file """ if not isinstance(uri, six.string_types): return None parsed_uri = _parse_uri(uri) if parsed_uri.scheme != 'file': return None _, extension = P.splitext(parsed_uri.uri_path) if extension in _COMPRESSOR_REGISTRY and not ignore_ext: return None open_kwargs = {} if encoding is not None: open_kwargs['encoding'] = encoding mode = mode.replace('b', '') # # binary mode of the builtin/stdlib open function doesn't take an errors argument # if errors and 'b' not in mode: open_kwargs['errors'] = errors # # Under Py3, the built-in open accepts kwargs, and it's OK to use that. # Under Py2, the built-in open _doesn't_ accept kwargs, but we still use it # whenever possible (see issue #207). If we're under Py2 and have to use # kwargs, then we have no option other to use io.open. # if six.PY3: return _builtin_open(parsed_uri.uri_path, mode, buffering=buffering, **open_kwargs) elif not open_kwargs: return _builtin_open(parsed_uri.uri_path, mode, buffering=buffering) return io.open(parsed_uri.uri_path, mode, buffering=buffering, **open_kwargs)
python
def _shortcut_open( uri, mode, ignore_ext=False, buffering=-1, encoding=None, errors=None, ): if not isinstance(uri, six.string_types): return None parsed_uri = _parse_uri(uri) if parsed_uri.scheme != 'file': return None _, extension = P.splitext(parsed_uri.uri_path) if extension in _COMPRESSOR_REGISTRY and not ignore_ext: return None open_kwargs = {} if encoding is not None: open_kwargs['encoding'] = encoding mode = mode.replace('b', '') # # binary mode of the builtin/stdlib open function doesn't take an errors argument # if errors and 'b' not in mode: open_kwargs['errors'] = errors # # Under Py3, the built-in open accepts kwargs, and it's OK to use that. # Under Py2, the built-in open _doesn't_ accept kwargs, but we still use it # whenever possible (see issue #207). If we're under Py2 and have to use # kwargs, then we have no option other to use io.open. # if six.PY3: return _builtin_open(parsed_uri.uri_path, mode, buffering=buffering, **open_kwargs) elif not open_kwargs: return _builtin_open(parsed_uri.uri_path, mode, buffering=buffering) return io.open(parsed_uri.uri_path, mode, buffering=buffering, **open_kwargs)
[ "def", "_shortcut_open", "(", "uri", ",", "mode", ",", "ignore_ext", "=", "False", ",", "buffering", "=", "-", "1", ",", "encoding", "=", "None", ",", "errors", "=", "None", ",", ")", ":", "if", "not", "isinstance", "(", "uri", ",", "six", ".", "st...
Try to open the URI using the standard library io.open function. This can be much faster than the alternative of opening in binary mode and then decoding. This is only possible under the following conditions: 1. Opening a local file 2. Ignore extension is set to True If it is not possible to use the built-in open for the specified URI, returns None. :param str uri: A string indicating what to open. :param str mode: The mode to pass to the open function. :param dict kw: :returns: The opened file :rtype: file
[ "Try", "to", "open", "the", "URI", "using", "the", "standard", "library", "io", ".", "open", "function", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L442-L501
233,334
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
_open_binary_stream
def _open_binary_stream(uri, mode, transport_params): """Open an arbitrary URI in the specified binary mode. Not all modes are supported for all protocols. :arg uri: The URI to open. May be a string, or something else. :arg str mode: The mode to open with. Must be rb, wb or ab. :arg transport_params: Keyword argumens for the transport layer. :returns: A file object and the filename :rtype: tuple """ if mode not in ('rb', 'rb+', 'wb', 'wb+', 'ab', 'ab+'): # # This should really be a ValueError, but for the sake of compatibility # with older versions, which raise NotImplementedError, we do the same. # raise NotImplementedError('unsupported mode: %r' % mode) if isinstance(uri, six.string_types): # this method just routes the request to classes handling the specific storage # schemes, depending on the URI protocol in `uri` filename = uri.split('/')[-1] parsed_uri = _parse_uri(uri) unsupported = "%r mode not supported for %r scheme" % (mode, parsed_uri.scheme) if parsed_uri.scheme == "file": fobj = io.open(parsed_uri.uri_path, mode) return fobj, filename elif parsed_uri.scheme in smart_open_ssh.SCHEMES: fobj = smart_open_ssh.open( parsed_uri.uri_path, mode, host=parsed_uri.host, user=parsed_uri.user, port=parsed_uri.port, ) return fobj, filename elif parsed_uri.scheme in smart_open_s3.SUPPORTED_SCHEMES: return _s3_open_uri(parsed_uri, mode, transport_params), filename elif parsed_uri.scheme == "hdfs": _check_kwargs(smart_open_hdfs.open, transport_params) return smart_open_hdfs.open(parsed_uri.uri_path, mode), filename elif parsed_uri.scheme == "webhdfs": kw = _check_kwargs(smart_open_webhdfs.open, transport_params) return smart_open_webhdfs.open(parsed_uri.uri_path, mode, **kw), filename elif parsed_uri.scheme.startswith('http'): # # The URI may contain a query string and fragments, which interfere # with our compressed/uncompressed estimation, so we strip them. # filename = P.basename(urlparse.urlparse(uri).path) kw = _check_kwargs(smart_open_http.open, transport_params) return smart_open_http.open(uri, mode, **kw), filename else: raise NotImplementedError("scheme %r is not supported", parsed_uri.scheme) elif hasattr(uri, 'read'): # simply pass-through if already a file-like # we need to return something as the file name, but we don't know what # so we probe for uri.name (e.g., this works with open() or tempfile.NamedTemporaryFile) # if the value ends with COMPRESSED_EXT, we will note it in _compression_wrapper() # if there is no such an attribute, we return "unknown" - this effectively disables any compression filename = getattr(uri, 'name', 'unknown') return uri, filename else: raise TypeError("don't know how to handle uri %r" % uri)
python
def _open_binary_stream(uri, mode, transport_params): if mode not in ('rb', 'rb+', 'wb', 'wb+', 'ab', 'ab+'): # # This should really be a ValueError, but for the sake of compatibility # with older versions, which raise NotImplementedError, we do the same. # raise NotImplementedError('unsupported mode: %r' % mode) if isinstance(uri, six.string_types): # this method just routes the request to classes handling the specific storage # schemes, depending on the URI protocol in `uri` filename = uri.split('/')[-1] parsed_uri = _parse_uri(uri) unsupported = "%r mode not supported for %r scheme" % (mode, parsed_uri.scheme) if parsed_uri.scheme == "file": fobj = io.open(parsed_uri.uri_path, mode) return fobj, filename elif parsed_uri.scheme in smart_open_ssh.SCHEMES: fobj = smart_open_ssh.open( parsed_uri.uri_path, mode, host=parsed_uri.host, user=parsed_uri.user, port=parsed_uri.port, ) return fobj, filename elif parsed_uri.scheme in smart_open_s3.SUPPORTED_SCHEMES: return _s3_open_uri(parsed_uri, mode, transport_params), filename elif parsed_uri.scheme == "hdfs": _check_kwargs(smart_open_hdfs.open, transport_params) return smart_open_hdfs.open(parsed_uri.uri_path, mode), filename elif parsed_uri.scheme == "webhdfs": kw = _check_kwargs(smart_open_webhdfs.open, transport_params) return smart_open_webhdfs.open(parsed_uri.uri_path, mode, **kw), filename elif parsed_uri.scheme.startswith('http'): # # The URI may contain a query string and fragments, which interfere # with our compressed/uncompressed estimation, so we strip them. # filename = P.basename(urlparse.urlparse(uri).path) kw = _check_kwargs(smart_open_http.open, transport_params) return smart_open_http.open(uri, mode, **kw), filename else: raise NotImplementedError("scheme %r is not supported", parsed_uri.scheme) elif hasattr(uri, 'read'): # simply pass-through if already a file-like # we need to return something as the file name, but we don't know what # so we probe for uri.name (e.g., this works with open() or tempfile.NamedTemporaryFile) # if the value ends with COMPRESSED_EXT, we will note it in _compression_wrapper() # if there is no such an attribute, we return "unknown" - this effectively disables any compression filename = getattr(uri, 'name', 'unknown') return uri, filename else: raise TypeError("don't know how to handle uri %r" % uri)
[ "def", "_open_binary_stream", "(", "uri", ",", "mode", ",", "transport_params", ")", ":", "if", "mode", "not", "in", "(", "'rb'", ",", "'rb+'", ",", "'wb'", ",", "'wb+'", ",", "'ab'", ",", "'ab+'", ")", ":", "#", "# This should really be a ValueError, but fo...
Open an arbitrary URI in the specified binary mode. Not all modes are supported for all protocols. :arg uri: The URI to open. May be a string, or something else. :arg str mode: The mode to open with. Must be rb, wb or ab. :arg transport_params: Keyword argumens for the transport layer. :returns: A file object and the filename :rtype: tuple
[ "Open", "an", "arbitrary", "URI", "in", "the", "specified", "binary", "mode", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L504-L568
233,335
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
_my_urlsplit
def _my_urlsplit(url): """This is a hack to prevent the regular urlsplit from splitting around question marks. A question mark (?) in a URL typically indicates the start of a querystring, and the standard library's urlparse function handles the querystring separately. Unfortunately, question marks can also appear _inside_ the actual URL for some schemas like S3. Replaces question marks with newlines prior to splitting. This is safe because: 1. The standard library's urlsplit completely ignores newlines 2. Raw newlines will never occur in innocuous URLs. They are always URL-encoded. See Also -------- https://github.com/python/cpython/blob/3.7/Lib/urllib/parse.py https://github.com/RaRe-Technologies/smart_open/issues/285 """ if '?' not in url: return urlsplit(url, allow_fragments=False) sr = urlsplit(url.replace('?', '\n'), allow_fragments=False) SplitResult = collections.namedtuple('SplitResult', 'scheme netloc path query fragment') return SplitResult(sr.scheme, sr.netloc, sr.path.replace('\n', '?'), '', '')
python
def _my_urlsplit(url): if '?' not in url: return urlsplit(url, allow_fragments=False) sr = urlsplit(url.replace('?', '\n'), allow_fragments=False) SplitResult = collections.namedtuple('SplitResult', 'scheme netloc path query fragment') return SplitResult(sr.scheme, sr.netloc, sr.path.replace('\n', '?'), '', '')
[ "def", "_my_urlsplit", "(", "url", ")", ":", "if", "'?'", "not", "in", "url", ":", "return", "urlsplit", "(", "url", ",", "allow_fragments", "=", "False", ")", "sr", "=", "urlsplit", "(", "url", ".", "replace", "(", "'?'", ",", "'\\n'", ")", ",", "...
This is a hack to prevent the regular urlsplit from splitting around question marks. A question mark (?) in a URL typically indicates the start of a querystring, and the standard library's urlparse function handles the querystring separately. Unfortunately, question marks can also appear _inside_ the actual URL for some schemas like S3. Replaces question marks with newlines prior to splitting. This is safe because: 1. The standard library's urlsplit completely ignores newlines 2. Raw newlines will never occur in innocuous URLs. They are always URL-encoded. See Also -------- https://github.com/python/cpython/blob/3.7/Lib/urllib/parse.py https://github.com/RaRe-Technologies/smart_open/issues/285
[ "This", "is", "a", "hack", "to", "prevent", "the", "regular", "urlsplit", "from", "splitting", "around", "question", "marks", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L632-L655
233,336
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
_parse_uri
def _parse_uri(uri_as_string): """ Parse the given URI from a string. Supported URI schemes are: * file * hdfs * http * https * s3 * s3a * s3n * s3u * webhdfs .s3, s3a and s3n are treated the same way. s3u is s3 but without SSL. Valid URI examples:: * s3://my_bucket/my_key * s3://my_key:my_secret@my_bucket/my_key * s3://my_key:my_secret@my_server:my_port@my_bucket/my_key * hdfs:///path/file * hdfs://path/file * webhdfs://host:port/path/file * ./local/path/file * ~/local/path/file * local/path/file * ./local/path/file.gz * file:///home/user/file * file:///home/user/file.bz2 * [ssh|scp|sftp]://username@host//path/file * [ssh|scp|sftp]://username@host/path/file """ if os.name == 'nt': # urlsplit doesn't work on Windows -- it parses the drive as the scheme... if '://' not in uri_as_string: # no protocol given => assume a local file uri_as_string = 'file://' + uri_as_string parsed_uri = _my_urlsplit(uri_as_string) if parsed_uri.scheme == "hdfs": return _parse_uri_hdfs(parsed_uri) elif parsed_uri.scheme == "webhdfs": return _parse_uri_webhdfs(parsed_uri) elif parsed_uri.scheme in smart_open_s3.SUPPORTED_SCHEMES: return _parse_uri_s3x(parsed_uri) elif parsed_uri.scheme == 'file': return _parse_uri_file(parsed_uri.netloc + parsed_uri.path) elif parsed_uri.scheme in ('', None): return _parse_uri_file(uri_as_string) elif parsed_uri.scheme.startswith('http'): return Uri(scheme=parsed_uri.scheme, uri_path=uri_as_string) elif parsed_uri.scheme in smart_open_ssh.SCHEMES: return _parse_uri_ssh(parsed_uri) else: raise NotImplementedError( "unknown URI scheme %r in %r" % (parsed_uri.scheme, uri_as_string) )
python
def _parse_uri(uri_as_string): if os.name == 'nt': # urlsplit doesn't work on Windows -- it parses the drive as the scheme... if '://' not in uri_as_string: # no protocol given => assume a local file uri_as_string = 'file://' + uri_as_string parsed_uri = _my_urlsplit(uri_as_string) if parsed_uri.scheme == "hdfs": return _parse_uri_hdfs(parsed_uri) elif parsed_uri.scheme == "webhdfs": return _parse_uri_webhdfs(parsed_uri) elif parsed_uri.scheme in smart_open_s3.SUPPORTED_SCHEMES: return _parse_uri_s3x(parsed_uri) elif parsed_uri.scheme == 'file': return _parse_uri_file(parsed_uri.netloc + parsed_uri.path) elif parsed_uri.scheme in ('', None): return _parse_uri_file(uri_as_string) elif parsed_uri.scheme.startswith('http'): return Uri(scheme=parsed_uri.scheme, uri_path=uri_as_string) elif parsed_uri.scheme in smart_open_ssh.SCHEMES: return _parse_uri_ssh(parsed_uri) else: raise NotImplementedError( "unknown URI scheme %r in %r" % (parsed_uri.scheme, uri_as_string) )
[ "def", "_parse_uri", "(", "uri_as_string", ")", ":", "if", "os", ".", "name", "==", "'nt'", ":", "# urlsplit doesn't work on Windows -- it parses the drive as the scheme...", "if", "'://'", "not", "in", "uri_as_string", ":", "# no protocol given => assume a local file", "ur...
Parse the given URI from a string. Supported URI schemes are: * file * hdfs * http * https * s3 * s3a * s3n * s3u * webhdfs .s3, s3a and s3n are treated the same way. s3u is s3 but without SSL. Valid URI examples:: * s3://my_bucket/my_key * s3://my_key:my_secret@my_bucket/my_key * s3://my_key:my_secret@my_server:my_port@my_bucket/my_key * hdfs:///path/file * hdfs://path/file * webhdfs://host:port/path/file * ./local/path/file * ~/local/path/file * local/path/file * ./local/path/file.gz * file:///home/user/file * file:///home/user/file.bz2 * [ssh|scp|sftp]://username@host//path/file * [ssh|scp|sftp]://username@host/path/file
[ "Parse", "the", "given", "URI", "from", "a", "string", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L658-L719
233,337
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
_parse_uri_ssh
def _parse_uri_ssh(unt): """Parse a Uri from a urllib namedtuple.""" if '@' in unt.netloc: user, host_port = unt.netloc.split('@', 1) else: user, host_port = None, unt.netloc if ':' in host_port: host, port = host_port.split(':', 1) else: host, port = host_port, None if not user: user = None if not port: port = smart_open_ssh.DEFAULT_PORT else: port = int(port) return Uri(scheme=unt.scheme, uri_path=unt.path, user=user, host=host, port=port)
python
def _parse_uri_ssh(unt): if '@' in unt.netloc: user, host_port = unt.netloc.split('@', 1) else: user, host_port = None, unt.netloc if ':' in host_port: host, port = host_port.split(':', 1) else: host, port = host_port, None if not user: user = None if not port: port = smart_open_ssh.DEFAULT_PORT else: port = int(port) return Uri(scheme=unt.scheme, uri_path=unt.path, user=user, host=host, port=port)
[ "def", "_parse_uri_ssh", "(", "unt", ")", ":", "if", "'@'", "in", "unt", ".", "netloc", ":", "user", ",", "host_port", "=", "unt", ".", "netloc", ".", "split", "(", "'@'", ",", "1", ")", "else", ":", "user", ",", "host_port", "=", "None", ",", "u...
Parse a Uri from a urllib namedtuple.
[ "Parse", "a", "Uri", "from", "a", "urllib", "namedtuple", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L807-L826
233,338
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
_need_to_buffer
def _need_to_buffer(file_obj, mode, ext): """Returns True if we need to buffer the whole file in memory in order to proceed.""" try: is_seekable = file_obj.seekable() except AttributeError: # # Under Py2, built-in file objects returned by open do not have # .seekable, but have a .seek method instead. # is_seekable = hasattr(file_obj, 'seek') return six.PY2 and mode.startswith('r') and ext in _COMPRESSOR_REGISTRY and not is_seekable
python
def _need_to_buffer(file_obj, mode, ext): try: is_seekable = file_obj.seekable() except AttributeError: # # Under Py2, built-in file objects returned by open do not have # .seekable, but have a .seek method instead. # is_seekable = hasattr(file_obj, 'seek') return six.PY2 and mode.startswith('r') and ext in _COMPRESSOR_REGISTRY and not is_seekable
[ "def", "_need_to_buffer", "(", "file_obj", ",", "mode", ",", "ext", ")", ":", "try", ":", "is_seekable", "=", "file_obj", ".", "seekable", "(", ")", "except", "AttributeError", ":", "#", "# Under Py2, built-in file objects returned by open do not have", "# .seekable, ...
Returns True if we need to buffer the whole file in memory in order to proceed.
[ "Returns", "True", "if", "we", "need", "to", "buffer", "the", "whole", "file", "in", "memory", "in", "order", "to", "proceed", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L829-L839
233,339
RaRe-Technologies/smart_open
smart_open/smart_open_lib.py
_encoding_wrapper
def _encoding_wrapper(fileobj, mode, encoding=None, errors=None): """Decode bytes into text, if necessary. If mode specifies binary access, does nothing, unless the encoding is specified. A non-null encoding implies text mode. :arg fileobj: must quack like a filehandle object. :arg str mode: is the mode which was originally requested by the user. :arg str encoding: The text encoding to use. If mode is binary, overrides mode. :arg str errors: The method to use when handling encoding/decoding errors. :returns: a file object """ logger.debug('encoding_wrapper: %r', locals()) # # If the mode is binary, but the user specified an encoding, assume they # want text. If we don't make this assumption, ignore the encoding and # return bytes, smart_open behavior will diverge from the built-in open: # # open(filename, encoding='utf-8') returns a text stream in Py3 # smart_open(filename, encoding='utf-8') would return a byte stream # without our assumption, because the default mode is rb. # if 'b' in mode and encoding is None: return fileobj if encoding is None: encoding = SYSTEM_ENCODING kw = {'errors': errors} if errors else {} if mode[0] == 'r' or mode.endswith('+'): fileobj = codecs.getreader(encoding)(fileobj, **kw) if mode[0] in ('w', 'a') or mode.endswith('+'): fileobj = codecs.getwriter(encoding)(fileobj, **kw) return fileobj
python
def _encoding_wrapper(fileobj, mode, encoding=None, errors=None): logger.debug('encoding_wrapper: %r', locals()) # # If the mode is binary, but the user specified an encoding, assume they # want text. If we don't make this assumption, ignore the encoding and # return bytes, smart_open behavior will diverge from the built-in open: # # open(filename, encoding='utf-8') returns a text stream in Py3 # smart_open(filename, encoding='utf-8') would return a byte stream # without our assumption, because the default mode is rb. # if 'b' in mode and encoding is None: return fileobj if encoding is None: encoding = SYSTEM_ENCODING kw = {'errors': errors} if errors else {} if mode[0] == 'r' or mode.endswith('+'): fileobj = codecs.getreader(encoding)(fileobj, **kw) if mode[0] in ('w', 'a') or mode.endswith('+'): fileobj = codecs.getwriter(encoding)(fileobj, **kw) return fileobj
[ "def", "_encoding_wrapper", "(", "fileobj", ",", "mode", ",", "encoding", "=", "None", ",", "errors", "=", "None", ")", ":", "logger", ".", "debug", "(", "'encoding_wrapper: %r'", ",", "locals", "(", ")", ")", "#", "# If the mode is binary, but the user specifie...
Decode bytes into text, if necessary. If mode specifies binary access, does nothing, unless the encoding is specified. A non-null encoding implies text mode. :arg fileobj: must quack like a filehandle object. :arg str mode: is the mode which was originally requested by the user. :arg str encoding: The text encoding to use. If mode is binary, overrides mode. :arg str errors: The method to use when handling encoding/decoding errors. :returns: a file object
[ "Decode", "bytes", "into", "text", "if", "necessary", "." ]
2dc8d60f223fc7b00a2000c56362a7bd6cd0850e
https://github.com/RaRe-Technologies/smart_open/blob/2dc8d60f223fc7b00a2000c56362a7bd6cd0850e/smart_open/smart_open_lib.py#L869-L903
233,340
cossacklabs/acra
examples/python/example_with_zone.py
get_zone
def get_zone(): """make http response to AcraServer api to generate new zone and return tuple of zone id and public key """ response = urlopen('{}/getNewZone'.format(ACRA_CONNECTOR_API_ADDRESS)) json_data = response.read().decode('utf-8') zone_data = json.loads(json_data) return zone_data['id'], b64decode(zone_data['public_key'])
python
def get_zone(): response = urlopen('{}/getNewZone'.format(ACRA_CONNECTOR_API_ADDRESS)) json_data = response.read().decode('utf-8') zone_data = json.loads(json_data) return zone_data['id'], b64decode(zone_data['public_key'])
[ "def", "get_zone", "(", ")", ":", "response", "=", "urlopen", "(", "'{}/getNewZone'", ".", "format", "(", "ACRA_CONNECTOR_API_ADDRESS", ")", ")", "json_data", "=", "response", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", "zone_data", "=", "js...
make http response to AcraServer api to generate new zone and return tuple of zone id and public key
[ "make", "http", "response", "to", "AcraServer", "api", "to", "generate", "new", "zone", "and", "return", "tuple", "of", "zone", "id", "and", "public", "key" ]
e30741e2dfb2f3320a08ff78450c618afcb195e4
https://github.com/cossacklabs/acra/blob/e30741e2dfb2f3320a08ff78450c618afcb195e4/examples/python/example_with_zone.py#L34-L41
233,341
has2k1/plotnine
plotnine/stats/binning.py
iqr
def iqr(a): """ Calculate the IQR for an array of numbers. """ a = np.asarray(a) q1 = stats.scoreatpercentile(a, 25) q3 = stats.scoreatpercentile(a, 75) return q3 - q1
python
def iqr(a): a = np.asarray(a) q1 = stats.scoreatpercentile(a, 25) q3 = stats.scoreatpercentile(a, 75) return q3 - q1
[ "def", "iqr", "(", "a", ")", ":", "a", "=", "np", ".", "asarray", "(", "a", ")", "q1", "=", "stats", ".", "scoreatpercentile", "(", "a", ",", "25", ")", "q3", "=", "stats", ".", "scoreatpercentile", "(", "a", ",", "75", ")", "return", "q3", "-"...
Calculate the IQR for an array of numbers.
[ "Calculate", "the", "IQR", "for", "an", "array", "of", "numbers", "." ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/binning.py#L14-L21
233,342
has2k1/plotnine
plotnine/stats/binning.py
freedman_diaconis_bins
def freedman_diaconis_bins(a): """ Calculate number of hist bins using Freedman-Diaconis rule. """ # From http://stats.stackexchange.com/questions/798/ a = np.asarray(a) h = 2 * iqr(a) / (len(a) ** (1 / 3)) # fall back to sqrt(a) bins if iqr is 0 if h == 0: bins = np.ceil(np.sqrt(a.size)) else: bins = np.ceil((np.nanmax(a) - np.nanmin(a)) / h) return np.int(bins)
python
def freedman_diaconis_bins(a): # From http://stats.stackexchange.com/questions/798/ a = np.asarray(a) h = 2 * iqr(a) / (len(a) ** (1 / 3)) # fall back to sqrt(a) bins if iqr is 0 if h == 0: bins = np.ceil(np.sqrt(a.size)) else: bins = np.ceil((np.nanmax(a) - np.nanmin(a)) / h) return np.int(bins)
[ "def", "freedman_diaconis_bins", "(", "a", ")", ":", "# From http://stats.stackexchange.com/questions/798/", "a", "=", "np", ".", "asarray", "(", "a", ")", "h", "=", "2", "*", "iqr", "(", "a", ")", "/", "(", "len", "(", "a", ")", "**", "(", "1", "/", ...
Calculate number of hist bins using Freedman-Diaconis rule.
[ "Calculate", "number", "of", "hist", "bins", "using", "Freedman", "-", "Diaconis", "rule", "." ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/binning.py#L24-L38
233,343
has2k1/plotnine
plotnine/stats/binning.py
assign_bins
def assign_bins(x, breaks, weight=None, pad=False, closed='right'): """ Assign value in x to bins demacated by the break points Parameters ---------- x : array_like Values to be binned. breaks : array_like Sequence of break points. weight : array_like Weight of each value in `x`. Used in creating the frequency table. If `None`, then each value in `x` has a weight of 1. pad : bool If `True`, add empty bins at either end of `x`. closed : str in ``['right', 'left']`` Whether the right or left edges of the bins are part of the bin. Returns ------- out : dataframe Bin count and density information. """ right = closed == 'right' # If weight not supplied to, use one (no weight) if weight is None: weight = np.ones(len(x)) else: weight = np.asarray(weight) weight[np.isnan(weight)] = 0 bin_idx = pd.cut(x, bins=breaks, labels=False, right=right, include_lowest=True) bin_widths = np.diff(breaks) bin_x = (breaks[:-1] + breaks[1:]) * 0.5 # Create a dataframe with two columns: # - the bins to which each x is assigned # - the weight of each x value # Then create a weighted frequency table df = pd.DataFrame({'bin_idx': bin_idx, 'weight': weight}) wftable = df.pivot_table( 'weight', index=['bin_idx'], aggfunc=np.sum)['weight'] # Empty bins get no value in the computed frequency table. # We need to add the zeros and since frequency table is a # Series object, we need to keep it ordered if len(wftable) < len(bin_x): empty_bins = set(range(len(bin_x))) - set(bin_idx) for b in empty_bins: wftable.loc[b] = 0 wftable = wftable.sort_index() bin_count = wftable.tolist() if pad: bw0 = bin_widths[0] bwn = bin_widths[-1] bin_count = np.hstack([0, bin_count, 0]) bin_widths = np.hstack([bw0, bin_widths, bwn]) bin_x = np.hstack([bin_x[0]-bw0, bin_x, bin_x[-1]+bwn]) return result_dataframe(bin_count, bin_x, bin_widths)
python
def assign_bins(x, breaks, weight=None, pad=False, closed='right'): right = closed == 'right' # If weight not supplied to, use one (no weight) if weight is None: weight = np.ones(len(x)) else: weight = np.asarray(weight) weight[np.isnan(weight)] = 0 bin_idx = pd.cut(x, bins=breaks, labels=False, right=right, include_lowest=True) bin_widths = np.diff(breaks) bin_x = (breaks[:-1] + breaks[1:]) * 0.5 # Create a dataframe with two columns: # - the bins to which each x is assigned # - the weight of each x value # Then create a weighted frequency table df = pd.DataFrame({'bin_idx': bin_idx, 'weight': weight}) wftable = df.pivot_table( 'weight', index=['bin_idx'], aggfunc=np.sum)['weight'] # Empty bins get no value in the computed frequency table. # We need to add the zeros and since frequency table is a # Series object, we need to keep it ordered if len(wftable) < len(bin_x): empty_bins = set(range(len(bin_x))) - set(bin_idx) for b in empty_bins: wftable.loc[b] = 0 wftable = wftable.sort_index() bin_count = wftable.tolist() if pad: bw0 = bin_widths[0] bwn = bin_widths[-1] bin_count = np.hstack([0, bin_count, 0]) bin_widths = np.hstack([bw0, bin_widths, bwn]) bin_x = np.hstack([bin_x[0]-bw0, bin_x, bin_x[-1]+bwn]) return result_dataframe(bin_count, bin_x, bin_widths)
[ "def", "assign_bins", "(", "x", ",", "breaks", ",", "weight", "=", "None", ",", "pad", "=", "False", ",", "closed", "=", "'right'", ")", ":", "right", "=", "closed", "==", "'right'", "# If weight not supplied to, use one (no weight)", "if", "weight", "is", "...
Assign value in x to bins demacated by the break points Parameters ---------- x : array_like Values to be binned. breaks : array_like Sequence of break points. weight : array_like Weight of each value in `x`. Used in creating the frequency table. If `None`, then each value in `x` has a weight of 1. pad : bool If `True`, add empty bins at either end of `x`. closed : str in ``['right', 'left']`` Whether the right or left edges of the bins are part of the bin. Returns ------- out : dataframe Bin count and density information.
[ "Assign", "value", "in", "x", "to", "bins", "demacated", "by", "the", "break", "points" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/binning.py#L120-L182
233,344
has2k1/plotnine
plotnine/stats/binning.py
result_dataframe
def result_dataframe(count, x, width, xmin=None, xmax=None): """ Create a dataframe to hold bin information """ if xmin is None: xmin = x-width/2 if xmax is None: xmax = x+width/2 # Eliminate any numerical roundoff discrepancies # between the edges xmin[1:] = xmax[:-1] density = (count/width) / np.sum(np.abs(count)) out = pd.DataFrame({ 'count': count, 'x': x, 'xmin': xmin, 'xmax': xmax, 'width': width, 'density': density, 'ncount': count/np.max(np.abs(count)), 'ndensity': count/np.max(np.abs(density))}) return out
python
def result_dataframe(count, x, width, xmin=None, xmax=None): if xmin is None: xmin = x-width/2 if xmax is None: xmax = x+width/2 # Eliminate any numerical roundoff discrepancies # between the edges xmin[1:] = xmax[:-1] density = (count/width) / np.sum(np.abs(count)) out = pd.DataFrame({ 'count': count, 'x': x, 'xmin': xmin, 'xmax': xmax, 'width': width, 'density': density, 'ncount': count/np.max(np.abs(count)), 'ndensity': count/np.max(np.abs(density))}) return out
[ "def", "result_dataframe", "(", "count", ",", "x", ",", "width", ",", "xmin", "=", "None", ",", "xmax", "=", "None", ")", ":", "if", "xmin", "is", "None", ":", "xmin", "=", "x", "-", "width", "/", "2", "if", "xmax", "is", "None", ":", "xmax", "...
Create a dataframe to hold bin information
[ "Create", "a", "dataframe", "to", "hold", "bin", "information" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/binning.py#L185-L209
233,345
has2k1/plotnine
plotnine/stats/binning.py
fuzzybreaks
def fuzzybreaks(scale, breaks=None, boundary=None, binwidth=None, bins=30, right=True): """ Compute fuzzy breaks For a continuous scale, fuzzybreaks "preserve" the range of the scale. The fuzzing is close to numerical roundoff and is visually imperceptible. Parameters ---------- scale : scale Scale breaks : array_like Sequence of break points. If provided and the scale is not discrete, they are returned. boundary : float First break. If `None` a suitable on is computed using the range of the scale and the binwidth. binwidth : float Separation between the breaks bins : int Number of bins right : bool If `True` the right edges of the bins are part of the bin. If `False` then the left edges of the bins are part of the bin. Returns ------- out : array_like """ # Bins for categorical data should take the width # of one level, and should show up centered over # their tick marks. All other parameters are ignored. if isinstance(scale, scale_discrete): breaks = scale.get_breaks() return -0.5 + np.arange(1, len(breaks)+2) else: if breaks is not None: breaks = scale.transform(breaks) if breaks is not None: return breaks recompute_bins = binwidth is not None srange = scale.limits if binwidth is None or np.isnan(binwidth): binwidth = (srange[1]-srange[0]) / bins if boundary is None or np.isnan(boundary): boundary = round_any(srange[0], binwidth, np.floor) if recompute_bins: bins = np.int(np.ceil((srange[1]-boundary)/binwidth)) # To minimise precision errors, we do not pass the boundary and # binwidth into np.arange as params. The resulting breaks # can then be adjusted with finer(epsilon based rather than # some arbitrary small number) precision. breaks = np.arange(boundary, srange[1]+binwidth, binwidth) return _adjust_breaks(breaks, right)
python
def fuzzybreaks(scale, breaks=None, boundary=None, binwidth=None, bins=30, right=True): # Bins for categorical data should take the width # of one level, and should show up centered over # their tick marks. All other parameters are ignored. if isinstance(scale, scale_discrete): breaks = scale.get_breaks() return -0.5 + np.arange(1, len(breaks)+2) else: if breaks is not None: breaks = scale.transform(breaks) if breaks is not None: return breaks recompute_bins = binwidth is not None srange = scale.limits if binwidth is None or np.isnan(binwidth): binwidth = (srange[1]-srange[0]) / bins if boundary is None or np.isnan(boundary): boundary = round_any(srange[0], binwidth, np.floor) if recompute_bins: bins = np.int(np.ceil((srange[1]-boundary)/binwidth)) # To minimise precision errors, we do not pass the boundary and # binwidth into np.arange as params. The resulting breaks # can then be adjusted with finer(epsilon based rather than # some arbitrary small number) precision. breaks = np.arange(boundary, srange[1]+binwidth, binwidth) return _adjust_breaks(breaks, right)
[ "def", "fuzzybreaks", "(", "scale", ",", "breaks", "=", "None", ",", "boundary", "=", "None", ",", "binwidth", "=", "None", ",", "bins", "=", "30", ",", "right", "=", "True", ")", ":", "# Bins for categorical data should take the width", "# of one level, and sho...
Compute fuzzy breaks For a continuous scale, fuzzybreaks "preserve" the range of the scale. The fuzzing is close to numerical roundoff and is visually imperceptible. Parameters ---------- scale : scale Scale breaks : array_like Sequence of break points. If provided and the scale is not discrete, they are returned. boundary : float First break. If `None` a suitable on is computed using the range of the scale and the binwidth. binwidth : float Separation between the breaks bins : int Number of bins right : bool If `True` the right edges of the bins are part of the bin. If `False` then the left edges of the bins are part of the bin. Returns ------- out : array_like
[ "Compute", "fuzzy", "breaks" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/binning.py#L212-L274
233,346
has2k1/plotnine
plotnine/guides/guides.py
guides.build
def build(self, plot): """ Build the guides Parameters ---------- plot : ggplot ggplot object being drawn Returns ------- box : matplotlib.offsetbox.Offsetbox | None A box that contains all the guides for the plot. If there are no guides, **None** is returned. """ get_property = plot.theme.themeables.property # by default, guide boxes are vertically aligned with suppress(KeyError): self.box_direction = get_property('legend_box') if self.box_direction is None: self.box_direction = 'vertical' with suppress(KeyError): self.position = get_property('legend_position') if self.position == 'none': return # justification of legend boxes with suppress(KeyError): self.box_align = get_property('legend_box_just') if self.box_align is None: if self.position in {'left', 'right'}: tmp = 'left' else: tmp = 'center' self.box_align = tmp with suppress(KeyError): self.box_margin = get_property('legend_box_margin') if self.box_margin is None: self.box_margin = 10 with suppress(KeyError): self.spacing = get_property('legend_spacing') if self.spacing is None: self.spacing = 10 gdefs = self.train(plot) if not gdefs: return gdefs = self.merge(gdefs) gdefs = self.create_geoms(gdefs, plot) if not gdefs: return gboxes = self.draw(gdefs, plot.theme) bigbox = self.assemble(gboxes, gdefs, plot.theme) return bigbox
python
def build(self, plot): get_property = plot.theme.themeables.property # by default, guide boxes are vertically aligned with suppress(KeyError): self.box_direction = get_property('legend_box') if self.box_direction is None: self.box_direction = 'vertical' with suppress(KeyError): self.position = get_property('legend_position') if self.position == 'none': return # justification of legend boxes with suppress(KeyError): self.box_align = get_property('legend_box_just') if self.box_align is None: if self.position in {'left', 'right'}: tmp = 'left' else: tmp = 'center' self.box_align = tmp with suppress(KeyError): self.box_margin = get_property('legend_box_margin') if self.box_margin is None: self.box_margin = 10 with suppress(KeyError): self.spacing = get_property('legend_spacing') if self.spacing is None: self.spacing = 10 gdefs = self.train(plot) if not gdefs: return gdefs = self.merge(gdefs) gdefs = self.create_geoms(gdefs, plot) if not gdefs: return gboxes = self.draw(gdefs, plot.theme) bigbox = self.assemble(gboxes, gdefs, plot.theme) return bigbox
[ "def", "build", "(", "self", ",", "plot", ")", ":", "get_property", "=", "plot", ".", "theme", ".", "themeables", ".", "property", "# by default, guide boxes are vertically aligned", "with", "suppress", "(", "KeyError", ")", ":", "self", ".", "box_direction", "=...
Build the guides Parameters ---------- plot : ggplot ggplot object being drawn Returns ------- box : matplotlib.offsetbox.Offsetbox | None A box that contains all the guides for the plot. If there are no guides, **None** is returned.
[ "Build", "the", "guides" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/guides/guides.py#L85-L146
233,347
has2k1/plotnine
plotnine/guides/guides.py
guides.train
def train(self, plot): """ Compute all the required guides Parameters ---------- plot : ggplot ggplot object Returns ------- gdefs : list Guides for the plots """ gdefs = [] for scale in plot.scales: for output in scale.aesthetics: # The guide for aesthetic 'xxx' is stored # in plot.guides['xxx']. The priority for # the guides depends on how they are created # 1. ... + guides(xxx=guide_blah()) # 2. ... + scale_xxx(guide=guide_blah()) # 3. default(either guide_legend or guide_colorbar # depending on the scale type) # output = scale.aesthetics[0] guide = self.get(output, scale.guide) if guide is None or guide is False: continue # check the validity of guide. # if guide is character, then find the guide object guide = self.validate(guide) # check the consistency of the guide and scale. if (guide.available_aes != 'any' and scale.aesthetics[0] not in guide.available_aes): raise PlotnineError( "{} cannot be used for {}".format( guide.__class__.__name__, scale.aesthetics)) # title if is_waive(guide.title): if scale.name: guide.title = scale.name else: try: guide.title = str(plot.labels[output]) except KeyError: warn("Cannot generate legend for the {!r} " "aesthetic. Make sure you have mapped a " "variable to it".format(output), PlotnineWarning) continue # each guide object trains scale within the object, # so Guides (i.e., the container of guides) # need not to know about them guide = guide.train(scale, output) if guide is not None: gdefs.append(guide) return gdefs
python
def train(self, plot): gdefs = [] for scale in plot.scales: for output in scale.aesthetics: # The guide for aesthetic 'xxx' is stored # in plot.guides['xxx']. The priority for # the guides depends on how they are created # 1. ... + guides(xxx=guide_blah()) # 2. ... + scale_xxx(guide=guide_blah()) # 3. default(either guide_legend or guide_colorbar # depending on the scale type) # output = scale.aesthetics[0] guide = self.get(output, scale.guide) if guide is None or guide is False: continue # check the validity of guide. # if guide is character, then find the guide object guide = self.validate(guide) # check the consistency of the guide and scale. if (guide.available_aes != 'any' and scale.aesthetics[0] not in guide.available_aes): raise PlotnineError( "{} cannot be used for {}".format( guide.__class__.__name__, scale.aesthetics)) # title if is_waive(guide.title): if scale.name: guide.title = scale.name else: try: guide.title = str(plot.labels[output]) except KeyError: warn("Cannot generate legend for the {!r} " "aesthetic. Make sure you have mapped a " "variable to it".format(output), PlotnineWarning) continue # each guide object trains scale within the object, # so Guides (i.e., the container of guides) # need not to know about them guide = guide.train(scale, output) if guide is not None: gdefs.append(guide) return gdefs
[ "def", "train", "(", "self", ",", "plot", ")", ":", "gdefs", "=", "[", "]", "for", "scale", "in", "plot", ".", "scales", ":", "for", "output", "in", "scale", ".", "aesthetics", ":", "# The guide for aesthetic 'xxx' is stored", "# in plot.guides['xxx']. The prior...
Compute all the required guides Parameters ---------- plot : ggplot ggplot object Returns ------- gdefs : list Guides for the plots
[ "Compute", "all", "the", "required", "guides" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/guides/guides.py#L148-L211
233,348
has2k1/plotnine
plotnine/guides/guides.py
guides.validate
def validate(self, guide): """ Validate guide object """ if is_string(guide): guide = Registry['guide_{}'.format(guide)]() if not isinstance(guide, guide_class): raise PlotnineError( "Unknown guide: {}".format(guide)) return guide
python
def validate(self, guide): if is_string(guide): guide = Registry['guide_{}'.format(guide)]() if not isinstance(guide, guide_class): raise PlotnineError( "Unknown guide: {}".format(guide)) return guide
[ "def", "validate", "(", "self", ",", "guide", ")", ":", "if", "is_string", "(", "guide", ")", ":", "guide", "=", "Registry", "[", "'guide_{}'", ".", "format", "(", "guide", ")", "]", "(", ")", "if", "not", "isinstance", "(", "guide", ",", "guide_clas...
Validate guide object
[ "Validate", "guide", "object" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/guides/guides.py#L213-L223
233,349
has2k1/plotnine
plotnine/guides/guides.py
guides.create_geoms
def create_geoms(self, gdefs, plot): """ Add geoms to the guide definitions """ new_gdefs = [] for gdef in gdefs: gdef = gdef.create_geoms(plot) if gdef: new_gdefs.append(gdef) return new_gdefs
python
def create_geoms(self, gdefs, plot): new_gdefs = [] for gdef in gdefs: gdef = gdef.create_geoms(plot) if gdef: new_gdefs.append(gdef) return new_gdefs
[ "def", "create_geoms", "(", "self", ",", "gdefs", ",", "plot", ")", ":", "new_gdefs", "=", "[", "]", "for", "gdef", "in", "gdefs", ":", "gdef", "=", "gdef", ".", "create_geoms", "(", "plot", ")", "if", "gdef", ":", "new_gdefs", ".", "append", "(", ...
Add geoms to the guide definitions
[ "Add", "geoms", "to", "the", "guide", "definitions" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/guides/guides.py#L255-L265
233,350
has2k1/plotnine
plotnine/guides/guides.py
guides.draw
def draw(self, gdefs, theme): """ Draw out each guide definition Parameters ---------- gdefs : list of guide_legend|guide_colorbar guide definitions theme : theme Plot theme Returns ------- out : list of matplotlib.offsetbox.Offsetbox A drawing of each legend """ for g in gdefs: g.theme = theme g._set_defaults() return [g.draw() for g in gdefs]
python
def draw(self, gdefs, theme): for g in gdefs: g.theme = theme g._set_defaults() return [g.draw() for g in gdefs]
[ "def", "draw", "(", "self", ",", "gdefs", ",", "theme", ")", ":", "for", "g", "in", "gdefs", ":", "g", ".", "theme", "=", "theme", "g", ".", "_set_defaults", "(", ")", "return", "[", "g", ".", "draw", "(", ")", "for", "g", "in", "gdefs", "]" ]
Draw out each guide definition Parameters ---------- gdefs : list of guide_legend|guide_colorbar guide definitions theme : theme Plot theme Returns ------- out : list of matplotlib.offsetbox.Offsetbox A drawing of each legend
[ "Draw", "out", "each", "guide", "definition" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/guides/guides.py#L267-L286
233,351
has2k1/plotnine
plotnine/guides/guides.py
guides.assemble
def assemble(self, gboxes, gdefs, theme): """ Put together all the guide boxes Parameters ---------- gboxes : list List of :class:`~matplotlib.offsetbox.Offsetbox`, where each item is a legend for a single aesthetic. gdefs : list of guide_legend|guide_colorbar guide definitions theme : theme Plot theme Returns ------- box : OffsetBox A box than can be placed onto a plot """ # place the guides according to the guide.order # 0 do not sort # 1-99 sort for gdef in gdefs: if gdef.order == 0: gdef.order = 100 elif not 0 <= gdef.order <= 99: raise PlotnineError( "'order' for a guide should be " "between 0 and 99") orders = [gdef.order for gdef in gdefs] idx = np.argsort(orders) gboxes = [gboxes[i] for i in idx] # direction when more than legend if self.box_direction == 'vertical': packer = VPacker elif self.box_direction == 'horizontal': packer = HPacker else: raise PlotnineError( "'legend_box' should be either " "'vertical' or 'horizontal'") box = packer(children=gboxes, align=self.box_align, pad=self.box_margin, sep=self.spacing) return box
python
def assemble(self, gboxes, gdefs, theme): # place the guides according to the guide.order # 0 do not sort # 1-99 sort for gdef in gdefs: if gdef.order == 0: gdef.order = 100 elif not 0 <= gdef.order <= 99: raise PlotnineError( "'order' for a guide should be " "between 0 and 99") orders = [gdef.order for gdef in gdefs] idx = np.argsort(orders) gboxes = [gboxes[i] for i in idx] # direction when more than legend if self.box_direction == 'vertical': packer = VPacker elif self.box_direction == 'horizontal': packer = HPacker else: raise PlotnineError( "'legend_box' should be either " "'vertical' or 'horizontal'") box = packer(children=gboxes, align=self.box_align, pad=self.box_margin, sep=self.spacing) return box
[ "def", "assemble", "(", "self", ",", "gboxes", ",", "gdefs", ",", "theme", ")", ":", "# place the guides according to the guide.order", "# 0 do not sort", "# 1-99 sort", "for", "gdef", "in", "gdefs", ":", "if", "gdef", ".", "order", "==", "0", ":", "gdef", "."...
Put together all the guide boxes Parameters ---------- gboxes : list List of :class:`~matplotlib.offsetbox.Offsetbox`, where each item is a legend for a single aesthetic. gdefs : list of guide_legend|guide_colorbar guide definitions theme : theme Plot theme Returns ------- box : OffsetBox A box than can be placed onto a plot
[ "Put", "together", "all", "the", "guide", "boxes" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/guides/guides.py#L288-L333
233,352
has2k1/plotnine
doc/sphinxext/examples_and_gallery.py
add_entries_to_gallery
def add_entries_to_gallery(app, doctree, docname): """ Add entries to the gallery node Should happen when all the doctrees have been read and the gallery entries have been collected. i.e at doctree-resolved time. """ if docname != 'gallery': return if not has_gallery(app.builder.name): return # Find gallery node try: node = doctree.traverse(gallery)[0] except TypeError: return content = [] for entry in app.env.gallery_entries: raw_html_node = nodes.raw('', text=entry.html, format='html') content.append(raw_html_node) # Even when content is empty, we want the gallery node replaced node.replace_self(content)
python
def add_entries_to_gallery(app, doctree, docname): if docname != 'gallery': return if not has_gallery(app.builder.name): return # Find gallery node try: node = doctree.traverse(gallery)[0] except TypeError: return content = [] for entry in app.env.gallery_entries: raw_html_node = nodes.raw('', text=entry.html, format='html') content.append(raw_html_node) # Even when content is empty, we want the gallery node replaced node.replace_self(content)
[ "def", "add_entries_to_gallery", "(", "app", ",", "doctree", ",", "docname", ")", ":", "if", "docname", "!=", "'gallery'", ":", "return", "if", "not", "has_gallery", "(", "app", ".", "builder", ".", "name", ")", ":", "return", "# Find gallery node", "try", ...
Add entries to the gallery node Should happen when all the doctrees have been read and the gallery entries have been collected. i.e at doctree-resolved time.
[ "Add", "entries", "to", "the", "gallery", "node" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/doc/sphinxext/examples_and_gallery.py#L289-L315
233,353
has2k1/plotnine
doc/sphinxext/examples_and_gallery.py
GalleryEntry.html
def html(self): """ Return html for a the entry """ # No empty tooltips if self.description: tooltip = 'tooltip="{}"'.format(self.description) else: tooltip = '' return entry_html( title=self.title, thumbnail=self.thumbnail, link=self.html_link, tooltip=tooltip)
python
def html(self): # No empty tooltips if self.description: tooltip = 'tooltip="{}"'.format(self.description) else: tooltip = '' return entry_html( title=self.title, thumbnail=self.thumbnail, link=self.html_link, tooltip=tooltip)
[ "def", "html", "(", "self", ")", ":", "# No empty tooltips", "if", "self", ".", "description", ":", "tooltip", "=", "'tooltip=\"{}\"'", ".", "format", "(", "self", ".", "description", ")", "else", ":", "tooltip", "=", "''", "return", "entry_html", "(", "ti...
Return html for a the entry
[ "Return", "html", "for", "a", "the", "entry" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/doc/sphinxext/examples_and_gallery.py#L103-L117
233,354
has2k1/plotnine
plotnine/geoms/annotation_logticks.py
_geom_logticks._check_log_scale
def _check_log_scale(base, sides, scales, coord): """ Check the log transforms Parameters ---------- base : float or None Base of the logarithm in which the ticks will be calculated. If ``None``, the base of the log transform the scale will be used. sides : str (default: bl) Sides onto which to draw the marks. Any combination chosen from the characters ``btlr``, for *bottom*, *top*, *left* or *right* side marks. If ``coord_flip()`` is used, these are the sides *after* the flip. scales : SimpleNamespace ``x`` and ``y`` scales. coord : coord Coordinate (e.g. coord_cartesian) system of the geom. Returns ------- out : tuple The bases (base_x, base_y) to use when generating the ticks. """ def is_log(trans): return (trans.__class__.__name__.startswith('log') and hasattr(trans, 'base')) base_x, base_y = base, base x_is_log = is_log(scales.x.trans) y_is_log = is_log(scales.y.trans) if isinstance(coord, coord_flip): x_is_log, y_is_log = y_is_log, x_is_log if 't' in sides or 'b' in sides: if base_x is None: base_x = scales.x.trans.base if not x_is_log: warnings.warn( "annotation_logticks for x-axis which does not have " "a log scale. The logticks may not make sense.", PlotnineWarning) elif x_is_log and base_x != scales.x.trans.base: warnings.warn( "The x-axis is log transformed in base {} ," "but the annotation_logticks are computed in base {}" "".format(base_x, scales.x.trans.base), PlotnineWarning) if 'l' in sides or 'r' in sides: if base_y is None: base_y = scales.y.trans.base if not y_is_log: warnings.warn( "annotation_logticks for y-axis which does not have " "a log scale. The logticks may not make sense.", PlotnineWarning) elif y_is_log and base_y != scales.x.trans.base: warnings.warn( "The y-axis is log transformed in base {} ," "but the annotation_logticks are computed in base {}" "".format(base_y, scales.x.trans.base), PlotnineWarning) return base_x, base_y
python
def _check_log_scale(base, sides, scales, coord): def is_log(trans): return (trans.__class__.__name__.startswith('log') and hasattr(trans, 'base')) base_x, base_y = base, base x_is_log = is_log(scales.x.trans) y_is_log = is_log(scales.y.trans) if isinstance(coord, coord_flip): x_is_log, y_is_log = y_is_log, x_is_log if 't' in sides or 'b' in sides: if base_x is None: base_x = scales.x.trans.base if not x_is_log: warnings.warn( "annotation_logticks for x-axis which does not have " "a log scale. The logticks may not make sense.", PlotnineWarning) elif x_is_log and base_x != scales.x.trans.base: warnings.warn( "The x-axis is log transformed in base {} ," "but the annotation_logticks are computed in base {}" "".format(base_x, scales.x.trans.base), PlotnineWarning) if 'l' in sides or 'r' in sides: if base_y is None: base_y = scales.y.trans.base if not y_is_log: warnings.warn( "annotation_logticks for y-axis which does not have " "a log scale. The logticks may not make sense.", PlotnineWarning) elif y_is_log and base_y != scales.x.trans.base: warnings.warn( "The y-axis is log transformed in base {} ," "but the annotation_logticks are computed in base {}" "".format(base_y, scales.x.trans.base), PlotnineWarning) return base_x, base_y
[ "def", "_check_log_scale", "(", "base", ",", "sides", ",", "scales", ",", "coord", ")", ":", "def", "is_log", "(", "trans", ")", ":", "return", "(", "trans", ".", "__class__", ".", "__name__", ".", "startswith", "(", "'log'", ")", "and", "hasattr", "("...
Check the log transforms Parameters ---------- base : float or None Base of the logarithm in which the ticks will be calculated. If ``None``, the base of the log transform the scale will be used. sides : str (default: bl) Sides onto which to draw the marks. Any combination chosen from the characters ``btlr``, for *bottom*, *top*, *left* or *right* side marks. If ``coord_flip()`` is used, these are the sides *after* the flip. scales : SimpleNamespace ``x`` and ``y`` scales. coord : coord Coordinate (e.g. coord_cartesian) system of the geom. Returns ------- out : tuple The bases (base_x, base_y) to use when generating the ticks.
[ "Check", "the", "log", "transforms" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/annotation_logticks.py#L25-L91
233,355
has2k1/plotnine
plotnine/geoms/annotation_logticks.py
_geom_logticks._calc_ticks
def _calc_ticks(value_range, base): """ Calculate tick marks within a range Parameters ---------- value_range: tuple Range for which to calculate ticks. Returns ------- out: tuple (major, middle, minor) tick locations """ def _minor(x, mid_idx): return np.hstack([x[1:mid_idx], x[mid_idx+1:-1]]) # * Calculate the low and high powers, # * Generate for all intervals in along the low-high power range # The intervals are in normal space # * Calculate evenly spaced breaks in normal space, then convert # them to log space. low = np.floor(value_range[0]) high = np.ceil(value_range[1]) arr = base ** np.arange(low, float(high+1)) n_ticks = base - 1 breaks = [log(np.linspace(b1, b2, n_ticks+1), base) for (b1, b2) in list(zip(arr, arr[1:]))] # Partition the breaks in the 3 groups major = np.array([x[0] for x in breaks] + [breaks[-1][-1]]) if n_ticks % 2: mid_idx = n_ticks // 2 middle = [x[mid_idx] for x in breaks] minor = np.hstack([_minor(x, mid_idx) for x in breaks]) else: middle = [] minor = np.hstack([x[1:-1] for x in breaks]) return major, middle, minor
python
def _calc_ticks(value_range, base): def _minor(x, mid_idx): return np.hstack([x[1:mid_idx], x[mid_idx+1:-1]]) # * Calculate the low and high powers, # * Generate for all intervals in along the low-high power range # The intervals are in normal space # * Calculate evenly spaced breaks in normal space, then convert # them to log space. low = np.floor(value_range[0]) high = np.ceil(value_range[1]) arr = base ** np.arange(low, float(high+1)) n_ticks = base - 1 breaks = [log(np.linspace(b1, b2, n_ticks+1), base) for (b1, b2) in list(zip(arr, arr[1:]))] # Partition the breaks in the 3 groups major = np.array([x[0] for x in breaks] + [breaks[-1][-1]]) if n_ticks % 2: mid_idx = n_ticks // 2 middle = [x[mid_idx] for x in breaks] minor = np.hstack([_minor(x, mid_idx) for x in breaks]) else: middle = [] minor = np.hstack([x[1:-1] for x in breaks]) return major, middle, minor
[ "def", "_calc_ticks", "(", "value_range", ",", "base", ")", ":", "def", "_minor", "(", "x", ",", "mid_idx", ")", ":", "return", "np", ".", "hstack", "(", "[", "x", "[", "1", ":", "mid_idx", "]", ",", "x", "[", "mid_idx", "+", "1", ":", "-", "1"...
Calculate tick marks within a range Parameters ---------- value_range: tuple Range for which to calculate ticks. Returns ------- out: tuple (major, middle, minor) tick locations
[ "Calculate", "tick", "marks", "within", "a", "range" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/annotation_logticks.py#L94-L133
233,356
has2k1/plotnine
plotnine/options.py
get_option
def get_option(name): """ Get package option Parameters ---------- name : str Name of the option """ d = globals() if name in {'get_option', 'set_option'} or name not in d: from ..exceptions import PlotnineError raise PlotnineError("Unknown option {}".format(name)) return d[name]
python
def get_option(name): d = globals() if name in {'get_option', 'set_option'} or name not in d: from ..exceptions import PlotnineError raise PlotnineError("Unknown option {}".format(name)) return d[name]
[ "def", "get_option", "(", "name", ")", ":", "d", "=", "globals", "(", ")", "if", "name", "in", "{", "'get_option'", ",", "'set_option'", "}", "or", "name", "not", "in", "d", ":", "from", ".", ".", "exceptions", "import", "PlotnineError", "raise", "Plot...
Get package option Parameters ---------- name : str Name of the option
[ "Get", "package", "option" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/options.py#L18-L33
233,357
has2k1/plotnine
plotnine/options.py
set_option
def set_option(name, value): """ Set package option Parameters ---------- name : str Name of the option value : object New value of the option Returns ------- old : object Old value of the option """ d = globals() if name in {'get_option', 'set_option'} or name not in d: from ..exceptions import PlotnineError raise PlotnineError("Unknown option {}".format(name)) old = d[name] d[name] = value return old
python
def set_option(name, value): d = globals() if name in {'get_option', 'set_option'} or name not in d: from ..exceptions import PlotnineError raise PlotnineError("Unknown option {}".format(name)) old = d[name] d[name] = value return old
[ "def", "set_option", "(", "name", ",", "value", ")", ":", "d", "=", "globals", "(", ")", "if", "name", "in", "{", "'get_option'", ",", "'set_option'", "}", "or", "name", "not", "in", "d", ":", "from", ".", ".", "exceptions", "import", "PlotnineError", ...
Set package option Parameters ---------- name : str Name of the option value : object New value of the option Returns ------- old : object Old value of the option
[ "Set", "package", "option" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/options.py#L36-L60
233,358
has2k1/plotnine
plotnine/scales/limits.py
expand_limits
def expand_limits(**kwargs): """ Expand the limits any aesthetic using data Parameters ---------- kwargs : dict or dataframe Data to use in expanding the limits. The keys should be aesthetic names e.g. *x*, *y*, *colour*, ... """ def as_list(key): with suppress(KeyError): if isinstance(kwargs[key], (int, float, str)): kwargs[key] = [kwargs[key]] if isinstance(kwargs, dict): as_list('x') as_list('y') data = pd.DataFrame(kwargs) else: data = kwargs mapping = {} for ae in set(kwargs) & all_aesthetics: mapping[ae] = ae return geom_blank(mapping=mapping, data=data, inherit_aes=False)
python
def expand_limits(**kwargs): def as_list(key): with suppress(KeyError): if isinstance(kwargs[key], (int, float, str)): kwargs[key] = [kwargs[key]] if isinstance(kwargs, dict): as_list('x') as_list('y') data = pd.DataFrame(kwargs) else: data = kwargs mapping = {} for ae in set(kwargs) & all_aesthetics: mapping[ae] = ae return geom_blank(mapping=mapping, data=data, inherit_aes=False)
[ "def", "expand_limits", "(", "*", "*", "kwargs", ")", ":", "def", "as_list", "(", "key", ")", ":", "with", "suppress", "(", "KeyError", ")", ":", "if", "isinstance", "(", "kwargs", "[", "key", "]", ",", "(", "int", ",", "float", ",", "str", ")", ...
Expand the limits any aesthetic using data Parameters ---------- kwargs : dict or dataframe Data to use in expanding the limits. The keys should be aesthetic names e.g. *x*, *y*, *colour*, ...
[ "Expand", "the", "limits", "any", "aesthetic", "using", "data" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/scales/limits.py#L188-L215
233,359
has2k1/plotnine
plotnine/scales/limits.py
_lim.get_scale
def get_scale(self, gg): """ Create a scale """ # This method does some introspection to save users from # scale mismatch error. This could happen when the # aesthetic is mapped to a categorical but the limits # are not provided in categorical form. We only handle # the case where the mapping uses an expression to # conver to categorical e.g `aes(color='factor(cyl)')`. # However if `'cyl'` column is a categorical and the # mapping is `aes(color='cyl')`, that will result in # an error. If later case proves common enough then we # could inspect the data and be clever based on that too!! ae = self.aesthetic series = self.limits_series ae_values = [] # Look through all the mappings for this aesthetic, # if we detect any factor stuff then we convert the # limits data to categorical so that the right scale # can be choosen. This should take care of the most # common use cases. for layer in gg.layers: with suppress(KeyError): value = layer.mapping[ae] if isinstance(value, str): ae_values.append(value) for value in ae_values: if ('factor(' in value or 'Categorical(' in value): series = pd.Categorical(self.limits_series) break return make_scale(self.aesthetic, series, limits=self.limits, trans=self.trans)
python
def get_scale(self, gg): # This method does some introspection to save users from # scale mismatch error. This could happen when the # aesthetic is mapped to a categorical but the limits # are not provided in categorical form. We only handle # the case where the mapping uses an expression to # conver to categorical e.g `aes(color='factor(cyl)')`. # However if `'cyl'` column is a categorical and the # mapping is `aes(color='cyl')`, that will result in # an error. If later case proves common enough then we # could inspect the data and be clever based on that too!! ae = self.aesthetic series = self.limits_series ae_values = [] # Look through all the mappings for this aesthetic, # if we detect any factor stuff then we convert the # limits data to categorical so that the right scale # can be choosen. This should take care of the most # common use cases. for layer in gg.layers: with suppress(KeyError): value = layer.mapping[ae] if isinstance(value, str): ae_values.append(value) for value in ae_values: if ('factor(' in value or 'Categorical(' in value): series = pd.Categorical(self.limits_series) break return make_scale(self.aesthetic, series, limits=self.limits, trans=self.trans)
[ "def", "get_scale", "(", "self", ",", "gg", ")", ":", "# This method does some introspection to save users from", "# scale mismatch error. This could happen when the", "# aesthetic is mapped to a categorical but the limits", "# are not provided in categorical form. We only handle", "# the ca...
Create a scale
[ "Create", "a", "scale" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/scales/limits.py#L46-L83
233,360
has2k1/plotnine
plotnine/geoms/geom.py
geom.from_stat
def from_stat(stat): """ Return an instantiated geom object geoms should not override this method. Parameters ---------- stat : stat `stat` Returns ------- out : geom A geom object Raises ------ :class:`PlotnineError` if unable to create a `geom`. """ name = stat.params['geom'] if issubclass(type(name), geom): return name if isinstance(name, type) and issubclass(name, geom): klass = name elif is_string(name): if not name.startswith('geom_'): name = 'geom_{}'.format(name) klass = Registry[name] else: raise PlotnineError( 'Unknown geom of type {}'.format(type(name))) return klass(stat=stat, **stat._kwargs)
python
def from_stat(stat): name = stat.params['geom'] if issubclass(type(name), geom): return name if isinstance(name, type) and issubclass(name, geom): klass = name elif is_string(name): if not name.startswith('geom_'): name = 'geom_{}'.format(name) klass = Registry[name] else: raise PlotnineError( 'Unknown geom of type {}'.format(type(name))) return klass(stat=stat, **stat._kwargs)
[ "def", "from_stat", "(", "stat", ")", ":", "name", "=", "stat", ".", "params", "[", "'geom'", "]", "if", "issubclass", "(", "type", "(", "name", ")", ",", "geom", ")", ":", "return", "name", "if", "isinstance", "(", "name", ",", "type", ")", "and",...
Return an instantiated geom object geoms should not override this method. Parameters ---------- stat : stat `stat` Returns ------- out : geom A geom object Raises ------ :class:`PlotnineError` if unable to create a `geom`.
[ "Return", "an", "instantiated", "geom", "object" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/geom.py#L50-L84
233,361
has2k1/plotnine
plotnine/geoms/geom.py
geom.aesthetics
def aesthetics(cls): """ Return all the aesthetics for this geom geoms should not override this method. """ main = cls.DEFAULT_AES.keys() | cls.REQUIRED_AES other = {'group'} # Need to recognize both spellings if 'color' in main: other.add('colour') if 'outlier_color' in main: other.add('outlier_colour') return main | other
python
def aesthetics(cls): main = cls.DEFAULT_AES.keys() | cls.REQUIRED_AES other = {'group'} # Need to recognize both spellings if 'color' in main: other.add('colour') if 'outlier_color' in main: other.add('outlier_colour') return main | other
[ "def", "aesthetics", "(", "cls", ")", ":", "main", "=", "cls", ".", "DEFAULT_AES", ".", "keys", "(", ")", "|", "cls", ".", "REQUIRED_AES", "other", "=", "{", "'group'", "}", "# Need to recognize both spellings", "if", "'color'", "in", "main", ":", "other",...
Return all the aesthetics for this geom geoms should not override this method.
[ "Return", "all", "the", "aesthetics", "for", "this", "geom" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/geom.py#L87-L100
233,362
has2k1/plotnine
plotnine/geoms/geom.py
geom.draw_layer
def draw_layer(self, data, layout, coord, **params): """ Draw layer across all panels geoms should not override this method. Parameters ---------- data : DataFrame DataFrame specific for this layer layout : Lanel Layout object created when the plot is getting built coord : coord Type of coordinate axes params : dict Combined *geom* and *stat* parameters. Also includes the stacking order of the layer in the plot (*zorder*) """ for pid, pdata in data.groupby('PANEL'): if len(pdata) == 0: continue ploc = pid - 1 panel_params = layout.panel_params[ploc] ax = layout.axs[ploc] self.draw_panel(pdata, panel_params, coord, ax, **params)
python
def draw_layer(self, data, layout, coord, **params): for pid, pdata in data.groupby('PANEL'): if len(pdata) == 0: continue ploc = pid - 1 panel_params = layout.panel_params[ploc] ax = layout.axs[ploc] self.draw_panel(pdata, panel_params, coord, ax, **params)
[ "def", "draw_layer", "(", "self", ",", "data", ",", "layout", ",", "coord", ",", "*", "*", "params", ")", ":", "for", "pid", ",", "pdata", "in", "data", ".", "groupby", "(", "'PANEL'", ")", ":", "if", "len", "(", "pdata", ")", "==", "0", ":", "...
Draw layer across all panels geoms should not override this method. Parameters ---------- data : DataFrame DataFrame specific for this layer layout : Lanel Layout object created when the plot is getting built coord : coord Type of coordinate axes params : dict Combined *geom* and *stat* parameters. Also includes the stacking order of the layer in the plot (*zorder*)
[ "Draw", "layer", "across", "all", "panels" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/geom.py#L195-L221
233,363
has2k1/plotnine
plotnine/geoms/geom.py
geom.draw_panel
def draw_panel(self, data, panel_params, coord, ax, **params): """ Plot all groups For effeciency, geoms that do not need to partition different groups before plotting should override this method and avoid the groupby. Parameters ---------- data : dataframe Data to be plotted by this geom. This is the dataframe created in the plot_build pipeline. panel_params : dict The scale information as may be required by the axes. At this point, that information is about ranges, ticks and labels. Keys of interest to the geom are:: 'x_range' # tuple 'y_range' # tuple coord : coord Coordinate (e.g. coord_cartesian) system of the geom. ax : axes Axes on which to plot. params : dict Combined parameters for the geom and stat. Also includes the 'zorder'. """ for _, gdata in data.groupby('group'): gdata.reset_index(inplace=True, drop=True) self.draw_group(gdata, panel_params, coord, ax, **params)
python
def draw_panel(self, data, panel_params, coord, ax, **params): for _, gdata in data.groupby('group'): gdata.reset_index(inplace=True, drop=True) self.draw_group(gdata, panel_params, coord, ax, **params)
[ "def", "draw_panel", "(", "self", ",", "data", ",", "panel_params", ",", "coord", ",", "ax", ",", "*", "*", "params", ")", ":", "for", "_", ",", "gdata", "in", "data", ".", "groupby", "(", "'group'", ")", ":", "gdata", ".", "reset_index", "(", "inp...
Plot all groups For effeciency, geoms that do not need to partition different groups before plotting should override this method and avoid the groupby. Parameters ---------- data : dataframe Data to be plotted by this geom. This is the dataframe created in the plot_build pipeline. panel_params : dict The scale information as may be required by the axes. At this point, that information is about ranges, ticks and labels. Keys of interest to the geom are:: 'x_range' # tuple 'y_range' # tuple coord : coord Coordinate (e.g. coord_cartesian) system of the geom. ax : axes Axes on which to plot. params : dict Combined parameters for the geom and stat. Also includes the 'zorder'.
[ "Plot", "all", "groups" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/geom.py#L223-L256
233,364
has2k1/plotnine
plotnine/geoms/geom.py
geom._verify_arguments
def _verify_arguments(self, kwargs): """ Verify arguments passed to the geom """ geom_stat_args = kwargs.keys() | self._stat._kwargs.keys() unknown = (geom_stat_args - self.aesthetics() - # geom aesthetics self.DEFAULT_PARAMS.keys() - # geom parameters self._stat.aesthetics() - # stat aesthetics self._stat.DEFAULT_PARAMS.keys() - # stat parameters {'data', 'mapping', # layer parameters 'show_legend', 'inherit_aes'}) # layer parameters if unknown: msg = ("Parameters {}, are not understood by " "either the geom, stat or layer.") raise PlotnineError(msg.format(unknown))
python
def _verify_arguments(self, kwargs): geom_stat_args = kwargs.keys() | self._stat._kwargs.keys() unknown = (geom_stat_args - self.aesthetics() - # geom aesthetics self.DEFAULT_PARAMS.keys() - # geom parameters self._stat.aesthetics() - # stat aesthetics self._stat.DEFAULT_PARAMS.keys() - # stat parameters {'data', 'mapping', # layer parameters 'show_legend', 'inherit_aes'}) # layer parameters if unknown: msg = ("Parameters {}, are not understood by " "either the geom, stat or layer.") raise PlotnineError(msg.format(unknown))
[ "def", "_verify_arguments", "(", "self", ",", "kwargs", ")", ":", "geom_stat_args", "=", "kwargs", ".", "keys", "(", ")", "|", "self", ".", "_stat", ".", "_kwargs", ".", "keys", "(", ")", "unknown", "=", "(", "geom_stat_args", "-", "self", ".", "aesthe...
Verify arguments passed to the geom
[ "Verify", "arguments", "passed", "to", "the", "geom" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/geom.py#L358-L373
233,365
has2k1/plotnine
plotnine/geoms/geom.py
geom.handle_na
def handle_na(self, data): """ Remove rows with NaN values geoms that infer extra information from missing values should override this method. For example :class:`~plotnine.geoms.geom_path`. Parameters ---------- data : dataframe Data Returns ------- out : dataframe Data without the NaNs. Notes ----- Shows a warning if the any rows are removed and the `na_rm` parameter is False. It only takes into account the columns of the required aesthetics. """ return remove_missing(data, self.params['na_rm'], list(self.REQUIRED_AES | self.NON_MISSING_AES), self.__class__.__name__)
python
def handle_na(self, data): return remove_missing(data, self.params['na_rm'], list(self.REQUIRED_AES | self.NON_MISSING_AES), self.__class__.__name__)
[ "def", "handle_na", "(", "self", ",", "data", ")", ":", "return", "remove_missing", "(", "data", ",", "self", ".", "params", "[", "'na_rm'", "]", ",", "list", "(", "self", ".", "REQUIRED_AES", "|", "self", ".", "NON_MISSING_AES", ")", ",", "self", ".",...
Remove rows with NaN values geoms that infer extra information from missing values should override this method. For example :class:`~plotnine.geoms.geom_path`. Parameters ---------- data : dataframe Data Returns ------- out : dataframe Data without the NaNs. Notes ----- Shows a warning if the any rows are removed and the `na_rm` parameter is False. It only takes into account the columns of the required aesthetics.
[ "Remove", "rows", "with", "NaN", "values" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/geom.py#L375-L402
233,366
has2k1/plotnine
plotnine/positions/position_stack.py
position_stack.setup_params
def setup_params(self, data): """ Verify, modify & return a copy of the params. """ # Variable for which to do the stacking if 'ymax' in data: if any((data['ymin'] != 0) & (data['ymax'] != 0)): warn("Stacking not well defined when not " "anchored on the axis.", PlotnineWarning) var = 'ymax' elif 'y' in data: var = 'y' else: warn("Stacking requires either ymin & ymax or y " "aesthetics. Maybe you want position = 'identity'?", PlotnineWarning) var = None params = self.params.copy() params['var'] = var params['fill'] = self.fill return params
python
def setup_params(self, data): # Variable for which to do the stacking if 'ymax' in data: if any((data['ymin'] != 0) & (data['ymax'] != 0)): warn("Stacking not well defined when not " "anchored on the axis.", PlotnineWarning) var = 'ymax' elif 'y' in data: var = 'y' else: warn("Stacking requires either ymin & ymax or y " "aesthetics. Maybe you want position = 'identity'?", PlotnineWarning) var = None params = self.params.copy() params['var'] = var params['fill'] = self.fill return params
[ "def", "setup_params", "(", "self", ",", "data", ")", ":", "# Variable for which to do the stacking", "if", "'ymax'", "in", "data", ":", "if", "any", "(", "(", "data", "[", "'ymin'", "]", "!=", "0", ")", "&", "(", "data", "[", "'ymax'", "]", "!=", "0",...
Verify, modify & return a copy of the params.
[ "Verify", "modify", "&", "return", "a", "copy", "of", "the", "params", "." ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/positions/position_stack.py#L24-L45
233,367
has2k1/plotnine
plotnine/positions/position_stack.py
position_stack.strategy
def strategy(data, params): """ Stack overlapping intervals. Assumes that each set has the same horizontal position """ vjust = params['vjust'] y = data['y'].copy() y[np.isnan(y)] = 0 heights = np.append(0, y.cumsum()) if params['fill']: heights = heights / np.abs(heights[-1]) data['ymin'] = np.min([heights[:-1], heights[1:]], axis=0) data['ymax'] = np.max([heights[:-1], heights[1:]], axis=0) # less intuitive than (ymin + vjust(ymax-ymin)), but # this way avoids subtracting numbers of potentially # similar precision data['y'] = ((1-vjust)*data['ymin'] + vjust*data['ymax']) return data
python
def strategy(data, params): vjust = params['vjust'] y = data['y'].copy() y[np.isnan(y)] = 0 heights = np.append(0, y.cumsum()) if params['fill']: heights = heights / np.abs(heights[-1]) data['ymin'] = np.min([heights[:-1], heights[1:]], axis=0) data['ymax'] = np.max([heights[:-1], heights[1:]], axis=0) # less intuitive than (ymin + vjust(ymax-ymin)), but # this way avoids subtracting numbers of potentially # similar precision data['y'] = ((1-vjust)*data['ymin'] + vjust*data['ymax']) return data
[ "def", "strategy", "(", "data", ",", "params", ")", ":", "vjust", "=", "params", "[", "'vjust'", "]", "y", "=", "data", "[", "'y'", "]", ".", "copy", "(", ")", "y", "[", "np", ".", "isnan", "(", "y", ")", "]", "=", "0", "heights", "=", "np", ...
Stack overlapping intervals. Assumes that each set has the same horizontal position
[ "Stack", "overlapping", "intervals", "." ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/positions/position_stack.py#L83-L104
233,368
has2k1/plotnine
plotnine/stats/stat_bindot.py
densitybin
def densitybin(x, weight=None, binwidth=None, bins=None, rangee=None): """ Do density binning It does not collapse each bin with a count. Parameters ---------- x : array-like Numbers to bin weight : array-like Weights binwidth : numeric Size of the bins rangee : tuple Range of x Returns ------- data : DataFrame """ if all(pd.isnull(x)): return pd.DataFrame() if weight is None: weight = np.ones(len(x)) weight = np.asarray(weight) weight[np.isnan(weight)] = 0 if rangee is None: rangee = np.min(x), np.max(x) if bins is None: bins = 30 if binwidth is None: binwidth = np.ptp(rangee) / bins # Sort weight and x, by x order = np.argsort(x) weight = weight[order] x = x[order] cbin = 0 # Current bin ID binn = [None] * len(x) # The bin ID for each observation # End position of current bin (scan left to right) binend = -np.inf # Scan list and put dots in bins for i, value in enumerate(x): # If past end of bin, start a new bin at this point if value >= binend: binend = value + binwidth cbin = cbin + 1 binn[i] = cbin def func(series): return (series.min()+series.max())/2 results = pd.DataFrame({'x': x, 'bin': binn, 'binwidth': binwidth, 'weight': weight}) # This is a plyr::ddply results['bincenter'] = results.groupby('bin')['x'].transform(func) return results
python
def densitybin(x, weight=None, binwidth=None, bins=None, rangee=None): if all(pd.isnull(x)): return pd.DataFrame() if weight is None: weight = np.ones(len(x)) weight = np.asarray(weight) weight[np.isnan(weight)] = 0 if rangee is None: rangee = np.min(x), np.max(x) if bins is None: bins = 30 if binwidth is None: binwidth = np.ptp(rangee) / bins # Sort weight and x, by x order = np.argsort(x) weight = weight[order] x = x[order] cbin = 0 # Current bin ID binn = [None] * len(x) # The bin ID for each observation # End position of current bin (scan left to right) binend = -np.inf # Scan list and put dots in bins for i, value in enumerate(x): # If past end of bin, start a new bin at this point if value >= binend: binend = value + binwidth cbin = cbin + 1 binn[i] = cbin def func(series): return (series.min()+series.max())/2 results = pd.DataFrame({'x': x, 'bin': binn, 'binwidth': binwidth, 'weight': weight}) # This is a plyr::ddply results['bincenter'] = results.groupby('bin')['x'].transform(func) return results
[ "def", "densitybin", "(", "x", ",", "weight", "=", "None", ",", "binwidth", "=", "None", ",", "bins", "=", "None", ",", "rangee", "=", "None", ")", ":", "if", "all", "(", "pd", ".", "isnull", "(", "x", ")", ")", ":", "return", "pd", ".", "DataF...
Do density binning It does not collapse each bin with a count. Parameters ---------- x : array-like Numbers to bin weight : array-like Weights binwidth : numeric Size of the bins rangee : tuple Range of x Returns ------- data : DataFrame
[ "Do", "density", "binning" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/stat_bindot.py#L215-L278
233,369
has2k1/plotnine
plotnine/themes/theme.py
theme_get
def theme_get(): """ Return the default theme The default theme is the one set (using :func:`theme_set`) by the user. If none has been set, then :class:`theme_gray` is the default. """ from .theme_gray import theme_gray _theme = get_option('current_theme') if isinstance(_theme, type): _theme = _theme() return _theme or theme_gray()
python
def theme_get(): from .theme_gray import theme_gray _theme = get_option('current_theme') if isinstance(_theme, type): _theme = _theme() return _theme or theme_gray()
[ "def", "theme_get", "(", ")", ":", "from", ".", "theme_gray", "import", "theme_gray", "_theme", "=", "get_option", "(", "'current_theme'", ")", "if", "isinstance", "(", "_theme", ",", "type", ")", ":", "_theme", "=", "_theme", "(", ")", "return", "_theme",...
Return the default theme The default theme is the one set (using :func:`theme_set`) by the user. If none has been set, then :class:`theme_gray` is the default.
[ "Return", "the", "default", "theme" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/themes/theme.py#L277-L289
233,370
has2k1/plotnine
plotnine/themes/theme.py
theme.apply
def apply(self, ax): """ Apply this theme, then apply additional modifications in order. Subclasses that override this method should make sure that the base class method is called. """ for th in self.themeables.values(): th.apply(ax)
python
def apply(self, ax): for th in self.themeables.values(): th.apply(ax)
[ "def", "apply", "(", "self", ",", "ax", ")", ":", "for", "th", "in", "self", ".", "themeables", ".", "values", "(", ")", ":", "th", ".", "apply", "(", "ax", ")" ]
Apply this theme, then apply additional modifications in order. Subclasses that override this method should make sure that the base class method is called.
[ "Apply", "this", "theme", "then", "apply", "additional", "modifications", "in", "order", "." ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/themes/theme.py#L114-L122
233,371
has2k1/plotnine
plotnine/themes/theme.py
theme.apply_rcparams
def apply_rcparams(self): """ Set the rcParams """ from matplotlib import rcParams for key, val in self.rcParams.items(): try: rcParams[key] = val except Exception as e: msg = ("""Setting "mpl.rcParams['{}']={}" """ "raised an Exception: {}") raise PlotnineError(msg.format(key, val, e))
python
def apply_rcparams(self): from matplotlib import rcParams for key, val in self.rcParams.items(): try: rcParams[key] = val except Exception as e: msg = ("""Setting "mpl.rcParams['{}']={}" """ "raised an Exception: {}") raise PlotnineError(msg.format(key, val, e))
[ "def", "apply_rcparams", "(", "self", ")", ":", "from", "matplotlib", "import", "rcParams", "for", "key", ",", "val", "in", "self", ".", "rcParams", ".", "items", "(", ")", ":", "try", ":", "rcParams", "[", "key", "]", "=", "val", "except", "Exception"...
Set the rcParams
[ "Set", "the", "rcParams" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/themes/theme.py#L148-L159
233,372
has2k1/plotnine
plotnine/themes/theme.py
theme.rcParams
def rcParams(self): """ Return rcParams dict for this theme. Notes ----- Subclasses should not need to override this method method as long as self._rcParams is constructed properly. rcParams are used during plotting. Sometimes the same theme can be achieved by setting rcParams before plotting or a apply after plotting. The choice of how to implement it is is a matter of convenience in that case. There are certain things can only be themed after plotting. There may not be an rcParam to control the theme or the act of plotting may cause an entity to come into existence before it can be themed. """ try: rcParams = deepcopy(self._rcParams) except NotImplementedError: # deepcopy raises an error for objects that are drived from or # composed of matplotlib.transform.TransformNode. # Not desirable, but probably requires upstream fix. # In particular, XKCD uses matplotlib.patheffects.withStrok rcParams = copy(self._rcParams) for th in self.themeables.values(): rcParams.update(th.rcParams) return rcParams
python
def rcParams(self): try: rcParams = deepcopy(self._rcParams) except NotImplementedError: # deepcopy raises an error for objects that are drived from or # composed of matplotlib.transform.TransformNode. # Not desirable, but probably requires upstream fix. # In particular, XKCD uses matplotlib.patheffects.withStrok rcParams = copy(self._rcParams) for th in self.themeables.values(): rcParams.update(th.rcParams) return rcParams
[ "def", "rcParams", "(", "self", ")", ":", "try", ":", "rcParams", "=", "deepcopy", "(", "self", ".", "_rcParams", ")", "except", "NotImplementedError", ":", "# deepcopy raises an error for objects that are drived from or", "# composed of matplotlib.transform.TransformNode.", ...
Return rcParams dict for this theme. Notes ----- Subclasses should not need to override this method method as long as self._rcParams is constructed properly. rcParams are used during plotting. Sometimes the same theme can be achieved by setting rcParams before plotting or a apply after plotting. The choice of how to implement it is is a matter of convenience in that case. There are certain things can only be themed after plotting. There may not be an rcParam to control the theme or the act of plotting may cause an entity to come into existence before it can be themed.
[ "Return", "rcParams", "dict", "for", "this", "theme", "." ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/themes/theme.py#L162-L193
233,373
has2k1/plotnine
plotnine/themes/theme.py
theme.add_theme
def add_theme(self, other, inplace=False): """Add themes together. Subclasses should not override this method. This will be called when adding two instances of class 'theme' together. A complete theme will annihilate any previous themes. Partial themes can be added together and can be added to a complete theme. """ if other.complete: return other theme_copy = self if inplace else deepcopy(self) theme_copy.themeables.update(deepcopy(other.themeables)) return theme_copy
python
def add_theme(self, other, inplace=False): if other.complete: return other theme_copy = self if inplace else deepcopy(self) theme_copy.themeables.update(deepcopy(other.themeables)) return theme_copy
[ "def", "add_theme", "(", "self", ",", "other", ",", "inplace", "=", "False", ")", ":", "if", "other", ".", "complete", ":", "return", "other", "theme_copy", "=", "self", "if", "inplace", "else", "deepcopy", "(", "self", ")", "theme_copy", ".", "themeable...
Add themes together. Subclasses should not override this method. This will be called when adding two instances of class 'theme' together. A complete theme will annihilate any previous themes. Partial themes can be added together and can be added to a complete theme.
[ "Add", "themes", "together", "." ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/themes/theme.py#L195-L210
233,374
has2k1/plotnine
plotnine/animation.py
PlotnineAnimation._draw_plots
def _draw_plots(self, plots): """ Plot and return the figure and artists Parameters ---------- plots : iterable ggplot objects that make up the the frames of the animation Returns ------- figure : matplotlib.figure.Figure Matplotlib figure artists : list List of :class:`Matplotlib.artist.artist` """ # For keeping track of artists for each frame artist_offsets = { 'collections': [], 'patches': [], 'lines': [], 'texts': [], 'artists': [] } scale_limits = dict() def initialise_artist_offsets(n): """ Initilise artists_offsets arrays to zero Parameters ---------- n : int Number of axes to initialise artists for. The artists for each axes are tracked separately. """ for artist_type in artist_offsets: artist_offsets[artist_type] = [0] * n def get_frame_artists(plot): """ Parameters ---------- plot : ggplot Drawn ggplot object from which to extract artists. """ # The axes accumulate artists for all frames # For each frame we pickup the newly added artists # We use offsets to mark the end of the previous frame # e.g ax.collections[start:] frame_artists = [] for i, ax in enumerate(plot.axs): for name in artist_offsets: start = artist_offsets[name][i] new_artists = getattr(ax, name)[start:] frame_artists.extend(new_artists) artist_offsets[name][i] += len(new_artists) return frame_artists def set_scale_limits(plot): """ Set limits of all the scales in the animation Should be called before :func:`check_scale_limits`. Parameters ---------- plot : ggplot First ggplot object that has been drawn """ for sc in plot.scales: ae = sc.aesthetics[0] scale_limits[ae] = sc.limits def check_scale_limits(plot, frame_no): """ Check limits of the scales of a plot in the animation Raises a PlotnineError if any of the scales has limits that do not match those of the first plot/frame. Should be called after :func:`set_scale_limits`. Parameters ---------- plot : ggplot ggplot object that has been drawn frame_no : int Frame number """ if len(scale_limits) != len(plot.scales): raise PlotnineError( "All plots must have the same number of scales " "as the first plot of the animation." ) for sc in plot.scales: ae = sc.aesthetics[0] if ae not in scale_limits: raise PlotnineError( "The plot for frame {} does not have a scale " "for the {} aesthetic.".format(frame_no, ae) ) if sc.limits != scale_limits[ae]: raise PlotnineError( "The {} scale of plot for frame {} has different " "limits from those of the first frame." "".format(ae, frame_no) ) figure = None axs = None artists = [] # The first ggplot creates the figure, axes and the initial # frame of the animation. The rest of the ggplots draw # onto the figure and axes created by the first ggplot and # they create the subsequent frames. for frame_no, p in enumerate(plots): if figure is None: figure, plot = p.draw(return_ggplot=True) axs = plot.axs initialise_artist_offsets(len(axs)) set_scale_limits(plot) else: p = copy(p) plot = p._draw_using_figure(figure, axs) try: check_scale_limits(plot, frame_no) except PlotnineError as err: plt.close(figure) raise err artists.append(get_frame_artists(plot)) if figure is None: figure = plt.figure() return figure, artists
python
def _draw_plots(self, plots): # For keeping track of artists for each frame artist_offsets = { 'collections': [], 'patches': [], 'lines': [], 'texts': [], 'artists': [] } scale_limits = dict() def initialise_artist_offsets(n): """ Initilise artists_offsets arrays to zero Parameters ---------- n : int Number of axes to initialise artists for. The artists for each axes are tracked separately. """ for artist_type in artist_offsets: artist_offsets[artist_type] = [0] * n def get_frame_artists(plot): """ Parameters ---------- plot : ggplot Drawn ggplot object from which to extract artists. """ # The axes accumulate artists for all frames # For each frame we pickup the newly added artists # We use offsets to mark the end of the previous frame # e.g ax.collections[start:] frame_artists = [] for i, ax in enumerate(plot.axs): for name in artist_offsets: start = artist_offsets[name][i] new_artists = getattr(ax, name)[start:] frame_artists.extend(new_artists) artist_offsets[name][i] += len(new_artists) return frame_artists def set_scale_limits(plot): """ Set limits of all the scales in the animation Should be called before :func:`check_scale_limits`. Parameters ---------- plot : ggplot First ggplot object that has been drawn """ for sc in plot.scales: ae = sc.aesthetics[0] scale_limits[ae] = sc.limits def check_scale_limits(plot, frame_no): """ Check limits of the scales of a plot in the animation Raises a PlotnineError if any of the scales has limits that do not match those of the first plot/frame. Should be called after :func:`set_scale_limits`. Parameters ---------- plot : ggplot ggplot object that has been drawn frame_no : int Frame number """ if len(scale_limits) != len(plot.scales): raise PlotnineError( "All plots must have the same number of scales " "as the first plot of the animation." ) for sc in plot.scales: ae = sc.aesthetics[0] if ae not in scale_limits: raise PlotnineError( "The plot for frame {} does not have a scale " "for the {} aesthetic.".format(frame_no, ae) ) if sc.limits != scale_limits[ae]: raise PlotnineError( "The {} scale of plot for frame {} has different " "limits from those of the first frame." "".format(ae, frame_no) ) figure = None axs = None artists = [] # The first ggplot creates the figure, axes and the initial # frame of the animation. The rest of the ggplots draw # onto the figure and axes created by the first ggplot and # they create the subsequent frames. for frame_no, p in enumerate(plots): if figure is None: figure, plot = p.draw(return_ggplot=True) axs = plot.axs initialise_artist_offsets(len(axs)) set_scale_limits(plot) else: p = copy(p) plot = p._draw_using_figure(figure, axs) try: check_scale_limits(plot, frame_no) except PlotnineError as err: plt.close(figure) raise err artists.append(get_frame_artists(plot)) if figure is None: figure = plt.figure() return figure, artists
[ "def", "_draw_plots", "(", "self", ",", "plots", ")", ":", "# For keeping track of artists for each frame", "artist_offsets", "=", "{", "'collections'", ":", "[", "]", ",", "'patches'", ":", "[", "]", ",", "'lines'", ":", "[", "]", ",", "'texts'", ":", "[", ...
Plot and return the figure and artists Parameters ---------- plots : iterable ggplot objects that make up the the frames of the animation Returns ------- figure : matplotlib.figure.Figure Matplotlib figure artists : list List of :class:`Matplotlib.artist.artist`
[ "Plot", "and", "return", "the", "figure", "and", "artists" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/animation.py#L54-L194
233,375
has2k1/plotnine
plotnine/coords/coord_flip.py
flip_labels
def flip_labels(obj): """ Rename fields x to y and y to x Parameters ---------- obj : dict_like Object with labels to rename """ def sub(a, b): """ Substitute all keys that start with a to b """ for label in list(obj.keys()): if label.startswith(a): new_label = b+label[1:] obj[new_label] = obj.pop(label) sub('x', 'z') sub('y', 'x') sub('z', 'y') return obj
python
def flip_labels(obj): def sub(a, b): """ Substitute all keys that start with a to b """ for label in list(obj.keys()): if label.startswith(a): new_label = b+label[1:] obj[new_label] = obj.pop(label) sub('x', 'z') sub('y', 'x') sub('z', 'y') return obj
[ "def", "flip_labels", "(", "obj", ")", ":", "def", "sub", "(", "a", ",", "b", ")", ":", "\"\"\"\n Substitute all keys that start with a to b\n \"\"\"", "for", "label", "in", "list", "(", "obj", ".", "keys", "(", ")", ")", ":", "if", "label", "....
Rename fields x to y and y to x Parameters ---------- obj : dict_like Object with labels to rename
[ "Rename", "fields", "x", "to", "y", "and", "y", "to", "x" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/coords/coord_flip.py#L58-L79
233,376
has2k1/plotnine
plotnine/stats/stat_summary.py
bootstrap_statistics
def bootstrap_statistics(series, statistic, n_samples=1000, confidence_interval=0.95, random_state=None): """ Default parameters taken from R's Hmisc smean.cl.boot """ if random_state is None: random_state = np.random alpha = 1 - confidence_interval size = (n_samples, len(series)) inds = random_state.randint(0, len(series), size=size) samples = series.values[inds] means = np.sort(statistic(samples, axis=1)) return pd.DataFrame({'ymin': means[int((alpha/2)*n_samples)], 'ymax': means[int((1-alpha/2)*n_samples)], 'y': [statistic(series)]})
python
def bootstrap_statistics(series, statistic, n_samples=1000, confidence_interval=0.95, random_state=None): if random_state is None: random_state = np.random alpha = 1 - confidence_interval size = (n_samples, len(series)) inds = random_state.randint(0, len(series), size=size) samples = series.values[inds] means = np.sort(statistic(samples, axis=1)) return pd.DataFrame({'ymin': means[int((alpha/2)*n_samples)], 'ymax': means[int((1-alpha/2)*n_samples)], 'y': [statistic(series)]})
[ "def", "bootstrap_statistics", "(", "series", ",", "statistic", ",", "n_samples", "=", "1000", ",", "confidence_interval", "=", "0.95", ",", "random_state", "=", "None", ")", ":", "if", "random_state", "is", "None", ":", "random_state", "=", "np", ".", "rand...
Default parameters taken from R's Hmisc smean.cl.boot
[ "Default", "parameters", "taken", "from", "R", "s", "Hmisc", "smean", ".", "cl", ".", "boot" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/stat_summary.py#L11-L27
233,377
has2k1/plotnine
plotnine/stats/stat_summary.py
mean_cl_boot
def mean_cl_boot(series, n_samples=1000, confidence_interval=0.95, random_state=None): """ Bootstrapped mean with confidence limits """ return bootstrap_statistics(series, np.mean, n_samples=n_samples, confidence_interval=confidence_interval, random_state=random_state)
python
def mean_cl_boot(series, n_samples=1000, confidence_interval=0.95, random_state=None): return bootstrap_statistics(series, np.mean, n_samples=n_samples, confidence_interval=confidence_interval, random_state=random_state)
[ "def", "mean_cl_boot", "(", "series", ",", "n_samples", "=", "1000", ",", "confidence_interval", "=", "0.95", ",", "random_state", "=", "None", ")", ":", "return", "bootstrap_statistics", "(", "series", ",", "np", ".", "mean", ",", "n_samples", "=", "n_sampl...
Bootstrapped mean with confidence limits
[ "Bootstrapped", "mean", "with", "confidence", "limits" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/stat_summary.py#L30-L38
233,378
has2k1/plotnine
plotnine/stats/stat_summary.py
mean_sdl
def mean_sdl(series, mult=2): """ mean plus or minus a constant times the standard deviation """ m = series.mean() s = series.std() return pd.DataFrame({'y': [m], 'ymin': m-mult*s, 'ymax': m+mult*s})
python
def mean_sdl(series, mult=2): m = series.mean() s = series.std() return pd.DataFrame({'y': [m], 'ymin': m-mult*s, 'ymax': m+mult*s})
[ "def", "mean_sdl", "(", "series", ",", "mult", "=", "2", ")", ":", "m", "=", "series", ".", "mean", "(", ")", "s", "=", "series", ".", "std", "(", ")", "return", "pd", ".", "DataFrame", "(", "{", "'y'", ":", "[", "m", "]", ",", "'ymin'", ":",...
mean plus or minus a constant times the standard deviation
[ "mean", "plus", "or", "minus", "a", "constant", "times", "the", "standard", "deviation" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/stat_summary.py#L54-L62
233,379
has2k1/plotnine
plotnine/stats/stat_summary.py
median_hilow
def median_hilow(series, confidence_interval=0.95): """ Median and a selected pair of outer quantiles having equal tail areas """ tail = (1 - confidence_interval) / 2 return pd.DataFrame({'y': [np.median(series)], 'ymin': np.percentile(series, 100 * tail), 'ymax': np.percentile(series, 100 * (1 - tail))})
python
def median_hilow(series, confidence_interval=0.95): tail = (1 - confidence_interval) / 2 return pd.DataFrame({'y': [np.median(series)], 'ymin': np.percentile(series, 100 * tail), 'ymax': np.percentile(series, 100 * (1 - tail))})
[ "def", "median_hilow", "(", "series", ",", "confidence_interval", "=", "0.95", ")", ":", "tail", "=", "(", "1", "-", "confidence_interval", ")", "/", "2", "return", "pd", ".", "DataFrame", "(", "{", "'y'", ":", "[", "np", ".", "median", "(", "series", ...
Median and a selected pair of outer quantiles having equal tail areas
[ "Median", "and", "a", "selected", "pair", "of", "outer", "quantiles", "having", "equal", "tail", "areas" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/stat_summary.py#L65-L72
233,380
has2k1/plotnine
plotnine/stats/stat_summary.py
mean_se
def mean_se(series, mult=1): """ Calculate mean and standard errors on either side """ m = np.mean(series) se = mult * np.sqrt(np.var(series) / len(series)) return pd.DataFrame({'y': [m], 'ymin': m-se, 'ymax': m+se})
python
def mean_se(series, mult=1): m = np.mean(series) se = mult * np.sqrt(np.var(series) / len(series)) return pd.DataFrame({'y': [m], 'ymin': m-se, 'ymax': m+se})
[ "def", "mean_se", "(", "series", ",", "mult", "=", "1", ")", ":", "m", "=", "np", ".", "mean", "(", "series", ")", "se", "=", "mult", "*", "np", ".", "sqrt", "(", "np", ".", "var", "(", "series", ")", "/", "len", "(", "series", ")", ")", "r...
Calculate mean and standard errors on either side
[ "Calculate", "mean", "and", "standard", "errors", "on", "either", "side" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/stat_summary.py#L75-L83
233,381
has2k1/plotnine
plotnine/facets/facet_null.py
facet_null.set_breaks_and_labels
def set_breaks_and_labels(self, ranges, layout_info, pidx): """ Add breaks and labels to the axes Parameters ---------- ranges : dict-like range information for the axes layout_info : dict-like facet layout information pidx : int Panel index """ ax = self.axs[pidx] facet.set_breaks_and_labels(self, ranges, layout_info, pidx) ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position('left')
python
def set_breaks_and_labels(self, ranges, layout_info, pidx): ax = self.axs[pidx] facet.set_breaks_and_labels(self, ranges, layout_info, pidx) ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position('left')
[ "def", "set_breaks_and_labels", "(", "self", ",", "ranges", ",", "layout_info", ",", "pidx", ")", ":", "ax", "=", "self", ".", "axs", "[", "pidx", "]", "facet", ".", "set_breaks_and_labels", "(", "self", ",", "ranges", ",", "layout_info", ",", "pidx", ")...
Add breaks and labels to the axes Parameters ---------- ranges : dict-like range information for the axes layout_info : dict-like facet layout information pidx : int Panel index
[ "Add", "breaks", "and", "labels", "to", "the", "axes" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet_null.py#L27-L43
233,382
has2k1/plotnine
plotnine/facets/facet_null.py
facet_null.spaceout_and_resize_panels
def spaceout_and_resize_panels(self): """ Adjust the space between the panels """ # Only deal with the aspect ratio figure = self.figure theme = self.theme try: aspect_ratio = theme.themeables.property('aspect_ratio') except KeyError: aspect_ratio = self.coordinates.aspect( self.layout.panel_params[0]) if aspect_ratio is None: return left = figure.subplotpars.left right = figure.subplotpars.right top = figure.subplotpars.top bottom = figure.subplotpars.bottom W, H = figure.get_size_inches() w = (right-left)*W h = w*aspect_ratio H = h / (top-bottom) figure.set_figheight(H)
python
def spaceout_and_resize_panels(self): # Only deal with the aspect ratio figure = self.figure theme = self.theme try: aspect_ratio = theme.themeables.property('aspect_ratio') except KeyError: aspect_ratio = self.coordinates.aspect( self.layout.panel_params[0]) if aspect_ratio is None: return left = figure.subplotpars.left right = figure.subplotpars.right top = figure.subplotpars.top bottom = figure.subplotpars.bottom W, H = figure.get_size_inches() w = (right-left)*W h = w*aspect_ratio H = h / (top-bottom) figure.set_figheight(H)
[ "def", "spaceout_and_resize_panels", "(", "self", ")", ":", "# Only deal with the aspect ratio", "figure", "=", "self", ".", "figure", "theme", "=", "self", ".", "theme", "try", ":", "aspect_ratio", "=", "theme", ".", "themeables", ".", "property", "(", "'aspect...
Adjust the space between the panels
[ "Adjust", "the", "space", "between", "the", "panels" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet_null.py#L45-L72
233,383
has2k1/plotnine
plotnine/geoms/geom_path.py
_draw_segments
def _draw_segments(data, ax, **params): """ Draw independent line segments between all the points """ color = to_rgba(data['color'], data['alpha']) # All we do is line-up all the points in a group # into segments, all in a single list. # Along the way the other parameters are put in # sequences accordingly indices = [] # for attributes of starting point of each segment segments = [] for _, df in data.groupby('group'): idx = df.index indices.extend(idx[:-1]) # One line from two points x = data['x'].iloc[idx] y = data['y'].iloc[idx] segments.append(make_line_segments(x, y, ispath=True)) segments = np.vstack(segments) if color is None: edgecolor = color else: edgecolor = [color[i] for i in indices] linewidth = data.loc[indices, 'size'] linestyle = data.loc[indices, 'linetype'] coll = mcoll.LineCollection(segments, edgecolor=edgecolor, linewidth=linewidth, linestyle=linestyle, zorder=params['zorder']) ax.add_collection(coll)
python
def _draw_segments(data, ax, **params): color = to_rgba(data['color'], data['alpha']) # All we do is line-up all the points in a group # into segments, all in a single list. # Along the way the other parameters are put in # sequences accordingly indices = [] # for attributes of starting point of each segment segments = [] for _, df in data.groupby('group'): idx = df.index indices.extend(idx[:-1]) # One line from two points x = data['x'].iloc[idx] y = data['y'].iloc[idx] segments.append(make_line_segments(x, y, ispath=True)) segments = np.vstack(segments) if color is None: edgecolor = color else: edgecolor = [color[i] for i in indices] linewidth = data.loc[indices, 'size'] linestyle = data.loc[indices, 'linetype'] coll = mcoll.LineCollection(segments, edgecolor=edgecolor, linewidth=linewidth, linestyle=linestyle, zorder=params['zorder']) ax.add_collection(coll)
[ "def", "_draw_segments", "(", "data", ",", "ax", ",", "*", "*", "params", ")", ":", "color", "=", "to_rgba", "(", "data", "[", "'color'", "]", ",", "data", "[", "'alpha'", "]", ")", "# All we do is line-up all the points in a group", "# into segments, all in a s...
Draw independent line segments between all the points
[ "Draw", "independent", "line", "segments", "between", "all", "the", "points" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/geom_path.py#L341-L375
233,384
has2k1/plotnine
plotnine/geoms/geom_path.py
_draw_lines
def _draw_lines(data, ax, **params): """ Draw a path with the same characteristics from the first point to the last point """ color = to_rgba(data['color'].iloc[0], data['alpha'].iloc[0]) join_style = _get_joinstyle(data, params) lines = mlines.Line2D(data['x'], data['y'], color=color, linewidth=data['size'].iloc[0], linestyle=data['linetype'].iloc[0], zorder=params['zorder'], **join_style) ax.add_artist(lines)
python
def _draw_lines(data, ax, **params): color = to_rgba(data['color'].iloc[0], data['alpha'].iloc[0]) join_style = _get_joinstyle(data, params) lines = mlines.Line2D(data['x'], data['y'], color=color, linewidth=data['size'].iloc[0], linestyle=data['linetype'].iloc[0], zorder=params['zorder'], **join_style) ax.add_artist(lines)
[ "def", "_draw_lines", "(", "data", ",", "ax", ",", "*", "*", "params", ")", ":", "color", "=", "to_rgba", "(", "data", "[", "'color'", "]", ".", "iloc", "[", "0", "]", ",", "data", "[", "'alpha'", "]", ".", "iloc", "[", "0", "]", ")", "join_sty...
Draw a path with the same characteristics from the first point to the last point
[ "Draw", "a", "path", "with", "the", "same", "characteristics", "from", "the", "first", "point", "to", "the", "last", "point" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/geom_path.py#L378-L392
233,385
has2k1/plotnine
plotnine/geoms/geom_path.py
arrow.get_paths
def get_paths(self, x1, y1, x2, y2, panel_params, coord, ax): """ Compute paths that create the arrow heads Parameters ---------- x1, y1, x2, y2 : array_like List of points that define the tails of the arrows. The arrow heads will be at x1, y1. If you need them at x2, y2 reverse the input. Returns ------- out : list of Path Paths that create arrow heads """ Path = mpath.Path # Create reusable lists of vertices and codes # arrowhead path has 3 vertices (Nones), # plus dummy vertex for the STOP code verts = [None, None, None, (0, 0)] # codes list remains the same after initialization codes = [Path.MOVETO, Path.LINETO, Path.LINETO, Path.STOP] # Slices into the vertices list slc = slice(0, 3) # We need the plot dimensions so that we can # compute scaling factors fig = ax.get_figure() width, height = fig.get_size_inches() ranges = coord.range(panel_params) width_ = np.ptp(ranges.x) height_ = np.ptp(ranges.y) # scaling factors to prevent skewed arrowheads lx = self.length * width_/width ly = self.length * height_/height # angle in radians a = self.angle * np.pi / 180 # direction of arrow head xdiff, ydiff = x2 - x1, y2 - y1 rotations = np.arctan2(ydiff/ly, xdiff/lx) # Arrow head vertices v1x = x1 + lx * np.cos(rotations + a) v1y = y1 + ly * np.sin(rotations + a) v2x = x1 + lx * np.cos(rotations - a) v2y = y1 + ly * np.sin(rotations - a) # create a path for each arrow head paths = [] for t in zip(v1x, v1y, x1, y1, v2x, v2y): verts[slc] = [t[:2], t[2:4], t[4:]] paths.append(Path(verts, codes)) return paths
python
def get_paths(self, x1, y1, x2, y2, panel_params, coord, ax): Path = mpath.Path # Create reusable lists of vertices and codes # arrowhead path has 3 vertices (Nones), # plus dummy vertex for the STOP code verts = [None, None, None, (0, 0)] # codes list remains the same after initialization codes = [Path.MOVETO, Path.LINETO, Path.LINETO, Path.STOP] # Slices into the vertices list slc = slice(0, 3) # We need the plot dimensions so that we can # compute scaling factors fig = ax.get_figure() width, height = fig.get_size_inches() ranges = coord.range(panel_params) width_ = np.ptp(ranges.x) height_ = np.ptp(ranges.y) # scaling factors to prevent skewed arrowheads lx = self.length * width_/width ly = self.length * height_/height # angle in radians a = self.angle * np.pi / 180 # direction of arrow head xdiff, ydiff = x2 - x1, y2 - y1 rotations = np.arctan2(ydiff/ly, xdiff/lx) # Arrow head vertices v1x = x1 + lx * np.cos(rotations + a) v1y = y1 + ly * np.sin(rotations + a) v2x = x1 + lx * np.cos(rotations - a) v2y = y1 + ly * np.sin(rotations - a) # create a path for each arrow head paths = [] for t in zip(v1x, v1y, x1, y1, v2x, v2y): verts[slc] = [t[:2], t[2:4], t[4:]] paths.append(Path(verts, codes)) return paths
[ "def", "get_paths", "(", "self", ",", "x1", ",", "y1", ",", "x2", ",", "y2", ",", "panel_params", ",", "coord", ",", "ax", ")", ":", "Path", "=", "mpath", ".", "Path", "# Create reusable lists of vertices and codes", "# arrowhead path has 3 vertices (Nones),", "...
Compute paths that create the arrow heads Parameters ---------- x1, y1, x2, y2 : array_like List of points that define the tails of the arrows. The arrow heads will be at x1, y1. If you need them at x2, y2 reverse the input. Returns ------- out : list of Path Paths that create arrow heads
[ "Compute", "paths", "that", "create", "the", "arrow", "heads" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/geoms/geom_path.py#L276-L338
233,386
has2k1/plotnine
plotnine/facets/facet.py
combine_vars
def combine_vars(data, environment=None, vars=None, drop=True): """ Base layout function that generates all combinations of data needed for facetting The first data frame in the list should be the default data for the plot. Other data frames in the list are ones that are added to the layers. """ if not vars: return pd.DataFrame() # For each layer, compute the facet values values = [eval_facet_vars(df, vars, environment) for df in data if df is not None] # Form the base data frame which contains all combinations # of facetting variables that appear in the data has_all = [x.shape[1] == len(vars) for x in values] if not any(has_all): raise PlotnineError( "At least one layer must contain all variables " + "used for facetting") base = pd.concat([x for i, x in enumerate(values) if has_all[i]], axis=0) base = base.drop_duplicates() if not drop: base = unique_combs(base) # sorts according to order of factor levels base = base.sort_values(list(base.columns)) # Systematically add on missing combinations for i, value in enumerate(values): if has_all[i] or len(value.columns) == 0: continue old = base.loc[:, base.columns - value.columns] new = value.loc[:, base.columns & value.columns].drop_duplicates() if not drop: new = unique_combs(new) base = base.append(cross_join(old, new), ignore_index=True) if len(base) == 0: raise PlotnineError( "Faceting variables must have at least one value") base = base.reset_index(drop=True) return base
python
def combine_vars(data, environment=None, vars=None, drop=True): if not vars: return pd.DataFrame() # For each layer, compute the facet values values = [eval_facet_vars(df, vars, environment) for df in data if df is not None] # Form the base data frame which contains all combinations # of facetting variables that appear in the data has_all = [x.shape[1] == len(vars) for x in values] if not any(has_all): raise PlotnineError( "At least one layer must contain all variables " + "used for facetting") base = pd.concat([x for i, x in enumerate(values) if has_all[i]], axis=0) base = base.drop_duplicates() if not drop: base = unique_combs(base) # sorts according to order of factor levels base = base.sort_values(list(base.columns)) # Systematically add on missing combinations for i, value in enumerate(values): if has_all[i] or len(value.columns) == 0: continue old = base.loc[:, base.columns - value.columns] new = value.loc[:, base.columns & value.columns].drop_duplicates() if not drop: new = unique_combs(new) base = base.append(cross_join(old, new), ignore_index=True) if len(base) == 0: raise PlotnineError( "Faceting variables must have at least one value") base = base.reset_index(drop=True) return base
[ "def", "combine_vars", "(", "data", ",", "environment", "=", "None", ",", "vars", "=", "None", ",", "drop", "=", "True", ")", ":", "if", "not", "vars", ":", "return", "pd", ".", "DataFrame", "(", ")", "# For each layer, compute the facet values", "values", ...
Base layout function that generates all combinations of data needed for facetting The first data frame in the list should be the default data for the plot. Other data frames in the list are ones that are added to the layers.
[ "Base", "layout", "function", "that", "generates", "all", "combinations", "of", "data", "needed", "for", "facetting", "The", "first", "data", "frame", "in", "the", "list", "should", "be", "the", "default", "data", "for", "the", "plot", ".", "Other", "data", ...
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L556-L603
233,387
has2k1/plotnine
plotnine/facets/facet.py
unique_combs
def unique_combs(df): """ Return data frame with all possible combinations of the values in the columns """ # List of unique values from every column lst = (x.unique() for x in (df[c] for c in df)) rows = list(itertools.product(*lst)) _df = pd.DataFrame(rows, columns=df.columns) # preserve the column dtypes for col in df: _df[col] = _df[col].astype(df[col].dtype, copy=False) return _df
python
def unique_combs(df): # List of unique values from every column lst = (x.unique() for x in (df[c] for c in df)) rows = list(itertools.product(*lst)) _df = pd.DataFrame(rows, columns=df.columns) # preserve the column dtypes for col in df: _df[col] = _df[col].astype(df[col].dtype, copy=False) return _df
[ "def", "unique_combs", "(", "df", ")", ":", "# List of unique values from every column", "lst", "=", "(", "x", ".", "unique", "(", ")", "for", "x", "in", "(", "df", "[", "c", "]", "for", "c", "in", "df", ")", ")", "rows", "=", "list", "(", "itertools...
Return data frame with all possible combinations of the values in the columns
[ "Return", "data", "frame", "with", "all", "possible", "combinations", "of", "the", "values", "in", "the", "columns" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L606-L619
233,388
has2k1/plotnine
plotnine/facets/facet.py
eval_facet_vars
def eval_facet_vars(data, vars, env): """ Evaluate facet variables Parameters ---------- data : DataFrame Factet dataframe vars : list Facet variables env : environment Plot environment Returns ------- facet_vals : DataFrame Facet values that correspond to the specified variables. """ # To allow expressions in facet formula def I(value): return value env = env.with_outer_namespace({'I': I}) facet_vals = pd.DataFrame(index=data.index) for name in vars: if name in data: # This is a limited solution. If a keyword is # part of an expression it will fail in the # else statement below res = data[name] elif str.isidentifier(name): # All other non-statements continue else: # Statements try: res = env.eval(name, inner_namespace=data) except NameError: continue facet_vals[name] = res return facet_vals
python
def eval_facet_vars(data, vars, env): # To allow expressions in facet formula def I(value): return value env = env.with_outer_namespace({'I': I}) facet_vals = pd.DataFrame(index=data.index) for name in vars: if name in data: # This is a limited solution. If a keyword is # part of an expression it will fail in the # else statement below res = data[name] elif str.isidentifier(name): # All other non-statements continue else: # Statements try: res = env.eval(name, inner_namespace=data) except NameError: continue facet_vals[name] = res return facet_vals
[ "def", "eval_facet_vars", "(", "data", ",", "vars", ",", "env", ")", ":", "# To allow expressions in facet formula", "def", "I", "(", "value", ")", ":", "return", "value", "env", "=", "env", ".", "with_outer_namespace", "(", "{", "'I'", ":", "I", "}", ")",...
Evaluate facet variables Parameters ---------- data : DataFrame Factet dataframe vars : list Facet variables env : environment Plot environment Returns ------- facet_vals : DataFrame Facet values that correspond to the specified variables.
[ "Evaluate", "facet", "variables" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L654-L697
233,389
has2k1/plotnine
plotnine/facets/facet.py
facet.map
def map(self, data, layout): """ Assign a data points to panels Parameters ---------- data : DataFrame Data for a layer layout : DataFrame As returned by self.compute_layout Returns ------- data : DataFrame Data with all points mapped to the panels on which they will be plotted. """ msg = "{} should implement this method." raise NotImplementedError( msg.format(self.__class.__name__))
python
def map(self, data, layout): msg = "{} should implement this method." raise NotImplementedError( msg.format(self.__class.__name__))
[ "def", "map", "(", "self", ",", "data", ",", "layout", ")", ":", "msg", "=", "\"{} should implement this method.\"", "raise", "NotImplementedError", "(", "msg", ".", "format", "(", "self", ".", "__class", ".", "__name__", ")", ")" ]
Assign a data points to panels Parameters ---------- data : DataFrame Data for a layer layout : DataFrame As returned by self.compute_layout Returns ------- data : DataFrame Data with all points mapped to the panels on which they will be plotted.
[ "Assign", "a", "data", "points", "to", "panels" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L160-L179
233,390
has2k1/plotnine
plotnine/facets/facet.py
facet.train_position_scales
def train_position_scales(self, layout, layers): """ Compute ranges for the x and y scales """ _layout = layout.layout panel_scales_x = layout.panel_scales_x panel_scales_y = layout.panel_scales_y # loop over each layer, training x and y scales in turn for layer in layers: data = layer.data match_id = match(data['PANEL'], _layout['PANEL']) if panel_scales_x: x_vars = list(set(panel_scales_x[0].aesthetics) & set(data.columns)) # the scale index for each data point SCALE_X = _layout['SCALE_X'].iloc[match_id].tolist() panel_scales_x.train(data, x_vars, SCALE_X) if panel_scales_y: y_vars = list(set(panel_scales_y[0].aesthetics) & set(data.columns)) # the scale index for each data point SCALE_Y = _layout['SCALE_Y'].iloc[match_id].tolist() panel_scales_y.train(data, y_vars, SCALE_Y) return self
python
def train_position_scales(self, layout, layers): _layout = layout.layout panel_scales_x = layout.panel_scales_x panel_scales_y = layout.panel_scales_y # loop over each layer, training x and y scales in turn for layer in layers: data = layer.data match_id = match(data['PANEL'], _layout['PANEL']) if panel_scales_x: x_vars = list(set(panel_scales_x[0].aesthetics) & set(data.columns)) # the scale index for each data point SCALE_X = _layout['SCALE_X'].iloc[match_id].tolist() panel_scales_x.train(data, x_vars, SCALE_X) if panel_scales_y: y_vars = list(set(panel_scales_y[0].aesthetics) & set(data.columns)) # the scale index for each data point SCALE_Y = _layout['SCALE_Y'].iloc[match_id].tolist() panel_scales_y.train(data, y_vars, SCALE_Y) return self
[ "def", "train_position_scales", "(", "self", ",", "layout", ",", "layers", ")", ":", "_layout", "=", "layout", ".", "layout", "panel_scales_x", "=", "layout", ".", "panel_scales_x", "panel_scales_y", "=", "layout", ".", "panel_scales_y", "# loop over each layer, tra...
Compute ranges for the x and y scales
[ "Compute", "ranges", "for", "the", "x", "and", "y", "scales" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L210-L236
233,391
has2k1/plotnine
plotnine/facets/facet.py
facet._create_subplots
def _create_subplots(self, fig, layout): """ Create suplots and return axs """ num_panels = len(layout) axsarr = np.empty((self.nrow, self.ncol), dtype=object) # Create axes i = 1 for row in range(self.nrow): for col in range(self.ncol): axsarr[row, col] = fig.add_subplot(self.nrow, self.ncol, i) i += 1 # Rearrange axes # They are ordered to match the positions in the layout table if self.dir == 'h': order = 'C' if not self.as_table: axsarr = axsarr[::-1] elif self.dir == 'v': order = 'F' if not self.as_table: axsarr = np.array([row[::-1] for row in axsarr]) axs = axsarr.ravel(order) # Delete unused axes for ax in axs[num_panels:]: fig.delaxes(ax) axs = axs[:num_panels] return axs
python
def _create_subplots(self, fig, layout): num_panels = len(layout) axsarr = np.empty((self.nrow, self.ncol), dtype=object) # Create axes i = 1 for row in range(self.nrow): for col in range(self.ncol): axsarr[row, col] = fig.add_subplot(self.nrow, self.ncol, i) i += 1 # Rearrange axes # They are ordered to match the positions in the layout table if self.dir == 'h': order = 'C' if not self.as_table: axsarr = axsarr[::-1] elif self.dir == 'v': order = 'F' if not self.as_table: axsarr = np.array([row[::-1] for row in axsarr]) axs = axsarr.ravel(order) # Delete unused axes for ax in axs[num_panels:]: fig.delaxes(ax) axs = axs[:num_panels] return axs
[ "def", "_create_subplots", "(", "self", ",", "fig", ",", "layout", ")", ":", "num_panels", "=", "len", "(", "layout", ")", "axsarr", "=", "np", ".", "empty", "(", "(", "self", ".", "nrow", ",", "self", ".", "ncol", ")", ",", "dtype", "=", "object",...
Create suplots and return axs
[ "Create", "suplots", "and", "return", "axs" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L319-L350
233,392
has2k1/plotnine
plotnine/facets/facet.py
facet.make_axes
def make_axes(self, figure, layout, coordinates): """ Create and return Matplotlib axes """ axs = self._create_subplots(figure, layout) # Used for labelling the x and y axes, the first and # last axes according to how MPL creates them. self.first_ax = figure.axes[0] self.last_ax = figure.axes[-1] self.figure = figure self.axs = axs return axs
python
def make_axes(self, figure, layout, coordinates): axs = self._create_subplots(figure, layout) # Used for labelling the x and y axes, the first and # last axes according to how MPL creates them. self.first_ax = figure.axes[0] self.last_ax = figure.axes[-1] self.figure = figure self.axs = axs return axs
[ "def", "make_axes", "(", "self", ",", "figure", ",", "layout", ",", "coordinates", ")", ":", "axs", "=", "self", ".", "_create_subplots", "(", "figure", ",", "layout", ")", "# Used for labelling the x and y axes, the first and", "# last axes according to how MPL creates...
Create and return Matplotlib axes
[ "Create", "and", "return", "Matplotlib", "axes" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L352-L364
233,393
has2k1/plotnine
plotnine/facets/facet.py
facet.strip_size
def strip_size(self, location='top', num_lines=None): """ Breadth of the strip background in inches Parameters ---------- location : str in ``['top', 'right']`` Location of the strip text num_lines : int Number of text lines """ dpi = 72 theme = self.theme get_property = theme.themeables.property if location == 'right': strip_name = 'strip_text_y' num_lines = num_lines or self.num_vars_y else: strip_name = 'strip_text_x' num_lines = num_lines or self.num_vars_x if not num_lines: return 0 # The facet labels are placed onto the figure using # transAxes dimensions. The line height and line # width are mapped to the same [0, 1] range # i.e (pts) * (inches / pts) * (1 / inches) try: fontsize = get_property(strip_name, 'size') except KeyError: fontsize = float(theme.rcParams.get('font.size', 10)) try: linespacing = get_property(strip_name, 'linespacing') except KeyError: linespacing = 1 # margins on either side of the strip text m1, m2 = self.inner_strip_margins(location) # Using figure.dpi value here does not workout well! breadth = (linespacing*fontsize) * num_lines / dpi breadth = breadth + (m1 + m2) / dpi return breadth
python
def strip_size(self, location='top', num_lines=None): dpi = 72 theme = self.theme get_property = theme.themeables.property if location == 'right': strip_name = 'strip_text_y' num_lines = num_lines or self.num_vars_y else: strip_name = 'strip_text_x' num_lines = num_lines or self.num_vars_x if not num_lines: return 0 # The facet labels are placed onto the figure using # transAxes dimensions. The line height and line # width are mapped to the same [0, 1] range # i.e (pts) * (inches / pts) * (1 / inches) try: fontsize = get_property(strip_name, 'size') except KeyError: fontsize = float(theme.rcParams.get('font.size', 10)) try: linespacing = get_property(strip_name, 'linespacing') except KeyError: linespacing = 1 # margins on either side of the strip text m1, m2 = self.inner_strip_margins(location) # Using figure.dpi value here does not workout well! breadth = (linespacing*fontsize) * num_lines / dpi breadth = breadth + (m1 + m2) / dpi return breadth
[ "def", "strip_size", "(", "self", ",", "location", "=", "'top'", ",", "num_lines", "=", "None", ")", ":", "dpi", "=", "72", "theme", "=", "self", ".", "theme", "get_property", "=", "theme", ".", "themeables", ".", "property", "if", "location", "==", "'...
Breadth of the strip background in inches Parameters ---------- location : str in ``['top', 'right']`` Location of the strip text num_lines : int Number of text lines
[ "Breadth", "of", "the", "strip", "background", "in", "inches" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L392-L436
233,394
has2k1/plotnine
plotnine/facets/facet.py
facet.strip_dimensions
def strip_dimensions(self, text_lines, location, pid): """ Calculate the dimension Returns ------- out : types.SimpleNamespace A structure with all the coordinates required to draw the strip text and the background box. """ dpi = 72 num_lines = len(text_lines) get_property = self.theme.themeables.property ax = self.axs[pid] bbox = ax.get_window_extent().transformed( self.figure.dpi_scale_trans.inverted()) ax_width, ax_height = bbox.width, bbox.height # in inches strip_size = self.strip_size(location, num_lines) m1, m2 = self.inner_strip_margins(location) m1, m2 = m1/dpi, m2/dpi margin = 0 # default if location == 'right': box_x = 1 box_y = 0 box_width = strip_size/ax_width box_height = 1 # y & height properties of the background slide and # shrink the strip vertically. The y margin slides # it horizontally. with suppress(KeyError): box_y = get_property('strip_background_y', 'y') with suppress(KeyError): box_height = get_property('strip_background_y', 'height') with suppress(KeyError): margin = get_property('strip_margin_y') x = 1 + (strip_size-m2+m1) / (2*ax_width) y = (2*box_y+box_height)/2 # margin adjustment hslide = 1 + margin*strip_size/ax_width x *= hslide box_x *= hslide else: box_x = 0 box_y = 1 box_width = 1 box_height = strip_size/ax_height # x & width properties of the background slide and # shrink the strip horizontally. The y margin slides # it vertically. with suppress(KeyError): box_x = get_property('strip_background_x', 'x') with suppress(KeyError): box_width = get_property('strip_background_x', 'width') with suppress(KeyError): margin = get_property('strip_margin_x') x = (2*box_x+box_width)/2 y = 1 + (strip_size-m1+m2)/(2*ax_height) # margin adjustment vslide = 1 + margin*strip_size/ax_height y *= vslide box_y *= vslide dimensions = types.SimpleNamespace( x=x, y=y, box_x=box_x, box_y=box_y, box_width=box_width, box_height=box_height) return dimensions
python
def strip_dimensions(self, text_lines, location, pid): dpi = 72 num_lines = len(text_lines) get_property = self.theme.themeables.property ax = self.axs[pid] bbox = ax.get_window_extent().transformed( self.figure.dpi_scale_trans.inverted()) ax_width, ax_height = bbox.width, bbox.height # in inches strip_size = self.strip_size(location, num_lines) m1, m2 = self.inner_strip_margins(location) m1, m2 = m1/dpi, m2/dpi margin = 0 # default if location == 'right': box_x = 1 box_y = 0 box_width = strip_size/ax_width box_height = 1 # y & height properties of the background slide and # shrink the strip vertically. The y margin slides # it horizontally. with suppress(KeyError): box_y = get_property('strip_background_y', 'y') with suppress(KeyError): box_height = get_property('strip_background_y', 'height') with suppress(KeyError): margin = get_property('strip_margin_y') x = 1 + (strip_size-m2+m1) / (2*ax_width) y = (2*box_y+box_height)/2 # margin adjustment hslide = 1 + margin*strip_size/ax_width x *= hslide box_x *= hslide else: box_x = 0 box_y = 1 box_width = 1 box_height = strip_size/ax_height # x & width properties of the background slide and # shrink the strip horizontally. The y margin slides # it vertically. with suppress(KeyError): box_x = get_property('strip_background_x', 'x') with suppress(KeyError): box_width = get_property('strip_background_x', 'width') with suppress(KeyError): margin = get_property('strip_margin_x') x = (2*box_x+box_width)/2 y = 1 + (strip_size-m1+m2)/(2*ax_height) # margin adjustment vslide = 1 + margin*strip_size/ax_height y *= vslide box_y *= vslide dimensions = types.SimpleNamespace( x=x, y=y, box_x=box_x, box_y=box_y, box_width=box_width, box_height=box_height) return dimensions
[ "def", "strip_dimensions", "(", "self", ",", "text_lines", ",", "location", ",", "pid", ")", ":", "dpi", "=", "72", "num_lines", "=", "len", "(", "text_lines", ")", "get_property", "=", "self", ".", "theme", ".", "themeables", ".", "property", "ax", "=",...
Calculate the dimension Returns ------- out : types.SimpleNamespace A structure with all the coordinates required to draw the strip text and the background box.
[ "Calculate", "the", "dimension" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L438-L505
233,395
has2k1/plotnine
plotnine/facets/facet.py
facet.draw_strip_text
def draw_strip_text(self, text_lines, location, pid): """ Create a background patch and put a label on it """ ax = self.axs[pid] themeable = self.figure._themeable dim = self.strip_dimensions(text_lines, location, pid) if location == 'right': rotation = -90 label = '\n'.join(reversed(text_lines)) else: rotation = 0 label = '\n'.join(text_lines) rect = mpatch.FancyBboxPatch((dim.box_x, dim.box_y), width=dim.box_width, height=dim.box_height, facecolor='lightgrey', edgecolor='None', transform=ax.transAxes, zorder=2.2, # > ax line & boundary boxstyle='square, pad=0', clip_on=False) text = mtext.Text(dim.x, dim.y, label, rotation=rotation, verticalalignment='center', horizontalalignment='center', transform=ax.transAxes, zorder=3.3, # > rect clip_on=False) ax.add_artist(rect) ax.add_artist(text) for key in ('strip_text_x', 'strip_text_y', 'strip_background_x', 'strip_background_y'): if key not in themeable: themeable[key] = [] if location == 'right': themeable['strip_background_y'].append(rect) themeable['strip_text_y'].append(text) else: themeable['strip_background_x'].append(rect) themeable['strip_text_x'].append(text)
python
def draw_strip_text(self, text_lines, location, pid): ax = self.axs[pid] themeable = self.figure._themeable dim = self.strip_dimensions(text_lines, location, pid) if location == 'right': rotation = -90 label = '\n'.join(reversed(text_lines)) else: rotation = 0 label = '\n'.join(text_lines) rect = mpatch.FancyBboxPatch((dim.box_x, dim.box_y), width=dim.box_width, height=dim.box_height, facecolor='lightgrey', edgecolor='None', transform=ax.transAxes, zorder=2.2, # > ax line & boundary boxstyle='square, pad=0', clip_on=False) text = mtext.Text(dim.x, dim.y, label, rotation=rotation, verticalalignment='center', horizontalalignment='center', transform=ax.transAxes, zorder=3.3, # > rect clip_on=False) ax.add_artist(rect) ax.add_artist(text) for key in ('strip_text_x', 'strip_text_y', 'strip_background_x', 'strip_background_y'): if key not in themeable: themeable[key] = [] if location == 'right': themeable['strip_background_y'].append(rect) themeable['strip_text_y'].append(text) else: themeable['strip_background_x'].append(rect) themeable['strip_text_x'].append(text)
[ "def", "draw_strip_text", "(", "self", ",", "text_lines", ",", "location", ",", "pid", ")", ":", "ax", "=", "self", ".", "axs", "[", "pid", "]", "themeable", "=", "self", ".", "figure", ".", "_themeable", "dim", "=", "self", ".", "strip_dimensions", "(...
Create a background patch and put a label on it
[ "Create", "a", "background", "patch", "and", "put", "a", "label", "on", "it" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L507-L553
233,396
has2k1/plotnine
plotnine/facets/facet.py
MyFixedFormatter.format_data
def format_data(self, value): """ Return a formatted string representation of a number. """ s = locale.format_string('%1.10e', (value,)) return self.fix_minus(s)
python
def format_data(self, value): s = locale.format_string('%1.10e', (value,)) return self.fix_minus(s)
[ "def", "format_data", "(", "self", ",", "value", ")", ":", "s", "=", "locale", ".", "format_string", "(", "'%1.10e'", ",", "(", "value", ",", ")", ")", "return", "self", ".", "fix_minus", "(", "s", ")" ]
Return a formatted string representation of a number.
[ "Return", "a", "formatted", "string", "representation", "of", "a", "number", "." ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/facet.py#L701-L706
233,397
has2k1/plotnine
plotnine/guides/guide_colorbar.py
add_interpolated_colorbar
def add_interpolated_colorbar(da, colors, direction): """ Add 'rastered' colorbar to DrawingArea """ # Special case that arises due to not so useful # aesthetic mapping. if len(colors) == 1: colors = [colors[0], colors[0]] # Number of horizontal egdes(breaks) in the grid # No need to create more nbreak than colors, provided # no. of colors = no. of breaks = no. of cmap colors # the shading does a perfect interpolation nbreak = len(colors) if direction == 'vertical': mesh_width = 1 mesh_height = nbreak-1 linewidth = da.height/mesh_height # Construct rectangular meshgrid # The values(Z) at each vertex are just the # normalized (onto [0, 1]) vertical distance x = np.array([0, da.width]) y = np.arange(0, nbreak) * linewidth X, Y = np.meshgrid(x, y) Z = Y/y.max() else: mesh_width = nbreak-1 mesh_height = 1 linewidth = da.width/mesh_width x = np.arange(0, nbreak) * linewidth y = np.array([0, da.height]) X, Y = np.meshgrid(x, y) Z = X/x.max() # As a 2D coordinates array coordinates = np.zeros( ((mesh_width+1)*(mesh_height+1), 2), dtype=float) coordinates[:, 0] = X.ravel() coordinates[:, 1] = Y.ravel() cmap = ListedColormap(colors) coll = mcoll.QuadMesh(mesh_width, mesh_height, coordinates, antialiased=False, shading='gouraud', linewidth=0, cmap=cmap, array=Z.ravel()) da.add_artist(coll)
python
def add_interpolated_colorbar(da, colors, direction): # Special case that arises due to not so useful # aesthetic mapping. if len(colors) == 1: colors = [colors[0], colors[0]] # Number of horizontal egdes(breaks) in the grid # No need to create more nbreak than colors, provided # no. of colors = no. of breaks = no. of cmap colors # the shading does a perfect interpolation nbreak = len(colors) if direction == 'vertical': mesh_width = 1 mesh_height = nbreak-1 linewidth = da.height/mesh_height # Construct rectangular meshgrid # The values(Z) at each vertex are just the # normalized (onto [0, 1]) vertical distance x = np.array([0, da.width]) y = np.arange(0, nbreak) * linewidth X, Y = np.meshgrid(x, y) Z = Y/y.max() else: mesh_width = nbreak-1 mesh_height = 1 linewidth = da.width/mesh_width x = np.arange(0, nbreak) * linewidth y = np.array([0, da.height]) X, Y = np.meshgrid(x, y) Z = X/x.max() # As a 2D coordinates array coordinates = np.zeros( ((mesh_width+1)*(mesh_height+1), 2), dtype=float) coordinates[:, 0] = X.ravel() coordinates[:, 1] = Y.ravel() cmap = ListedColormap(colors) coll = mcoll.QuadMesh(mesh_width, mesh_height, coordinates, antialiased=False, shading='gouraud', linewidth=0, cmap=cmap, array=Z.ravel()) da.add_artist(coll)
[ "def", "add_interpolated_colorbar", "(", "da", ",", "colors", ",", "direction", ")", ":", "# Special case that arises due to not so useful", "# aesthetic mapping.", "if", "len", "(", "colors", ")", "==", "1", ":", "colors", "=", "[", "colors", "[", "0", "]", ","...
Add 'rastered' colorbar to DrawingArea
[ "Add", "rastered", "colorbar", "to", "DrawingArea" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/guides/guide_colorbar.py#L242-L292
233,398
has2k1/plotnine
plotnine/guides/guide_colorbar.py
add_segmented_colorbar
def add_segmented_colorbar(da, colors, direction): """ Add 'non-rastered' colorbar to DrawingArea """ nbreak = len(colors) if direction == 'vertical': linewidth = da.height/nbreak verts = [None] * nbreak x1, x2 = 0, da.width for i, color in enumerate(colors): y1 = i * linewidth y2 = y1 + linewidth verts[i] = ((x1, y1), (x1, y2), (x2, y2), (x2, y1)) else: linewidth = da.width/nbreak verts = [None] * nbreak y1, y2 = 0, da.height for i, color in enumerate(colors): x1 = i * linewidth x2 = x1 + linewidth verts[i] = ((x1, y1), (x1, y2), (x2, y2), (x2, y1)) coll = mcoll.PolyCollection(verts, facecolors=colors, linewidth=0, antialiased=False) da.add_artist(coll)
python
def add_segmented_colorbar(da, colors, direction): nbreak = len(colors) if direction == 'vertical': linewidth = da.height/nbreak verts = [None] * nbreak x1, x2 = 0, da.width for i, color in enumerate(colors): y1 = i * linewidth y2 = y1 + linewidth verts[i] = ((x1, y1), (x1, y2), (x2, y2), (x2, y1)) else: linewidth = da.width/nbreak verts = [None] * nbreak y1, y2 = 0, da.height for i, color in enumerate(colors): x1 = i * linewidth x2 = x1 + linewidth verts[i] = ((x1, y1), (x1, y2), (x2, y2), (x2, y1)) coll = mcoll.PolyCollection(verts, facecolors=colors, linewidth=0, antialiased=False) da.add_artist(coll)
[ "def", "add_segmented_colorbar", "(", "da", ",", "colors", ",", "direction", ")", ":", "nbreak", "=", "len", "(", "colors", ")", "if", "direction", "==", "'vertical'", ":", "linewidth", "=", "da", ".", "height", "/", "nbreak", "verts", "=", "[", "None", ...
Add 'non-rastered' colorbar to DrawingArea
[ "Add", "non", "-", "rastered", "colorbar", "to", "DrawingArea" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/guides/guide_colorbar.py#L295-L321
233,399
has2k1/plotnine
plotnine/guides/guide_colorbar.py
create_labels
def create_labels(da, labels, locations, direction): """ Return an OffsetBox with label texts """ # The box dimensions are determined by the size of # the text objects. We put two dummy children at # either end to gaurantee that when center packed # the labels in the labels_box matchup with the ticks. fontsize = 9 aux_transform = mtransforms.IdentityTransform() labels_box = MyAuxTransformBox(aux_transform) xs, ys = [0]*len(labels), locations ha, va = 'left', 'center' x1, y1 = 0, 0 x2, y2 = 0, da.height if direction == 'horizontal': xs, ys = ys, xs ha, va = 'center', 'top' x2, y2 = da.width, 0 txt1 = mtext.Text(x1, y1, '', horizontalalignment=ha, verticalalignment=va) txt2 = mtext.Text(x2, y2, '', horizontalalignment=ha, verticalalignment=va) labels_box.add_artist(txt1) labels_box.add_artist(txt2) legend_text = [] for i, (x, y, text) in enumerate(zip(xs, ys, labels)): txt = mtext.Text(x, y, text, size=fontsize, horizontalalignment=ha, verticalalignment=va) labels_box.add_artist(txt) legend_text.append(txt) return labels_box, legend_text
python
def create_labels(da, labels, locations, direction): # The box dimensions are determined by the size of # the text objects. We put two dummy children at # either end to gaurantee that when center packed # the labels in the labels_box matchup with the ticks. fontsize = 9 aux_transform = mtransforms.IdentityTransform() labels_box = MyAuxTransformBox(aux_transform) xs, ys = [0]*len(labels), locations ha, va = 'left', 'center' x1, y1 = 0, 0 x2, y2 = 0, da.height if direction == 'horizontal': xs, ys = ys, xs ha, va = 'center', 'top' x2, y2 = da.width, 0 txt1 = mtext.Text(x1, y1, '', horizontalalignment=ha, verticalalignment=va) txt2 = mtext.Text(x2, y2, '', horizontalalignment=ha, verticalalignment=va) labels_box.add_artist(txt1) labels_box.add_artist(txt2) legend_text = [] for i, (x, y, text) in enumerate(zip(xs, ys, labels)): txt = mtext.Text(x, y, text, size=fontsize, horizontalalignment=ha, verticalalignment=va) labels_box.add_artist(txt) legend_text.append(txt) return labels_box, legend_text
[ "def", "create_labels", "(", "da", ",", "labels", ",", "locations", ",", "direction", ")", ":", "# The box dimensions are determined by the size of", "# the text objects. We put two dummy children at", "# either end to gaurantee that when center packed", "# the labels in the labels_box...
Return an OffsetBox with label texts
[ "Return", "an", "OffsetBox", "with", "label", "texts" ]
566e579af705367e584fb27a74e6c5199624ca89
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/guides/guide_colorbar.py#L344-L381