repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
ianclegg/winrmlib
winrmlib/api/service.py
Service.invoke
def invoke(self, headers, body): """ Invokes the soap service """ xml = Service._create_request(headers, body) try: response = self.session.post(self.endpoint, verify=False, data=xml) logging.debug(response.content) except Exception as e: traceback.print_exc() raise WSManException(e) if response.status_code == 200: return Service._parse_response(response.content) if response.status_code == 401: raise WSManAuthenticationException('the remote host rejected authentication') raise WSManException('the remote host returned an unexpected http status code: %s' % response.status_code)
python
def invoke(self, headers, body): """ Invokes the soap service """ xml = Service._create_request(headers, body) try: response = self.session.post(self.endpoint, verify=False, data=xml) logging.debug(response.content) except Exception as e: traceback.print_exc() raise WSManException(e) if response.status_code == 200: return Service._parse_response(response.content) if response.status_code == 401: raise WSManAuthenticationException('the remote host rejected authentication') raise WSManException('the remote host returned an unexpected http status code: %s' % response.status_code)
[ "def", "invoke", "(", "self", ",", "headers", ",", "body", ")", ":", "xml", "=", "Service", ".", "_create_request", "(", "headers", ",", "body", ")", "try", ":", "response", "=", "self", ".", "session", ".", "post", "(", "self", ".", "endpoint", ",",...
Invokes the soap service
[ "Invokes", "the", "soap", "service" ]
train
https://github.com/ianclegg/winrmlib/blob/489b3ce5d0e6a9a7301ba5d345ba82fa824c1431/winrmlib/api/service.py#L55-L74
ianclegg/winrmlib
winrmlib/api/service.py
Service._determine_auth_mechanism
def _determine_auth_mechanism(username, password, delegation): """ if the username contains at '@' sign we will use kerberos if the username contains a '/ we will use ntlm either NTLM or Kerberos. In fact its basically always Negotiate. """ if re.match('(.*)@(.+)', username) is not None: if delegation is True: raise Exception('Kerberos is not yet supported, specify the username in <domain>\<username> form for NTLM') else: raise Exception('Kerberos is not yet supported, specify the username in <domain>>\<username> form for NTLM') # check for NT format 'domain\username' a blank domain or username is invalid legacy = re.match('(.*)\\\\(.*)', username) if legacy is not None: if not legacy.group(1): raise Exception('Please specify the Windows domain for user in <domain>\<username> format') if not legacy.group(2): raise Exception('Please specify the Username of the user in <domain>\<username> format') if delegation is True: return HttpCredSSPAuth(legacy.group(1), legacy.group(2), password) else: return HttpNtlmAuth(legacy.group(1), legacy.group(2), password) #return HttpCredSSPAuth("SERVER2012", "Administrator", password) # attempt NTLM (local account, not domain) - if username is '' then we try anonymous NTLM auth # as if anyone will configure that - uf! return HttpNtlmAuth('', username, password)
python
def _determine_auth_mechanism(username, password, delegation): """ if the username contains at '@' sign we will use kerberos if the username contains a '/ we will use ntlm either NTLM or Kerberos. In fact its basically always Negotiate. """ if re.match('(.*)@(.+)', username) is not None: if delegation is True: raise Exception('Kerberos is not yet supported, specify the username in <domain>\<username> form for NTLM') else: raise Exception('Kerberos is not yet supported, specify the username in <domain>>\<username> form for NTLM') # check for NT format 'domain\username' a blank domain or username is invalid legacy = re.match('(.*)\\\\(.*)', username) if legacy is not None: if not legacy.group(1): raise Exception('Please specify the Windows domain for user in <domain>\<username> format') if not legacy.group(2): raise Exception('Please specify the Username of the user in <domain>\<username> format') if delegation is True: return HttpCredSSPAuth(legacy.group(1), legacy.group(2), password) else: return HttpNtlmAuth(legacy.group(1), legacy.group(2), password) #return HttpCredSSPAuth("SERVER2012", "Administrator", password) # attempt NTLM (local account, not domain) - if username is '' then we try anonymous NTLM auth # as if anyone will configure that - uf! return HttpNtlmAuth('', username, password)
[ "def", "_determine_auth_mechanism", "(", "username", ",", "password", ",", "delegation", ")", ":", "if", "re", ".", "match", "(", "'(.*)@(.+)'", ",", "username", ")", "is", "not", "None", ":", "if", "delegation", "is", "True", ":", "raise", "Exception", "(...
if the username contains at '@' sign we will use kerberos if the username contains a '/ we will use ntlm either NTLM or Kerberos. In fact its basically always Negotiate.
[ "if", "the", "username", "contains", "at" ]
train
https://github.com/ianclegg/winrmlib/blob/489b3ce5d0e6a9a7301ba5d345ba82fa824c1431/winrmlib/api/service.py#L77-L104
ianclegg/winrmlib
winrmlib/api/service.py
Service._create_request
def _create_request(headers, body): """ Create the SOAP 1.2 Envelope An ordered dictionary is required to ensure the same order is reflected in the XML, otherwise the SOAP Body element would appear before the Header element. """ envelope = OrderedDict() for (namespace, alias) in Service.Namespaces.items(): envelope['@xmlns:' + alias] = namespace envelope['soap:Header'] = headers envelope['soap:Body'] = body return xmltodict.unparse({'soap:Envelope': envelope}, encoding='utf-8')
python
def _create_request(headers, body): """ Create the SOAP 1.2 Envelope An ordered dictionary is required to ensure the same order is reflected in the XML, otherwise the SOAP Body element would appear before the Header element. """ envelope = OrderedDict() for (namespace, alias) in Service.Namespaces.items(): envelope['@xmlns:' + alias] = namespace envelope['soap:Header'] = headers envelope['soap:Body'] = body return xmltodict.unparse({'soap:Envelope': envelope}, encoding='utf-8')
[ "def", "_create_request", "(", "headers", ",", "body", ")", ":", "envelope", "=", "OrderedDict", "(", ")", "for", "(", "namespace", ",", "alias", ")", "in", "Service", ".", "Namespaces", ".", "items", "(", ")", ":", "envelope", "[", "'@xmlns:'", "+", "...
Create the SOAP 1.2 Envelope An ordered dictionary is required to ensure the same order is reflected in the XML, otherwise the SOAP Body element would appear before the Header element.
[ "Create", "the", "SOAP", "1", ".", "2", "Envelope", "An", "ordered", "dictionary", "is", "required", "to", "ensure", "the", "same", "order", "is", "reflected", "in", "the", "XML", "otherwise", "the", "SOAP", "Body", "element", "would", "appear", "before", ...
train
https://github.com/ianclegg/winrmlib/blob/489b3ce5d0e6a9a7301ba5d345ba82fa824c1431/winrmlib/api/service.py#L107-L118
ianclegg/winrmlib
winrmlib/api/service.py
Service._parse_response
def _parse_response(xml): """ Attempt to parse the SOAP response and return a python object Raise a WSManException if a Fault is found """ try: soap_response = xmltodict.parse(xml, process_namespaces=True, namespaces=Service.Namespaces) except Exception: logging.debug('unable to parse the xml response: %s', xml) raise WSManException("the remote host returned an invalid soap response") # the delete response has an empty body body = soap_response['soap:Envelope']['soap:Body'] if body is not None and 'soap:Fault' in body: raise WSManOperationException(body['soap:Fault']['soap:Reason']['soap:Text']['#text']) return body
python
def _parse_response(xml): """ Attempt to parse the SOAP response and return a python object Raise a WSManException if a Fault is found """ try: soap_response = xmltodict.parse(xml, process_namespaces=True, namespaces=Service.Namespaces) except Exception: logging.debug('unable to parse the xml response: %s', xml) raise WSManException("the remote host returned an invalid soap response") # the delete response has an empty body body = soap_response['soap:Envelope']['soap:Body'] if body is not None and 'soap:Fault' in body: raise WSManOperationException(body['soap:Fault']['soap:Reason']['soap:Text']['#text']) return body
[ "def", "_parse_response", "(", "xml", ")", ":", "try", ":", "soap_response", "=", "xmltodict", ".", "parse", "(", "xml", ",", "process_namespaces", "=", "True", ",", "namespaces", "=", "Service", ".", "Namespaces", ")", "except", "Exception", ":", "logging",...
Attempt to parse the SOAP response and return a python object Raise a WSManException if a Fault is found
[ "Attempt", "to", "parse", "the", "SOAP", "response", "and", "return", "a", "python", "object", "Raise", "a", "WSManException", "if", "a", "Fault", "is", "found" ]
train
https://github.com/ianclegg/winrmlib/blob/489b3ce5d0e6a9a7301ba5d345ba82fa824c1431/winrmlib/api/service.py#L121-L136
arve0/fijibin
fijibin/__init__.py
fetch
def fetch(force=False): """Fetch and extract latest Life-Line version of Fiji is just ImageJ to *~/.bin*. Parameters ---------- force : bool Force overwrite of existing Fiji in *~/.bin*. """ try: # python 2 from urllib2 import urlopen, HTTPError, URLError except ImportError: # python 3 from urllib.request import urlopen, HTTPError, URLError if os.path.isdir(FIJI_ROOT) and not force: return elif not os.path.isdir(FIJI_ROOT): print('Fiji missing in %s' % FIJI_ROOT) if force: print('Deleting %s' % FIJI_ROOT) shutil.rmtree(FIJI_ROOT, ignore_errors=True) print('Downloading fiji from %s' % URL) try: req = urlopen(URL) try: size = int(req.info()['content-length']) except AttributeError: size = -1 chunk = 512*1024 fp = BytesIO() i = 0 while 1: data = req.read(chunk) if not data: break fp.write(data) if size > 0: percent = fp.tell() // (size/100) msg = 'Downloaded %d percent \r' % percent else: msg = 'Downloaded %d bytes\r' % fp.tell() sys.stdout.write(msg) except (HTTPError, URLError) as e: print('Error getting fiji: {}'.format(e)) sys.exit(1) try: print('\nExtracting zip') z = ZipFile(fp) z.extractall(BIN_FOLDER) # move to Fiji-VERSION.app to easily check if it exists (upon fijibin upgrade) os.rename(EXTRACT_FOLDER, FIJI_ROOT) except (BadZipFile, IOError) as e: print('Error extracting zip: {}'.format(e)) sys.exit(1) for path in BIN_NAMES.values(): st = os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC)
python
def fetch(force=False): """Fetch and extract latest Life-Line version of Fiji is just ImageJ to *~/.bin*. Parameters ---------- force : bool Force overwrite of existing Fiji in *~/.bin*. """ try: # python 2 from urllib2 import urlopen, HTTPError, URLError except ImportError: # python 3 from urllib.request import urlopen, HTTPError, URLError if os.path.isdir(FIJI_ROOT) and not force: return elif not os.path.isdir(FIJI_ROOT): print('Fiji missing in %s' % FIJI_ROOT) if force: print('Deleting %s' % FIJI_ROOT) shutil.rmtree(FIJI_ROOT, ignore_errors=True) print('Downloading fiji from %s' % URL) try: req = urlopen(URL) try: size = int(req.info()['content-length']) except AttributeError: size = -1 chunk = 512*1024 fp = BytesIO() i = 0 while 1: data = req.read(chunk) if not data: break fp.write(data) if size > 0: percent = fp.tell() // (size/100) msg = 'Downloaded %d percent \r' % percent else: msg = 'Downloaded %d bytes\r' % fp.tell() sys.stdout.write(msg) except (HTTPError, URLError) as e: print('Error getting fiji: {}'.format(e)) sys.exit(1) try: print('\nExtracting zip') z = ZipFile(fp) z.extractall(BIN_FOLDER) # move to Fiji-VERSION.app to easily check if it exists (upon fijibin upgrade) os.rename(EXTRACT_FOLDER, FIJI_ROOT) except (BadZipFile, IOError) as e: print('Error extracting zip: {}'.format(e)) sys.exit(1) for path in BIN_NAMES.values(): st = os.stat(path) os.chmod(path, st.st_mode | stat.S_IEXEC)
[ "def", "fetch", "(", "force", "=", "False", ")", ":", "try", ":", "# python 2", "from", "urllib2", "import", "urlopen", ",", "HTTPError", ",", "URLError", "except", "ImportError", ":", "# python 3", "from", "urllib", ".", "request", "import", "urlopen", ",",...
Fetch and extract latest Life-Line version of Fiji is just ImageJ to *~/.bin*. Parameters ---------- force : bool Force overwrite of existing Fiji in *~/.bin*.
[ "Fetch", "and", "extract", "latest", "Life", "-", "Line", "version", "of", "Fiji", "is", "just", "ImageJ", "to", "*", "~", "/", ".", "bin", "*", "." ]
train
https://github.com/arve0/fijibin/blob/a3d2e983cb9ff2bcbb56a800084bc3b35cb9292f/fijibin/__init__.py#L56-L120
yourcelf/escapejson
escapejson/templatetags/escapejson.py
escapejson_filter
def escapejson_filter(value): """ Escape `value` to prevent </script> and unicode whitespace attacks. If `value` is not a string, JSON-encode it first. """ if isinstance(value, six.string_types): string = value else: string = json.dumps(value, cls=DjangoJSONEncoder) return mark_safe(escapejson(string))
python
def escapejson_filter(value): """ Escape `value` to prevent </script> and unicode whitespace attacks. If `value` is not a string, JSON-encode it first. """ if isinstance(value, six.string_types): string = value else: string = json.dumps(value, cls=DjangoJSONEncoder) return mark_safe(escapejson(string))
[ "def", "escapejson_filter", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "string", "=", "value", "else", ":", "string", "=", "json", ".", "dumps", "(", "value", ",", "cls", "=", "DjangoJSONEncoder",...
Escape `value` to prevent </script> and unicode whitespace attacks. If `value` is not a string, JSON-encode it first.
[ "Escape", "value", "to", "prevent", "<", "/", "script", ">", "and", "unicode", "whitespace", "attacks", ".", "If", "value", "is", "not", "a", "string", "JSON", "-", "encode", "it", "first", "." ]
train
https://github.com/yourcelf/escapejson/blob/5b39160ae619542cc16db7a443b752d64ff5c416/escapejson/templatetags/escapejson.py#L12-L21
nimeshkverma/mongo_joins
mongojoin/mongocollection.py
MongoCollection.get_mongo_cursor
def get_mongo_cursor(self, bulk=False): """ Returns Mongo cursor using the class variables :param bulk: bulk writer option :type bulk: boolean :return: mongo collection for which cursor will be created :rtype: mongo colection object """ try: if self.host: if self.port: client = MongoClient(self.host, self.port) else: client = MongoClient( self.host, MongoCollection.DEFAULT_PORT) else: client = MongoClient(self.mongo_uri) db = client[self.db_name] cursor = db[self.collection] if bulk: try: return cursor.initialize_unordered_bulk_op() except Exception as e: msg = "Mongo Bulk cursor could not be fetched, Error: {error}".format( error=str(e)) raise Exception(msg) return cursor except Exception as e: msg = "Mongo Connection could not be established for Mongo Uri: {mongo_uri}, Database: {db_name}, Collection {col}, Error: {error}".format( mongo_uri=self.mongo_uri, db_name=self.db_name, col=self.collection, error=str(e)) raise Exception(msg)
python
def get_mongo_cursor(self, bulk=False): """ Returns Mongo cursor using the class variables :param bulk: bulk writer option :type bulk: boolean :return: mongo collection for which cursor will be created :rtype: mongo colection object """ try: if self.host: if self.port: client = MongoClient(self.host, self.port) else: client = MongoClient( self.host, MongoCollection.DEFAULT_PORT) else: client = MongoClient(self.mongo_uri) db = client[self.db_name] cursor = db[self.collection] if bulk: try: return cursor.initialize_unordered_bulk_op() except Exception as e: msg = "Mongo Bulk cursor could not be fetched, Error: {error}".format( error=str(e)) raise Exception(msg) return cursor except Exception as e: msg = "Mongo Connection could not be established for Mongo Uri: {mongo_uri}, Database: {db_name}, Collection {col}, Error: {error}".format( mongo_uri=self.mongo_uri, db_name=self.db_name, col=self.collection, error=str(e)) raise Exception(msg)
[ "def", "get_mongo_cursor", "(", "self", ",", "bulk", "=", "False", ")", ":", "try", ":", "if", "self", ".", "host", ":", "if", "self", ".", "port", ":", "client", "=", "MongoClient", "(", "self", ".", "host", ",", "self", ".", "port", ")", "else", ...
Returns Mongo cursor using the class variables :param bulk: bulk writer option :type bulk: boolean :return: mongo collection for which cursor will be created :rtype: mongo colection object
[ "Returns", "Mongo", "cursor", "using", "the", "class", "variables" ]
train
https://github.com/nimeshkverma/mongo_joins/blob/64c416c3402d5906f707b73867fbc55e28d5ec37/mongojoin/mongocollection.py#L58-L95
nimeshkverma/mongo_joins
mongojoin/mongocollection.py
MongoCollection.bulk_cursor_execute
def bulk_cursor_execute(self, bulk_cursor): """ Executes the bulk_cursor :param bulk_cursor: Cursor to perform bulk operations :type bulk_cursor: pymongo bulk cursor object :returns: pymongo bulk cursor object (for bulk operations) """ try: result = bulk_cursor.execute() except BulkWriteError as bwe: msg = "bulk_cursor_execute: Exception in executing Bulk cursor to mongo with {error}".format( error=str(bwe)) raise Exception(msg) except Exception as e: msg = "Mongo Bulk cursor could not be fetched, Error: {error}".format( error=str(e)) raise Exception(msg)
python
def bulk_cursor_execute(self, bulk_cursor): """ Executes the bulk_cursor :param bulk_cursor: Cursor to perform bulk operations :type bulk_cursor: pymongo bulk cursor object :returns: pymongo bulk cursor object (for bulk operations) """ try: result = bulk_cursor.execute() except BulkWriteError as bwe: msg = "bulk_cursor_execute: Exception in executing Bulk cursor to mongo with {error}".format( error=str(bwe)) raise Exception(msg) except Exception as e: msg = "Mongo Bulk cursor could not be fetched, Error: {error}".format( error=str(e)) raise Exception(msg)
[ "def", "bulk_cursor_execute", "(", "self", ",", "bulk_cursor", ")", ":", "try", ":", "result", "=", "bulk_cursor", ".", "execute", "(", ")", "except", "BulkWriteError", "as", "bwe", ":", "msg", "=", "\"bulk_cursor_execute: Exception in executing Bulk cursor to mongo w...
Executes the bulk_cursor :param bulk_cursor: Cursor to perform bulk operations :type bulk_cursor: pymongo bulk cursor object :returns: pymongo bulk cursor object (for bulk operations)
[ "Executes", "the", "bulk_cursor" ]
train
https://github.com/nimeshkverma/mongo_joins/blob/64c416c3402d5906f707b73867fbc55e28d5ec37/mongojoin/mongocollection.py#L97-L115
tkf/rash
rash/utils/termdetection.py
detect_terminal
def detect_terminal(_environ=os.environ): """ Detect "terminal" you are using. First, this function checks if you are in tmux, byobu, or screen. If not it uses $COLORTERM [#]_ if defined and fallbacks to $TERM. .. [#] So, if you are in Gnome Terminal you have "gnome-terminal" instead of "xterm-color"". """ if _environ.get('TMUX'): return 'tmux' elif subdict_by_key_prefix(_environ, 'BYOBU'): return 'byobu' elif _environ.get('TERM').startswith('screen'): return _environ['TERM'] elif _environ.get('COLORTERM'): return _environ['COLORTERM'] else: return _environ.get('TERM')
python
def detect_terminal(_environ=os.environ): """ Detect "terminal" you are using. First, this function checks if you are in tmux, byobu, or screen. If not it uses $COLORTERM [#]_ if defined and fallbacks to $TERM. .. [#] So, if you are in Gnome Terminal you have "gnome-terminal" instead of "xterm-color"". """ if _environ.get('TMUX'): return 'tmux' elif subdict_by_key_prefix(_environ, 'BYOBU'): return 'byobu' elif _environ.get('TERM').startswith('screen'): return _environ['TERM'] elif _environ.get('COLORTERM'): return _environ['COLORTERM'] else: return _environ.get('TERM')
[ "def", "detect_terminal", "(", "_environ", "=", "os", ".", "environ", ")", ":", "if", "_environ", ".", "get", "(", "'TMUX'", ")", ":", "return", "'tmux'", "elif", "subdict_by_key_prefix", "(", "_environ", ",", "'BYOBU'", ")", ":", "return", "'byobu'", "eli...
Detect "terminal" you are using. First, this function checks if you are in tmux, byobu, or screen. If not it uses $COLORTERM [#]_ if defined and fallbacks to $TERM. .. [#] So, if you are in Gnome Terminal you have "gnome-terminal" instead of "xterm-color"".
[ "Detect", "terminal", "you", "are", "using", "." ]
train
https://github.com/tkf/rash/blob/585da418ec37dd138f1a4277718b6f507e9536a2/rash/utils/termdetection.py#L28-L48
Pytwitcher/pytwitcherapi
src/pytwitcherapi/chat/connection.py
ServerConnection3.get_waittime
def get_waittime(self): """Return the appropriate time to wait, if we sent too many messages :returns: the time to wait in seconds :rtype: :class:`float` :raises: None """ now = time.time() self.sentmessages.appendleft(now) if len(self.sentmessages) == self.sentmessages.maxlen: # check if the oldes message is older than # limited by self.limitinterval oldest = self.sentmessages[-1] waittime = self.limitinterval - (now - oldest) if waittime > 0: return waittime + 1 # add a little buffer return 0
python
def get_waittime(self): """Return the appropriate time to wait, if we sent too many messages :returns: the time to wait in seconds :rtype: :class:`float` :raises: None """ now = time.time() self.sentmessages.appendleft(now) if len(self.sentmessages) == self.sentmessages.maxlen: # check if the oldes message is older than # limited by self.limitinterval oldest = self.sentmessages[-1] waittime = self.limitinterval - (now - oldest) if waittime > 0: return waittime + 1 # add a little buffer return 0
[ "def", "get_waittime", "(", "self", ")", ":", "now", "=", "time", ".", "time", "(", ")", "self", ".", "sentmessages", ".", "appendleft", "(", "now", ")", "if", "len", "(", "self", ".", "sentmessages", ")", "==", "self", ".", "sentmessages", ".", "max...
Return the appropriate time to wait, if we sent too many messages :returns: the time to wait in seconds :rtype: :class:`float` :raises: None
[ "Return", "the", "appropriate", "time", "to", "wait", "if", "we", "sent", "too", "many", "messages" ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/chat/connection.py#L92-L108
Pytwitcher/pytwitcherapi
src/pytwitcherapi/chat/connection.py
ServerConnection3.send_raw
def send_raw(self, string): """Send raw string to the server. The string will be padded with appropriate CR LF. If too many messages are sent, this will call :func:`time.sleep` until it is allowed to send messages again. :param string: the raw string to send :type string: :class:`str` :returns: None :raises: :class:`irc.client.InvalidCharacters`, :class:`irc.client.MessageTooLong`, :class:`irc.client.ServerNotConnectedError` """ waittime = self.get_waittime() if waittime: log.debug('Sent too many messages. Waiting %s seconds', waittime) time.sleep(waittime) return super(ServerConnection3, self).send_raw(string)
python
def send_raw(self, string): """Send raw string to the server. The string will be padded with appropriate CR LF. If too many messages are sent, this will call :func:`time.sleep` until it is allowed to send messages again. :param string: the raw string to send :type string: :class:`str` :returns: None :raises: :class:`irc.client.InvalidCharacters`, :class:`irc.client.MessageTooLong`, :class:`irc.client.ServerNotConnectedError` """ waittime = self.get_waittime() if waittime: log.debug('Sent too many messages. Waiting %s seconds', waittime) time.sleep(waittime) return super(ServerConnection3, self).send_raw(string)
[ "def", "send_raw", "(", "self", ",", "string", ")", ":", "waittime", "=", "self", ".", "get_waittime", "(", ")", "if", "waittime", ":", "log", ".", "debug", "(", "'Sent too many messages. Waiting %s seconds'", ",", "waittime", ")", "time", ".", "sleep", "(",...
Send raw string to the server. The string will be padded with appropriate CR LF. If too many messages are sent, this will call :func:`time.sleep` until it is allowed to send messages again. :param string: the raw string to send :type string: :class:`str` :returns: None :raises: :class:`irc.client.InvalidCharacters`, :class:`irc.client.MessageTooLong`, :class:`irc.client.ServerNotConnectedError`
[ "Send", "raw", "string", "to", "the", "server", "." ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/chat/connection.py#L110-L129
Pytwitcher/pytwitcherapi
src/pytwitcherapi/chat/connection.py
ServerConnection3._process_line
def _process_line(self, line): """Process the given line and handle the events :param line: the raw message :type line: :class:`str` :returns: None :rtype: None :raises: None """ m = self._rfc_1459_command_regexp.match(line) prefix = m.group('prefix') tags = self._process_tags(m.group('tags')) source = self._process_prefix(prefix) command = self._process_command(m.group('command')) arguments = self._process_arguments(m.group('argument')) if not self.real_server_name: self.real_server_name = prefix # Translate numerics into more readable strings. command = irc.events.numeric.get(command, command) if command not in ["privmsg", "notice"]: return super(ServerConnection3, self)._process_line(line) event = Event3("all_raw_messages", self.get_server_name(), None, [line], tags=tags) self._handle_event(event) target, msg = arguments[0], arguments[1] messages = irc.ctcp.dequote(msg) command = self._resolve_command(command, target) for m in messages: self._handle_message(tags, source, command, target, m)
python
def _process_line(self, line): """Process the given line and handle the events :param line: the raw message :type line: :class:`str` :returns: None :rtype: None :raises: None """ m = self._rfc_1459_command_regexp.match(line) prefix = m.group('prefix') tags = self._process_tags(m.group('tags')) source = self._process_prefix(prefix) command = self._process_command(m.group('command')) arguments = self._process_arguments(m.group('argument')) if not self.real_server_name: self.real_server_name = prefix # Translate numerics into more readable strings. command = irc.events.numeric.get(command, command) if command not in ["privmsg", "notice"]: return super(ServerConnection3, self)._process_line(line) event = Event3("all_raw_messages", self.get_server_name(), None, [line], tags=tags) self._handle_event(event) target, msg = arguments[0], arguments[1] messages = irc.ctcp.dequote(msg) command = self._resolve_command(command, target) for m in messages: self._handle_message(tags, source, command, target, m)
[ "def", "_process_line", "(", "self", ",", "line", ")", ":", "m", "=", "self", ".", "_rfc_1459_command_regexp", ".", "match", "(", "line", ")", "prefix", "=", "m", ".", "group", "(", "'prefix'", ")", "tags", "=", "self", ".", "_process_tags", "(", "m", ...
Process the given line and handle the events :param line: the raw message :type line: :class:`str` :returns: None :rtype: None :raises: None
[ "Process", "the", "given", "line", "and", "handle", "the", "events" ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/chat/connection.py#L131-L162
Pytwitcher/pytwitcherapi
src/pytwitcherapi/chat/connection.py
ServerConnection3._resolve_command
def _resolve_command(self, command, target): """Get the correct event for the command Only for 'privmsg' and 'notice' commands. :param command: The command string :type command: :class:`str` :param target: either a user or a channel :type target: :class:`str` :returns: the correct event type :rtype: :class:`str` :raises: None """ if command == "privmsg": if irc.client.is_channel(target): command = "pubmsg" else: if irc.client.is_channel(target): command = "pubnotice" else: command = "privnotice" return command
python
def _resolve_command(self, command, target): """Get the correct event for the command Only for 'privmsg' and 'notice' commands. :param command: The command string :type command: :class:`str` :param target: either a user or a channel :type target: :class:`str` :returns: the correct event type :rtype: :class:`str` :raises: None """ if command == "privmsg": if irc.client.is_channel(target): command = "pubmsg" else: if irc.client.is_channel(target): command = "pubnotice" else: command = "privnotice" return command
[ "def", "_resolve_command", "(", "self", ",", "command", ",", "target", ")", ":", "if", "command", "==", "\"privmsg\"", ":", "if", "irc", ".", "client", ".", "is_channel", "(", "target", ")", ":", "command", "=", "\"pubmsg\"", "else", ":", "if", "irc", ...
Get the correct event for the command Only for 'privmsg' and 'notice' commands. :param command: The command string :type command: :class:`str` :param target: either a user or a channel :type target: :class:`str` :returns: the correct event type :rtype: :class:`str` :raises: None
[ "Get", "the", "correct", "event", "for", "the", "command" ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/chat/connection.py#L164-L185
Pytwitcher/pytwitcherapi
src/pytwitcherapi/chat/connection.py
ServerConnection3._handle_message
def _handle_message(self, tags, source, command, target, msg): """Construct the correct events and handle them :param tags: the tags of the message :type tags: :class:`list` of :class:`message.Tag` :param source: the sender of the message :type source: :class:`str` :param command: the event type :type command: :class:`str` :param target: the target of the message :type target: :class:`str` :param msg: the content :type msg: :class:`str` :returns: None :rtype: None :raises: None """ if isinstance(msg, tuple): if command in ["privmsg", "pubmsg"]: command = "ctcp" else: command = "ctcpreply" msg = list(msg) log.debug("tags: %s, command: %s, source: %s, target: %s, " "arguments: %s", tags, command, source, target, msg) event = Event3(command, source, target, msg, tags=tags) self._handle_event(event) if command == "ctcp" and msg[0] == "ACTION": event = Event3("action", source, target, msg[1:], tags=tags) self._handle_event(event) else: log.debug("tags: %s, command: %s, source: %s, target: %s, " "arguments: %s", tags, command, source, target, [msg]) event = Event3(command, source, target, [msg], tags=tags) self._handle_event(event)
python
def _handle_message(self, tags, source, command, target, msg): """Construct the correct events and handle them :param tags: the tags of the message :type tags: :class:`list` of :class:`message.Tag` :param source: the sender of the message :type source: :class:`str` :param command: the event type :type command: :class:`str` :param target: the target of the message :type target: :class:`str` :param msg: the content :type msg: :class:`str` :returns: None :rtype: None :raises: None """ if isinstance(msg, tuple): if command in ["privmsg", "pubmsg"]: command = "ctcp" else: command = "ctcpreply" msg = list(msg) log.debug("tags: %s, command: %s, source: %s, target: %s, " "arguments: %s", tags, command, source, target, msg) event = Event3(command, source, target, msg, tags=tags) self._handle_event(event) if command == "ctcp" and msg[0] == "ACTION": event = Event3("action", source, target, msg[1:], tags=tags) self._handle_event(event) else: log.debug("tags: %s, command: %s, source: %s, target: %s, " "arguments: %s", tags, command, source, target, [msg]) event = Event3(command, source, target, [msg], tags=tags) self._handle_event(event)
[ "def", "_handle_message", "(", "self", ",", "tags", ",", "source", ",", "command", ",", "target", ",", "msg", ")", ":", "if", "isinstance", "(", "msg", ",", "tuple", ")", ":", "if", "command", "in", "[", "\"privmsg\"", ",", "\"pubmsg\"", "]", ":", "c...
Construct the correct events and handle them :param tags: the tags of the message :type tags: :class:`list` of :class:`message.Tag` :param source: the sender of the message :type source: :class:`str` :param command: the event type :type command: :class:`str` :param target: the target of the message :type target: :class:`str` :param msg: the content :type msg: :class:`str` :returns: None :rtype: None :raises: None
[ "Construct", "the", "correct", "events", "and", "handle", "them" ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/chat/connection.py#L187-L222
Pytwitcher/pytwitcherapi
src/pytwitcherapi/chat/connection.py
ServerConnection3._process_tags
def _process_tags(self, tags): """Process the tags of the message :param tags: the tags string of a message :type tags: :class:`str` | None :returns: list of tags :rtype: :class:`list` of :class:`message.Tag` :raises: None """ if not tags: return [] return [message.Tag.from_str(x) for x in tags.split(';')]
python
def _process_tags(self, tags): """Process the tags of the message :param tags: the tags string of a message :type tags: :class:`str` | None :returns: list of tags :rtype: :class:`list` of :class:`message.Tag` :raises: None """ if not tags: return [] return [message.Tag.from_str(x) for x in tags.split(';')]
[ "def", "_process_tags", "(", "self", ",", "tags", ")", ":", "if", "not", "tags", ":", "return", "[", "]", "return", "[", "message", ".", "Tag", ".", "from_str", "(", "x", ")", "for", "x", "in", "tags", ".", "split", "(", "';'", ")", "]" ]
Process the tags of the message :param tags: the tags string of a message :type tags: :class:`str` | None :returns: list of tags :rtype: :class:`list` of :class:`message.Tag` :raises: None
[ "Process", "the", "tags", "of", "the", "message" ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/chat/connection.py#L224-L235
Pytwitcher/pytwitcherapi
src/pytwitcherapi/chat/connection.py
ServerConnection3._process_arguments
def _process_arguments(self, arguments): """Process the arguments :param arguments: arguments string of a message :type arguments: :class:`str` | None :returns: A list of arguments :rtype: :class:`list` of :class:`str` | None :raises: None """ if not arguments: return None a = arguments.split(" :", 1) arglist = a[0].split() if len(a) == 2: arglist.append(a[1]) return arglist
python
def _process_arguments(self, arguments): """Process the arguments :param arguments: arguments string of a message :type arguments: :class:`str` | None :returns: A list of arguments :rtype: :class:`list` of :class:`str` | None :raises: None """ if not arguments: return None a = arguments.split(" :", 1) arglist = a[0].split() if len(a) == 2: arglist.append(a[1]) return arglist
[ "def", "_process_arguments", "(", "self", ",", "arguments", ")", ":", "if", "not", "arguments", ":", "return", "None", "a", "=", "arguments", ".", "split", "(", "\" :\"", ",", "1", ")", "arglist", "=", "a", "[", "0", "]", ".", "split", "(", ")", "i...
Process the arguments :param arguments: arguments string of a message :type arguments: :class:`str` | None :returns: A list of arguments :rtype: :class:`list` of :class:`str` | None :raises: None
[ "Process", "the", "arguments" ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/chat/connection.py#L264-L279
nimeshkverma/mongo_joins
mongojoin/processdata.py
CollectionsProcessedData.build_mongo_doc
def build_mongo_doc(self, key_list): """ Creates the components of aggregation pipeline :param key_list: list of key which will be used to create the components of aggregation pipeline :type key_list: list :returns mongo_doc: dict """ mongo_doc = {} if isinstance(key_list, list) and key_list: for key in key_list: mongo_doc[key] = "$" + str(key) return mongo_doc
python
def build_mongo_doc(self, key_list): """ Creates the components of aggregation pipeline :param key_list: list of key which will be used to create the components of aggregation pipeline :type key_list: list :returns mongo_doc: dict """ mongo_doc = {} if isinstance(key_list, list) and key_list: for key in key_list: mongo_doc[key] = "$" + str(key) return mongo_doc
[ "def", "build_mongo_doc", "(", "self", ",", "key_list", ")", ":", "mongo_doc", "=", "{", "}", "if", "isinstance", "(", "key_list", ",", "list", ")", "and", "key_list", ":", "for", "key", "in", "key_list", ":", "mongo_doc", "[", "key", "]", "=", "\"$\""...
Creates the components of aggregation pipeline :param key_list: list of key which will be used to create the components of aggregation pipeline :type key_list: list :returns mongo_doc: dict
[ "Creates", "the", "components", "of", "aggregation", "pipeline", ":", "param", "key_list", ":", "list", "of", "key", "which", "will", "be", "used", "to", "create", "the", "components", "of", "aggregation", "pipeline", ":", "type", "key_list", ":", "list" ]
train
https://github.com/nimeshkverma/mongo_joins/blob/64c416c3402d5906f707b73867fbc55e28d5ec37/mongojoin/processdata.py#L24-L38
nimeshkverma/mongo_joins
mongojoin/processdata.py
CollectionsProcessedData.build_pipeline
def build_pipeline(self, collection): """ Creates aggregation pipeline for aggregation :param collection: Mongo collection for aggregation :type collection: MongoCollection :return pipeline: list of dicts """ pipeline = [] if isinstance(collection.where_dict, dict) and collection.where_dict: match_dict = { "$match": collection.where_dict } pipeline.append(match_dict) group_keys_dict = self.build_mongo_doc(self.join_keys) push_dict = self.build_mongo_doc(collection.select_keys) group_by_dict = { "$group": { "_id": group_keys_dict, "docs": { "$push": push_dict } } } pipeline.append(group_by_dict) return pipeline
python
def build_pipeline(self, collection): """ Creates aggregation pipeline for aggregation :param collection: Mongo collection for aggregation :type collection: MongoCollection :return pipeline: list of dicts """ pipeline = [] if isinstance(collection.where_dict, dict) and collection.where_dict: match_dict = { "$match": collection.where_dict } pipeline.append(match_dict) group_keys_dict = self.build_mongo_doc(self.join_keys) push_dict = self.build_mongo_doc(collection.select_keys) group_by_dict = { "$group": { "_id": group_keys_dict, "docs": { "$push": push_dict } } } pipeline.append(group_by_dict) return pipeline
[ "def", "build_pipeline", "(", "self", ",", "collection", ")", ":", "pipeline", "=", "[", "]", "if", "isinstance", "(", "collection", ".", "where_dict", ",", "dict", ")", "and", "collection", ".", "where_dict", ":", "match_dict", "=", "{", "\"$match\"", ":"...
Creates aggregation pipeline for aggregation :param collection: Mongo collection for aggregation :type collection: MongoCollection :return pipeline: list of dicts
[ "Creates", "aggregation", "pipeline", "for", "aggregation", ":", "param", "collection", ":", "Mongo", "collection", "for", "aggregation", ":", "type", "collection", ":", "MongoCollection" ]
train
https://github.com/nimeshkverma/mongo_joins/blob/64c416c3402d5906f707b73867fbc55e28d5ec37/mongojoin/processdata.py#L40-L71
nimeshkverma/mongo_joins
mongojoin/processdata.py
CollectionsProcessedData.fetch_and_process_data
def fetch_and_process_data(self, collection, pipeline): """ Fetches and Processess data from the input collection by aggregating using the pipeline :param collection: The collection object for which mongo connection has to be made :type collection: MongoCollection :param pipeline: The pipeline using which aggregation will be performed :type pipeline: list of dicts :return grouped_docs_dict: dict of property_id,metric_count """ collection_cursor = collection.get_mongo_cursor() grouped_docs = list(collection_cursor.aggregate(pipeline)) grouped_docs_dict = {} while grouped_docs: doc = grouped_docs.pop() keys_list = [] for group_by_key in self.join_keys: keys_list.append(doc["_id"].get(group_by_key, None)) grouped_docs_dict[tuple(keys_list)] = doc['docs'] return grouped_docs_dict
python
def fetch_and_process_data(self, collection, pipeline): """ Fetches and Processess data from the input collection by aggregating using the pipeline :param collection: The collection object for which mongo connection has to be made :type collection: MongoCollection :param pipeline: The pipeline using which aggregation will be performed :type pipeline: list of dicts :return grouped_docs_dict: dict of property_id,metric_count """ collection_cursor = collection.get_mongo_cursor() grouped_docs = list(collection_cursor.aggregate(pipeline)) grouped_docs_dict = {} while grouped_docs: doc = grouped_docs.pop() keys_list = [] for group_by_key in self.join_keys: keys_list.append(doc["_id"].get(group_by_key, None)) grouped_docs_dict[tuple(keys_list)] = doc['docs'] return grouped_docs_dict
[ "def", "fetch_and_process_data", "(", "self", ",", "collection", ",", "pipeline", ")", ":", "collection_cursor", "=", "collection", ".", "get_mongo_cursor", "(", ")", "grouped_docs", "=", "list", "(", "collection_cursor", ".", "aggregate", "(", "pipeline", ")", ...
Fetches and Processess data from the input collection by aggregating using the pipeline :param collection: The collection object for which mongo connection has to be made :type collection: MongoCollection :param pipeline: The pipeline using which aggregation will be performed :type pipeline: list of dicts :return grouped_docs_dict: dict of property_id,metric_count
[ "Fetches", "and", "Processess", "data", "from", "the", "input", "collection", "by", "aggregating", "using", "the", "pipeline" ]
train
https://github.com/nimeshkverma/mongo_joins/blob/64c416c3402d5906f707b73867fbc55e28d5ec37/mongojoin/processdata.py#L73-L97
nimeshkverma/mongo_joins
mongojoin/processdata.py
CollectionsProcessedData.get_collections_data
def get_collections_data(self): """ Driver function to fetch the data from the two collections """ collections = { 'left': self.left_collection, 'right': self.right_collection } for collection_type, collection in collections.iteritems(): pipeline = self.build_pipeline(collection) self.collections_data[collection_type] = self.fetch_and_process_data( collection, pipeline)
python
def get_collections_data(self): """ Driver function to fetch the data from the two collections """ collections = { 'left': self.left_collection, 'right': self.right_collection } for collection_type, collection in collections.iteritems(): pipeline = self.build_pipeline(collection) self.collections_data[collection_type] = self.fetch_and_process_data( collection, pipeline)
[ "def", "get_collections_data", "(", "self", ")", ":", "collections", "=", "{", "'left'", ":", "self", ".", "left_collection", ",", "'right'", ":", "self", ".", "right_collection", "}", "for", "collection_type", ",", "collection", "in", "collections", ".", "ite...
Driver function to fetch the data from the two collections
[ "Driver", "function", "to", "fetch", "the", "data", "from", "the", "two", "collections" ]
train
https://github.com/nimeshkverma/mongo_joins/blob/64c416c3402d5906f707b73867fbc55e28d5ec37/mongojoin/processdata.py#L99-L111
tkf/rash
rash/utils/timeutils.py
parse_duration
def parse_duration(string): """ Parse human readable duration. >>> parse_duration('1m') 60 >>> parse_duration('7 days') == 7 * 24 * 60 * 60 True """ if string.isdigit(): return int(string) try: return float(string) except ValueError: pass string = string.rstrip() for (suf, mult) in DURATION_SUFFIX_MAP.items(): if string.lower().endswith(suf): try: return parse_duration(string[:-len(suf)].strip()) * mult except TypeError: return
python
def parse_duration(string): """ Parse human readable duration. >>> parse_duration('1m') 60 >>> parse_duration('7 days') == 7 * 24 * 60 * 60 True """ if string.isdigit(): return int(string) try: return float(string) except ValueError: pass string = string.rstrip() for (suf, mult) in DURATION_SUFFIX_MAP.items(): if string.lower().endswith(suf): try: return parse_duration(string[:-len(suf)].strip()) * mult except TypeError: return
[ "def", "parse_duration", "(", "string", ")", ":", "if", "string", ".", "isdigit", "(", ")", ":", "return", "int", "(", "string", ")", "try", ":", "return", "float", "(", "string", ")", "except", "ValueError", ":", "pass", "string", "=", "string", ".", ...
Parse human readable duration. >>> parse_duration('1m') 60 >>> parse_duration('7 days') == 7 * 24 * 60 * 60 True
[ "Parse", "human", "readable", "duration", "." ]
train
https://github.com/tkf/rash/blob/585da418ec37dd138f1a4277718b6f507e9536a2/rash/utils/timeutils.py#L36-L58
ianclegg/winrmlib
winrmlib/client.py
WinRmClient.create_session
def create_session(): """ shell = CommandShell('http://192.168.145.132:5985/wsman', 'Administrator', 'Pa55w0rd') """ shell = CommandShell('http://192.168.137.238:5985/wsman', 'Administrator', 'Pa55w0rd') shell.open() command_id = shell.run('ipconfig', ['/all']) (stdout, stderr, exit_code) = shell.receive(command_id) sys.stdout.write(stdout.strip() + '\r\n') shell.close() return None
python
def create_session(): """ shell = CommandShell('http://192.168.145.132:5985/wsman', 'Administrator', 'Pa55w0rd') """ shell = CommandShell('http://192.168.137.238:5985/wsman', 'Administrator', 'Pa55w0rd') shell.open() command_id = shell.run('ipconfig', ['/all']) (stdout, stderr, exit_code) = shell.receive(command_id) sys.stdout.write(stdout.strip() + '\r\n') shell.close() return None
[ "def", "create_session", "(", ")", ":", "shell", "=", "CommandShell", "(", "'http://192.168.137.238:5985/wsman'", ",", "'Administrator'", ",", "'Pa55w0rd'", ")", "shell", ".", "open", "(", ")", "command_id", "=", "shell", ".", "run", "(", "'ipconfig'", ",", "[...
shell = CommandShell('http://192.168.145.132:5985/wsman', 'Administrator', 'Pa55w0rd')
[ "shell", "=", "CommandShell", "(", "http", ":", "//", "192", ".", "168", ".", "145", ".", "132", ":", "5985", "/", "wsman", "Administrator", "Pa55w0rd", ")" ]
train
https://github.com/ianclegg/winrmlib/blob/489b3ce5d0e6a9a7301ba5d345ba82fa824c1431/winrmlib/client.py#L53-L64
agusmakmun/djipsum
djipsum/faker.py
FakerModel.fake_chars_or_choice
def fake_chars_or_choice(self, field_name): """ Return fake chars or choice it if the `field_name` has choices. Then, returning random value from it. This specially for `CharField`. Usage: faker.fake_chars_or_choice('field_name') Example for field: TYPE_CHOICES = ( ('project', 'I wanna to talk about project'), ('feedback', 'I want to report a bugs or give feedback'), ('hello', 'I just want to say hello') ) type = models.CharField(max_length=200, choices=TYPE_CHOICES) """ return self.djipsum_fields().randomCharField( self.model_class(), field_name=field_name )
python
def fake_chars_or_choice(self, field_name): """ Return fake chars or choice it if the `field_name` has choices. Then, returning random value from it. This specially for `CharField`. Usage: faker.fake_chars_or_choice('field_name') Example for field: TYPE_CHOICES = ( ('project', 'I wanna to talk about project'), ('feedback', 'I want to report a bugs or give feedback'), ('hello', 'I just want to say hello') ) type = models.CharField(max_length=200, choices=TYPE_CHOICES) """ return self.djipsum_fields().randomCharField( self.model_class(), field_name=field_name )
[ "def", "fake_chars_or_choice", "(", "self", ",", "field_name", ")", ":", "return", "self", ".", "djipsum_fields", "(", ")", ".", "randomCharField", "(", "self", ".", "model_class", "(", ")", ",", "field_name", "=", "field_name", ")" ]
Return fake chars or choice it if the `field_name` has choices. Then, returning random value from it. This specially for `CharField`. Usage: faker.fake_chars_or_choice('field_name') Example for field: TYPE_CHOICES = ( ('project', 'I wanna to talk about project'), ('feedback', 'I want to report a bugs or give feedback'), ('hello', 'I just want to say hello') ) type = models.CharField(max_length=200, choices=TYPE_CHOICES)
[ "Return", "fake", "chars", "or", "choice", "it", "if", "the", "field_name", "has", "choices", ".", "Then", "returning", "random", "value", "from", "it", ".", "This", "specially", "for", "CharField", "." ]
train
https://github.com/agusmakmun/djipsum/blob/e7950556422b4039092db2083db7a83728230977/djipsum/faker.py#L90-L110
agusmakmun/djipsum
djipsum/faker.py
FakerModel.fake_decimal
def fake_decimal(self, field_name): """ Validate if the field has a `max_digits` and `decimal_places` And generating the unique decimal number. Usage: faker.fake_decimal('field_name') Example: 10.7, 13041.00, 200.000.000 """ return self.djipsum_fields().randomDecimalField( self.model_class(), field_name=field_name )
python
def fake_decimal(self, field_name): """ Validate if the field has a `max_digits` and `decimal_places` And generating the unique decimal number. Usage: faker.fake_decimal('field_name') Example: 10.7, 13041.00, 200.000.000 """ return self.djipsum_fields().randomDecimalField( self.model_class(), field_name=field_name )
[ "def", "fake_decimal", "(", "self", ",", "field_name", ")", ":", "return", "self", ".", "djipsum_fields", "(", ")", ".", "randomDecimalField", "(", "self", ".", "model_class", "(", ")", ",", "field_name", "=", "field_name", ")" ]
Validate if the field has a `max_digits` and `decimal_places` And generating the unique decimal number. Usage: faker.fake_decimal('field_name') Example: 10.7, 13041.00, 200.000.000
[ "Validate", "if", "the", "field", "has", "a", "max_digits", "and", "decimal_places", "And", "generating", "the", "unique", "decimal", "number", "." ]
train
https://github.com/agusmakmun/djipsum/blob/e7950556422b4039092db2083db7a83728230977/djipsum/faker.py#L122-L136
agusmakmun/djipsum
djipsum/faker.py
FakerModel.fake_fk
def fake_fk(self, field_name): """ Return related random object to set as ForeignKey. Example Output: <User: username> """ return self.djipsum_fields().getOrCreateForeignKey( model_class=self.model_class(), field_name=field_name )
python
def fake_fk(self, field_name): """ Return related random object to set as ForeignKey. Example Output: <User: username> """ return self.djipsum_fields().getOrCreateForeignKey( model_class=self.model_class(), field_name=field_name )
[ "def", "fake_fk", "(", "self", ",", "field_name", ")", ":", "return", "self", ".", "djipsum_fields", "(", ")", ".", "getOrCreateForeignKey", "(", "model_class", "=", "self", ".", "model_class", "(", ")", ",", "field_name", "=", "field_name", ")" ]
Return related random object to set as ForeignKey. Example Output: <User: username>
[ "Return", "related", "random", "object", "to", "set", "as", "ForeignKey", "." ]
train
https://github.com/agusmakmun/djipsum/blob/e7950556422b4039092db2083db7a83728230977/djipsum/faker.py#L290-L300
agusmakmun/djipsum
djipsum/faker.py
FakerModel.fake_m2m
def fake_m2m(self, obj, field_name): """ Return the random objects from m2m relationship. The ManyToManyField need specific object, so i handle it after created the object. """ instance_m2m = getattr(obj, field_name) objects_m2m = instance_m2m.model.objects.all() if objects_m2m.exists(): ids_m2m = [i.pk for i in objects_m2m] random_decission = random.sample( range(min(ids_m2m), max(ids_m2m)), max(ids_m2m) - 1 ) if len(random_decission) <= 2: random_decission = [ self.djipsum_fields().randomize(ids_m2m) ] related_objects = [ rel_obj for rel_obj in objects_m2m if rel_obj.pk in random_decission ] instance_m2m.add(*related_objects)
python
def fake_m2m(self, obj, field_name): """ Return the random objects from m2m relationship. The ManyToManyField need specific object, so i handle it after created the object. """ instance_m2m = getattr(obj, field_name) objects_m2m = instance_m2m.model.objects.all() if objects_m2m.exists(): ids_m2m = [i.pk for i in objects_m2m] random_decission = random.sample( range(min(ids_m2m), max(ids_m2m)), max(ids_m2m) - 1 ) if len(random_decission) <= 2: random_decission = [ self.djipsum_fields().randomize(ids_m2m) ] related_objects = [ rel_obj for rel_obj in objects_m2m if rel_obj.pk in random_decission ] instance_m2m.add(*related_objects)
[ "def", "fake_m2m", "(", "self", ",", "obj", ",", "field_name", ")", ":", "instance_m2m", "=", "getattr", "(", "obj", ",", "field_name", ")", "objects_m2m", "=", "instance_m2m", ".", "model", ".", "objects", ".", "all", "(", ")", "if", "objects_m2m", ".",...
Return the random objects from m2m relationship. The ManyToManyField need specific object, so i handle it after created the object.
[ "Return", "the", "random", "objects", "from", "m2m", "relationship", ".", "The", "ManyToManyField", "need", "specific", "object", "so", "i", "handle", "it", "after", "created", "the", "object", "." ]
train
https://github.com/agusmakmun/djipsum/blob/e7950556422b4039092db2083db7a83728230977/djipsum/faker.py#L302-L324
agusmakmun/djipsum
djipsum/faker.py
FakerModel.create
def create(self, fields): """ Create the object only once. So, you need loop to usage. :param `fields` is dictionary fields. """ try: # Cleaning the fields, and check if has `ForeignKey` type. cleaned_fields = {} for key, value in fields.items(): if type(value) is dict: try: if value['type'] == 'fk': fake_fk = self.fake_fk(value['field_name']) cleaned_fields.update({key: fake_fk}) except: pass else: cleaned_fields.update({key: value}) # Creating the object from dictionary fields. model_class = self.model_class() obj = model_class.objects.create(**cleaned_fields) # The `ManyToManyField` need specific object, # so i handle it after created the object. for key, value in fields.items(): if type(value) is dict: try: if value['type'] == 'm2m': self.fake_m2m(obj, value['field_name']) except: pass try: obj.save_m2m() except: obj.save() return obj except Exception as e: raise e
python
def create(self, fields): """ Create the object only once. So, you need loop to usage. :param `fields` is dictionary fields. """ try: # Cleaning the fields, and check if has `ForeignKey` type. cleaned_fields = {} for key, value in fields.items(): if type(value) is dict: try: if value['type'] == 'fk': fake_fk = self.fake_fk(value['field_name']) cleaned_fields.update({key: fake_fk}) except: pass else: cleaned_fields.update({key: value}) # Creating the object from dictionary fields. model_class = self.model_class() obj = model_class.objects.create(**cleaned_fields) # The `ManyToManyField` need specific object, # so i handle it after created the object. for key, value in fields.items(): if type(value) is dict: try: if value['type'] == 'm2m': self.fake_m2m(obj, value['field_name']) except: pass try: obj.save_m2m() except: obj.save() return obj except Exception as e: raise e
[ "def", "create", "(", "self", ",", "fields", ")", ":", "try", ":", "# Cleaning the fields, and check if has `ForeignKey` type.", "cleaned_fields", "=", "{", "}", "for", "key", ",", "value", "in", "fields", ".", "items", "(", ")", ":", "if", "type", "(", "val...
Create the object only once. So, you need loop to usage. :param `fields` is dictionary fields.
[ "Create", "the", "object", "only", "once", ".", "So", "you", "need", "loop", "to", "usage", "." ]
train
https://github.com/agusmakmun/djipsum/blob/e7950556422b4039092db2083db7a83728230977/djipsum/faker.py#L326-L366
alpha-xone/xone
xone/calendar.py
trading_dates
def trading_dates(start, end, calendar='US'): """ Trading dates for given exchange Args: start: start date end: end date calendar: exchange as string Returns: pd.DatetimeIndex: datetime index Examples: >>> bus_dates = ['2018-12-24', '2018-12-26', '2018-12-27'] >>> trd_dates = trading_dates(start='2018-12-23', end='2018-12-27') >>> assert len(trd_dates) == len(bus_dates) >>> assert pd.Series(trd_dates == pd.DatetimeIndex(bus_dates)).all() """ kw = dict(start=pd.Timestamp(start, tz='UTC').date(), end=pd.Timestamp(end, tz='UTC').date()) us_cal = getattr(sys.modules[__name__], f'{calendar}TradingCalendar')() return pd.bdate_range(**kw).drop(us_cal.holidays(**kw))
python
def trading_dates(start, end, calendar='US'): """ Trading dates for given exchange Args: start: start date end: end date calendar: exchange as string Returns: pd.DatetimeIndex: datetime index Examples: >>> bus_dates = ['2018-12-24', '2018-12-26', '2018-12-27'] >>> trd_dates = trading_dates(start='2018-12-23', end='2018-12-27') >>> assert len(trd_dates) == len(bus_dates) >>> assert pd.Series(trd_dates == pd.DatetimeIndex(bus_dates)).all() """ kw = dict(start=pd.Timestamp(start, tz='UTC').date(), end=pd.Timestamp(end, tz='UTC').date()) us_cal = getattr(sys.modules[__name__], f'{calendar}TradingCalendar')() return pd.bdate_range(**kw).drop(us_cal.holidays(**kw))
[ "def", "trading_dates", "(", "start", ",", "end", ",", "calendar", "=", "'US'", ")", ":", "kw", "=", "dict", "(", "start", "=", "pd", ".", "Timestamp", "(", "start", ",", "tz", "=", "'UTC'", ")", ".", "date", "(", ")", ",", "end", "=", "pd", "....
Trading dates for given exchange Args: start: start date end: end date calendar: exchange as string Returns: pd.DatetimeIndex: datetime index Examples: >>> bus_dates = ['2018-12-24', '2018-12-26', '2018-12-27'] >>> trd_dates = trading_dates(start='2018-12-23', end='2018-12-27') >>> assert len(trd_dates) == len(bus_dates) >>> assert pd.Series(trd_dates == pd.DatetimeIndex(bus_dates)).all()
[ "Trading", "dates", "for", "given", "exchange" ]
train
https://github.com/alpha-xone/xone/blob/68534a30f7f1760b220ba58040be3927f7dfbcf4/xone/calendar.py#L22-L42
alpha-xone/xone
xone/logs.py
get_logger
def get_logger( name_or_func, log_file='', level=logging.INFO, types='stream', **kwargs ): """ Generate logger Args: name_or_func: logger name or current running function log_file: logger file level: level of logs - debug, info, error types: file or stream, or both Returns: logger Examples: >>> get_logger(name_or_func='download_data', level='debug', types='stream') <Logger download_data (DEBUG)> >>> get_logger(name_or_func='preprocess', log_file='pre.log', types='file|stream') <Logger preprocess (INFO)> """ if isinstance(level, str): level = getattr(logging, level.upper()) log_name = name_or_func if isinstance(name_or_func, str) else utils.func_scope(name_or_func) logger = logging.getLogger(name=log_name) logger.setLevel(level=level) if not len(logger.handlers): formatter = logging.Formatter(fmt=kwargs.get('fmt', LOG_FMT)) if 'file' in types: file_handler = logging.FileHandler(log_file) file_handler.setFormatter(fmt=formatter) logger.addHandler(file_handler) if 'stream' in types: stream_handler = logging.StreamHandler() stream_handler.setFormatter(fmt=formatter) logger.addHandler(stream_handler) return logger
python
def get_logger( name_or_func, log_file='', level=logging.INFO, types='stream', **kwargs ): """ Generate logger Args: name_or_func: logger name or current running function log_file: logger file level: level of logs - debug, info, error types: file or stream, or both Returns: logger Examples: >>> get_logger(name_or_func='download_data', level='debug', types='stream') <Logger download_data (DEBUG)> >>> get_logger(name_or_func='preprocess', log_file='pre.log', types='file|stream') <Logger preprocess (INFO)> """ if isinstance(level, str): level = getattr(logging, level.upper()) log_name = name_or_func if isinstance(name_or_func, str) else utils.func_scope(name_or_func) logger = logging.getLogger(name=log_name) logger.setLevel(level=level) if not len(logger.handlers): formatter = logging.Formatter(fmt=kwargs.get('fmt', LOG_FMT)) if 'file' in types: file_handler = logging.FileHandler(log_file) file_handler.setFormatter(fmt=formatter) logger.addHandler(file_handler) if 'stream' in types: stream_handler = logging.StreamHandler() stream_handler.setFormatter(fmt=formatter) logger.addHandler(stream_handler) return logger
[ "def", "get_logger", "(", "name_or_func", ",", "log_file", "=", "''", ",", "level", "=", "logging", ".", "INFO", ",", "types", "=", "'stream'", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "level", ",", "str", ")", ":", "level", "=", ...
Generate logger Args: name_or_func: logger name or current running function log_file: logger file level: level of logs - debug, info, error types: file or stream, or both Returns: logger Examples: >>> get_logger(name_or_func='download_data', level='debug', types='stream') <Logger download_data (DEBUG)> >>> get_logger(name_or_func='preprocess', log_file='pre.log', types='file|stream') <Logger preprocess (INFO)>
[ "Generate", "logger" ]
train
https://github.com/alpha-xone/xone/blob/68534a30f7f1760b220ba58040be3927f7dfbcf4/xone/logs.py#L8-L47
mathiasertl/xmpp-backends
xmpp_backends/django/models.py
XmppBackendUser.set_password
def set_password(self, raw_password): """Calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` for the user. If password is ``None``, calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_unusable_password`. """ if raw_password is None: self.set_unusable_password() else: xmpp_backend.set_password(self.node, self.domain, raw_password)
python
def set_password(self, raw_password): """Calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` for the user. If password is ``None``, calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_unusable_password`. """ if raw_password is None: self.set_unusable_password() else: xmpp_backend.set_password(self.node, self.domain, raw_password)
[ "def", "set_password", "(", "self", ",", "raw_password", ")", ":", "if", "raw_password", "is", "None", ":", "self", ".", "set_unusable_password", "(", ")", "else", ":", "xmpp_backend", ".", "set_password", "(", "self", ".", "node", ",", "self", ".", "domai...
Calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` for the user. If password is ``None``, calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_unusable_password`.
[ "Calls", ":", "py", ":", "func", ":", "~xmpp_backends", ".", "base", ".", "XmppBackendBase", ".", "set_password", "for", "the", "user", "." ]
train
https://github.com/mathiasertl/xmpp-backends/blob/214ef0664dbf90fa300c2483b9b3416559e5d171/xmpp_backends/django/models.py#L39-L47
mathiasertl/xmpp-backends
xmpp_backends/django/models.py
XmppBackendUser.check_password
def check_password(self, raw_password): """Calls :py:func:`~xmpp_backends.base.XmppBackendBase.check_password` for the user.""" return xmpp_backend.check_password(self.node, self.domain, raw_password)
python
def check_password(self, raw_password): """Calls :py:func:`~xmpp_backends.base.XmppBackendBase.check_password` for the user.""" return xmpp_backend.check_password(self.node, self.domain, raw_password)
[ "def", "check_password", "(", "self", ",", "raw_password", ")", ":", "return", "xmpp_backend", ".", "check_password", "(", "self", ".", "node", ",", "self", ".", "domain", ",", "raw_password", ")" ]
Calls :py:func:`~xmpp_backends.base.XmppBackendBase.check_password` for the user.
[ "Calls", ":", "py", ":", "func", ":", "~xmpp_backends", ".", "base", ".", "XmppBackendBase", ".", "check_password", "for", "the", "user", "." ]
train
https://github.com/mathiasertl/xmpp-backends/blob/214ef0664dbf90fa300c2483b9b3416559e5d171/xmpp_backends/django/models.py#L49-L51
sangoma/pysensu
pysensu/api.py
SensuAPI.get_clients
def get_clients(self, limit=None, offset=None): """ Returns a list of clients. """ data = {} if limit: data['limit'] = limit if offset: data['offset'] = offset result = self._request('GET', '/clients', data=json.dumps(data)) return result.json()
python
def get_clients(self, limit=None, offset=None): """ Returns a list of clients. """ data = {} if limit: data['limit'] = limit if offset: data['offset'] = offset result = self._request('GET', '/clients', data=json.dumps(data)) return result.json()
[ "def", "get_clients", "(", "self", ",", "limit", "=", "None", ",", "offset", "=", "None", ")", ":", "data", "=", "{", "}", "if", "limit", ":", "data", "[", "'limit'", "]", "=", "limit", "if", "offset", ":", "data", "[", "'offset'", "]", "=", "off...
Returns a list of clients.
[ "Returns", "a", "list", "of", "clients", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L76-L86
sangoma/pysensu
pysensu/api.py
SensuAPI.get_client_data
def get_client_data(self, client): """ Returns a client. """ data = self._request('GET', '/clients/{}'.format(client)) return data.json()
python
def get_client_data(self, client): """ Returns a client. """ data = self._request('GET', '/clients/{}'.format(client)) return data.json()
[ "def", "get_client_data", "(", "self", ",", "client", ")", ":", "data", "=", "self", ".", "_request", "(", "'GET'", ",", "'/clients/{}'", ".", "format", "(", "client", ")", ")", "return", "data", ".", "json", "(", ")" ]
Returns a client.
[ "Returns", "a", "client", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L88-L93
sangoma/pysensu
pysensu/api.py
SensuAPI.get_client_history
def get_client_history(self, client): """ Returns the history for a client. """ data = self._request('GET', '/clients/{}/history'.format(client)) return data.json()
python
def get_client_history(self, client): """ Returns the history for a client. """ data = self._request('GET', '/clients/{}/history'.format(client)) return data.json()
[ "def", "get_client_history", "(", "self", ",", "client", ")", ":", "data", "=", "self", ".", "_request", "(", "'GET'", ",", "'/clients/{}/history'", ".", "format", "(", "client", ")", ")", "return", "data", ".", "json", "(", ")" ]
Returns the history for a client.
[ "Returns", "the", "history", "for", "a", "client", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L95-L100
sangoma/pysensu
pysensu/api.py
SensuAPI.get_all_client_events
def get_all_client_events(self, client): """ Returns the list of current events for a given client. """ data = self._request('GET', '/events/{}'.format(client)) return data.json()
python
def get_all_client_events(self, client): """ Returns the list of current events for a given client. """ data = self._request('GET', '/events/{}'.format(client)) return data.json()
[ "def", "get_all_client_events", "(", "self", ",", "client", ")", ":", "data", "=", "self", ".", "_request", "(", "'GET'", ",", "'/events/{}'", ".", "format", "(", "client", ")", ")", "return", "data", ".", "json", "(", ")" ]
Returns the list of current events for a given client.
[ "Returns", "the", "list", "of", "current", "events", "for", "a", "given", "client", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L119-L124
sangoma/pysensu
pysensu/api.py
SensuAPI.get_event
def get_event(self, client, check): """ Returns an event for a given client & check name. """ data = self._request('GET', '/events/{}/{}'.format(client, check)) return data.json()
python
def get_event(self, client, check): """ Returns an event for a given client & check name. """ data = self._request('GET', '/events/{}/{}'.format(client, check)) return data.json()
[ "def", "get_event", "(", "self", ",", "client", ",", "check", ")", ":", "data", "=", "self", ".", "_request", "(", "'GET'", ",", "'/events/{}/{}'", ".", "format", "(", "client", ",", "check", ")", ")", "return", "data", ".", "json", "(", ")" ]
Returns an event for a given client & check name.
[ "Returns", "an", "event", "for", "a", "given", "client", "&", "check", "name", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L126-L131
sangoma/pysensu
pysensu/api.py
SensuAPI.delete_event
def delete_event(self, client, check): """ Resolves an event for a given check on a given client. (delayed action) """ self._request('DELETE', '/events/{}/{}'.format(client, check)) return True
python
def delete_event(self, client, check): """ Resolves an event for a given check on a given client. (delayed action) """ self._request('DELETE', '/events/{}/{}'.format(client, check)) return True
[ "def", "delete_event", "(", "self", ",", "client", ",", "check", ")", ":", "self", ".", "_request", "(", "'DELETE'", ",", "'/events/{}/{}'", ".", "format", "(", "client", ",", "check", ")", ")", "return", "True" ]
Resolves an event for a given check on a given client. (delayed action)
[ "Resolves", "an", "event", "for", "a", "given", "check", "on", "a", "given", "client", ".", "(", "delayed", "action", ")" ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L133-L138
sangoma/pysensu
pysensu/api.py
SensuAPI.post_event
def post_event(self, client, check): """ Resolves an event. (delayed action) """ self._request('POST', '/resolve', data=json.dumps({'client': client, 'check': check})) return True
python
def post_event(self, client, check): """ Resolves an event. (delayed action) """ self._request('POST', '/resolve', data=json.dumps({'client': client, 'check': check})) return True
[ "def", "post_event", "(", "self", ",", "client", ",", "check", ")", ":", "self", ".", "_request", "(", "'POST'", ",", "'/resolve'", ",", "data", "=", "json", ".", "dumps", "(", "{", "'client'", ":", "client", ",", "'check'", ":", "check", "}", ")", ...
Resolves an event. (delayed action)
[ "Resolves", "an", "event", ".", "(", "delayed", "action", ")" ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L140-L146
sangoma/pysensu
pysensu/api.py
SensuAPI.get_check
def get_check(self, check): """ Returns a check. """ data = self._request('GET', '/checks/{}'.format(check)) return data.json()
python
def get_check(self, check): """ Returns a check. """ data = self._request('GET', '/checks/{}'.format(check)) return data.json()
[ "def", "get_check", "(", "self", ",", "check", ")", ":", "data", "=", "self", ".", "_request", "(", "'GET'", ",", "'/checks/{}'", ".", "format", "(", "check", ")", ")", "return", "data", ".", "json", "(", ")" ]
Returns a check.
[ "Returns", "a", "check", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L158-L163
sangoma/pysensu
pysensu/api.py
SensuAPI.post_check_request
def post_check_request(self, check, subscribers): """ Issues a check execution request. """ data = { 'check': check, 'subscribers': [subscribers] } self._request('POST', '/request', data=json.dumps(data)) return True
python
def post_check_request(self, check, subscribers): """ Issues a check execution request. """ data = { 'check': check, 'subscribers': [subscribers] } self._request('POST', '/request', data=json.dumps(data)) return True
[ "def", "post_check_request", "(", "self", ",", "check", ",", "subscribers", ")", ":", "data", "=", "{", "'check'", ":", "check", ",", "'subscribers'", ":", "[", "subscribers", "]", "}", "self", ".", "_request", "(", "'POST'", ",", "'/request'", ",", "dat...
Issues a check execution request.
[ "Issues", "a", "check", "execution", "request", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L165-L174
sangoma/pysensu
pysensu/api.py
SensuAPI.post_silence_request
def post_silence_request(self, kwargs): """ Create a silence entry. """ self._request('POST', '/silenced', data=json.dumps(kwargs)) return True
python
def post_silence_request(self, kwargs): """ Create a silence entry. """ self._request('POST', '/silenced', data=json.dumps(kwargs)) return True
[ "def", "post_silence_request", "(", "self", ",", "kwargs", ")", ":", "self", ".", "_request", "(", "'POST'", ",", "'/silenced'", ",", "data", "=", "json", ".", "dumps", "(", "kwargs", ")", ")", "return", "True" ]
Create a silence entry.
[ "Create", "a", "silence", "entry", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L191-L196
sangoma/pysensu
pysensu/api.py
SensuAPI.clear_silence
def clear_silence(self, kwargs): """ Clear a silence entry. """ self._request('POST', '/silenced/clear', data=json.dumps(kwargs)) return True
python
def clear_silence(self, kwargs): """ Clear a silence entry. """ self._request('POST', '/silenced/clear', data=json.dumps(kwargs)) return True
[ "def", "clear_silence", "(", "self", ",", "kwargs", ")", ":", "self", ".", "_request", "(", "'POST'", ",", "'/silenced/clear'", ",", "data", "=", "json", ".", "dumps", "(", "kwargs", ")", ")", "return", "True" ]
Clear a silence entry.
[ "Clear", "a", "silence", "entry", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L198-L203
sangoma/pysensu
pysensu/api.py
SensuAPI.get_aggregate_check
def get_aggregate_check(self, check, age=None): """ Returns the list of aggregates for a given check """ data = {} if age: data['max_age'] = age result = self._request('GET', '/aggregates/{}'.format(check), data=json.dumps(data)) return result.json()
python
def get_aggregate_check(self, check, age=None): """ Returns the list of aggregates for a given check """ data = {} if age: data['max_age'] = age result = self._request('GET', '/aggregates/{}'.format(check), data=json.dumps(data)) return result.json()
[ "def", "get_aggregate_check", "(", "self", ",", "check", ",", "age", "=", "None", ")", ":", "data", "=", "{", "}", "if", "age", ":", "data", "[", "'max_age'", "]", "=", "age", "result", "=", "self", ".", "_request", "(", "'GET'", ",", "'/aggregates/{...
Returns the list of aggregates for a given check
[ "Returns", "the", "list", "of", "aggregates", "for", "a", "given", "check" ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L215-L225
sangoma/pysensu
pysensu/api.py
SensuAPI.get_health
def get_health(self, consumers=2, messages=100): """ Returns health information on transport & Redis connections. """ data = {'consumers': consumers, 'messages': messages} try: self._request('GET', '/health', data=json.dumps(data)) return True except SensuAPIException: return False
python
def get_health(self, consumers=2, messages=100): """ Returns health information on transport & Redis connections. """ data = {'consumers': consumers, 'messages': messages} try: self._request('GET', '/health', data=json.dumps(data)) return True except SensuAPIException: return False
[ "def", "get_health", "(", "self", ",", "consumers", "=", "2", ",", "messages", "=", "100", ")", ":", "data", "=", "{", "'consumers'", ":", "consumers", ",", "'messages'", ":", "messages", "}", "try", ":", "self", ".", "_request", "(", "'GET'", ",", "...
Returns health information on transport & Redis connections.
[ "Returns", "health", "information", "on", "transport", "&", "Redis", "connections", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L244-L254
sangoma/pysensu
pysensu/api.py
SensuAPI.get_results
def get_results(self, client): """ Returns a result. """ data = self._request('GET', '/results/{}'.format(client)) return data.json()
python
def get_results(self, client): """ Returns a result. """ data = self._request('GET', '/results/{}'.format(client)) return data.json()
[ "def", "get_results", "(", "self", ",", "client", ")", ":", "data", "=", "self", ".", "_request", "(", "'GET'", ",", "'/results/{}'", ".", "format", "(", "client", ")", ")", "return", "data", ".", "json", "(", ")" ]
Returns a result.
[ "Returns", "a", "result", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L266-L271
sangoma/pysensu
pysensu/api.py
SensuAPI.get_result
def get_result(self, client, check): """ Returns an event for a given client & result name. """ data = self._request('GET', '/results/{}/{}'.format(client, check)) return data.json()
python
def get_result(self, client, check): """ Returns an event for a given client & result name. """ data = self._request('GET', '/results/{}/{}'.format(client, check)) return data.json()
[ "def", "get_result", "(", "self", ",", "client", ",", "check", ")", ":", "data", "=", "self", ".", "_request", "(", "'GET'", ",", "'/results/{}/{}'", ".", "format", "(", "client", ",", "check", ")", ")", "return", "data", ".", "json", "(", ")" ]
Returns an event for a given client & result name.
[ "Returns", "an", "event", "for", "a", "given", "client", "&", "result", "name", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L273-L278
sangoma/pysensu
pysensu/api.py
SensuAPI.delete_result
def delete_result(self, client, check): """ Deletes an check result data for a given check on a given client. """ self._request('DELETE', '/results/{}/{}'.format(client, check)) return True
python
def delete_result(self, client, check): """ Deletes an check result data for a given check on a given client. """ self._request('DELETE', '/results/{}/{}'.format(client, check)) return True
[ "def", "delete_result", "(", "self", ",", "client", ",", "check", ")", ":", "self", ".", "_request", "(", "'DELETE'", ",", "'/results/{}/{}'", ".", "format", "(", "client", ",", "check", ")", ")", "return", "True" ]
Deletes an check result data for a given check on a given client.
[ "Deletes", "an", "check", "result", "data", "for", "a", "given", "check", "on", "a", "given", "client", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L280-L285
sangoma/pysensu
pysensu/api.py
SensuAPI.post_result_data
def post_result_data(self, client, check, output, status): """ Posts check result data. """ data = { 'source': client, 'name': check, 'output': output, 'status': status, } self._request('POST', '/results', data=json.dumps(data)) return True
python
def post_result_data(self, client, check, output, status): """ Posts check result data. """ data = { 'source': client, 'name': check, 'output': output, 'status': status, } self._request('POST', '/results', data=json.dumps(data)) return True
[ "def", "post_result_data", "(", "self", ",", "client", ",", "check", ",", "output", ",", "status", ")", ":", "data", "=", "{", "'source'", ":", "client", ",", "'name'", ":", "check", ",", "'output'", ":", "output", ",", "'status'", ":", "status", ",", ...
Posts check result data.
[ "Posts", "check", "result", "data", "." ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L287-L298
sangoma/pysensu
pysensu/api.py
SensuAPI.create_stash
def create_stash(self, payload, path=None): """ Create a stash. (JSON document) """ if path: self._request('POST', '/stashes/{}'.format(path), json=payload) else: self._request('POST', '/stashes', json=payload) return True
python
def create_stash(self, payload, path=None): """ Create a stash. (JSON document) """ if path: self._request('POST', '/stashes/{}'.format(path), json=payload) else: self._request('POST', '/stashes', json=payload) return True
[ "def", "create_stash", "(", "self", ",", "payload", ",", "path", "=", "None", ")", ":", "if", "path", ":", "self", ".", "_request", "(", "'POST'", ",", "'/stashes/{}'", ".", "format", "(", "path", ")", ",", "json", "=", "payload", ")", "else", ":", ...
Create a stash. (JSON document)
[ "Create", "a", "stash", ".", "(", "JSON", "document", ")" ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L310-L319
sangoma/pysensu
pysensu/api.py
SensuAPI.get_subscriptions
def get_subscriptions(self, nodes=[]): """ Returns all the channels where (optionally specified) nodes are subscribed """ if len(nodes) > 0: data = [node for node in self.get_clients() if node['name'] in nodes] else: data = self.get_clients() channels = [] for client in data: if 'subscriptions' in client: if isinstance(client['subscriptions'], list): for channel in client['subscriptions']: if channel not in channels: channels.append(channel) else: if client['subscriptions'] not in channels: channels.append(client['subscriptions']) return channels
python
def get_subscriptions(self, nodes=[]): """ Returns all the channels where (optionally specified) nodes are subscribed """ if len(nodes) > 0: data = [node for node in self.get_clients() if node['name'] in nodes] else: data = self.get_clients() channels = [] for client in data: if 'subscriptions' in client: if isinstance(client['subscriptions'], list): for channel in client['subscriptions']: if channel not in channels: channels.append(channel) else: if client['subscriptions'] not in channels: channels.append(client['subscriptions']) return channels
[ "def", "get_subscriptions", "(", "self", ",", "nodes", "=", "[", "]", ")", ":", "if", "len", "(", "nodes", ")", ">", "0", ":", "data", "=", "[", "node", "for", "node", "in", "self", ".", "get_clients", "(", ")", "if", "node", "[", "'name'", "]", ...
Returns all the channels where (optionally specified) nodes are subscribed
[ "Returns", "all", "the", "channels", "where", "(", "optionally", "specified", ")", "nodes", "are", "subscribed" ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L331-L349
sangoma/pysensu
pysensu/api.py
SensuAPI.get_subscriptions_channel
def get_subscriptions_channel(self, search_channel): """ Return all the nodes that are subscribed to the specified channel """ data = self.get_clients() clients = [] for client in data: if 'subscriptions' in client: if isinstance(client['subscriptions'], list): if search_channel in client['subscriptions']: clients.append(client['name']) else: if search_channel == client['subscriptions']: clients.append(client['name']) return clients
python
def get_subscriptions_channel(self, search_channel): """ Return all the nodes that are subscribed to the specified channel """ data = self.get_clients() clients = [] for client in data: if 'subscriptions' in client: if isinstance(client['subscriptions'], list): if search_channel in client['subscriptions']: clients.append(client['name']) else: if search_channel == client['subscriptions']: clients.append(client['name']) return clients
[ "def", "get_subscriptions_channel", "(", "self", ",", "search_channel", ")", ":", "data", "=", "self", ".", "get_clients", "(", ")", "clients", "=", "[", "]", "for", "client", "in", "data", ":", "if", "'subscriptions'", "in", "client", ":", "if", "isinstan...
Return all the nodes that are subscribed to the specified channel
[ "Return", "all", "the", "nodes", "that", "are", "subscribed", "to", "the", "specified", "channel" ]
train
https://github.com/sangoma/pysensu/blob/dc6799edbf2635247aec61fcf45b04ddec1beb49/pysensu/api.py#L351-L365
denfromufa/clrmagic
setup.py
clrmagic_build_ext.build_extension
def build_extension(self, ext): """ build clrmagic.dll using csc or mcs """ if sys.platform == "win32": _clr_compiler = "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\csc.exe" else: _clr_compiler = "mcs" cmd = [ _clr_compiler, "/target:library", "clrmagic.cs" ] check_call(" ".join(cmd), shell=True)
python
def build_extension(self, ext): """ build clrmagic.dll using csc or mcs """ if sys.platform == "win32": _clr_compiler = "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\csc.exe" else: _clr_compiler = "mcs" cmd = [ _clr_compiler, "/target:library", "clrmagic.cs" ] check_call(" ".join(cmd), shell=True)
[ "def", "build_extension", "(", "self", ",", "ext", ")", ":", "if", "sys", ".", "platform", "==", "\"win32\"", ":", "_clr_compiler", "=", "\"C:\\\\Windows\\\\Microsoft.NET\\\\Framework\\\\v4.0.30319\\\\csc.exe\"", "else", ":", "_clr_compiler", "=", "\"mcs\"", "cmd", "=...
build clrmagic.dll using csc or mcs
[ "build", "clrmagic", ".", "dll", "using", "csc", "or", "mcs" ]
train
https://github.com/denfromufa/clrmagic/blob/065215988f112419ca99abe140f13b03e3a14829/setup.py#L11-L24
pebble/libpebble2
libpebble2/events/mixin.py
EventSourceMixin.wait_for_event
def wait_for_event(self, event, timeout=10): """ Block waiting for the given event. Returns the event params. :param event: The event to handle. :return: The event params. :param timeout: The maximum time to wait before raising :exc:`.TimeoutError`. """ return self.__handler.wait_for_event(event, timeout=timeout)
python
def wait_for_event(self, event, timeout=10): """ Block waiting for the given event. Returns the event params. :param event: The event to handle. :return: The event params. :param timeout: The maximum time to wait before raising :exc:`.TimeoutError`. """ return self.__handler.wait_for_event(event, timeout=timeout)
[ "def", "wait_for_event", "(", "self", ",", "event", ",", "timeout", "=", "10", ")", ":", "return", "self", ".", "__handler", ".", "wait_for_event", "(", "event", ",", "timeout", "=", "timeout", ")" ]
Block waiting for the given event. Returns the event params. :param event: The event to handle. :return: The event params. :param timeout: The maximum time to wait before raising :exc:`.TimeoutError`.
[ "Block", "waiting", "for", "the", "given", "event", ".", "Returns", "the", "event", "params", "." ]
train
https://github.com/pebble/libpebble2/blob/23e2eb92cfc084e6f9e8c718711ac994ef606d18/libpebble2/events/mixin.py#L32-L40
avalente/appmetrics
appmetrics/histogram.py
search_greater
def search_greater(values, target): """ Return the first index for which target is greater or equal to the first item of the tuple found in values """ first = 0 last = len(values) while first < last: middle = (first + last) // 2 if values[middle][0] < target: first = middle + 1 else: last = middle return first
python
def search_greater(values, target): """ Return the first index for which target is greater or equal to the first item of the tuple found in values """ first = 0 last = len(values) while first < last: middle = (first + last) // 2 if values[middle][0] < target: first = middle + 1 else: last = middle return first
[ "def", "search_greater", "(", "values", ",", "target", ")", ":", "first", "=", "0", "last", "=", "len", "(", "values", ")", "while", "first", "<", "last", ":", "middle", "=", "(", "first", "+", "last", ")", "//", "2", "if", "values", "[", "middle",...
Return the first index for which target is greater or equal to the first item of the tuple found in values
[ "Return", "the", "first", "index", "for", "which", "target", "is", "greater", "or", "equal", "to", "the", "first", "item", "of", "the", "tuple", "found", "in", "values" ]
train
https://github.com/avalente/appmetrics/blob/366fc7e1ca897e49a2227cbfa43bfa02a47f1acc/appmetrics/histogram.py#L33-L48
avalente/appmetrics
appmetrics/histogram.py
ReservoirBase.add
def add(self, value): """ Add a value to the reservoir The value will be casted to a floating-point, so a TypeError or a ValueError may be raised. """ if not isinstance(value, float): value = float(value) return self._do_add(value)
python
def add(self, value): """ Add a value to the reservoir The value will be casted to a floating-point, so a TypeError or a ValueError may be raised. """ if not isinstance(value, float): value = float(value) return self._do_add(value)
[ "def", "add", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "float", ")", ":", "value", "=", "float", "(", "value", ")", "return", "self", ".", "_do_add", "(", "value", ")" ]
Add a value to the reservoir The value will be casted to a floating-point, so a TypeError or a ValueError may be raised.
[ "Add", "a", "value", "to", "the", "reservoir", "The", "value", "will", "be", "casted", "to", "a", "floating", "-", "point", "so", "a", "TypeError", "or", "a", "ValueError", "may", "be", "raised", "." ]
train
https://github.com/avalente/appmetrics/blob/366fc7e1ca897e49a2227cbfa43bfa02a47f1acc/appmetrics/histogram.py#L59-L69
avalente/appmetrics
appmetrics/histogram.py
ReservoirBase.same_kind
def same_kind(self, other): """ Return True if "other" is an object of the same type and it was instantiated with the same parameters """ return type(self) is type(other) and self._same_parameters(other)
python
def same_kind(self, other): """ Return True if "other" is an object of the same type and it was instantiated with the same parameters """ return type(self) is type(other) and self._same_parameters(other)
[ "def", "same_kind", "(", "self", ",", "other", ")", ":", "return", "type", "(", "self", ")", "is", "type", "(", "other", ")", "and", "self", ".", "_same_parameters", "(", "other", ")" ]
Return True if "other" is an object of the same type and it was instantiated with the same parameters
[ "Return", "True", "if", "other", "is", "an", "object", "of", "the", "same", "type", "and", "it", "was", "instantiated", "with", "the", "same", "parameters" ]
train
https://github.com/avalente/appmetrics/blob/366fc7e1ca897e49a2227cbfa43bfa02a47f1acc/appmetrics/histogram.py#L87-L93
avalente/appmetrics
appmetrics/histogram.py
ExponentialDecayingReservoir._lookup
def _lookup(self, timestamp): """ Return the index of the value associated with "timestamp" if any, else None. Since the timestamps are floating-point values, they are considered equal if their absolute difference is smaller than self.EPSILON """ idx = search_greater(self._values, timestamp) if (idx < len(self._values) and math.fabs(self._values[idx][0] - timestamp) < self.EPSILON): return idx return None
python
def _lookup(self, timestamp): """ Return the index of the value associated with "timestamp" if any, else None. Since the timestamps are floating-point values, they are considered equal if their absolute difference is smaller than self.EPSILON """ idx = search_greater(self._values, timestamp) if (idx < len(self._values) and math.fabs(self._values[idx][0] - timestamp) < self.EPSILON): return idx return None
[ "def", "_lookup", "(", "self", ",", "timestamp", ")", ":", "idx", "=", "search_greater", "(", "self", ".", "_values", ",", "timestamp", ")", "if", "(", "idx", "<", "len", "(", "self", ".", "_values", ")", "and", "math", ".", "fabs", "(", "self", "....
Return the index of the value associated with "timestamp" if any, else None. Since the timestamps are floating-point values, they are considered equal if their absolute difference is smaller than self.EPSILON
[ "Return", "the", "index", "of", "the", "value", "associated", "with", "timestamp", "if", "any", "else", "None", ".", "Since", "the", "timestamps", "are", "floating", "-", "point", "values", "they", "are", "considered", "equal", "if", "their", "absolute", "di...
train
https://github.com/avalente/appmetrics/blob/366fc7e1ca897e49a2227cbfa43bfa02a47f1acc/appmetrics/histogram.py#L258-L270
avalente/appmetrics
appmetrics/histogram.py
ExponentialDecayingReservoir._put
def _put(self, timestamp, value): """Replace the value associated with "timestamp" or add the new value""" idx = self._lookup(timestamp) if idx is not None: self._values[idx] = (timestamp, value) else: self._values.append((timestamp, value))
python
def _put(self, timestamp, value): """Replace the value associated with "timestamp" or add the new value""" idx = self._lookup(timestamp) if idx is not None: self._values[idx] = (timestamp, value) else: self._values.append((timestamp, value))
[ "def", "_put", "(", "self", ",", "timestamp", ",", "value", ")", ":", "idx", "=", "self", ".", "_lookup", "(", "timestamp", ")", "if", "idx", "is", "not", "None", ":", "self", ".", "_values", "[", "idx", "]", "=", "(", "timestamp", ",", "value", ...
Replace the value associated with "timestamp" or add the new value
[ "Replace", "the", "value", "associated", "with", "timestamp", "or", "add", "the", "new", "value" ]
train
https://github.com/avalente/appmetrics/blob/366fc7e1ca897e49a2227cbfa43bfa02a47f1acc/appmetrics/histogram.py#L272-L279
avalente/appmetrics
appmetrics/histogram.py
Histogram.get
def get(self): """Return the computed statistics over the gathered data""" values = self.reservoir.sorted_values def safe(f, *args): try: return f(values, *args) except exceptions.StatisticsError: return 0.0 plevels = [50, 75, 90, 95, 99, 99.9] percentiles = [safe(statistics.percentile, p) for p in plevels] try: histogram = statistics.get_histogram(values) except exceptions.StatisticsError: histogram = [(0, 0)] res = dict( kind="histogram", min=values[0] if values else 0, max=values[-1] if values else 0, arithmetic_mean=safe(statistics.mean), geometric_mean=safe(statistics.geometric_mean), harmonic_mean=safe(statistics.harmonic_mean), median=safe(statistics.median), variance=safe(statistics.variance), standard_deviation=safe(statistics.stdev), skewness=safe(statistics.skewness), kurtosis=safe(statistics.kurtosis), percentile=py3comp.zip(plevels, percentiles), histogram=histogram, n=len(values)) return res
python
def get(self): """Return the computed statistics over the gathered data""" values = self.reservoir.sorted_values def safe(f, *args): try: return f(values, *args) except exceptions.StatisticsError: return 0.0 plevels = [50, 75, 90, 95, 99, 99.9] percentiles = [safe(statistics.percentile, p) for p in plevels] try: histogram = statistics.get_histogram(values) except exceptions.StatisticsError: histogram = [(0, 0)] res = dict( kind="histogram", min=values[0] if values else 0, max=values[-1] if values else 0, arithmetic_mean=safe(statistics.mean), geometric_mean=safe(statistics.geometric_mean), harmonic_mean=safe(statistics.harmonic_mean), median=safe(statistics.median), variance=safe(statistics.variance), standard_deviation=safe(statistics.stdev), skewness=safe(statistics.skewness), kurtosis=safe(statistics.kurtosis), percentile=py3comp.zip(plevels, percentiles), histogram=histogram, n=len(values)) return res
[ "def", "get", "(", "self", ")", ":", "values", "=", "self", ".", "reservoir", ".", "sorted_values", "def", "safe", "(", "f", ",", "*", "args", ")", ":", "try", ":", "return", "f", "(", "values", ",", "*", "args", ")", "except", "exceptions", ".", ...
Return the computed statistics over the gathered data
[ "Return", "the", "computed", "statistics", "over", "the", "gathered", "data" ]
train
https://github.com/avalente/appmetrics/blob/366fc7e1ca897e49a2227cbfa43bfa02a47f1acc/appmetrics/histogram.py#L352-L386
pebble/libpebble2
libpebble2/services/appglances.py
AppGlances.reload_glance
def reload_glance(self, target_app, slices=None): """ Reloads an app's glance. Blocks as long as necessary. :param target_app: The UUID of the app for which to reload its glance. :type target_app: ~uuid.UUID :param slices: The slices with which to reload the app's glance. :type slices: list[.AppGlanceSlice] """ glance = AppGlance( version=1, creation_time=time.time(), slices=(slices or []) ) SyncWrapper(self._blobdb.insert, BlobDatabaseID.AppGlance, target_app, glance.serialise()).wait()
python
def reload_glance(self, target_app, slices=None): """ Reloads an app's glance. Blocks as long as necessary. :param target_app: The UUID of the app for which to reload its glance. :type target_app: ~uuid.UUID :param slices: The slices with which to reload the app's glance. :type slices: list[.AppGlanceSlice] """ glance = AppGlance( version=1, creation_time=time.time(), slices=(slices or []) ) SyncWrapper(self._blobdb.insert, BlobDatabaseID.AppGlance, target_app, glance.serialise()).wait()
[ "def", "reload_glance", "(", "self", ",", "target_app", ",", "slices", "=", "None", ")", ":", "glance", "=", "AppGlance", "(", "version", "=", "1", ",", "creation_time", "=", "time", ".", "time", "(", ")", ",", "slices", "=", "(", "slices", "or", "["...
Reloads an app's glance. Blocks as long as necessary. :param target_app: The UUID of the app for which to reload its glance. :type target_app: ~uuid.UUID :param slices: The slices with which to reload the app's glance. :type slices: list[.AppGlanceSlice]
[ "Reloads", "an", "app", "s", "glance", ".", "Blocks", "as", "long", "as", "necessary", "." ]
train
https://github.com/pebble/libpebble2/blob/23e2eb92cfc084e6f9e8c718711ac994ef606d18/libpebble2/services/appglances.py#L27-L41
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
network_sketch
def network_sketch(ax, highlight=None, labels=True, yscaling=1.): ''' highlight : None or string if string, then only the label of this population is set and the box is highlighted ''' name_to_id_mapping={'L6E':(0,0), 'L6I':(0,1), 'L5E':(1,0), 'L5I':(1,1), 'L4E':(2,0), 'L4I':(2,1), 'L23E':(3,0), 'L23I':(3,1) } showgrid=False ## switch on/off grid ## sketch parameters layer_x=0.1 ## x position of left boundary of cortex layers layer6_y=0.2*yscaling ## y position of lower boundary of layer 6 layer_width=0.65 ## width of cortex layers layer_height=0.21*yscaling ## height of cortex layers layer_colors=['0.9','0.8','0.9','0.8'] ## layer colors c_pop_size=0.15 ## cortex population size c_pop_dist=0.17 ## distance between cortex populations t_pop_size=0.15 ## thalamus population size t_pop_y=0.0 ## y position of lower thalamus boundary axon_cell_sep=0.04 ## distance between axons and popualations cc_input_y=0.6*yscaling ## y position of cortico-cortical synapses (relative to cortex population) tc_input_y=0.4*yscaling ## y position of thalamo-cortical synapses (relative to cortex population) exc_clr = 'k' if analysis_params.bw else analysis_params.colorE ## color of excitatory axons/synapses inh_clr = 'gray' if analysis_params.bw else analysis_params.colorI ## color of inhibitory axons/synapses lw_pop=0.5 ## linewidth for populations lw_axons=0.4 ## linewidth for axons arrow_size=0.013 ## arrow size conn_radius=0.005 ## radius of connector marker legend_length=0.07 ## length of legend arrows colors = phlp.get_colors(8)[::-1] ## colors of each population fontdict1={'fontsize': 6, ## population name 'weight':'normal', 'horizontalalignment':'center', 'verticalalignment':'center'} fontdict2={'fontsize': 6, ## cortico-cortical input 'weight':'normal', 'horizontalalignment':'center', 'verticalalignment':'center'} fontdict3={'fontsize': 6, ## legend 'weight':'normal', 'horizontalalignment':'left', 'verticalalignment':'center'} ###################################################################################### def draw_box(ax,pos,lw=1.,ls='solid',eclr='k',fclr='w',zorder=0, clip_on=False, boxstyle=patches.BoxStyle("Round", pad=0.0), padadjust=0.): '''Draws a rectangle.''' rect = patches.FancyBboxPatch((pos[0]+padadjust, pos[1]+padadjust), pos[2]-2*padadjust, pos[3]-2*padadjust, ec=eclr, fc=fclr, lw=lw, ls=ls, zorder=zorder, clip_on=clip_on, boxstyle=boxstyle) ax.add_patch(rect) def draw_circle(ax,xy,radius,lw=1.,ls='solid',eclr='k',fclr='w',zorder=0): '''Draws a circle.''' circ = plt.Circle((xy[0],xy[1]),radius=radius, ec=eclr,fc=fclr,lw=lw,ls=ls,zorder=zorder) ax.add_patch(circ) def put_text(ax,xy,txt,clr,fontdict,zorder=10): '''Puts text to a specific position.''' ax.text(xy[0],xy[1],txt,fontdict=fontdict,color=clr,zorder=zorder) def draw_line(ax,path,lw=1.,ls='solid',lclr='k',zorder=0): '''Draws a path.''' #pth = path.Path(np.array(path)) pth = Path(np.array(path)) patch = patches.PathPatch(pth, fill=False, lw=lw,ls=ls,ec=lclr,fc=lclr,zorder=zorder) ax.add_patch(patch) def draw_arrow(ax,path,lw=1.0,ls='solid',lclr='k',arrow_size=0.025,zorder=0): '''Draws a path with an arrow at the end. ''' x=path[-2][0] y=path[-2][1] dx=path[-1][0]-path[-2][0] dy=path[-1][1]-path[-2][1] D=np.array([dx,dy]) D=D/np.sqrt(D[0]**2+D[1]**2) path2=np.array(path).copy() path2[-1,:]=path2[-1,:]-arrow_size*D pth = Path(np.array(path2)) patch = patches.PathPatch(pth, fill=False, lw=lw,ls=ls,ec=lclr,fc=lclr,zorder=zorder) ax.add_patch(patch) arr=patches.FancyArrow(\ x,y,dx,dy,\ length_includes_head=True,width=0.0,head_width=arrow_size,\ overhang=0.2,ec=lclr,fc=lclr,linewidth=0) ax.add_patch(arr) ################################################## ## populations ## cortex layer_pos=[] c_pop_pos=[] for i in xrange(4): ## cortex layers layer_pos+=[[layer_x,layer6_y+i*layer_height*yscaling,layer_width,layer_height]] ## layer positions draw_box(ax,layer_pos[i],lw=0.,fclr=layer_colors[i],zorder=0) ## cortex populations l_margin=(layer_width-2.*c_pop_size-c_pop_dist)/2. b_margin=(layer_height-c_pop_size)/2. ## positions of cortex populations c_pop_pos+=[[ [layer_pos[i][0] + l_margin, layer_pos[i][1] + b_margin, c_pop_size, c_pop_size], ## E [layer_pos[i][0] + l_margin + c_pop_size + c_pop_dist, layer_pos[i][1] + b_margin, c_pop_size, c_pop_size] ]] ## I #draw_box(ax,c_pop_pos[i][0],lw=lw_pop,eclr='k',fclr='w',zorder=2) ## E #draw_box(ax,c_pop_pos[i][1],lw=lw_pop,eclr='k',fclr='w',zorder=2) ## I draw_box(ax,c_pop_pos[i][0],lw=lw_pop,eclr='k',fclr=colors[i*2+1],zorder=2, boxstyle=patches.BoxStyle("Round", pad=0.02), padadjust=0.02) ## E draw_box(ax,c_pop_pos[i][1],lw=lw_pop,eclr='k',fclr=colors[i*2],zorder=2, boxstyle=patches.BoxStyle("Round", pad=0.02), padadjust=0.02) ## I ## thalamus c_center_x=layer_x+layer_width/2. ## x position of cortex center t_pos=[c_center_x-t_pop_size/2.,t_pop_y*yscaling,t_pop_size,t_pop_size] ## thalamus position #draw_box(ax,t_pos,lw=lw_pop,eclr='k',fclr='w',zorder=2) ## Th draw_box(ax,t_pos,lw=lw_pop,eclr='k',fclr='k',zorder=2, boxstyle=patches.BoxStyle("Round", pad=0.02), padadjust=0.02) ## Th ################################################## ## intracortical axons axon_x_dist=(c_pop_dist-2.*axon_cell_sep)/7. assert(axon_x_dist>0.) axon_y_dist=c_pop_size/9.#*yscaling c_axon_x=[] c_axon_y=[] # x positions of vertical intracortical axons for i in xrange(4): # pre layer exc=c_pop_pos[i][0][0]+c_pop_size+axon_cell_sep+i*axon_x_dist ## E inh=exc+4.*axon_x_dist ## I c_axon_x+=[[exc,inh]] # y positions of horizontal intracortical axons for i in xrange(4): ## post layer c_axon_y+=[[]] for j in xrange(4): ## pre layer exc=c_pop_pos[i][0][1]+(j+1.)*axon_y_dist ## E inh=c_pop_pos[i][0][1]+c_pop_size-(j+1.)*axon_y_dist ## I c_axon_y[i]+=[[exc,inh]] ## vertical intracortical axons for i in xrange(4): draw_line(ax,[[c_axon_x[i][0],c_axon_y[0][i][0]],[c_axon_x[i][0],c_axon_y[-1][i][0]]],lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) draw_line(ax,[[c_axon_x[i][1],c_axon_y[0][i][1]],[c_axon_x[i][1],c_axon_y[-1][i][1]]],lw=lw_axons,ls='solid',lclr=inh_clr,zorder=0) ## horizontal intracortical axons for i in xrange(4): ## post layer for j in xrange(4): ## pre layer path=[[c_axon_x[j][0],c_axon_y[i][j][0]],[c_pop_pos[i][0][0]+c_pop_size,c_axon_y[i][j][0]]] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) path=[[c_axon_x[j][0],c_axon_y[i][j][0]],[c_pop_pos[i][1][0],c_axon_y[i][j][0]]] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) path=[[c_axon_x[j][1],c_axon_y[i][j][1]],[c_pop_pos[i][1][0],c_axon_y[i][j][1]]] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=inh_clr,arrow_size=arrow_size,zorder=0) path=[[c_axon_x[j][1],c_axon_y[i][j][1]],[c_pop_pos[i][0][0]+c_pop_size,c_axon_y[i][j][1]]] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=inh_clr,arrow_size=arrow_size,zorder=0) ## connector markers draw_circle(ax,[c_axon_x[j][0],c_axon_y[i][j][0]],conn_radius,lw=0,fclr=exc_clr,zorder=0) draw_circle(ax,[c_axon_x[j][1],c_axon_y[i][j][1]],conn_radius,lw=0,fclr=inh_clr,zorder=0) ## cell outputs for i in xrange(4): path=[[c_pop_pos[i][0][0]+c_pop_size/2.,c_pop_pos[i][0][1]], [c_pop_pos[i][0][0]+c_pop_size/2.,c_pop_pos[i][0][1]-axon_y_dist], [c_axon_x[i][0],c_pop_pos[i][0][1]-axon_y_dist]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## excitatory draw_circle(ax,path[-1],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[i][1][0]+c_pop_size/2.,c_pop_pos[i][1][1]], [c_pop_pos[i][1][0]+c_pop_size/2.,c_pop_pos[i][1][1]-axon_y_dist], [c_axon_x[-1-i][1],c_pop_pos[i][1][1]-axon_y_dist]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=inh_clr,zorder=1) ## inhibitory draw_circle(ax,path[-1],conn_radius,lw=0,fclr=inh_clr,zorder=0) ## connector ## remaining first segments for L6 path=[[c_axon_x[0][0],c_pop_pos[0][0][1]-axon_y_dist],[c_axon_x[0][0],c_axon_y[0][0][0]]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=0) path=[[c_axon_x[-1][1],c_pop_pos[0][1][1]-axon_y_dist],[c_axon_x[-1][1],c_axon_y[0][0][1]]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=inh_clr,zorder=0) ################################################## ## cortico-cortical axons ## horizontal branch in L1 path=[[0.,c_pop_pos[-1][0][1]+c_pop_size+axon_cell_sep], [c_pop_pos[-1][1][0]+c_pop_size+axon_cell_sep,c_pop_pos[-1][0][1]+c_pop_size+axon_cell_sep]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## vertical branches path=[[c_pop_pos[-1][0][0]-axon_cell_sep,c_pop_pos[-1][0][1]+c_pop_size+axon_cell_sep], [c_pop_pos[-1][0][0]-axon_cell_sep,c_pop_pos[0][0][1]+cc_input_y*c_pop_size]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## cc input to exc pop draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[-1][1][0]+c_pop_size+axon_cell_sep,c_pop_pos[-1][0][1]+c_pop_size+axon_cell_sep], [c_pop_pos[-1][1][0]+c_pop_size+axon_cell_sep,c_pop_pos[0][0][1]+cc_input_y*c_pop_size]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## cc input to inh pop draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector ## horizontal branches (arrows) for i in xrange(4): ## cc input to excitatory populations path=[[c_pop_pos[-1][0][0]-axon_cell_sep,c_pop_pos[i][0][1]+cc_input_y*c_pop_size], [c_pop_pos[-1][0][0],c_pop_pos[i][0][1]+cc_input_y*c_pop_size],] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=0) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector ## cc input to inhibitory populations path=[[c_pop_pos[-1][1][0]+c_pop_size+axon_cell_sep,c_pop_pos[i][0][1]+cc_input_y*c_pop_size], [c_pop_pos[-1][1][0]+c_pop_size,c_pop_pos[i][0][1]+cc_input_y*c_pop_size]] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=0) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector ################################################## ## thalamo-cortical axons path=[[t_pos[0]+t_pop_size/2.,t_pos[1]+t_pop_size], [t_pos[0]+t_pop_size/2.,t_pos[1]+t_pop_size+axon_y_dist]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## thalamic output draw_circle(ax,path[-1],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[0][0][0]-(axon_cell_sep+axon_y_dist),t_pos[1]+t_pop_size+axon_y_dist], [c_pop_pos[0][1][0]+c_pop_size+(axon_cell_sep+axon_y_dist),t_pos[1]+t_pop_size+axon_y_dist]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## horizontal branch path=[[c_pop_pos[0][0][0]-(axon_cell_sep+axon_y_dist),t_pos[1]+t_pop_size+axon_y_dist], [c_pop_pos[0][0][0]-(axon_cell_sep+axon_y_dist),c_pop_pos[2][0][1]+tc_input_y*c_pop_size]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## left vertical branch path=[[c_pop_pos[0][1][0]+c_pop_size+(axon_cell_sep+axon_y_dist),t_pos[1]+t_pop_size+axon_y_dist], [c_pop_pos[0][1][0]+c_pop_size+(axon_cell_sep+axon_y_dist),c_pop_pos[2][0][1]+tc_input_y*c_pop_size]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## right vertical branch path=[[c_pop_pos[0][0][0]-(axon_cell_sep+axon_y_dist),c_pop_pos[2][0][1]+tc_input_y*c_pop_size], [c_pop_pos[0][0][0],c_pop_pos[2][0][1]+tc_input_y*c_pop_size],] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) ## Th -> L4E synapses (arrows) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[0][0][0]-(axon_cell_sep+axon_y_dist),c_pop_pos[0][0][1]+tc_input_y*c_pop_size], [c_pop_pos[0][0][0],c_pop_pos[0][0][1]+tc_input_y*c_pop_size],] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) ## Th -> L6E synapses (arrows) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[0][1][0]+c_pop_size+(axon_cell_sep+axon_y_dist),c_pop_pos[2][0][1]+tc_input_y*c_pop_size], [c_pop_pos[0][1][0]+c_pop_size,c_pop_pos[2][0][1]+tc_input_y*c_pop_size],] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) ## Th -> L4I synapses (arrows) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[0][1][0]+c_pop_size+(axon_cell_sep+axon_y_dist),c_pop_pos[0][0][1]+tc_input_y*c_pop_size], [c_pop_pos[0][1][0]+c_pop_size,c_pop_pos[0][0][1]+tc_input_y*c_pop_size],] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) ## Th -> L6I synapses (arrows) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector if labels: ################################################## ## legend legend_x=[t_pos[0]+t_pop_size+axon_cell_sep,t_pos[0]+t_pop_size+axon_cell_sep+legend_length] legend_y=[t_pos[1],(t_pos[1]+2*t_pop_size/3)] draw_arrow(ax,[[legend_x[0],legend_y[1]],[legend_x[1],legend_y[1]]],lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) draw_arrow(ax,[[legend_x[0],legend_y[0]],[legend_x[1],legend_y[0]]],lw=lw_axons,ls='solid',lclr=inh_clr,arrow_size=arrow_size,zorder=1) ################################################## ## population names put_text(ax,[t_pos[0]+t_pop_size/2.,(t_pos[1]+t_pop_size/2.)],r'TC','w',fontdict1) put_text(ax,[c_pop_pos[0][0][0]+c_pop_size/2.,c_pop_pos[0][0][1]+c_pop_size/2.],r'L6E','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[0][1][0]+c_pop_size/2.,c_pop_pos[0][1][1]+c_pop_size/2.],r'L6I','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[1][0][0]+c_pop_size/2.,c_pop_pos[1][0][1]+c_pop_size/2.],r'L5E','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[1][1][0]+c_pop_size/2.,c_pop_pos[1][1][1]+c_pop_size/2.],r'L5I','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[2][0][0]+c_pop_size/2.,c_pop_pos[2][0][1]+c_pop_size/2.],r'L4E','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[2][1][0]+c_pop_size/2.,c_pop_pos[2][1][1]+c_pop_size/2.],r'L4I','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[3][0][0]+c_pop_size/2.,c_pop_pos[3][0][1]+c_pop_size/2.],r'L23E','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[3][1][0]+c_pop_size/2.,c_pop_pos[3][1][1]+c_pop_size/2.],r'L23I','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[-1][0][0], c_pop_pos[-1][0][1]+c_pop_size+1.7*axon_cell_sep + 0.01], r'cortico-cortical input','k',fontdict2) put_text(ax,[legend_x[1]+axon_y_dist,legend_y[1]],r'excitatory','k',fontdict3) put_text(ax,[legend_x[1]+axon_y_dist,legend_y[0]],r'inhibitory','k',fontdict3) ################################################## ## layer names put_text(ax,[0.2*c_pop_pos[0][0][0],c_pop_pos[0][1][1]+c_pop_size/2.],r'L6','k',fontdict1) put_text(ax,[0.2*c_pop_pos[1][0][0],c_pop_pos[1][1][1]+c_pop_size/2.],r'L5','k',fontdict1) put_text(ax,[0.2*c_pop_pos[2][0][0],c_pop_pos[2][1][1]+c_pop_size/2.],r'L4','k',fontdict1) put_text(ax,[0.2*c_pop_pos[3][0][0],c_pop_pos[3][1][1]+c_pop_size/2.],r'L2/3','k',fontdict1) if highlight is not None: ids = name_to_id_mapping[highlight] fontdict1['fontsize']=4 put_text(ax,[c_pop_pos[ids[0]][ids[1]][0]+c_pop_size/2.,c_pop_pos[ids[0]][ids[1]][1]+c_pop_size/2.],highlight,'k',fontdict1) ax.xaxis.set_ticks([]) ax.yaxis.set_ticks([]) ax.axis(ax.axis('equal')) return ax
python
def network_sketch(ax, highlight=None, labels=True, yscaling=1.): ''' highlight : None or string if string, then only the label of this population is set and the box is highlighted ''' name_to_id_mapping={'L6E':(0,0), 'L6I':(0,1), 'L5E':(1,0), 'L5I':(1,1), 'L4E':(2,0), 'L4I':(2,1), 'L23E':(3,0), 'L23I':(3,1) } showgrid=False ## switch on/off grid ## sketch parameters layer_x=0.1 ## x position of left boundary of cortex layers layer6_y=0.2*yscaling ## y position of lower boundary of layer 6 layer_width=0.65 ## width of cortex layers layer_height=0.21*yscaling ## height of cortex layers layer_colors=['0.9','0.8','0.9','0.8'] ## layer colors c_pop_size=0.15 ## cortex population size c_pop_dist=0.17 ## distance between cortex populations t_pop_size=0.15 ## thalamus population size t_pop_y=0.0 ## y position of lower thalamus boundary axon_cell_sep=0.04 ## distance between axons and popualations cc_input_y=0.6*yscaling ## y position of cortico-cortical synapses (relative to cortex population) tc_input_y=0.4*yscaling ## y position of thalamo-cortical synapses (relative to cortex population) exc_clr = 'k' if analysis_params.bw else analysis_params.colorE ## color of excitatory axons/synapses inh_clr = 'gray' if analysis_params.bw else analysis_params.colorI ## color of inhibitory axons/synapses lw_pop=0.5 ## linewidth for populations lw_axons=0.4 ## linewidth for axons arrow_size=0.013 ## arrow size conn_radius=0.005 ## radius of connector marker legend_length=0.07 ## length of legend arrows colors = phlp.get_colors(8)[::-1] ## colors of each population fontdict1={'fontsize': 6, ## population name 'weight':'normal', 'horizontalalignment':'center', 'verticalalignment':'center'} fontdict2={'fontsize': 6, ## cortico-cortical input 'weight':'normal', 'horizontalalignment':'center', 'verticalalignment':'center'} fontdict3={'fontsize': 6, ## legend 'weight':'normal', 'horizontalalignment':'left', 'verticalalignment':'center'} ###################################################################################### def draw_box(ax,pos,lw=1.,ls='solid',eclr='k',fclr='w',zorder=0, clip_on=False, boxstyle=patches.BoxStyle("Round", pad=0.0), padadjust=0.): '''Draws a rectangle.''' rect = patches.FancyBboxPatch((pos[0]+padadjust, pos[1]+padadjust), pos[2]-2*padadjust, pos[3]-2*padadjust, ec=eclr, fc=fclr, lw=lw, ls=ls, zorder=zorder, clip_on=clip_on, boxstyle=boxstyle) ax.add_patch(rect) def draw_circle(ax,xy,radius,lw=1.,ls='solid',eclr='k',fclr='w',zorder=0): '''Draws a circle.''' circ = plt.Circle((xy[0],xy[1]),radius=radius, ec=eclr,fc=fclr,lw=lw,ls=ls,zorder=zorder) ax.add_patch(circ) def put_text(ax,xy,txt,clr,fontdict,zorder=10): '''Puts text to a specific position.''' ax.text(xy[0],xy[1],txt,fontdict=fontdict,color=clr,zorder=zorder) def draw_line(ax,path,lw=1.,ls='solid',lclr='k',zorder=0): '''Draws a path.''' #pth = path.Path(np.array(path)) pth = Path(np.array(path)) patch = patches.PathPatch(pth, fill=False, lw=lw,ls=ls,ec=lclr,fc=lclr,zorder=zorder) ax.add_patch(patch) def draw_arrow(ax,path,lw=1.0,ls='solid',lclr='k',arrow_size=0.025,zorder=0): '''Draws a path with an arrow at the end. ''' x=path[-2][0] y=path[-2][1] dx=path[-1][0]-path[-2][0] dy=path[-1][1]-path[-2][1] D=np.array([dx,dy]) D=D/np.sqrt(D[0]**2+D[1]**2) path2=np.array(path).copy() path2[-1,:]=path2[-1,:]-arrow_size*D pth = Path(np.array(path2)) patch = patches.PathPatch(pth, fill=False, lw=lw,ls=ls,ec=lclr,fc=lclr,zorder=zorder) ax.add_patch(patch) arr=patches.FancyArrow(\ x,y,dx,dy,\ length_includes_head=True,width=0.0,head_width=arrow_size,\ overhang=0.2,ec=lclr,fc=lclr,linewidth=0) ax.add_patch(arr) ################################################## ## populations ## cortex layer_pos=[] c_pop_pos=[] for i in xrange(4): ## cortex layers layer_pos+=[[layer_x,layer6_y+i*layer_height*yscaling,layer_width,layer_height]] ## layer positions draw_box(ax,layer_pos[i],lw=0.,fclr=layer_colors[i],zorder=0) ## cortex populations l_margin=(layer_width-2.*c_pop_size-c_pop_dist)/2. b_margin=(layer_height-c_pop_size)/2. ## positions of cortex populations c_pop_pos+=[[ [layer_pos[i][0] + l_margin, layer_pos[i][1] + b_margin, c_pop_size, c_pop_size], ## E [layer_pos[i][0] + l_margin + c_pop_size + c_pop_dist, layer_pos[i][1] + b_margin, c_pop_size, c_pop_size] ]] ## I #draw_box(ax,c_pop_pos[i][0],lw=lw_pop,eclr='k',fclr='w',zorder=2) ## E #draw_box(ax,c_pop_pos[i][1],lw=lw_pop,eclr='k',fclr='w',zorder=2) ## I draw_box(ax,c_pop_pos[i][0],lw=lw_pop,eclr='k',fclr=colors[i*2+1],zorder=2, boxstyle=patches.BoxStyle("Round", pad=0.02), padadjust=0.02) ## E draw_box(ax,c_pop_pos[i][1],lw=lw_pop,eclr='k',fclr=colors[i*2],zorder=2, boxstyle=patches.BoxStyle("Round", pad=0.02), padadjust=0.02) ## I ## thalamus c_center_x=layer_x+layer_width/2. ## x position of cortex center t_pos=[c_center_x-t_pop_size/2.,t_pop_y*yscaling,t_pop_size,t_pop_size] ## thalamus position #draw_box(ax,t_pos,lw=lw_pop,eclr='k',fclr='w',zorder=2) ## Th draw_box(ax,t_pos,lw=lw_pop,eclr='k',fclr='k',zorder=2, boxstyle=patches.BoxStyle("Round", pad=0.02), padadjust=0.02) ## Th ################################################## ## intracortical axons axon_x_dist=(c_pop_dist-2.*axon_cell_sep)/7. assert(axon_x_dist>0.) axon_y_dist=c_pop_size/9.#*yscaling c_axon_x=[] c_axon_y=[] # x positions of vertical intracortical axons for i in xrange(4): # pre layer exc=c_pop_pos[i][0][0]+c_pop_size+axon_cell_sep+i*axon_x_dist ## E inh=exc+4.*axon_x_dist ## I c_axon_x+=[[exc,inh]] # y positions of horizontal intracortical axons for i in xrange(4): ## post layer c_axon_y+=[[]] for j in xrange(4): ## pre layer exc=c_pop_pos[i][0][1]+(j+1.)*axon_y_dist ## E inh=c_pop_pos[i][0][1]+c_pop_size-(j+1.)*axon_y_dist ## I c_axon_y[i]+=[[exc,inh]] ## vertical intracortical axons for i in xrange(4): draw_line(ax,[[c_axon_x[i][0],c_axon_y[0][i][0]],[c_axon_x[i][0],c_axon_y[-1][i][0]]],lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) draw_line(ax,[[c_axon_x[i][1],c_axon_y[0][i][1]],[c_axon_x[i][1],c_axon_y[-1][i][1]]],lw=lw_axons,ls='solid',lclr=inh_clr,zorder=0) ## horizontal intracortical axons for i in xrange(4): ## post layer for j in xrange(4): ## pre layer path=[[c_axon_x[j][0],c_axon_y[i][j][0]],[c_pop_pos[i][0][0]+c_pop_size,c_axon_y[i][j][0]]] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) path=[[c_axon_x[j][0],c_axon_y[i][j][0]],[c_pop_pos[i][1][0],c_axon_y[i][j][0]]] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) path=[[c_axon_x[j][1],c_axon_y[i][j][1]],[c_pop_pos[i][1][0],c_axon_y[i][j][1]]] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=inh_clr,arrow_size=arrow_size,zorder=0) path=[[c_axon_x[j][1],c_axon_y[i][j][1]],[c_pop_pos[i][0][0]+c_pop_size,c_axon_y[i][j][1]]] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=inh_clr,arrow_size=arrow_size,zorder=0) ## connector markers draw_circle(ax,[c_axon_x[j][0],c_axon_y[i][j][0]],conn_radius,lw=0,fclr=exc_clr,zorder=0) draw_circle(ax,[c_axon_x[j][1],c_axon_y[i][j][1]],conn_radius,lw=0,fclr=inh_clr,zorder=0) ## cell outputs for i in xrange(4): path=[[c_pop_pos[i][0][0]+c_pop_size/2.,c_pop_pos[i][0][1]], [c_pop_pos[i][0][0]+c_pop_size/2.,c_pop_pos[i][0][1]-axon_y_dist], [c_axon_x[i][0],c_pop_pos[i][0][1]-axon_y_dist]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## excitatory draw_circle(ax,path[-1],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[i][1][0]+c_pop_size/2.,c_pop_pos[i][1][1]], [c_pop_pos[i][1][0]+c_pop_size/2.,c_pop_pos[i][1][1]-axon_y_dist], [c_axon_x[-1-i][1],c_pop_pos[i][1][1]-axon_y_dist]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=inh_clr,zorder=1) ## inhibitory draw_circle(ax,path[-1],conn_radius,lw=0,fclr=inh_clr,zorder=0) ## connector ## remaining first segments for L6 path=[[c_axon_x[0][0],c_pop_pos[0][0][1]-axon_y_dist],[c_axon_x[0][0],c_axon_y[0][0][0]]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=0) path=[[c_axon_x[-1][1],c_pop_pos[0][1][1]-axon_y_dist],[c_axon_x[-1][1],c_axon_y[0][0][1]]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=inh_clr,zorder=0) ################################################## ## cortico-cortical axons ## horizontal branch in L1 path=[[0.,c_pop_pos[-1][0][1]+c_pop_size+axon_cell_sep], [c_pop_pos[-1][1][0]+c_pop_size+axon_cell_sep,c_pop_pos[-1][0][1]+c_pop_size+axon_cell_sep]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## vertical branches path=[[c_pop_pos[-1][0][0]-axon_cell_sep,c_pop_pos[-1][0][1]+c_pop_size+axon_cell_sep], [c_pop_pos[-1][0][0]-axon_cell_sep,c_pop_pos[0][0][1]+cc_input_y*c_pop_size]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## cc input to exc pop draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[-1][1][0]+c_pop_size+axon_cell_sep,c_pop_pos[-1][0][1]+c_pop_size+axon_cell_sep], [c_pop_pos[-1][1][0]+c_pop_size+axon_cell_sep,c_pop_pos[0][0][1]+cc_input_y*c_pop_size]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## cc input to inh pop draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector ## horizontal branches (arrows) for i in xrange(4): ## cc input to excitatory populations path=[[c_pop_pos[-1][0][0]-axon_cell_sep,c_pop_pos[i][0][1]+cc_input_y*c_pop_size], [c_pop_pos[-1][0][0],c_pop_pos[i][0][1]+cc_input_y*c_pop_size],] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=0) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector ## cc input to inhibitory populations path=[[c_pop_pos[-1][1][0]+c_pop_size+axon_cell_sep,c_pop_pos[i][0][1]+cc_input_y*c_pop_size], [c_pop_pos[-1][1][0]+c_pop_size,c_pop_pos[i][0][1]+cc_input_y*c_pop_size]] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=0) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector ################################################## ## thalamo-cortical axons path=[[t_pos[0]+t_pop_size/2.,t_pos[1]+t_pop_size], [t_pos[0]+t_pop_size/2.,t_pos[1]+t_pop_size+axon_y_dist]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## thalamic output draw_circle(ax,path[-1],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[0][0][0]-(axon_cell_sep+axon_y_dist),t_pos[1]+t_pop_size+axon_y_dist], [c_pop_pos[0][1][0]+c_pop_size+(axon_cell_sep+axon_y_dist),t_pos[1]+t_pop_size+axon_y_dist]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## horizontal branch path=[[c_pop_pos[0][0][0]-(axon_cell_sep+axon_y_dist),t_pos[1]+t_pop_size+axon_y_dist], [c_pop_pos[0][0][0]-(axon_cell_sep+axon_y_dist),c_pop_pos[2][0][1]+tc_input_y*c_pop_size]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## left vertical branch path=[[c_pop_pos[0][1][0]+c_pop_size+(axon_cell_sep+axon_y_dist),t_pos[1]+t_pop_size+axon_y_dist], [c_pop_pos[0][1][0]+c_pop_size+(axon_cell_sep+axon_y_dist),c_pop_pos[2][0][1]+tc_input_y*c_pop_size]] draw_line(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,zorder=1) ## right vertical branch path=[[c_pop_pos[0][0][0]-(axon_cell_sep+axon_y_dist),c_pop_pos[2][0][1]+tc_input_y*c_pop_size], [c_pop_pos[0][0][0],c_pop_pos[2][0][1]+tc_input_y*c_pop_size],] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) ## Th -> L4E synapses (arrows) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[0][0][0]-(axon_cell_sep+axon_y_dist),c_pop_pos[0][0][1]+tc_input_y*c_pop_size], [c_pop_pos[0][0][0],c_pop_pos[0][0][1]+tc_input_y*c_pop_size],] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) ## Th -> L6E synapses (arrows) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[0][1][0]+c_pop_size+(axon_cell_sep+axon_y_dist),c_pop_pos[2][0][1]+tc_input_y*c_pop_size], [c_pop_pos[0][1][0]+c_pop_size,c_pop_pos[2][0][1]+tc_input_y*c_pop_size],] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) ## Th -> L4I synapses (arrows) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector path=[[c_pop_pos[0][1][0]+c_pop_size+(axon_cell_sep+axon_y_dist),c_pop_pos[0][0][1]+tc_input_y*c_pop_size], [c_pop_pos[0][1][0]+c_pop_size,c_pop_pos[0][0][1]+tc_input_y*c_pop_size],] draw_arrow(ax,path,lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) ## Th -> L6I synapses (arrows) draw_circle(ax,path[0],conn_radius,lw=0,fclr=exc_clr,zorder=0) ## connector if labels: ################################################## ## legend legend_x=[t_pos[0]+t_pop_size+axon_cell_sep,t_pos[0]+t_pop_size+axon_cell_sep+legend_length] legend_y=[t_pos[1],(t_pos[1]+2*t_pop_size/3)] draw_arrow(ax,[[legend_x[0],legend_y[1]],[legend_x[1],legend_y[1]]],lw=lw_axons,ls='solid',lclr=exc_clr,arrow_size=arrow_size,zorder=1) draw_arrow(ax,[[legend_x[0],legend_y[0]],[legend_x[1],legend_y[0]]],lw=lw_axons,ls='solid',lclr=inh_clr,arrow_size=arrow_size,zorder=1) ################################################## ## population names put_text(ax,[t_pos[0]+t_pop_size/2.,(t_pos[1]+t_pop_size/2.)],r'TC','w',fontdict1) put_text(ax,[c_pop_pos[0][0][0]+c_pop_size/2.,c_pop_pos[0][0][1]+c_pop_size/2.],r'L6E','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[0][1][0]+c_pop_size/2.,c_pop_pos[0][1][1]+c_pop_size/2.],r'L6I','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[1][0][0]+c_pop_size/2.,c_pop_pos[1][0][1]+c_pop_size/2.],r'L5E','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[1][1][0]+c_pop_size/2.,c_pop_pos[1][1][1]+c_pop_size/2.],r'L5I','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[2][0][0]+c_pop_size/2.,c_pop_pos[2][0][1]+c_pop_size/2.],r'L4E','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[2][1][0]+c_pop_size/2.,c_pop_pos[2][1][1]+c_pop_size/2.],r'L4I','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[3][0][0]+c_pop_size/2.,c_pop_pos[3][0][1]+c_pop_size/2.],r'L23E','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[3][1][0]+c_pop_size/2.,c_pop_pos[3][1][1]+c_pop_size/2.],r'L23I','w' if analysis_params.bw else 'k',fontdict1) put_text(ax,[c_pop_pos[-1][0][0], c_pop_pos[-1][0][1]+c_pop_size+1.7*axon_cell_sep + 0.01], r'cortico-cortical input','k',fontdict2) put_text(ax,[legend_x[1]+axon_y_dist,legend_y[1]],r'excitatory','k',fontdict3) put_text(ax,[legend_x[1]+axon_y_dist,legend_y[0]],r'inhibitory','k',fontdict3) ################################################## ## layer names put_text(ax,[0.2*c_pop_pos[0][0][0],c_pop_pos[0][1][1]+c_pop_size/2.],r'L6','k',fontdict1) put_text(ax,[0.2*c_pop_pos[1][0][0],c_pop_pos[1][1][1]+c_pop_size/2.],r'L5','k',fontdict1) put_text(ax,[0.2*c_pop_pos[2][0][0],c_pop_pos[2][1][1]+c_pop_size/2.],r'L4','k',fontdict1) put_text(ax,[0.2*c_pop_pos[3][0][0],c_pop_pos[3][1][1]+c_pop_size/2.],r'L2/3','k',fontdict1) if highlight is not None: ids = name_to_id_mapping[highlight] fontdict1['fontsize']=4 put_text(ax,[c_pop_pos[ids[0]][ids[1]][0]+c_pop_size/2.,c_pop_pos[ids[0]][ids[1]][1]+c_pop_size/2.],highlight,'k',fontdict1) ax.xaxis.set_ticks([]) ax.yaxis.set_ticks([]) ax.axis(ax.axis('equal')) return ax
[ "def", "network_sketch", "(", "ax", ",", "highlight", "=", "None", ",", "labels", "=", "True", ",", "yscaling", "=", "1.", ")", ":", "name_to_id_mapping", "=", "{", "'L6E'", ":", "(", "0", ",", "0", ")", ",", "'L6I'", ":", "(", "0", ",", "1", ")"...
highlight : None or string if string, then only the label of this population is set and the box is highlighted
[ "highlight", ":", "None", "or", "string", "if", "string", "then", "only", "the", "label", "of", "this", "population", "is", "set", "and", "the", "box", "is", "highlighted" ]
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L43-L382
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
plot_population
def plot_population(ax, params, aspect='tight', isometricangle=0, plot_somas = True, plot_morphos = False, num_unitsE = 1, num_unitsI=1, clip_dendrites=False, main_pops=True, Y = None, big=True, title='cell positions', rasterized=True): ''' Plot the geometry of the column model, optionally with somatic locations and optionally with reconstructed neurons kwargs: :: ax : matplotlib.axes.AxesSubplot aspect : str matplotlib.axis argument isometricangle : float pseudo-3d view angle plot_somas : bool plot soma locations plot_morphos : bool plot full morphologies num_unitsE : int number of excitatory morphos plotted per population num_unitsI : int number of inhibitory morphos plotted per population clip_dendrites : bool draw dendrites outside of axis mainpops : bool if True, plot only main pops, e.g. b23 and nb23 as L23I Y : None, or string if not None, plot only soma locations of Y big : bool if False: leave out labels and reduce marker size return: :: axis : list the plt.axis() corresponding to input aspect ''' name_to_id_mapping={'L6E':(3,0), 'L6I':(3,1), 'L5E':(2,0), 'L5I':(2,1), 'L4E':(1,0), 'L4I':(1,1), 'L23E':(0,0), 'L23I':(0,1) } # DRAW OUTLINE OF POPULATIONS ax.xaxis.set_ticks([]) ax.yaxis.set_ticks([]) #contact points if big: ax.plot(params.electrodeParams['x'], params.electrodeParams['z'], '.', marker='o', markersize=2, color='k', zorder=0) else: ax.plot(params.electrodeParams['x'], params.electrodeParams['z'], '.', marker='o', markersize=0.5, color='k', zorder=0) #outline of electrode x_0 = params.electrodeParams['r_z'][1, 1:-1] z_0 = params.electrodeParams['r_z'][0, 1:-1] x = np.r_[x_0[-1], x_0[::-1], -x_0[1:], -x_0[-1]] z = np.r_[100, z_0[::-1], z_0[1:], 100] ax.fill(x, z, fc='w', lw=0.1, ec='k', zorder=-0.1, clip_on=False) #outline of populations: #fetch the population radius from some population r = params.populationParams['p23']['radius'] theta0 = np.linspace(0, np.pi, 20) theta1 = np.linspace(np.pi, 2*np.pi, 20) zpos = np.r_[params.layerBoundaries[:, 0], params.layerBoundaries[-1, 1]] layers = ['L1', 'L2/3', 'L4', 'L5', 'L6'] for i, z in enumerate(params.layerBoundaries.mean(axis=1)): if big: ax.text(r, z, ' %s' % layers[i], va='center', ha='left') for i, zval in enumerate(zpos): if i == 0: ax.plot(r*np.cos(theta0), r*np.sin(theta0)*np.sin(isometricangle)+zval, color='k', zorder=-r, clip_on=False) ax.plot(r*np.cos(theta1), r*np.sin(theta1)*np.sin(isometricangle)+zval, color='k', zorder=r, clip_on=False) else: ax.plot(r*np.cos(theta0), r*np.sin(theta0)*np.sin(isometricangle)+zval, color='gray', zorder=-r, clip_on=False) ax.plot(r*np.cos(theta1), r*np.sin(theta1)*np.sin(isometricangle)+zval, color='k', zorder=r, clip_on=False) ax.plot([-r, -r], [zpos[0], zpos[-1]], 'k', zorder=0, clip_on=False) ax.plot([r, r], [zpos[0], zpos[-1]], 'k', zorder=0, clip_on=False) if big: #plot a horizontal radius scalebar ax.plot([0, r], [z_0.min()]*2, 'k', lw=1, zorder=0, clip_on=False) ax.text(r / 2., z_0.min()-100, '$r$ = %i $\mu$m' % int(r), ha='center') #plot a vertical depth scalebar ax.plot([-r]*2, [z_0.min()+50, z_0.min()-50], 'k', lw=1, zorder=0, clip_on=False) ax.text(-r, z_0.min(), r'100 $\mu$m', va='center', ha='right') ax.set_yticks([]) ax.set_yticklabels([]) #fake ticks: if big: for pos in zpos: ax.text(-r, pos, '$z$=%i-' % int(pos), ha='right', va='center') ax.set_title(title, va='bottom') axis = ax.axis(ax.axis(aspect)) def plot_pop_scatter(somapos, marker, colors, i): #scatter plot setting appropriate zorder for each datapoint by binning pitch = 100 for lower in np.arange(-600, 601, pitch): upper = lower + pitch inds = (somapos[:, 1] >= lower) & (somapos[:, 1] < upper) if np.any(inds): if big: ax.scatter(somapos[inds, 0], somapos[inds, 2] - somapos[inds, 1] * np.sin(isometricangle), s=10, facecolors=colors[i], edgecolors='gray', linewidths=0.1, zorder=lower, marker = marker, clip_on=False, rasterized=rasterized) else: ax.scatter(somapos[inds, 0], somapos[inds, 2] - somapos[inds, 1] * np.sin(isometricangle), s=3, facecolors=colors[i], edgecolors='gray', linewidths=0.1, zorder=lower, marker = marker, clip_on=False, rasterized=rasterized) # DRAW UNITS pop = zip(*params.mapping_Yy)[0] #plot a symbol in each location with a unit if plot_somas: if main_pops: colors = phlp.get_colors(np.unique(pop).size) #restructure E, I = zip(*params.y_in_Y) pops_ = [] if Y is None: for i in xrange(len(E)): pops_.append(E[i]) pops_.append(I[i]) else: ids = name_to_id_mapping[Y] if ids[1] == 0: pops_.append(E[ids[0]]) if ids[1] == 1: pops_.append(I[ids[0]]) for i, pops in enumerate(pops_): layer = np.unique(pop)[i] if layer.rfind('p') >= 0 or layer.rfind('E') >= 0: marker = '^' elif layer.rfind('b') >= 0 or layer.rfind('I') >= 0: marker = '*' elif layer.rfind('ss') >= 0: marker = 'o' else: raise Exception #get the somapos somapos = [] for j, lname in enumerate(pops): fname = glob.glob(os.path.join(params.populations_path, '%s*somapos.gdf' % lname))[0] if j == 0: somapos = np.loadtxt(fname).reshape((-1, 3)) else: somapos = np.r_['0, 2', somapos, np.loadtxt(fname).reshape((-1, 3))] somapos = somapos[::5, :] if Y is None: plot_pop_scatter(somapos, marker, colors, i) else: plot_pop_scatter(somapos, marker, colors, ids[0]*2+ids[1]) else: colors = phlp.get_colors(len(pop)) i = 0 for layer, _, _, _ in params.y_zip_list: #assign symbol if layer.rfind('p') >= 0 or layer.rfind('E') >= 0: marker = '^' elif layer.rfind('b') >= 0 or layer.rfind('I') >= 0: marker = '*' elif layer.rfind('ss') >= 0: marker = 'x' else: raise Exception #get the somapos fname = glob.glob(os.path.join(params.populations_path, '%s*somapos.gdf' % layer))[0] somapos = np.loadtxt(fname).reshape((-1, 3)) plot_pop_scatter(somapos, marker, colors, i) i += 1 #plot morphologies in their appropriate locations if plot_morphos: if main_pops: colors = phlp.get_colors(np.unique(pop).size) #restructure E, I = zip(*params.y_in_Y) pops_ = [] for i in xrange(len(E)): pops_.append(E[i]) pops_.append(I[i]) for i, pops in enumerate(pops_): layer = np.unique(pop)[i] #get the somapos and morphos somapos = [] for j, lname in enumerate(pops): fname = glob.glob(os.path.join(params.populations_path, '%s*somapos.gdf' % lname))[0] if j == 0: somapos = np.loadtxt(fname).reshape((-1, 3)) else: somapos = np.r_['0, 2', somapos, np.loadtxt(fname).reshape((-1, 3))] #add num_units morphologies per population with a random z-rotation if layer.rfind('p') >= 0 or layer.rfind('ss') >= 0 or layer.rfind('E') >= 0: num_units = num_unitsE else: num_units = num_unitsI if num_units > somapos.shape[0]: n = somapos.shape[0] else: n = num_units #find some morphos for this population: morphos = [] for fname in params.m_y: if fname.rfind(layer) >= 0: morphos.append(fname) #plot some units for j in range(n): cell = LFPy.Cell(morphology=os.path.join(params.PATH_m_y, np.random.permutation(morphos)[0]), nsegs_method='lambda_f', lambda_f=10, extracellular=False ) cell.set_pos(somapos[j, 0], somapos[j, 1], somapos[j, 2]) cell.set_rotation(z=np.random.rand()*np.pi*2) #set up a polycollection zips = [] for x, z in cell.get_idx_polygons(): zips.append(zip(x, z-somapos[j, 1] * np.sin(isometricangle))) polycol = PolyCollection(zips, edgecolors=colors[i], facecolors=colors[i], linewidths=(0.5), zorder=somapos[j, 1], clip_on=clip_dendrites, rasterized=rasterized) ax.add_collection(polycol) i += 1 else: colors = phlp.get_colors(len(pop)) i = 0 for layer, morpho, depth, size in params.y_zip_list: #get the somapos fname = glob.glob(os.path.join(params.populations_path, '%s*somapos.gdf' % layer))[0] somapos = np.loadtxt(fname).reshape((-1, 3)) #add num_units morphologies per population with a random z-rotation if layer.rfind('p') >= 0 or layer.rfind('ss') >= 0 or layer.rfind('E') >= 0: num_units = num_unitsE else: num_units = num_unitsI if num_units > somapos.shape[0]: n = somapos.shape[0] else: n = num_units #plot some units for j in range(n): cell = LFPy.Cell(morphology=os.path.join(params.PATH_m_y, morpho), nsegs_method='lambda_f', lambda_f=10, extracellular=False ) cell.set_pos(somapos[j, 0], somapos[j, 1], somapos[j, 2]) cell.set_rotation(z=np.random.rand()*np.pi*2) #set up a polycollection zips = [] for x, z in cell.get_idx_polygons(): zips.append(zip(x, z-somapos[j, 1] * np.sin(isometricangle))) polycol = PolyCollection(zips, edgecolors=colors[i], facecolors=colors[i], linewidths=(0.5), zorder=somapos[j, 1], clip_on=clip_dendrites, rasterized=rasterized) ax.add_collection(polycol) i += 1 return axis
python
def plot_population(ax, params, aspect='tight', isometricangle=0, plot_somas = True, plot_morphos = False, num_unitsE = 1, num_unitsI=1, clip_dendrites=False, main_pops=True, Y = None, big=True, title='cell positions', rasterized=True): ''' Plot the geometry of the column model, optionally with somatic locations and optionally with reconstructed neurons kwargs: :: ax : matplotlib.axes.AxesSubplot aspect : str matplotlib.axis argument isometricangle : float pseudo-3d view angle plot_somas : bool plot soma locations plot_morphos : bool plot full morphologies num_unitsE : int number of excitatory morphos plotted per population num_unitsI : int number of inhibitory morphos plotted per population clip_dendrites : bool draw dendrites outside of axis mainpops : bool if True, plot only main pops, e.g. b23 and nb23 as L23I Y : None, or string if not None, plot only soma locations of Y big : bool if False: leave out labels and reduce marker size return: :: axis : list the plt.axis() corresponding to input aspect ''' name_to_id_mapping={'L6E':(3,0), 'L6I':(3,1), 'L5E':(2,0), 'L5I':(2,1), 'L4E':(1,0), 'L4I':(1,1), 'L23E':(0,0), 'L23I':(0,1) } # DRAW OUTLINE OF POPULATIONS ax.xaxis.set_ticks([]) ax.yaxis.set_ticks([]) #contact points if big: ax.plot(params.electrodeParams['x'], params.electrodeParams['z'], '.', marker='o', markersize=2, color='k', zorder=0) else: ax.plot(params.electrodeParams['x'], params.electrodeParams['z'], '.', marker='o', markersize=0.5, color='k', zorder=0) #outline of electrode x_0 = params.electrodeParams['r_z'][1, 1:-1] z_0 = params.electrodeParams['r_z'][0, 1:-1] x = np.r_[x_0[-1], x_0[::-1], -x_0[1:], -x_0[-1]] z = np.r_[100, z_0[::-1], z_0[1:], 100] ax.fill(x, z, fc='w', lw=0.1, ec='k', zorder=-0.1, clip_on=False) #outline of populations: #fetch the population radius from some population r = params.populationParams['p23']['radius'] theta0 = np.linspace(0, np.pi, 20) theta1 = np.linspace(np.pi, 2*np.pi, 20) zpos = np.r_[params.layerBoundaries[:, 0], params.layerBoundaries[-1, 1]] layers = ['L1', 'L2/3', 'L4', 'L5', 'L6'] for i, z in enumerate(params.layerBoundaries.mean(axis=1)): if big: ax.text(r, z, ' %s' % layers[i], va='center', ha='left') for i, zval in enumerate(zpos): if i == 0: ax.plot(r*np.cos(theta0), r*np.sin(theta0)*np.sin(isometricangle)+zval, color='k', zorder=-r, clip_on=False) ax.plot(r*np.cos(theta1), r*np.sin(theta1)*np.sin(isometricangle)+zval, color='k', zorder=r, clip_on=False) else: ax.plot(r*np.cos(theta0), r*np.sin(theta0)*np.sin(isometricangle)+zval, color='gray', zorder=-r, clip_on=False) ax.plot(r*np.cos(theta1), r*np.sin(theta1)*np.sin(isometricangle)+zval, color='k', zorder=r, clip_on=False) ax.plot([-r, -r], [zpos[0], zpos[-1]], 'k', zorder=0, clip_on=False) ax.plot([r, r], [zpos[0], zpos[-1]], 'k', zorder=0, clip_on=False) if big: #plot a horizontal radius scalebar ax.plot([0, r], [z_0.min()]*2, 'k', lw=1, zorder=0, clip_on=False) ax.text(r / 2., z_0.min()-100, '$r$ = %i $\mu$m' % int(r), ha='center') #plot a vertical depth scalebar ax.plot([-r]*2, [z_0.min()+50, z_0.min()-50], 'k', lw=1, zorder=0, clip_on=False) ax.text(-r, z_0.min(), r'100 $\mu$m', va='center', ha='right') ax.set_yticks([]) ax.set_yticklabels([]) #fake ticks: if big: for pos in zpos: ax.text(-r, pos, '$z$=%i-' % int(pos), ha='right', va='center') ax.set_title(title, va='bottom') axis = ax.axis(ax.axis(aspect)) def plot_pop_scatter(somapos, marker, colors, i): #scatter plot setting appropriate zorder for each datapoint by binning pitch = 100 for lower in np.arange(-600, 601, pitch): upper = lower + pitch inds = (somapos[:, 1] >= lower) & (somapos[:, 1] < upper) if np.any(inds): if big: ax.scatter(somapos[inds, 0], somapos[inds, 2] - somapos[inds, 1] * np.sin(isometricangle), s=10, facecolors=colors[i], edgecolors='gray', linewidths=0.1, zorder=lower, marker = marker, clip_on=False, rasterized=rasterized) else: ax.scatter(somapos[inds, 0], somapos[inds, 2] - somapos[inds, 1] * np.sin(isometricangle), s=3, facecolors=colors[i], edgecolors='gray', linewidths=0.1, zorder=lower, marker = marker, clip_on=False, rasterized=rasterized) # DRAW UNITS pop = zip(*params.mapping_Yy)[0] #plot a symbol in each location with a unit if plot_somas: if main_pops: colors = phlp.get_colors(np.unique(pop).size) #restructure E, I = zip(*params.y_in_Y) pops_ = [] if Y is None: for i in xrange(len(E)): pops_.append(E[i]) pops_.append(I[i]) else: ids = name_to_id_mapping[Y] if ids[1] == 0: pops_.append(E[ids[0]]) if ids[1] == 1: pops_.append(I[ids[0]]) for i, pops in enumerate(pops_): layer = np.unique(pop)[i] if layer.rfind('p') >= 0 or layer.rfind('E') >= 0: marker = '^' elif layer.rfind('b') >= 0 or layer.rfind('I') >= 0: marker = '*' elif layer.rfind('ss') >= 0: marker = 'o' else: raise Exception #get the somapos somapos = [] for j, lname in enumerate(pops): fname = glob.glob(os.path.join(params.populations_path, '%s*somapos.gdf' % lname))[0] if j == 0: somapos = np.loadtxt(fname).reshape((-1, 3)) else: somapos = np.r_['0, 2', somapos, np.loadtxt(fname).reshape((-1, 3))] somapos = somapos[::5, :] if Y is None: plot_pop_scatter(somapos, marker, colors, i) else: plot_pop_scatter(somapos, marker, colors, ids[0]*2+ids[1]) else: colors = phlp.get_colors(len(pop)) i = 0 for layer, _, _, _ in params.y_zip_list: #assign symbol if layer.rfind('p') >= 0 or layer.rfind('E') >= 0: marker = '^' elif layer.rfind('b') >= 0 or layer.rfind('I') >= 0: marker = '*' elif layer.rfind('ss') >= 0: marker = 'x' else: raise Exception #get the somapos fname = glob.glob(os.path.join(params.populations_path, '%s*somapos.gdf' % layer))[0] somapos = np.loadtxt(fname).reshape((-1, 3)) plot_pop_scatter(somapos, marker, colors, i) i += 1 #plot morphologies in their appropriate locations if plot_morphos: if main_pops: colors = phlp.get_colors(np.unique(pop).size) #restructure E, I = zip(*params.y_in_Y) pops_ = [] for i in xrange(len(E)): pops_.append(E[i]) pops_.append(I[i]) for i, pops in enumerate(pops_): layer = np.unique(pop)[i] #get the somapos and morphos somapos = [] for j, lname in enumerate(pops): fname = glob.glob(os.path.join(params.populations_path, '%s*somapos.gdf' % lname))[0] if j == 0: somapos = np.loadtxt(fname).reshape((-1, 3)) else: somapos = np.r_['0, 2', somapos, np.loadtxt(fname).reshape((-1, 3))] #add num_units morphologies per population with a random z-rotation if layer.rfind('p') >= 0 or layer.rfind('ss') >= 0 or layer.rfind('E') >= 0: num_units = num_unitsE else: num_units = num_unitsI if num_units > somapos.shape[0]: n = somapos.shape[0] else: n = num_units #find some morphos for this population: morphos = [] for fname in params.m_y: if fname.rfind(layer) >= 0: morphos.append(fname) #plot some units for j in range(n): cell = LFPy.Cell(morphology=os.path.join(params.PATH_m_y, np.random.permutation(morphos)[0]), nsegs_method='lambda_f', lambda_f=10, extracellular=False ) cell.set_pos(somapos[j, 0], somapos[j, 1], somapos[j, 2]) cell.set_rotation(z=np.random.rand()*np.pi*2) #set up a polycollection zips = [] for x, z in cell.get_idx_polygons(): zips.append(zip(x, z-somapos[j, 1] * np.sin(isometricangle))) polycol = PolyCollection(zips, edgecolors=colors[i], facecolors=colors[i], linewidths=(0.5), zorder=somapos[j, 1], clip_on=clip_dendrites, rasterized=rasterized) ax.add_collection(polycol) i += 1 else: colors = phlp.get_colors(len(pop)) i = 0 for layer, morpho, depth, size in params.y_zip_list: #get the somapos fname = glob.glob(os.path.join(params.populations_path, '%s*somapos.gdf' % layer))[0] somapos = np.loadtxt(fname).reshape((-1, 3)) #add num_units morphologies per population with a random z-rotation if layer.rfind('p') >= 0 or layer.rfind('ss') >= 0 or layer.rfind('E') >= 0: num_units = num_unitsE else: num_units = num_unitsI if num_units > somapos.shape[0]: n = somapos.shape[0] else: n = num_units #plot some units for j in range(n): cell = LFPy.Cell(morphology=os.path.join(params.PATH_m_y, morpho), nsegs_method='lambda_f', lambda_f=10, extracellular=False ) cell.set_pos(somapos[j, 0], somapos[j, 1], somapos[j, 2]) cell.set_rotation(z=np.random.rand()*np.pi*2) #set up a polycollection zips = [] for x, z in cell.get_idx_polygons(): zips.append(zip(x, z-somapos[j, 1] * np.sin(isometricangle))) polycol = PolyCollection(zips, edgecolors=colors[i], facecolors=colors[i], linewidths=(0.5), zorder=somapos[j, 1], clip_on=clip_dendrites, rasterized=rasterized) ax.add_collection(polycol) i += 1 return axis
[ "def", "plot_population", "(", "ax", ",", "params", ",", "aspect", "=", "'tight'", ",", "isometricangle", "=", "0", ",", "plot_somas", "=", "True", ",", "plot_morphos", "=", "False", ",", "num_unitsE", "=", "1", ",", "num_unitsI", "=", "1", ",", "clip_de...
Plot the geometry of the column model, optionally with somatic locations and optionally with reconstructed neurons kwargs: :: ax : matplotlib.axes.AxesSubplot aspect : str matplotlib.axis argument isometricangle : float pseudo-3d view angle plot_somas : bool plot soma locations plot_morphos : bool plot full morphologies num_unitsE : int number of excitatory morphos plotted per population num_unitsI : int number of inhibitory morphos plotted per population clip_dendrites : bool draw dendrites outside of axis mainpops : bool if True, plot only main pops, e.g. b23 and nb23 as L23I Y : None, or string if not None, plot only soma locations of Y big : bool if False: leave out labels and reduce marker size return: :: axis : list the plt.axis() corresponding to input aspect
[ "Plot", "the", "geometry", "of", "the", "column", "model", "optionally", "with", "somatic", "locations", "and", "optionally", "with", "reconstructed", "neurons", "kwargs", ":", "::", "ax", ":", "matplotlib", ".", "axes", ".", "AxesSubplot", "aspect", ":", "str...
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L386-L735
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
plot_signal_sum
def plot_signal_sum(ax, params, fname='LFPsum.h5', unit='mV', scaling_factor=1., ylabels=True, scalebar=True, vlimround=None, T=[800, 1000], ylim=[-1500, 0], color='k', fancy=False, label='', transient=200, clip_on=False, rasterized=True, **kwargs): ''' on axes plot the summed LFP contributions args: :: ax : matplotlib.axes.AxesSubplot object fname : str/np.ndarray, path to h5 file or ndim=2 numpy.ndarray unit : str, scalebar unit scaling_factor : float, scaling factor (e.g. to scale 10% data set up) ylabels : bool, show labels on y-axis scalebar : bool, show scalebar in plot vlimround : None/float, override autoscaling of data and scalebar T : list, [tstart, tstop], which timeinterval ylim : list of floats, see plt.gca().set_ylim color : str/colorspec tuple, color of shown lines fancy : bool, label : str, line labels rasterized : bool, rasterize line plots if true kwargs : additional keyword arguments passed to ax.plot() returns: :: vlimround : float, scalebar scaling factor, i.e., to match up plots ''' if type(fname) == str and os.path.isfile(fname): f = h5py.File(fname) #load data data = f['data'].value tvec = np.arange(data.shape[1]) * 1000. / f['srate'].value #for mean subtraction datameanaxis1 = f['data'].value[:, tvec >= transient].mean(axis=1) #close dataset f.close() elif type(fname) == np.ndarray and fname.ndim==2: data = fname tvec = np.arange(data.shape[1]) * params.dt_output datameanaxis1 = data[:, tvec >= transient].mean(axis=1) else: raise Exception, 'type(fname)={} not str or numpy.ndarray'.format(type(fname)) # slice slica = (tvec <= T[1]) & (tvec >= T[0]) data = data[:,slica] #subtract mean in each channel #dataT = data.T - data.mean(axis=1) dataT = data.T - datameanaxis1 data = dataT.T # normalize data = data*scaling_factor zvec = np.r_[params.electrodeParams['z']] zvec = np.r_[zvec, zvec[-1] + np.diff(zvec)[-1]] vlim = abs(data).max() if vlimround is None: vlimround = 2.**np.round(np.log2(vlim)) else: pass yticklabels=[] yticks = [] if fancy: colors=phlp.get_colors(data.shape[0]) else: colors = [color]*data.shape[0] for i, z in enumerate(params.electrodeParams['z']): if i == 0: ax.plot(tvec[slica], data[i] * 100 / vlimround + z, color=colors[i], rasterized=rasterized, label=label, clip_on=clip_on, **kwargs) else: ax.plot(tvec[slica], data[i] * 100 / vlimround + z, color=colors[i], rasterized=rasterized, clip_on=clip_on, **kwargs) yticklabels.append('ch. %i' % (i+1)) yticks.append(z) if scalebar: ax.plot([tvec[slica][-1], tvec[slica][-1]], [-1300, -1400], lw=2, color='k', clip_on=False) ax.text(tvec[slica][-1]+np.diff(T)*0.02, -1350, r'%g %s' % (vlimround, unit), color='k', rotation='vertical', va='center') ax.axis(ax.axis('tight')) ax.yaxis.set_ticks(yticks) if ylabels: ax.yaxis.set_ticklabels(yticklabels) else: ax.yaxis.set_ticklabels([]) for loc, spine in ax.spines.iteritems(): if loc in ['right', 'top']: spine.set_color('none') ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position('left') ax.set_xlabel(r'$t$ (ms)', labelpad=0.1) ax.set_ylim(ylim) return vlimround
python
def plot_signal_sum(ax, params, fname='LFPsum.h5', unit='mV', scaling_factor=1., ylabels=True, scalebar=True, vlimround=None, T=[800, 1000], ylim=[-1500, 0], color='k', fancy=False, label='', transient=200, clip_on=False, rasterized=True, **kwargs): ''' on axes plot the summed LFP contributions args: :: ax : matplotlib.axes.AxesSubplot object fname : str/np.ndarray, path to h5 file or ndim=2 numpy.ndarray unit : str, scalebar unit scaling_factor : float, scaling factor (e.g. to scale 10% data set up) ylabels : bool, show labels on y-axis scalebar : bool, show scalebar in plot vlimround : None/float, override autoscaling of data and scalebar T : list, [tstart, tstop], which timeinterval ylim : list of floats, see plt.gca().set_ylim color : str/colorspec tuple, color of shown lines fancy : bool, label : str, line labels rasterized : bool, rasterize line plots if true kwargs : additional keyword arguments passed to ax.plot() returns: :: vlimround : float, scalebar scaling factor, i.e., to match up plots ''' if type(fname) == str and os.path.isfile(fname): f = h5py.File(fname) #load data data = f['data'].value tvec = np.arange(data.shape[1]) * 1000. / f['srate'].value #for mean subtraction datameanaxis1 = f['data'].value[:, tvec >= transient].mean(axis=1) #close dataset f.close() elif type(fname) == np.ndarray and fname.ndim==2: data = fname tvec = np.arange(data.shape[1]) * params.dt_output datameanaxis1 = data[:, tvec >= transient].mean(axis=1) else: raise Exception, 'type(fname)={} not str or numpy.ndarray'.format(type(fname)) # slice slica = (tvec <= T[1]) & (tvec >= T[0]) data = data[:,slica] #subtract mean in each channel #dataT = data.T - data.mean(axis=1) dataT = data.T - datameanaxis1 data = dataT.T # normalize data = data*scaling_factor zvec = np.r_[params.electrodeParams['z']] zvec = np.r_[zvec, zvec[-1] + np.diff(zvec)[-1]] vlim = abs(data).max() if vlimround is None: vlimround = 2.**np.round(np.log2(vlim)) else: pass yticklabels=[] yticks = [] if fancy: colors=phlp.get_colors(data.shape[0]) else: colors = [color]*data.shape[0] for i, z in enumerate(params.electrodeParams['z']): if i == 0: ax.plot(tvec[slica], data[i] * 100 / vlimround + z, color=colors[i], rasterized=rasterized, label=label, clip_on=clip_on, **kwargs) else: ax.plot(tvec[slica], data[i] * 100 / vlimround + z, color=colors[i], rasterized=rasterized, clip_on=clip_on, **kwargs) yticklabels.append('ch. %i' % (i+1)) yticks.append(z) if scalebar: ax.plot([tvec[slica][-1], tvec[slica][-1]], [-1300, -1400], lw=2, color='k', clip_on=False) ax.text(tvec[slica][-1]+np.diff(T)*0.02, -1350, r'%g %s' % (vlimround, unit), color='k', rotation='vertical', va='center') ax.axis(ax.axis('tight')) ax.yaxis.set_ticks(yticks) if ylabels: ax.yaxis.set_ticklabels(yticklabels) else: ax.yaxis.set_ticklabels([]) for loc, spine in ax.spines.iteritems(): if loc in ['right', 'top']: spine.set_color('none') ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position('left') ax.set_xlabel(r'$t$ (ms)', labelpad=0.1) ax.set_ylim(ylim) return vlimround
[ "def", "plot_signal_sum", "(", "ax", ",", "params", ",", "fname", "=", "'LFPsum.h5'", ",", "unit", "=", "'mV'", ",", "scaling_factor", "=", "1.", ",", "ylabels", "=", "True", ",", "scalebar", "=", "True", ",", "vlimround", "=", "None", ",", "T", "=", ...
on axes plot the summed LFP contributions args: :: ax : matplotlib.axes.AxesSubplot object fname : str/np.ndarray, path to h5 file or ndim=2 numpy.ndarray unit : str, scalebar unit scaling_factor : float, scaling factor (e.g. to scale 10% data set up) ylabels : bool, show labels on y-axis scalebar : bool, show scalebar in plot vlimround : None/float, override autoscaling of data and scalebar T : list, [tstart, tstop], which timeinterval ylim : list of floats, see plt.gca().set_ylim color : str/colorspec tuple, color of shown lines fancy : bool, label : str, line labels rasterized : bool, rasterize line plots if true kwargs : additional keyword arguments passed to ax.plot() returns: :: vlimround : float, scalebar scaling factor, i.e., to match up plots
[ "on", "axes", "plot", "the", "summed", "LFP", "contributions", "args", ":", "::", "ax", ":", "matplotlib", ".", "axes", ".", "AxesSubplot", "object", "fname", ":", "str", "/", "np", ".", "ndarray", "path", "to", "h5", "file", "or", "ndim", "=", "2", ...
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L738-L853
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
plotConnectivity
def plotConnectivity(ax): '''make an imshow of the intranetwork connectivity''' im = ax.pcolor(params.C_YX, cmap='hot') ax.axis(ax.axis('tight')) ax.invert_yaxis() ax.xaxis.set_ticks_position('top') ax.set_xticks(np.arange(9)+0.5) ax.set_yticks(np.arange(8)+0.5) ax.set_xticklabels(params.X, rotation=270) ax.set_yticklabels(params.Y, ) #ax.set_ylabel(r'to ($Y$)', ha='center') #ax.set_xlabel(r'from ($X$)', va='center') ax.xaxis.set_label_position('top') rect = np.array(ax.get_position().bounds) rect[0] += rect[2] + 0.01 rect[2] = 0.01 fig = plt.gcf() cax = fig.add_axes(rect) cbar = plt.colorbar(im, cax=cax) cbar.set_label('connectivity', ha='center')
python
def plotConnectivity(ax): '''make an imshow of the intranetwork connectivity''' im = ax.pcolor(params.C_YX, cmap='hot') ax.axis(ax.axis('tight')) ax.invert_yaxis() ax.xaxis.set_ticks_position('top') ax.set_xticks(np.arange(9)+0.5) ax.set_yticks(np.arange(8)+0.5) ax.set_xticklabels(params.X, rotation=270) ax.set_yticklabels(params.Y, ) #ax.set_ylabel(r'to ($Y$)', ha='center') #ax.set_xlabel(r'from ($X$)', va='center') ax.xaxis.set_label_position('top') rect = np.array(ax.get_position().bounds) rect[0] += rect[2] + 0.01 rect[2] = 0.01 fig = plt.gcf() cax = fig.add_axes(rect) cbar = plt.colorbar(im, cax=cax) cbar.set_label('connectivity', ha='center')
[ "def", "plotConnectivity", "(", "ax", ")", ":", "im", "=", "ax", ".", "pcolor", "(", "params", ".", "C_YX", ",", "cmap", "=", "'hot'", ")", "ax", ".", "axis", "(", "ax", ".", "axis", "(", "'tight'", ")", ")", "ax", ".", "invert_yaxis", "(", ")", ...
make an imshow of the intranetwork connectivity
[ "make", "an", "imshow", "of", "the", "intranetwork", "connectivity" ]
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L856-L877
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
getMeanInpCurrents
def getMeanInpCurrents(params, numunits=100, filepattern=os.path.join('simulation_output_default', 'population_input_spikes*')): '''return a dict with the per population mean and std synaptic current, averaging over numcells recorded units from each population in the network Returned currents are in unit of nA. ''' #convolution kernels x = np.arange(100) * params.dt kernel = np.exp(-x / params.model_params['tau_syn_ex']) #number of external inputs: K_bg = np.array(sum(params.K_bg, [])) #compensate for DC CC connections if we're using that iDC = K_bg * params.dc_amplitude * 1E-3 # unit ???? data = {} #loop over network-populations for i, Y in enumerate(params.Y): if i % SIZE == RANK: #file to open fname = glob.glob(filepattern+'*' + Y + '*')[0] print fname #read in read data and assess units, up to numunits rawdata = np.array(helpers.read_gdf(fname)) units = np.unique(rawdata[:, 0]) if numunits > units.size: numcells = units.size else: numcells = numunits units = units[:numcells] #churn through data and extract the input currents per cell for j, unit in enumerate(units): slc = rawdata[:, 0] == unit #just the spikes: if j == 0: dataslc = rawdata[slc, 2:] else: dataslc = np.r_['0,3', dataslc, rawdata[slc, 2:]] #fix the datatype, it may be object dataslc = dataslc.astype(float) #fill in data-structure data.update({ Y : { 'E' : np.convolve(dataslc[:, :, 0].mean(axis=0), kernel, 'same')*1E-3 + float(iDC[i]), 'I' : np.convolve(dataslc[:, :, 1].mean(axis=0), kernel, 'same')*1E-3, 'tvec' : rawdata[slc, 1], 'numunits' : numunits, } }) data = COMM.allgather(data) return {k: v for d in data for k, v in d.items()}
python
def getMeanInpCurrents(params, numunits=100, filepattern=os.path.join('simulation_output_default', 'population_input_spikes*')): '''return a dict with the per population mean and std synaptic current, averaging over numcells recorded units from each population in the network Returned currents are in unit of nA. ''' #convolution kernels x = np.arange(100) * params.dt kernel = np.exp(-x / params.model_params['tau_syn_ex']) #number of external inputs: K_bg = np.array(sum(params.K_bg, [])) #compensate for DC CC connections if we're using that iDC = K_bg * params.dc_amplitude * 1E-3 # unit ???? data = {} #loop over network-populations for i, Y in enumerate(params.Y): if i % SIZE == RANK: #file to open fname = glob.glob(filepattern+'*' + Y + '*')[0] print fname #read in read data and assess units, up to numunits rawdata = np.array(helpers.read_gdf(fname)) units = np.unique(rawdata[:, 0]) if numunits > units.size: numcells = units.size else: numcells = numunits units = units[:numcells] #churn through data and extract the input currents per cell for j, unit in enumerate(units): slc = rawdata[:, 0] == unit #just the spikes: if j == 0: dataslc = rawdata[slc, 2:] else: dataslc = np.r_['0,3', dataslc, rawdata[slc, 2:]] #fix the datatype, it may be object dataslc = dataslc.astype(float) #fill in data-structure data.update({ Y : { 'E' : np.convolve(dataslc[:, :, 0].mean(axis=0), kernel, 'same')*1E-3 + float(iDC[i]), 'I' : np.convolve(dataslc[:, :, 1].mean(axis=0), kernel, 'same')*1E-3, 'tvec' : rawdata[slc, 1], 'numunits' : numunits, } }) data = COMM.allgather(data) return {k: v for d in data for k, v in d.items()}
[ "def", "getMeanInpCurrents", "(", "params", ",", "numunits", "=", "100", ",", "filepattern", "=", "os", ".", "path", ".", "join", "(", "'simulation_output_default'", ",", "'population_input_spikes*'", ")", ")", ":", "#convolution kernels", "x", "=", "np", ".", ...
return a dict with the per population mean and std synaptic current, averaging over numcells recorded units from each population in the network Returned currents are in unit of nA.
[ "return", "a", "dict", "with", "the", "per", "population", "mean", "and", "std", "synaptic", "current", "averaging", "over", "numcells", "recorded", "units", "from", "each", "population", "in", "the", "network", "Returned", "currents", "are", "in", "unit", "of...
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L1035-L1100
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
getMeanVoltages
def getMeanVoltages(params, numunits=100, filepattern=os.path.join('simulation_output_default', 'voltages')): '''return a dict with the per population mean and std synaptic current, averaging over numcells recorded units from each population in the network Returned currents are in unit of nA. ''' data = {} #loop over network-populations for i, Y in enumerate(params.Y): if i % SIZE == RANK: #read in read data and assess units, up to numunits fname = glob.glob(filepattern+'*' + Y + '*')[0] print fname rawdata = np.array(helpers.read_gdf(fname)) units = np.unique(rawdata[:, 0]) if numunits > units.size: numcells = units.size else: numcells = numunits units = units[:numcells] #churn through data and extract the per cell voltages for j, unit in enumerate(units): slc = rawdata[:, 0] == unit #just the spikes: if j == 0: dataslc = rawdata[slc, 2:] else: dataslc = np.r_['0,3', dataslc, rawdata[slc, 2:]] #fix the datatype, it may be object dataslc = dataslc.astype(float) #fill in data-structure data.update({ Y : { 'data' : dataslc[:, :, 0].mean(axis=0), 'std' : dataslc[:, :, 0].std(axis=0), 'sample' : dataslc[0, :, 0], 'tvec' : rawdata[slc, 1], 'numunits' : numunits, } }) data = COMM.allgather(data) return {k: v for d in data for k, v in d.items()}
python
def getMeanVoltages(params, numunits=100, filepattern=os.path.join('simulation_output_default', 'voltages')): '''return a dict with the per population mean and std synaptic current, averaging over numcells recorded units from each population in the network Returned currents are in unit of nA. ''' data = {} #loop over network-populations for i, Y in enumerate(params.Y): if i % SIZE == RANK: #read in read data and assess units, up to numunits fname = glob.glob(filepattern+'*' + Y + '*')[0] print fname rawdata = np.array(helpers.read_gdf(fname)) units = np.unique(rawdata[:, 0]) if numunits > units.size: numcells = units.size else: numcells = numunits units = units[:numcells] #churn through data and extract the per cell voltages for j, unit in enumerate(units): slc = rawdata[:, 0] == unit #just the spikes: if j == 0: dataslc = rawdata[slc, 2:] else: dataslc = np.r_['0,3', dataslc, rawdata[slc, 2:]] #fix the datatype, it may be object dataslc = dataslc.astype(float) #fill in data-structure data.update({ Y : { 'data' : dataslc[:, :, 0].mean(axis=0), 'std' : dataslc[:, :, 0].std(axis=0), 'sample' : dataslc[0, :, 0], 'tvec' : rawdata[slc, 1], 'numunits' : numunits, } }) data = COMM.allgather(data) return {k: v for d in data for k, v in d.items()}
[ "def", "getMeanVoltages", "(", "params", ",", "numunits", "=", "100", ",", "filepattern", "=", "os", ".", "path", ".", "join", "(", "'simulation_output_default'", ",", "'voltages'", ")", ")", ":", "data", "=", "{", "}", "#loop over network-populations", "for",...
return a dict with the per population mean and std synaptic current, averaging over numcells recorded units from each population in the network Returned currents are in unit of nA.
[ "return", "a", "dict", "with", "the", "per", "population", "mean", "and", "std", "synaptic", "current", "averaging", "over", "numcells", "recorded", "units", "from", "each", "population", "in", "the", "network", "Returned", "currents", "are", "in", "unit", "of...
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L1103-L1155
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
plot_signal_sum_colorplot
def plot_signal_sum_colorplot(ax, params, fname='LFPsum.h5', unit='mV', N=1, ylabels = True, T=[800, 1000], ylim=[-1500, 0], fancy=False, colorbar=True, cmap='spectral_r', absmax=None, transient=200, rasterized=True): ''' on colorplot and as background plot the summed CSD contributions args: :: ax : matplotlib.axes.AxesSubplot object T : list, [tstart, tstop], which timeinterval ylims : list, set range of yaxis to scale with other plots fancy : bool, N : integer, set to number of LFP generators in order to get the normalized signal ''' f = h5py.File(fname) data = f['data'].value tvec = np.arange(data.shape[1]) * 1000. / f['srate'].value #for mean subtraction datameanaxis1 = f['data'].value[:, tvec >= transient].mean(axis=1) # slice slica = (tvec <= T[1]) & (tvec >= T[0]) data = data[:,slica] # subtract mean #dataT = data.T - data.mean(axis=1) dataT = data.T - datameanaxis1 data = dataT.T # normalize data = data/N zvec = params.electrodeParams['z'] if fancy: colors = phlp.get_colors(data.shape[0]) else: colors = ['k']*data.shape[0] if absmax == None: absmax=abs(np.array([data.max(), data.min()])).max() im = ax.pcolormesh(tvec[slica], np.r_[zvec, zvec[-1] + np.diff(zvec)[-1]] + 50, data, rasterized=rasterized, vmax=absmax, vmin=-absmax, cmap=cmap) ax.set_yticks(params.electrodeParams['z']) if ylabels: yticklabels = ['ch. %i' %(i+1) for i in np.arange(len(params.electrodeParams['z']))] ax.set_yticklabels(yticklabels) else: ax.set_yticklabels([]) if colorbar: #colorbar divider=make_axes_locatable(ax) cax=divider.append_axes("right", size="5%", pad=0.1) cbar=plt.colorbar(im, cax=cax) cbar.set_label(unit,labelpad=0.1) plt.axis('tight') ax.set_ylim(ylim) f.close() return im
python
def plot_signal_sum_colorplot(ax, params, fname='LFPsum.h5', unit='mV', N=1, ylabels = True, T=[800, 1000], ylim=[-1500, 0], fancy=False, colorbar=True, cmap='spectral_r', absmax=None, transient=200, rasterized=True): ''' on colorplot and as background plot the summed CSD contributions args: :: ax : matplotlib.axes.AxesSubplot object T : list, [tstart, tstop], which timeinterval ylims : list, set range of yaxis to scale with other plots fancy : bool, N : integer, set to number of LFP generators in order to get the normalized signal ''' f = h5py.File(fname) data = f['data'].value tvec = np.arange(data.shape[1]) * 1000. / f['srate'].value #for mean subtraction datameanaxis1 = f['data'].value[:, tvec >= transient].mean(axis=1) # slice slica = (tvec <= T[1]) & (tvec >= T[0]) data = data[:,slica] # subtract mean #dataT = data.T - data.mean(axis=1) dataT = data.T - datameanaxis1 data = dataT.T # normalize data = data/N zvec = params.electrodeParams['z'] if fancy: colors = phlp.get_colors(data.shape[0]) else: colors = ['k']*data.shape[0] if absmax == None: absmax=abs(np.array([data.max(), data.min()])).max() im = ax.pcolormesh(tvec[slica], np.r_[zvec, zvec[-1] + np.diff(zvec)[-1]] + 50, data, rasterized=rasterized, vmax=absmax, vmin=-absmax, cmap=cmap) ax.set_yticks(params.electrodeParams['z']) if ylabels: yticklabels = ['ch. %i' %(i+1) for i in np.arange(len(params.electrodeParams['z']))] ax.set_yticklabels(yticklabels) else: ax.set_yticklabels([]) if colorbar: #colorbar divider=make_axes_locatable(ax) cax=divider.append_axes("right", size="5%", pad=0.1) cbar=plt.colorbar(im, cax=cax) cbar.set_label(unit,labelpad=0.1) plt.axis('tight') ax.set_ylim(ylim) f.close() return im
[ "def", "plot_signal_sum_colorplot", "(", "ax", ",", "params", ",", "fname", "=", "'LFPsum.h5'", ",", "unit", "=", "'mV'", ",", "N", "=", "1", ",", "ylabels", "=", "True", ",", "T", "=", "[", "800", ",", "1000", "]", ",", "ylim", "=", "[", "-", "1...
on colorplot and as background plot the summed CSD contributions args: :: ax : matplotlib.axes.AxesSubplot object T : list, [tstart, tstop], which timeinterval ylims : list, set range of yaxis to scale with other plots fancy : bool, N : integer, set to number of LFP generators in order to get the normalized signal
[ "on", "colorplot", "and", "as", "background", "plot", "the", "summed", "CSD", "contributions", "args", ":", "::", "ax", ":", "matplotlib", ".", "axes", ".", "AxesSubplot", "object", "T", ":", "list", "[", "tstart", "tstop", "]", "which", "timeinterval", "y...
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L1158-L1222
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
calc_signal_power
def calc_signal_power(params, fname, transient=200, Df=None, mlab=True, NFFT=1000, noverlap=0, window=plt.mlab.window_hanning): ''' calculates power spectrum of sum signal for all channels ''' if type(fname) is str and os.path.isfile(fname): #open file f = h5py.File(fname) data = f['data'].value srate = f['srate'].value tvec = np.arange(data.shape[1]) * 1000. / srate f.close() elif type(fname) is np.ndarray: data = fname srate = 1000. tvec = np.arange(data.shape[1]) * 1000. / srate else: raise Exception, '{} not a file or array'.format(fname) # slice slica = (tvec >= transient) data = data[:,slica] # subtract mean dataT = data.T - data.mean(axis=1) data = dataT.T #extract PSD PSD=[] for i in np.arange(len(params.electrodeParams['z'])): if mlab: Pxx, freqs=plt.mlab.psd(data[i], NFFT=NFFT, Fs=srate, noverlap=noverlap, window=window) else: [freqs, Pxx] = helpers.powerspec([data[i,]], tbin= 1., Df=Df, pointProcess=False) mask = np.where(freqs >= 0.) freqs = freqs[mask] Pxx = Pxx.flatten() Pxx = Pxx[mask] Pxx = Pxx/tvec[tvec >= transient].size**2 PSD +=[Pxx.flatten()] PSD=np.array(PSD) return freqs, PSD
python
def calc_signal_power(params, fname, transient=200, Df=None, mlab=True, NFFT=1000, noverlap=0, window=plt.mlab.window_hanning): ''' calculates power spectrum of sum signal for all channels ''' if type(fname) is str and os.path.isfile(fname): #open file f = h5py.File(fname) data = f['data'].value srate = f['srate'].value tvec = np.arange(data.shape[1]) * 1000. / srate f.close() elif type(fname) is np.ndarray: data = fname srate = 1000. tvec = np.arange(data.shape[1]) * 1000. / srate else: raise Exception, '{} not a file or array'.format(fname) # slice slica = (tvec >= transient) data = data[:,slica] # subtract mean dataT = data.T - data.mean(axis=1) data = dataT.T #extract PSD PSD=[] for i in np.arange(len(params.electrodeParams['z'])): if mlab: Pxx, freqs=plt.mlab.psd(data[i], NFFT=NFFT, Fs=srate, noverlap=noverlap, window=window) else: [freqs, Pxx] = helpers.powerspec([data[i,]], tbin= 1., Df=Df, pointProcess=False) mask = np.where(freqs >= 0.) freqs = freqs[mask] Pxx = Pxx.flatten() Pxx = Pxx[mask] Pxx = Pxx/tvec[tvec >= transient].size**2 PSD +=[Pxx.flatten()] PSD=np.array(PSD) return freqs, PSD
[ "def", "calc_signal_power", "(", "params", ",", "fname", ",", "transient", "=", "200", ",", "Df", "=", "None", ",", "mlab", "=", "True", ",", "NFFT", "=", "1000", ",", "noverlap", "=", "0", ",", "window", "=", "plt", ".", "mlab", ".", "window_hanning...
calculates power spectrum of sum signal for all channels
[ "calculates", "power", "spectrum", "of", "sum", "signal", "for", "all", "channels" ]
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L1225-L1273
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
plot_signal_power_colorplot
def plot_signal_power_colorplot(ax, params, fname, transient=200, Df=None, mlab=True, NFFT=1000, window=plt.mlab.window_hanning, noverlap=0, cmap = plt.cm.get_cmap('jet', 21), vmin=None, vmax=None): ''' on axes plot the LFP power spectral density The whole signal duration is used. args: :: ax : matplotlib.axes.AxesSubplot object fancy : bool, ''' zvec = np.r_[params.electrodeParams['z']] zvec = np.r_[zvec, zvec[-1] + np.diff(zvec)[-1]] #labels yticklabels=[] yticks = [] for i, kk in enumerate(params.electrodeParams['z']): yticklabels.append('ch. %i' % (i+1)) yticks.append(kk) freqs, PSD = calc_signal_power(params, fname=fname, transient=transient,Df=Df, mlab=mlab, NFFT=NFFT, window=window, noverlap=noverlap) #plot only above 1 Hz inds = freqs >= 1 # frequencies greater than 4 Hz im = ax.pcolormesh(freqs[inds], zvec+50, PSD[:, inds], rasterized=True, norm=LogNorm(), vmin=vmin,vmax=vmax, cmap=cmap, ) ax.yaxis.set_ticks(yticks) ax.yaxis.set_ticklabels(yticklabels) ax.semilogx() ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position('left') ax.set_xlabel(r'$f$ (Hz)', labelpad=0.1) ax.axis(ax.axis('tight')) return im
python
def plot_signal_power_colorplot(ax, params, fname, transient=200, Df=None, mlab=True, NFFT=1000, window=plt.mlab.window_hanning, noverlap=0, cmap = plt.cm.get_cmap('jet', 21), vmin=None, vmax=None): ''' on axes plot the LFP power spectral density The whole signal duration is used. args: :: ax : matplotlib.axes.AxesSubplot object fancy : bool, ''' zvec = np.r_[params.electrodeParams['z']] zvec = np.r_[zvec, zvec[-1] + np.diff(zvec)[-1]] #labels yticklabels=[] yticks = [] for i, kk in enumerate(params.electrodeParams['z']): yticklabels.append('ch. %i' % (i+1)) yticks.append(kk) freqs, PSD = calc_signal_power(params, fname=fname, transient=transient,Df=Df, mlab=mlab, NFFT=NFFT, window=window, noverlap=noverlap) #plot only above 1 Hz inds = freqs >= 1 # frequencies greater than 4 Hz im = ax.pcolormesh(freqs[inds], zvec+50, PSD[:, inds], rasterized=True, norm=LogNorm(), vmin=vmin,vmax=vmax, cmap=cmap, ) ax.yaxis.set_ticks(yticks) ax.yaxis.set_ticklabels(yticklabels) ax.semilogx() ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position('left') ax.set_xlabel(r'$f$ (Hz)', labelpad=0.1) ax.axis(ax.axis('tight')) return im
[ "def", "plot_signal_power_colorplot", "(", "ax", ",", "params", ",", "fname", ",", "transient", "=", "200", ",", "Df", "=", "None", ",", "mlab", "=", "True", ",", "NFFT", "=", "1000", ",", "window", "=", "plt", ".", "mlab", ".", "window_hanning", ",", ...
on axes plot the LFP power spectral density The whole signal duration is used. args: :: ax : matplotlib.axes.AxesSubplot object fancy : bool,
[ "on", "axes", "plot", "the", "LFP", "power", "spectral", "density", "The", "whole", "signal", "duration", "is", "used", ".", "args", ":", "::", "ax", ":", "matplotlib", ".", "axes", ".", "AxesSubplot", "object", "fancy", ":", "bool" ]
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L1276-L1324
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
plotPowers
def plotPowers(ax, params, popkeys, dataset, linestyles, linewidths, transient=200, SCALING_POSTFIX='', markerstyles=None): '''plot power (variance) as function of depth for total and separate contributors Plot variance of sum signal ''' colors = phlp.get_colors(len(popkeys)) depth = params.electrodeParams['z'] zpos = np.r_[params.layerBoundaries[:, 0], params.layerBoundaries[-1, 1]] for i, layer in enumerate(popkeys): f = h5py.File(os.path.join(params.populations_path, '%s_population_%s' % (layer, dataset) + SCALING_POSTFIX + '.h5' )) ax.semilogx(f['data'].value[:, transient:].var(axis=1), depth, color=colors[i], ls=linestyles[i], lw=linewidths[i], marker=None if markerstyles is None else markerstyles[i], markersize=2.5, markerfacecolor=colors[i], markeredgecolor=colors[i], label=layer, clip_on=True ) f.close() f = h5py.File(os.path.join(params.savefolder, '%ssum' % dataset + SCALING_POSTFIX + '.h5' )) ax.plot(f['data'].value[:, transient:].var(axis=1), depth, 'k', label='SUM', lw=1.25, clip_on=False) f.close() ax.set_yticks(zpos) ax.set_yticklabels([]) #ax.set_xscale('log') try: # numticks arg only exists for latest matplotlib version ax.xaxis.set_major_locator(plt.LogLocator(base=10, subs=np.linspace(-10, 10, 2), numticks=6)) except: ax.xaxis.set_major_locator(plt.LogLocator(base=10, subs=np.linspace(-10, 10, 2))) ax.xaxis.set_minor_locator(plt.LogLocator(base=10, subs=[1.])) ax.axis('tight')
python
def plotPowers(ax, params, popkeys, dataset, linestyles, linewidths, transient=200, SCALING_POSTFIX='', markerstyles=None): '''plot power (variance) as function of depth for total and separate contributors Plot variance of sum signal ''' colors = phlp.get_colors(len(popkeys)) depth = params.electrodeParams['z'] zpos = np.r_[params.layerBoundaries[:, 0], params.layerBoundaries[-1, 1]] for i, layer in enumerate(popkeys): f = h5py.File(os.path.join(params.populations_path, '%s_population_%s' % (layer, dataset) + SCALING_POSTFIX + '.h5' )) ax.semilogx(f['data'].value[:, transient:].var(axis=1), depth, color=colors[i], ls=linestyles[i], lw=linewidths[i], marker=None if markerstyles is None else markerstyles[i], markersize=2.5, markerfacecolor=colors[i], markeredgecolor=colors[i], label=layer, clip_on=True ) f.close() f = h5py.File(os.path.join(params.savefolder, '%ssum' % dataset + SCALING_POSTFIX + '.h5' )) ax.plot(f['data'].value[:, transient:].var(axis=1), depth, 'k', label='SUM', lw=1.25, clip_on=False) f.close() ax.set_yticks(zpos) ax.set_yticklabels([]) #ax.set_xscale('log') try: # numticks arg only exists for latest matplotlib version ax.xaxis.set_major_locator(plt.LogLocator(base=10, subs=np.linspace(-10, 10, 2), numticks=6)) except: ax.xaxis.set_major_locator(plt.LogLocator(base=10, subs=np.linspace(-10, 10, 2))) ax.xaxis.set_minor_locator(plt.LogLocator(base=10, subs=[1.])) ax.axis('tight')
[ "def", "plotPowers", "(", "ax", ",", "params", ",", "popkeys", ",", "dataset", ",", "linestyles", ",", "linewidths", ",", "transient", "=", "200", ",", "SCALING_POSTFIX", "=", "''", ",", "markerstyles", "=", "None", ")", ":", "colors", "=", "phlp", ".", ...
plot power (variance) as function of depth for total and separate contributors Plot variance of sum signal
[ "plot", "power", "(", "variance", ")", "as", "function", "of", "depth", "for", "total", "and", "separate", "contributors" ]
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L1327-L1373
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/plot_methods.py
plotting_correlation
def plotting_correlation(params, x0, x1, ax, lag=20., scaling=None, normalize=True, color='k', unit=r'$cc=%.3f$' , title='firing_rate vs LFP', scalebar=True, **kwargs): ''' mls on axes plot the correlation between x0 and x1 args: :: x0 : first dataset x1 : second dataset - the LFP usually here ax : matplotlib.axes.AxesSubplot object title : text to be used as current axis object title normalize : if True, signals are z-scored before applying np.correlate unit : unit for scalebar ''' zvec = np.r_[params.electrodeParams['z']] zvec = np.r_[zvec, zvec[-1] + np.diff(zvec)[-1]] xcorr_all=np.zeros((params.electrodeParams['z'].size, x0.shape[-1])) if normalize: for i, z in enumerate(params.electrodeParams['z']): if x0.ndim == 1: x2 = x1[i, ] xcorr1 = np.correlate(helpers.normalize(x0), helpers.normalize(x2), 'same') / x0.size elif x0.ndim == 2: xcorr1 = np.correlate(helpers.normalize(x0[i, ]), helpers.normalize(x1[i, ]), 'same') / x0.shape[-1] xcorr_all[i,:]=xcorr1 else: for i, z in enumerate(params.electrodeParams['z']): if x0.ndim == 1: x2 = x1[i, ] xcorr1 = np.correlate(x0,x2, 'same') elif x0.ndim == 2: xcorr1 = np.correlate(x0[i, ],x1[i, ], 'same') xcorr_all[i,:]=xcorr1 # Find limits for the plot if scaling is None: vlim = abs(xcorr_all).max() vlimround = 2.**np.round(np.log2(vlim)) else: vlimround = scaling yticklabels=[] yticks = [] #temporal slicing lagvector = np.arange(-lag, lag+1).astype(int) inds = lagvector + x0.shape[-1] / 2 for i, z in enumerate(params.electrodeParams['z']): ax.plot(lagvector, xcorr_all[i,inds[::-1]] * 100. / vlimround + z, 'k', clip_on=True, rasterized=False, color=color, **kwargs) yticklabels.append('ch. %i' %(i+1)) yticks.append(z) phlp.remove_axis_junk(ax) ax.set_title(title, va='center') ax.set_xlabel(r'$\tau$ (ms)', labelpad=0.1) ax.set_xlim(-lag, lag) ax.set_ylim(z-100, 100) axis = ax.axis() ax.vlines(0, axis[2], axis[3], 'k' if analysis_params.bw else 'k', 'dotted', lw=0.25) ax.yaxis.set_ticks(yticks) ax.yaxis.set_ticklabels(yticklabels) ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position('left') ## Create a scaling bar if scalebar: ax.plot([lag, lag], [-1500, -1400], lw=2, color='k', clip_on=False) ax.text(lag*1.04, -1450, unit % vlimround, rotation='vertical', va='center') return xcorr_all[:, inds[::-1]], vlimround
python
def plotting_correlation(params, x0, x1, ax, lag=20., scaling=None, normalize=True, color='k', unit=r'$cc=%.3f$' , title='firing_rate vs LFP', scalebar=True, **kwargs): ''' mls on axes plot the correlation between x0 and x1 args: :: x0 : first dataset x1 : second dataset - the LFP usually here ax : matplotlib.axes.AxesSubplot object title : text to be used as current axis object title normalize : if True, signals are z-scored before applying np.correlate unit : unit for scalebar ''' zvec = np.r_[params.electrodeParams['z']] zvec = np.r_[zvec, zvec[-1] + np.diff(zvec)[-1]] xcorr_all=np.zeros((params.electrodeParams['z'].size, x0.shape[-1])) if normalize: for i, z in enumerate(params.electrodeParams['z']): if x0.ndim == 1: x2 = x1[i, ] xcorr1 = np.correlate(helpers.normalize(x0), helpers.normalize(x2), 'same') / x0.size elif x0.ndim == 2: xcorr1 = np.correlate(helpers.normalize(x0[i, ]), helpers.normalize(x1[i, ]), 'same') / x0.shape[-1] xcorr_all[i,:]=xcorr1 else: for i, z in enumerate(params.electrodeParams['z']): if x0.ndim == 1: x2 = x1[i, ] xcorr1 = np.correlate(x0,x2, 'same') elif x0.ndim == 2: xcorr1 = np.correlate(x0[i, ],x1[i, ], 'same') xcorr_all[i,:]=xcorr1 # Find limits for the plot if scaling is None: vlim = abs(xcorr_all).max() vlimround = 2.**np.round(np.log2(vlim)) else: vlimround = scaling yticklabels=[] yticks = [] #temporal slicing lagvector = np.arange(-lag, lag+1).astype(int) inds = lagvector + x0.shape[-1] / 2 for i, z in enumerate(params.electrodeParams['z']): ax.plot(lagvector, xcorr_all[i,inds[::-1]] * 100. / vlimround + z, 'k', clip_on=True, rasterized=False, color=color, **kwargs) yticklabels.append('ch. %i' %(i+1)) yticks.append(z) phlp.remove_axis_junk(ax) ax.set_title(title, va='center') ax.set_xlabel(r'$\tau$ (ms)', labelpad=0.1) ax.set_xlim(-lag, lag) ax.set_ylim(z-100, 100) axis = ax.axis() ax.vlines(0, axis[2], axis[3], 'k' if analysis_params.bw else 'k', 'dotted', lw=0.25) ax.yaxis.set_ticks(yticks) ax.yaxis.set_ticklabels(yticklabels) ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position('left') ## Create a scaling bar if scalebar: ax.plot([lag, lag], [-1500, -1400], lw=2, color='k', clip_on=False) ax.text(lag*1.04, -1450, unit % vlimround, rotation='vertical', va='center') return xcorr_all[:, inds[::-1]], vlimround
[ "def", "plotting_correlation", "(", "params", ",", "x0", ",", "x1", ",", "ax", ",", "lag", "=", "20.", ",", "scaling", "=", "None", ",", "normalize", "=", "True", ",", "color", "=", "'k'", ",", "unit", "=", "r'$cc=%.3f$'", ",", "title", "=", "'firing...
mls on axes plot the correlation between x0 and x1 args: :: x0 : first dataset x1 : second dataset - the LFP usually here ax : matplotlib.axes.AxesSubplot object title : text to be used as current axis object title normalize : if True, signals are z-scored before applying np.correlate unit : unit for scalebar
[ "mls", "on", "axes", "plot", "the", "correlation", "between", "x0", "and", "x1", "args", ":", "::", "x0", ":", "first", "dataset", "x1", ":", "second", "dataset", "-", "the", "LFP", "usually", "here", "ax", ":", "matplotlib", ".", "axes", ".", "AxesSub...
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/plot_methods.py#L1376-L1462
Pytwitcher/pytwitcherapi
src/pytwitcherapi/oauth.py
RedirectHandler.do_GET
def do_GET(self, ): """Handle GET requests If the path is '/', a site which extracts the token will be generated. This will redirect the user to the '/sucess' page, which shows a success message. :returns: None :rtype: None :raises: None """ urld = {self.extract_site_url: 'extract_token_site.html', self.success_site_url: 'success_site.html'} site = urld.get(self.path) if not site: log.debug("Requesting false url on login server.") self.send_error(404) return log.debug('Requesting the login server. Responding with %s.', urld) self._set_headers() self._write_html(site)
python
def do_GET(self, ): """Handle GET requests If the path is '/', a site which extracts the token will be generated. This will redirect the user to the '/sucess' page, which shows a success message. :returns: None :rtype: None :raises: None """ urld = {self.extract_site_url: 'extract_token_site.html', self.success_site_url: 'success_site.html'} site = urld.get(self.path) if not site: log.debug("Requesting false url on login server.") self.send_error(404) return log.debug('Requesting the login server. Responding with %s.', urld) self._set_headers() self._write_html(site)
[ "def", "do_GET", "(", "self", ",", ")", ":", "urld", "=", "{", "self", ".", "extract_site_url", ":", "'extract_token_site.html'", ",", "self", ".", "success_site_url", ":", "'success_site.html'", "}", "site", "=", "urld", ".", "get", "(", "self", ".", "pat...
Handle GET requests If the path is '/', a site which extracts the token will be generated. This will redirect the user to the '/sucess' page, which shows a success message. :returns: None :rtype: None :raises: None
[ "Handle", "GET", "requests" ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/oauth.py#L50-L70
Pytwitcher/pytwitcherapi
src/pytwitcherapi/oauth.py
RedirectHandler._write_html
def _write_html(self, filename): """Read the html site with the given filename from the data directory and write it to :data:`RedirectHandler.wfile`. :param filename: the filename to read :type filename: :class:`str` :returns: None :rtype: None :raises: None """ datapath = os.path.join('html', filename) sitepath = pkg_resources.resource_filename('pytwitcherapi', datapath) with open(sitepath, 'r') as f: html = f.read() self.wfile.write(html.encode('utf-8'))
python
def _write_html(self, filename): """Read the html site with the given filename from the data directory and write it to :data:`RedirectHandler.wfile`. :param filename: the filename to read :type filename: :class:`str` :returns: None :rtype: None :raises: None """ datapath = os.path.join('html', filename) sitepath = pkg_resources.resource_filename('pytwitcherapi', datapath) with open(sitepath, 'r') as f: html = f.read() self.wfile.write(html.encode('utf-8'))
[ "def", "_write_html", "(", "self", ",", "filename", ")", ":", "datapath", "=", "os", ".", "path", ".", "join", "(", "'html'", ",", "filename", ")", "sitepath", "=", "pkg_resources", ".", "resource_filename", "(", "'pytwitcherapi'", ",", "datapath", ")", "w...
Read the html site with the given filename from the data directory and write it to :data:`RedirectHandler.wfile`. :param filename: the filename to read :type filename: :class:`str` :returns: None :rtype: None :raises: None
[ "Read", "the", "html", "site", "with", "the", "given", "filename", "from", "the", "data", "directory", "and", "write", "it", "to", ":", "data", ":", "RedirectHandler", ".", "wfile", "." ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/oauth.py#L72-L86
Pytwitcher/pytwitcherapi
src/pytwitcherapi/oauth.py
RedirectHandler.do_POST
def do_POST(self, ): """Handle POST requests When the user is redirected, this handler will respond with a website which will send a post request with the url fragment as parameters. This will get the parameters and store the original redirection url and fragments in :data:`LoginServer.tokenurl`. :returns: None :rtype: None :raises: None """ log.debug('POST') self._set_headers() # convert the parameters back to the original fragment # because we need to send the original uri to set_token # url fragments will not show up in self.path though. # thats why we make the hassle to send it as a post request. # Note: oauth does not allow for http connections # but twitch does, so we fake it ruri = constants.REDIRECT_URI.replace('http://', 'https://') self.server.set_token(ruri + self.path.replace('?', '#'))
python
def do_POST(self, ): """Handle POST requests When the user is redirected, this handler will respond with a website which will send a post request with the url fragment as parameters. This will get the parameters and store the original redirection url and fragments in :data:`LoginServer.tokenurl`. :returns: None :rtype: None :raises: None """ log.debug('POST') self._set_headers() # convert the parameters back to the original fragment # because we need to send the original uri to set_token # url fragments will not show up in self.path though. # thats why we make the hassle to send it as a post request. # Note: oauth does not allow for http connections # but twitch does, so we fake it ruri = constants.REDIRECT_URI.replace('http://', 'https://') self.server.set_token(ruri + self.path.replace('?', '#'))
[ "def", "do_POST", "(", "self", ",", ")", ":", "log", ".", "debug", "(", "'POST'", ")", "self", ".", "_set_headers", "(", ")", "# convert the parameters back to the original fragment", "# because we need to send the original uri to set_token", "# url fragments will not show up...
Handle POST requests When the user is redirected, this handler will respond with a website which will send a post request with the url fragment as parameters. This will get the parameters and store the original redirection url and fragments in :data:`LoginServer.tokenurl`. :returns: None :rtype: None :raises: None
[ "Handle", "POST", "requests" ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/oauth.py#L88-L109
Pytwitcher/pytwitcherapi
src/pytwitcherapi/oauth.py
LoginServer.set_token
def set_token(self, redirecturl): """Set the token on the session :param redirecturl: the original full redirect url :type redirecturl: :class:`str` :returns: None :rtype: None :raises: None """ log.debug('Setting the token on %s.' % self.session) self.session.token_from_fragment(redirecturl)
python
def set_token(self, redirecturl): """Set the token on the session :param redirecturl: the original full redirect url :type redirecturl: :class:`str` :returns: None :rtype: None :raises: None """ log.debug('Setting the token on %s.' % self.session) self.session.token_from_fragment(redirecturl)
[ "def", "set_token", "(", "self", ",", "redirecturl", ")", ":", "log", ".", "debug", "(", "'Setting the token on %s.'", "%", "self", ".", "session", ")", "self", ".", "session", ".", "token_from_fragment", "(", "redirecturl", ")" ]
Set the token on the session :param redirecturl: the original full redirect url :type redirecturl: :class:`str` :returns: None :rtype: None :raises: None
[ "Set", "the", "token", "on", "the", "session" ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/oauth.py#L132-L142
Pytwitcher/pytwitcherapi
src/pytwitcherapi/oauth.py
TwitchOAuthClient._add_bearer_token
def _add_bearer_token(self, *args, **kwargs): """Add a bearer token to the request uri, body or authorization header. This is overwritten to change the headers slightly. """ s = super(TwitchOAuthClient, self) uri, headers, body = s._add_bearer_token(*args, **kwargs) authheader = headers.get('Authorization') if authheader: headers['Authorization'] = authheader.replace('Bearer', 'OAuth') return uri, headers, body
python
def _add_bearer_token(self, *args, **kwargs): """Add a bearer token to the request uri, body or authorization header. This is overwritten to change the headers slightly. """ s = super(TwitchOAuthClient, self) uri, headers, body = s._add_bearer_token(*args, **kwargs) authheader = headers.get('Authorization') if authheader: headers['Authorization'] = authheader.replace('Bearer', 'OAuth') return uri, headers, body
[ "def", "_add_bearer_token", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "s", "=", "super", "(", "TwitchOAuthClient", ",", "self", ")", "uri", ",", "headers", ",", "body", "=", "s", ".", "_add_bearer_token", "(", "*", "args", ","...
Add a bearer token to the request uri, body or authorization header. This is overwritten to change the headers slightly.
[ "Add", "a", "bearer", "token", "to", "the", "request", "uri", "body", "or", "authorization", "header", "." ]
train
https://github.com/Pytwitcher/pytwitcherapi/blob/d53ac5ad5ca113ecb7da542e8cdcbbf8c762b336/src/pytwitcherapi/oauth.py#L160-L170
BlendedSiteGenerator/Blended
blended/__main__.py
install_template
def install_template(username, repo): """Installs a Blended template from GitHub""" print("Installing template from " + username + "/" + repo) dpath = os.path.join(cwd, "templates") getunzipped(username, repo, dpath)
python
def install_template(username, repo): """Installs a Blended template from GitHub""" print("Installing template from " + username + "/" + repo) dpath = os.path.join(cwd, "templates") getunzipped(username, repo, dpath)
[ "def", "install_template", "(", "username", ",", "repo", ")", ":", "print", "(", "\"Installing template from \"", "+", "username", "+", "\"/\"", "+", "repo", ")", "dpath", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "\"templates\"", ")", "getunzi...
Installs a Blended template from GitHub
[ "Installs", "a", "Blended", "template", "from", "GitHub" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L65-L70
BlendedSiteGenerator/Blended
blended/__main__.py
import_wp
def import_wp(filepath): """Imports A WordPress export and converts it to a Blended site""" print("\nBlended: Static Website Generator -\n") checkConfig() print("Importing from WordPress...") wp = parseXML(filepath) wname = wp.rss.channel.title.cdata wdesc = wp.rss.channel.description.cdata wlan = wp.rss.channel.language.cdata wurl = wp.rss.channel.link.cdata aname = wp.rss.channel.wp_author.wp_author_display_name.cdata.strip() createBlendedFolders() # Populate the configuration file createConfig(app_version=app_version, wname=wname, wdesc=wdesc, wlan=wlan, wurl=wurl, aname=aname) for item in wp.rss.channel.item: with open(os.path.join(cwd, "content", item.title.cdata.replace(" ", "_") + ".html"), 'w') as wfile: wfile.write(item.content_encoded.cdata.strip()) print("\nYour website has been imported from WordPress.")
python
def import_wp(filepath): """Imports A WordPress export and converts it to a Blended site""" print("\nBlended: Static Website Generator -\n") checkConfig() print("Importing from WordPress...") wp = parseXML(filepath) wname = wp.rss.channel.title.cdata wdesc = wp.rss.channel.description.cdata wlan = wp.rss.channel.language.cdata wurl = wp.rss.channel.link.cdata aname = wp.rss.channel.wp_author.wp_author_display_name.cdata.strip() createBlendedFolders() # Populate the configuration file createConfig(app_version=app_version, wname=wname, wdesc=wdesc, wlan=wlan, wurl=wurl, aname=aname) for item in wp.rss.channel.item: with open(os.path.join(cwd, "content", item.title.cdata.replace(" ", "_") + ".html"), 'w') as wfile: wfile.write(item.content_encoded.cdata.strip()) print("\nYour website has been imported from WordPress.")
[ "def", "import_wp", "(", "filepath", ")", ":", "print", "(", "\"\\nBlended: Static Website Generator -\\n\"", ")", "checkConfig", "(", ")", "print", "(", "\"Importing from WordPress...\"", ")", "wp", "=", "parseXML", "(", "filepath", ")", "wname", "=", "wp", ".", ...
Imports A WordPress export and converts it to a Blended site
[ "Imports", "A", "WordPress", "export", "and", "converts", "it", "to", "a", "Blended", "site" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L75-L101
BlendedSiteGenerator/Blended
blended/__main__.py
import_blogger
def import_blogger(filepath): """Imports A Blogger export and converts it to a Blended site""" print("\nBlended: Static Website Generator -\n") checkConfig() print("Importing from Blogger...") blogger = parseXML(filepath) wname = blogger.feed.title.cdata aname = blogger.feed.author.name.cdata.strip() createBlendedFolders() # Populate the configuration file createConfig(app_version=app_version, wname=wname, aname=aname) for entry in blogger.feed.entry: if "post" in entry.id.cdata: with open(os.path.join(cwd, "content", entry.title.cdata.replace(" ", "_") + ".html"), 'w') as wfile: wfile.write(entry.content.cdata.strip()) print("\nYour website has been imported from Blogger.")
python
def import_blogger(filepath): """Imports A Blogger export and converts it to a Blended site""" print("\nBlended: Static Website Generator -\n") checkConfig() print("Importing from Blogger...") blogger = parseXML(filepath) wname = blogger.feed.title.cdata aname = blogger.feed.author.name.cdata.strip() createBlendedFolders() # Populate the configuration file createConfig(app_version=app_version, wname=wname, aname=aname) for entry in blogger.feed.entry: if "post" in entry.id.cdata: with open(os.path.join(cwd, "content", entry.title.cdata.replace(" ", "_") + ".html"), 'w') as wfile: wfile.write(entry.content.cdata.strip()) print("\nYour website has been imported from Blogger.")
[ "def", "import_blogger", "(", "filepath", ")", ":", "print", "(", "\"\\nBlended: Static Website Generator -\\n\"", ")", "checkConfig", "(", ")", "print", "(", "\"Importing from Blogger...\"", ")", "blogger", "=", "parseXML", "(", "filepath", ")", "wname", "=", "blog...
Imports A Blogger export and converts it to a Blended site
[ "Imports", "A", "Blogger", "export", "and", "converts", "it", "to", "a", "Blended", "site" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L106-L129
BlendedSiteGenerator/Blended
blended/__main__.py
install_plugin
def install_plugin(username, repo): """Installs a Blended plugin from GitHub""" print("Installing plugin from " + username + "/" + repo) pip.main(['install', '-U', "git+git://github.com/" + username + "/" + repo + ".git"])
python
def install_plugin(username, repo): """Installs a Blended plugin from GitHub""" print("Installing plugin from " + username + "/" + repo) pip.main(['install', '-U', "git+git://github.com/" + username + "/" + repo + ".git"])
[ "def", "install_plugin", "(", "username", ",", "repo", ")", ":", "print", "(", "\"Installing plugin from \"", "+", "username", "+", "\"/\"", "+", "repo", ")", "pip", ".", "main", "(", "[", "'install'", ",", "'-U'", ",", "\"git+git://github.com/\"", "+", "use...
Installs a Blended plugin from GitHub
[ "Installs", "a", "Blended", "plugin", "from", "GitHub" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L137-L142
BlendedSiteGenerator/Blended
blended/__main__.py
init
def init(): """Initiates a new website""" print("Blended: Static Website Generator -\n") checkConfig() if (sys.version_info > (3, 0)): wname = input("Website Name: ") wdesc = input("Website Description: ") wlan = input("Website Language: ") wlic = input("Website License: ") aname = input("Author(s) Name(s): ") else: wname = raw_input("Website Name: ") wdesc = raw_input("Website Description: ") wlan = raw_input("Website Language: ") wlic = raw_input("Website License: ") aname = raw_input("Author(s) Name(s): ") createBlendedFolders() # Populate the configuration file createConfig(app_version=app_version, wname=wname, wdesc=wdesc, wlic=wlic, wlan=wlan, aname=aname) print("\nThe required files for your website have been generated.")
python
def init(): """Initiates a new website""" print("Blended: Static Website Generator -\n") checkConfig() if (sys.version_info > (3, 0)): wname = input("Website Name: ") wdesc = input("Website Description: ") wlan = input("Website Language: ") wlic = input("Website License: ") aname = input("Author(s) Name(s): ") else: wname = raw_input("Website Name: ") wdesc = raw_input("Website Description: ") wlan = raw_input("Website Language: ") wlic = raw_input("Website License: ") aname = raw_input("Author(s) Name(s): ") createBlendedFolders() # Populate the configuration file createConfig(app_version=app_version, wname=wname, wdesc=wdesc, wlic=wlic, wlan=wlan, aname=aname) print("\nThe required files for your website have been generated.")
[ "def", "init", "(", ")", ":", "print", "(", "\"Blended: Static Website Generator -\\n\"", ")", "checkConfig", "(", ")", "if", "(", "sys", ".", "version_info", ">", "(", "3", ",", "0", ")", ")", ":", "wname", "=", "input", "(", "\"Website Name: \"", ")", ...
Initiates a new website
[ "Initiates", "a", "new", "website" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L146-L172
BlendedSiteGenerator/Blended
blended/__main__.py
placeFiles
def placeFiles(ftp, path): """Upload the built files to FTP""" for name in os.listdir(path): if name != "config.py" and name != "config.pyc" and name != "templates" and name != "content": localpath = os.path.join(path, name) if os.path.isfile(localpath): print("STOR", name, localpath) ftp.storbinary('STOR ' + name, open(localpath, 'rb')) elif os.path.isdir(localpath): print("MKD", name) try: ftp.mkd(name) # ignore "directory already exists" except error_perm as e: if not e.args[0].startswith('550'): raise print("CWD", name) ftp.cwd(name) placeFiles(ftp, localpath) print("CWD", "..") ftp.cwd("..")
python
def placeFiles(ftp, path): """Upload the built files to FTP""" for name in os.listdir(path): if name != "config.py" and name != "config.pyc" and name != "templates" and name != "content": localpath = os.path.join(path, name) if os.path.isfile(localpath): print("STOR", name, localpath) ftp.storbinary('STOR ' + name, open(localpath, 'rb')) elif os.path.isdir(localpath): print("MKD", name) try: ftp.mkd(name) # ignore "directory already exists" except error_perm as e: if not e.args[0].startswith('550'): raise print("CWD", name) ftp.cwd(name) placeFiles(ftp, localpath) print("CWD", "..") ftp.cwd("..")
[ "def", "placeFiles", "(", "ftp", ",", "path", ")", ":", "for", "name", "in", "os", ".", "listdir", "(", "path", ")", ":", "if", "name", "!=", "\"config.py\"", "and", "name", "!=", "\"config.pyc\"", "and", "name", "!=", "\"templates\"", "and", "name", "...
Upload the built files to FTP
[ "Upload", "the", "built", "files", "to", "FTP" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L175-L198
BlendedSiteGenerator/Blended
blended/__main__.py
send_ftp
def send_ftp(outdir): """Upload the built website to FTP""" print("Uploading the files in the " + outdir + "/ directory!\n") # Make sure there is actually a configuration file config_file_dir = os.path.join(cwd, "config.py") if not os.path.exists(config_file_dir): sys.exit( "There dosen't seem to be a configuration file. Have you run the init command?") else: sys.path.insert(0, cwd) try: from config import ftp_server, ftp_username, ftp_password, ftp_port, ftp_upload_path except: sys.exit( "The FTP settings could not be found. Maybe your config file is too old. Re-run 'blended init' to fix it.") server = ftp_server username = ftp_username password = ftp_password port = ftp_port ftp = FTP() ftp.connect(server, port) ftp.login(username, password) filenameCV = os.path.join(cwd, outdir) try: ftp.cwd(ftp_upload_path) placeFiles(ftp, filenameCV) except: ftp.quit() sys.exit("Files not able to be uploaded! Are you sure the directory exists?") ftp.quit() print("\nFTP Done!")
python
def send_ftp(outdir): """Upload the built website to FTP""" print("Uploading the files in the " + outdir + "/ directory!\n") # Make sure there is actually a configuration file config_file_dir = os.path.join(cwd, "config.py") if not os.path.exists(config_file_dir): sys.exit( "There dosen't seem to be a configuration file. Have you run the init command?") else: sys.path.insert(0, cwd) try: from config import ftp_server, ftp_username, ftp_password, ftp_port, ftp_upload_path except: sys.exit( "The FTP settings could not be found. Maybe your config file is too old. Re-run 'blended init' to fix it.") server = ftp_server username = ftp_username password = ftp_password port = ftp_port ftp = FTP() ftp.connect(server, port) ftp.login(username, password) filenameCV = os.path.join(cwd, outdir) try: ftp.cwd(ftp_upload_path) placeFiles(ftp, filenameCV) except: ftp.quit() sys.exit("Files not able to be uploaded! Are you sure the directory exists?") ftp.quit() print("\nFTP Done!")
[ "def", "send_ftp", "(", "outdir", ")", ":", "print", "(", "\"Uploading the files in the \"", "+", "outdir", "+", "\"/ directory!\\n\"", ")", "# Make sure there is actually a configuration file", "config_file_dir", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", ...
Upload the built website to FTP
[ "Upload", "the", "built", "website", "to", "FTP" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L203-L239
BlendedSiteGenerator/Blended
blended/__main__.py
clean_built
def clean_built(outdir): """Removes all built files""" print("Removing the built files!") # Remove the build folder build_dir = os.path.join(cwd, outdir) if os.path.exists(build_dir): shutil.rmtree(build_dir)
python
def clean_built(outdir): """Removes all built files""" print("Removing the built files!") # Remove the build folder build_dir = os.path.join(cwd, outdir) if os.path.exists(build_dir): shutil.rmtree(build_dir)
[ "def", "clean_built", "(", "outdir", ")", ":", "print", "(", "\"Removing the built files!\"", ")", "# Remove the build folder", "build_dir", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "outdir", ")", "if", "os", ".", "path", ".", "exists", "(", ...
Removes all built files
[ "Removes", "all", "built", "files" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L244-L251
BlendedSiteGenerator/Blended
blended/__main__.py
zip_built
def zip_built(outdir): """Packages the build folder into a zip""" print("Zipping the built files!") config_file_dir = os.path.join(cwd, "config.py") if not os.path.exists(config_file_dir): sys.exit( "There dosen't seem to be a configuration file. Have you run the init command?") else: sys.path.insert(0, cwd) try: from config import website_name except: sys.exit( "Some of the configuration values could not be found! Maybe your config.py is too old. Run 'blended init' to fix.") # Remove the build folder build_dir = os.path.join(cwd, outdir) zip_dir = os.path.join(cwd, website_name.replace(" ", "_") + "-build-" + str(datetime.now().date())) if os.path.exists(build_dir): shutil.make_archive(zip_dir, 'zip', build_dir) else: print("The " + outdir + "/ folder could not be found! Have you run 'blended build' yet?")
python
def zip_built(outdir): """Packages the build folder into a zip""" print("Zipping the built files!") config_file_dir = os.path.join(cwd, "config.py") if not os.path.exists(config_file_dir): sys.exit( "There dosen't seem to be a configuration file. Have you run the init command?") else: sys.path.insert(0, cwd) try: from config import website_name except: sys.exit( "Some of the configuration values could not be found! Maybe your config.py is too old. Run 'blended init' to fix.") # Remove the build folder build_dir = os.path.join(cwd, outdir) zip_dir = os.path.join(cwd, website_name.replace(" ", "_") + "-build-" + str(datetime.now().date())) if os.path.exists(build_dir): shutil.make_archive(zip_dir, 'zip', build_dir) else: print("The " + outdir + "/ folder could not be found! Have you run 'blended build' yet?")
[ "def", "zip_built", "(", "outdir", ")", ":", "print", "(", "\"Zipping the built files!\"", ")", "config_file_dir", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "\"config.py\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "config_file_...
Packages the build folder into a zip
[ "Packages", "the", "build", "folder", "into", "a", "zip" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L256-L280
BlendedSiteGenerator/Blended
blended/__main__.py
purge
def purge(): """Removes all files generated by Blended""" print("Purging the Blended files!") # Remove the templates folder templ_dir = os.path.join(cwd, "templates") if os.path.exists(templ_dir): shutil.rmtree(templ_dir) # Remove the content folder cont_dir = os.path.join(cwd, "content") if os.path.exists(cont_dir): shutil.rmtree(cont_dir) # Remove the build folder build_dir = os.path.join(cwd, "build") if os.path.exists(build_dir): shutil.rmtree(build_dir) # Remove config.py config_file_dir = os.path.join(cwd, "config.py") if os.path.exists(config_file_dir): os.remove(config_file_dir) # Remove config.pyc config2_file_dir = os.path.join(cwd, "config.pyc") if os.path.exists(config2_file_dir): os.remove(config2_file_dir) # Remove config.py config3_file_dir = os.path.join(cwd, "config.py.oldbak") if os.path.exists(config3_file_dir): os.remove(config3_file_dir)
python
def purge(): """Removes all files generated by Blended""" print("Purging the Blended files!") # Remove the templates folder templ_dir = os.path.join(cwd, "templates") if os.path.exists(templ_dir): shutil.rmtree(templ_dir) # Remove the content folder cont_dir = os.path.join(cwd, "content") if os.path.exists(cont_dir): shutil.rmtree(cont_dir) # Remove the build folder build_dir = os.path.join(cwd, "build") if os.path.exists(build_dir): shutil.rmtree(build_dir) # Remove config.py config_file_dir = os.path.join(cwd, "config.py") if os.path.exists(config_file_dir): os.remove(config_file_dir) # Remove config.pyc config2_file_dir = os.path.join(cwd, "config.pyc") if os.path.exists(config2_file_dir): os.remove(config2_file_dir) # Remove config.py config3_file_dir = os.path.join(cwd, "config.py.oldbak") if os.path.exists(config3_file_dir): os.remove(config3_file_dir)
[ "def", "purge", "(", ")", ":", "print", "(", "\"Purging the Blended files!\"", ")", "# Remove the templates folder", "templ_dir", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "\"templates\"", ")", "if", "os", ".", "path", ".", "exists", "(", "templ_...
Removes all files generated by Blended
[ "Removes", "all", "files", "generated", "by", "Blended" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L284-L316
BlendedSiteGenerator/Blended
blended/__main__.py
convert_text
def convert_text(filename): """Convert the post/page content using the converters""" text_content = open(filename, "r") if ".md" in filename: text_cont1 = "\n" + markdown.markdown(text_content.read()) + "\n" elif ".docx" in filename: with open(os.path.join(cwd, "content", filename), "rb") as docx_file: result = mammoth.convert_to_html(docx_file) final_docx_html = result.value text_cont1 = "\n" + final_docx_html + "\n" elif ".tile" in filename: text_cont1 = "\n" + textile.textile(text_content.read()) + "\n" elif ".jade" in filename: text_cont1 = "\n" + pyjade.simple_convert(text_content.read()) + "\n" elif ".rst" in filename: text_cont1 = "\n" + \ publish_parts(text_content.read(), writer_name='html')[ 'html_body'] + "\n" elif ".html" in filename: text_cont1 = text_content.read() elif ".txt" in filename: text_cont1 = text_content.read() else: print(filename + " is not a valid file type!") text_cont1 = "NULL" return text_cont1 + "\n\n"
python
def convert_text(filename): """Convert the post/page content using the converters""" text_content = open(filename, "r") if ".md" in filename: text_cont1 = "\n" + markdown.markdown(text_content.read()) + "\n" elif ".docx" in filename: with open(os.path.join(cwd, "content", filename), "rb") as docx_file: result = mammoth.convert_to_html(docx_file) final_docx_html = result.value text_cont1 = "\n" + final_docx_html + "\n" elif ".tile" in filename: text_cont1 = "\n" + textile.textile(text_content.read()) + "\n" elif ".jade" in filename: text_cont1 = "\n" + pyjade.simple_convert(text_content.read()) + "\n" elif ".rst" in filename: text_cont1 = "\n" + \ publish_parts(text_content.read(), writer_name='html')[ 'html_body'] + "\n" elif ".html" in filename: text_cont1 = text_content.read() elif ".txt" in filename: text_cont1 = text_content.read() else: print(filename + " is not a valid file type!") text_cont1 = "NULL" return text_cont1 + "\n\n"
[ "def", "convert_text", "(", "filename", ")", ":", "text_content", "=", "open", "(", "filename", ",", "\"r\"", ")", "if", "\".md\"", "in", "filename", ":", "text_cont1", "=", "\"\\n\"", "+", "markdown", ".", "markdown", "(", "text_content", ".", "read", "("...
Convert the post/page content using the converters
[ "Convert", "the", "post", "/", "page", "content", "using", "the", "converters" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L319-L345
BlendedSiteGenerator/Blended
blended/__main__.py
build_files
def build_files(outdir): """Build the files!""" # Make sure there is actually a configuration file config_file_dir = os.path.join(cwd, "config.py") if not os.path.exists(config_file_dir): sys.exit( "There dosen't seem to be a configuration file. Have you run the init command?") else: sys.path.insert(0, cwd) try: from config import website_name, website_description, website_language, home_page_list except: sys.exit( "ERROR: Some of the crucial configuration values could not be found! Maybe your config.py is too old. Run 'blended init' to fix.") try: from config import website_description_long, website_license, website_url, author_name, author_bio, plugins, minify_css, minify_js, custom_variables except: website_description_long = "" website_license = "" website_url = "" author_name = "" author_bio = "" plugins = [] custom_variables = {} minify_css = False minify_js = False print("WARNING: Some of the optional configuration values could not be found! Maybe your config.py is too old. Run 'blended init' to fix.\n") # Create the build folder build_dir = os.path.join(cwd, outdir) if "." not in outdir and ".." not in outdir and "..." not in outdir and "...." not in outdir and "....." not in outdir: replace_folder(build_dir) # Make sure there is actually a header template file header_file_dir = os.path.join(cwd, "templates", "header.html") if not os.path.exists(header_file_dir): sys.exit( "There dosen't seem to be a header template file. You need one to generate.") # Make sure there is actually a footer template file footer_file_dir = os.path.join(cwd, "templates", "footer.html") if not os.path.exists(footer_file_dir): sys.exit( "There dosen't seem to be a footer template file. You need one to generate.") # Open the header and footer files for reading header_file = open(header_file_dir, "r") footer_file = open(footer_file_dir, "r") # Create the HTML page listing page_list_item_file = os.path.join(cwd, "templates", "page_list_item.html") if not os.path.exists(page_list_item_file): page_list = '<ul class="page-list">\n' for root, dirs, files in os.walk(os.path.join(cwd, "content")): for filename in files: top = os.path.dirname(os.path.join(root, filename)) top2 = top.replace(os.path.join(cwd, "content"), "", 1) if platform != "win32": subfolder = top2.replace("/", "", 1) else: subfolder = top2.replace("\\", "", 1) if subfolder == "": subfolder_link = "" else: subfolder_link = subfolder + "/" file_modified = time.ctime( os.path.getmtime(os.path.join(root, filename))) newFilename = get_html_filename(filename) newFilename2 = get_html_clear_filename(filename) page_list = page_list + '<li class="page-list-item"><a href="' + subfolder_link + newFilename + \ '">' + newFilename2 + '</a><span class="page-list-item-time"> - ' + \ str(file_modified) + '</span></li>\n' page_list = page_list + '</ul>' else: with open(page_list_item_file, 'r') as f: page_list_item = f.read() page_list = "" for root, dirs, files in os.walk(os.path.join(cwd, "content")): dirs[:] = [d for d in dirs if "_" not in d] for filename in files: p_content = convert_text(os.path.join(root, filename)) top = os.path.dirname(os.path.join(root, filename)) top2 = top.replace(os.path.join(cwd, "content"), "", 1) if platform != "win32": subfolder = top2.replace("/", "", 1) else: subfolder = top2.replace("\\", "", 1) if subfolder == "": subfolder_link = "" else: subfolder_link = subfolder + "/" file_modified = time.ctime( os.path.getmtime(os.path.join(root, filename))) file_modified_day = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[8:10] file_modified_year = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[:4] file_modified_month = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[5:7] month_name = calendar.month_name[int(file_modified_month)] newFilename = get_html_filename(filename) newFilename2 = get_html_clear_filename(filename) page_list = page_list + page_list_item.replace("{path}", subfolder_link + newFilename).replace("{name}", newFilename2).replace( "{date}", str(file_modified)).replace("{content}", p_content).replace("{content_short}", p_content[:250] + "...").replace("{day}", file_modified_day).replace("{month}", file_modified_month).replace("{month_name}", month_name).replace("{year}", file_modified_year) if home_page_list == "yes" or home_page_list: # Open the home page file (index.html) for writing home_working_file = open(os.path.join(cwd, outdir, "index.html"), "w") home_working_file.write(header_file.read()) # Make sure there is actually a home page template file home_templ_dir = os.path.join(cwd, "templates", "home_page.html") if os.path.exists(home_templ_dir): home_templ_file = open(home_templ_dir, "r") home_working_file.write(home_templ_file.read()) else: print("\nNo home page template file found. Writing page list to index.html") home_working_file.write(page_list) home_working_file.write(footer_file.read()) home_working_file.close() for root, dirs, files in os.walk(os.path.join(cwd, "content")): dirs[:] = [d for d in dirs if "_" not in d] for filename in files: if not filename.startswith("_"): header_file = open(header_file_dir, "r") footer_file = open(footer_file_dir, "r") newFilename = get_html_filename(filename) top = os.path.dirname(os.path.join(root, filename)) top2 = top.replace(os.path.join(cwd, "content"), "", 1) if platform != "win32": subfolder = top2.replace("/", "", 1) else: subfolder = top2.replace("\\", "", 1) if subfolder == "": currents_working_file = open( os.path.join(cwd, outdir, newFilename), "w") else: create_folder(os.path.join(cwd, outdir, subfolder)) currents_working_file = open(os.path.join( cwd, outdir, subfolder, newFilename), "w") # Write the header currents_working_file.write(header_file.read()) text_cont1 = convert_text(os.path.join(root, filename)) if "+++++" in text_cont1.splitlines()[1]: page_template_file = text_cont1.splitlines()[0] text_cont1 = text_cont1.replace( text_cont1.splitlines()[0], "") text_cont1 = text_cont1.replace( text_cont1.splitlines()[1], "") else: page_template_file = "content_page" # Write the text content into the content template and onto the # build file content_templ_dir = os.path.join( cwd, "templates", page_template_file + ".html") if os.path.exists(content_templ_dir): content_templ_file = open(content_templ_dir, "r") content_templ_file1 = content_templ_file.read() content_templ_file2 = content_templ_file1.replace( "{page_content}", text_cont1) currents_working_file.write(content_templ_file2) else: currents_working_file.write(text_cont1) # Write the footer to the build file currents_working_file.write("\n" + footer_file.read()) # Close the build file currents_working_file.close() # Find all the nav(something) templates in the `templates` folder and # Read their content to the dict navs = {} for file in os.listdir(os.path.join(cwd, "templates")): if "nav" in file: nav_cont = open(os.path.join(cwd, "templates", file), "r") navs[file.replace(".html", "")] = nav_cont.read() nav_cont.close() forbidden_dirs = set(["assets", "templates"]) blended_version_message = "Built with Blended v" + \ str(app_version) build_date = str(datetime.now().date()) build_time = str(datetime.now().time()) build_datetime = str(datetime.now()) # Replace global variables such as site name and language for root, dirs, files in os.walk(os.path.join(cwd, outdir)): dirs[:] = [d for d in dirs if d not in forbidden_dirs] for filename in files: if filename != "config.pyc" and filename != "config.py": newFilename = get_html_clear_filename(filename) page_file = filename.replace(".html", "") page_folder = os.path.basename(os.path.dirname(os.path.join( root, filename))).replace("-", "").replace("_", "").title() page_folder_orig = os.path.basename( os.path.dirname(os.path.join(root, filename))) top = os.path.dirname(os.path.join(root, filename)) top2 = top.replace(os.path.join(cwd, outdir), "", 1) if platform != "win32": subfolder = top2.replace("/", "", 1) else: subfolder = top2.replace("\\", "", 1) if subfolder == "": subfolder_folder = os.path.join(cwd, outdir, filename) else: subfolder_folder = os.path.join( cwd, outdir, subfolder, filename) file_modified = time.ctime( os.path.getmtime(os.path.join(root, filename))) file_modified_day = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[8:10] file_modified_year = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[:4] file_modified_month = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[5:7] month_name = calendar.month_name[int(file_modified_month)] # The Loop! for line in fileinput.input(subfolder_folder, inplace=1): for var in custom_variables: line = line.replace( "{" + var + "}", custom_variables[var]) if len(plugins) != 0: for i in range(len(plugins)): if sys.version_info[0] < 2: main = importlib.import_module(plugins[i]) elif sys.version_info[0] < 3: main = __import__(plugins[i]) content = main.main() line = line.replace( "{" + plugins[i] + "}", content) if "{nav" in line: navname = line.split("{")[1].split("}")[0] line = line.replace( "{" + navname + "}", navs[(line.split("{"))[1].split("}")[0]]) line = line.replace( "{website_description}", website_description) line = line.replace( "{website_description_long}", website_description_long) line = line.replace("{website_license}", website_license) line = line.replace("{website_language}", website_language) line = line.replace("{website_url}", website_url) line = line.replace("{author_name}", author_name) line = line.replace("{author_bio}", author_bio) line = line.replace("{random_number}", str(randint(0, 100000000))) line = line.replace("{build_date}", build_date) line = line.replace("{build_time}", build_time) line = line.replace("{build_datetime}", build_datetime) line = line.replace("{page_list}", page_list) line = line.replace("{page_name}", newFilename) line = line.replace("{page_filename}", page_file) line = line.replace("{page_file}", filename) line = line.replace("{" + filename + "_active}", "active") if page_folder != outdir.title(): line = line.replace("{page_folder}", page_folder) else: line = line.replace("{page_folder}", "") if page_folder_orig != outdir: line = line.replace( "{page_folder_orig}", page_folder_orig) else: line = line.replace("{page_folder_orig}", "") line = line.replace("{page_date}", str(file_modified)) line = line.replace("{page_day}", str(file_modified_day)) line = line.replace("{page_year}", str(file_modified_year)) line = line.replace( "{page_month}", str(file_modified_month)) line = line.replace( "{page_month_name}", str(month_name)) line = line.replace("{blended_version}", str(app_version)) line = line.replace( "{blended_version_message}", blended_version_message) line = line.replace("{website_name}", website_name) top = os.path.join(cwd, outdir) startinglevel = top.count(os.sep) relative_path = "" level = root.count(os.sep) - startinglevel for i in range(level): relative_path = relative_path + "../" line = line.replace("{relative_root}", relative_path) print(line.rstrip('\n')) fileinput.close() # Copy the asset folder to the build folder if os.path.exists(os.path.join(cwd, "templates", "assets")): if os.path.exists(os.path.join(cwd, outdir, "assets")): shutil.rmtree(os.path.join(cwd, outdir, "assets")) shutil.copytree(os.path.join(cwd, "templates", "assets"), os.path.join(cwd, outdir, "assets")) for root, dirs, files in os.walk(os.path.join(cwd, outdir, "assets")): for file in files: if not file.startswith("_"): if (file.endswith(".sass")) or (file.endswith(".scss")): sass_text = open(os.path.join(root, file)).read() text_file = open(os.path.join( root, file[:-4] + "css"), "w") if sass_text != "": text_file.write(sass.compile(string=sass_text)) else: print(file + " is empty! Not compiling Sass.") text_file.close() if file.endswith(".less"): less_text = open(os.path.join(root, file)).read() text_file = open(os.path.join( root, file[:-4] + "css"), "w") if less_text != "": text_file.write(lesscpy.compile(StringIO(less_text))) else: print(file + " is empty! Not compiling Less.") text_file.close() if file.endswith(".styl"): try: styl_text = open(os.path.join(root, file)).read() text_file = open(os.path.join( root, file[:-4] + "css"), "w") if styl_text != "": text_file.write(Stylus().compile(styl_text)) else: print(file + " is empty! Not compiling Styl.") text_file.close() except: print("Not able to build with Stylus! Is it installed?") try: subprocess.call["npm", "install", "-g", "stylus"] except: print("NPM (NodeJS) not working. Is it installed?") if file.endswith(".coffee"): coffee_text = open(os.path.join(root, file)).read() text_file = open(os.path.join(root, file[:-6] + "js"), "w") if coffee_text != "": text_file.write(coffeescript.compile(coffee_text)) else: print(file + " is empty! Not compiling CoffeeScript.") text_file.close() if minify_css: if file.endswith(".css"): css_text = open(os.path.join(root, file)).read() text_file = open(os.path.join(root, file), "w") if css_text != "": text_file.write(cssmin(css_text)) text_file.close() if minify_js: if file.endswith(".js"): js_text = open(os.path.join(root, file)).read() text_file = open(os.path.join(root, file), "w") if js_text != "": text_file.write(jsmin(js_text)) text_file.close()
python
def build_files(outdir): """Build the files!""" # Make sure there is actually a configuration file config_file_dir = os.path.join(cwd, "config.py") if not os.path.exists(config_file_dir): sys.exit( "There dosen't seem to be a configuration file. Have you run the init command?") else: sys.path.insert(0, cwd) try: from config import website_name, website_description, website_language, home_page_list except: sys.exit( "ERROR: Some of the crucial configuration values could not be found! Maybe your config.py is too old. Run 'blended init' to fix.") try: from config import website_description_long, website_license, website_url, author_name, author_bio, plugins, minify_css, minify_js, custom_variables except: website_description_long = "" website_license = "" website_url = "" author_name = "" author_bio = "" plugins = [] custom_variables = {} minify_css = False minify_js = False print("WARNING: Some of the optional configuration values could not be found! Maybe your config.py is too old. Run 'blended init' to fix.\n") # Create the build folder build_dir = os.path.join(cwd, outdir) if "." not in outdir and ".." not in outdir and "..." not in outdir and "...." not in outdir and "....." not in outdir: replace_folder(build_dir) # Make sure there is actually a header template file header_file_dir = os.path.join(cwd, "templates", "header.html") if not os.path.exists(header_file_dir): sys.exit( "There dosen't seem to be a header template file. You need one to generate.") # Make sure there is actually a footer template file footer_file_dir = os.path.join(cwd, "templates", "footer.html") if not os.path.exists(footer_file_dir): sys.exit( "There dosen't seem to be a footer template file. You need one to generate.") # Open the header and footer files for reading header_file = open(header_file_dir, "r") footer_file = open(footer_file_dir, "r") # Create the HTML page listing page_list_item_file = os.path.join(cwd, "templates", "page_list_item.html") if not os.path.exists(page_list_item_file): page_list = '<ul class="page-list">\n' for root, dirs, files in os.walk(os.path.join(cwd, "content")): for filename in files: top = os.path.dirname(os.path.join(root, filename)) top2 = top.replace(os.path.join(cwd, "content"), "", 1) if platform != "win32": subfolder = top2.replace("/", "", 1) else: subfolder = top2.replace("\\", "", 1) if subfolder == "": subfolder_link = "" else: subfolder_link = subfolder + "/" file_modified = time.ctime( os.path.getmtime(os.path.join(root, filename))) newFilename = get_html_filename(filename) newFilename2 = get_html_clear_filename(filename) page_list = page_list + '<li class="page-list-item"><a href="' + subfolder_link + newFilename + \ '">' + newFilename2 + '</a><span class="page-list-item-time"> - ' + \ str(file_modified) + '</span></li>\n' page_list = page_list + '</ul>' else: with open(page_list_item_file, 'r') as f: page_list_item = f.read() page_list = "" for root, dirs, files in os.walk(os.path.join(cwd, "content")): dirs[:] = [d for d in dirs if "_" not in d] for filename in files: p_content = convert_text(os.path.join(root, filename)) top = os.path.dirname(os.path.join(root, filename)) top2 = top.replace(os.path.join(cwd, "content"), "", 1) if platform != "win32": subfolder = top2.replace("/", "", 1) else: subfolder = top2.replace("\\", "", 1) if subfolder == "": subfolder_link = "" else: subfolder_link = subfolder + "/" file_modified = time.ctime( os.path.getmtime(os.path.join(root, filename))) file_modified_day = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[8:10] file_modified_year = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[:4] file_modified_month = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[5:7] month_name = calendar.month_name[int(file_modified_month)] newFilename = get_html_filename(filename) newFilename2 = get_html_clear_filename(filename) page_list = page_list + page_list_item.replace("{path}", subfolder_link + newFilename).replace("{name}", newFilename2).replace( "{date}", str(file_modified)).replace("{content}", p_content).replace("{content_short}", p_content[:250] + "...").replace("{day}", file_modified_day).replace("{month}", file_modified_month).replace("{month_name}", month_name).replace("{year}", file_modified_year) if home_page_list == "yes" or home_page_list: # Open the home page file (index.html) for writing home_working_file = open(os.path.join(cwd, outdir, "index.html"), "w") home_working_file.write(header_file.read()) # Make sure there is actually a home page template file home_templ_dir = os.path.join(cwd, "templates", "home_page.html") if os.path.exists(home_templ_dir): home_templ_file = open(home_templ_dir, "r") home_working_file.write(home_templ_file.read()) else: print("\nNo home page template file found. Writing page list to index.html") home_working_file.write(page_list) home_working_file.write(footer_file.read()) home_working_file.close() for root, dirs, files in os.walk(os.path.join(cwd, "content")): dirs[:] = [d for d in dirs if "_" not in d] for filename in files: if not filename.startswith("_"): header_file = open(header_file_dir, "r") footer_file = open(footer_file_dir, "r") newFilename = get_html_filename(filename) top = os.path.dirname(os.path.join(root, filename)) top2 = top.replace(os.path.join(cwd, "content"), "", 1) if platform != "win32": subfolder = top2.replace("/", "", 1) else: subfolder = top2.replace("\\", "", 1) if subfolder == "": currents_working_file = open( os.path.join(cwd, outdir, newFilename), "w") else: create_folder(os.path.join(cwd, outdir, subfolder)) currents_working_file = open(os.path.join( cwd, outdir, subfolder, newFilename), "w") # Write the header currents_working_file.write(header_file.read()) text_cont1 = convert_text(os.path.join(root, filename)) if "+++++" in text_cont1.splitlines()[1]: page_template_file = text_cont1.splitlines()[0] text_cont1 = text_cont1.replace( text_cont1.splitlines()[0], "") text_cont1 = text_cont1.replace( text_cont1.splitlines()[1], "") else: page_template_file = "content_page" # Write the text content into the content template and onto the # build file content_templ_dir = os.path.join( cwd, "templates", page_template_file + ".html") if os.path.exists(content_templ_dir): content_templ_file = open(content_templ_dir, "r") content_templ_file1 = content_templ_file.read() content_templ_file2 = content_templ_file1.replace( "{page_content}", text_cont1) currents_working_file.write(content_templ_file2) else: currents_working_file.write(text_cont1) # Write the footer to the build file currents_working_file.write("\n" + footer_file.read()) # Close the build file currents_working_file.close() # Find all the nav(something) templates in the `templates` folder and # Read their content to the dict navs = {} for file in os.listdir(os.path.join(cwd, "templates")): if "nav" in file: nav_cont = open(os.path.join(cwd, "templates", file), "r") navs[file.replace(".html", "")] = nav_cont.read() nav_cont.close() forbidden_dirs = set(["assets", "templates"]) blended_version_message = "Built with Blended v" + \ str(app_version) build_date = str(datetime.now().date()) build_time = str(datetime.now().time()) build_datetime = str(datetime.now()) # Replace global variables such as site name and language for root, dirs, files in os.walk(os.path.join(cwd, outdir)): dirs[:] = [d for d in dirs if d not in forbidden_dirs] for filename in files: if filename != "config.pyc" and filename != "config.py": newFilename = get_html_clear_filename(filename) page_file = filename.replace(".html", "") page_folder = os.path.basename(os.path.dirname(os.path.join( root, filename))).replace("-", "").replace("_", "").title() page_folder_orig = os.path.basename( os.path.dirname(os.path.join(root, filename))) top = os.path.dirname(os.path.join(root, filename)) top2 = top.replace(os.path.join(cwd, outdir), "", 1) if platform != "win32": subfolder = top2.replace("/", "", 1) else: subfolder = top2.replace("\\", "", 1) if subfolder == "": subfolder_folder = os.path.join(cwd, outdir, filename) else: subfolder_folder = os.path.join( cwd, outdir, subfolder, filename) file_modified = time.ctime( os.path.getmtime(os.path.join(root, filename))) file_modified_day = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[8:10] file_modified_year = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[:4] file_modified_month = str(datetime.strptime( file_modified, "%a %b %d %H:%M:%S %Y"))[5:7] month_name = calendar.month_name[int(file_modified_month)] # The Loop! for line in fileinput.input(subfolder_folder, inplace=1): for var in custom_variables: line = line.replace( "{" + var + "}", custom_variables[var]) if len(plugins) != 0: for i in range(len(plugins)): if sys.version_info[0] < 2: main = importlib.import_module(plugins[i]) elif sys.version_info[0] < 3: main = __import__(plugins[i]) content = main.main() line = line.replace( "{" + plugins[i] + "}", content) if "{nav" in line: navname = line.split("{")[1].split("}")[0] line = line.replace( "{" + navname + "}", navs[(line.split("{"))[1].split("}")[0]]) line = line.replace( "{website_description}", website_description) line = line.replace( "{website_description_long}", website_description_long) line = line.replace("{website_license}", website_license) line = line.replace("{website_language}", website_language) line = line.replace("{website_url}", website_url) line = line.replace("{author_name}", author_name) line = line.replace("{author_bio}", author_bio) line = line.replace("{random_number}", str(randint(0, 100000000))) line = line.replace("{build_date}", build_date) line = line.replace("{build_time}", build_time) line = line.replace("{build_datetime}", build_datetime) line = line.replace("{page_list}", page_list) line = line.replace("{page_name}", newFilename) line = line.replace("{page_filename}", page_file) line = line.replace("{page_file}", filename) line = line.replace("{" + filename + "_active}", "active") if page_folder != outdir.title(): line = line.replace("{page_folder}", page_folder) else: line = line.replace("{page_folder}", "") if page_folder_orig != outdir: line = line.replace( "{page_folder_orig}", page_folder_orig) else: line = line.replace("{page_folder_orig}", "") line = line.replace("{page_date}", str(file_modified)) line = line.replace("{page_day}", str(file_modified_day)) line = line.replace("{page_year}", str(file_modified_year)) line = line.replace( "{page_month}", str(file_modified_month)) line = line.replace( "{page_month_name}", str(month_name)) line = line.replace("{blended_version}", str(app_version)) line = line.replace( "{blended_version_message}", blended_version_message) line = line.replace("{website_name}", website_name) top = os.path.join(cwd, outdir) startinglevel = top.count(os.sep) relative_path = "" level = root.count(os.sep) - startinglevel for i in range(level): relative_path = relative_path + "../" line = line.replace("{relative_root}", relative_path) print(line.rstrip('\n')) fileinput.close() # Copy the asset folder to the build folder if os.path.exists(os.path.join(cwd, "templates", "assets")): if os.path.exists(os.path.join(cwd, outdir, "assets")): shutil.rmtree(os.path.join(cwd, outdir, "assets")) shutil.copytree(os.path.join(cwd, "templates", "assets"), os.path.join(cwd, outdir, "assets")) for root, dirs, files in os.walk(os.path.join(cwd, outdir, "assets")): for file in files: if not file.startswith("_"): if (file.endswith(".sass")) or (file.endswith(".scss")): sass_text = open(os.path.join(root, file)).read() text_file = open(os.path.join( root, file[:-4] + "css"), "w") if sass_text != "": text_file.write(sass.compile(string=sass_text)) else: print(file + " is empty! Not compiling Sass.") text_file.close() if file.endswith(".less"): less_text = open(os.path.join(root, file)).read() text_file = open(os.path.join( root, file[:-4] + "css"), "w") if less_text != "": text_file.write(lesscpy.compile(StringIO(less_text))) else: print(file + " is empty! Not compiling Less.") text_file.close() if file.endswith(".styl"): try: styl_text = open(os.path.join(root, file)).read() text_file = open(os.path.join( root, file[:-4] + "css"), "w") if styl_text != "": text_file.write(Stylus().compile(styl_text)) else: print(file + " is empty! Not compiling Styl.") text_file.close() except: print("Not able to build with Stylus! Is it installed?") try: subprocess.call["npm", "install", "-g", "stylus"] except: print("NPM (NodeJS) not working. Is it installed?") if file.endswith(".coffee"): coffee_text = open(os.path.join(root, file)).read() text_file = open(os.path.join(root, file[:-6] + "js"), "w") if coffee_text != "": text_file.write(coffeescript.compile(coffee_text)) else: print(file + " is empty! Not compiling CoffeeScript.") text_file.close() if minify_css: if file.endswith(".css"): css_text = open(os.path.join(root, file)).read() text_file = open(os.path.join(root, file), "w") if css_text != "": text_file.write(cssmin(css_text)) text_file.close() if minify_js: if file.endswith(".js"): js_text = open(os.path.join(root, file)).read() text_file = open(os.path.join(root, file), "w") if js_text != "": text_file.write(jsmin(js_text)) text_file.close()
[ "def", "build_files", "(", "outdir", ")", ":", "# Make sure there is actually a configuration file", "config_file_dir", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "\"config.py\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "config_file_d...
Build the files!
[ "Build", "the", "files!" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L348-L713
BlendedSiteGenerator/Blended
blended/__main__.py
build
def build(outdir): """Blends the generated files and outputs a HTML website""" print("Building your Blended files into a website!") reload(sys) sys.setdefaultencoding('utf8') build_files(outdir) print("The files are built! You can find them in the " + outdir + "/ directory. Run the view command to see what you have created in a web browser.")
python
def build(outdir): """Blends the generated files and outputs a HTML website""" print("Building your Blended files into a website!") reload(sys) sys.setdefaultencoding('utf8') build_files(outdir) print("The files are built! You can find them in the " + outdir + "/ directory. Run the view command to see what you have created in a web browser.")
[ "def", "build", "(", "outdir", ")", ":", "print", "(", "\"Building your Blended files into a website!\"", ")", "reload", "(", "sys", ")", "sys", ".", "setdefaultencoding", "(", "'utf8'", ")", "build_files", "(", "outdir", ")", "print", "(", "\"The files are built!...
Blends the generated files and outputs a HTML website
[ "Blends", "the", "generated", "files", "and", "outputs", "a", "HTML", "website" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L718-L729
BlendedSiteGenerator/Blended
blended/__main__.py
interactive
def interactive(outdir): """Blends the generated files and outputs a HTML website on file change""" print("Building your Blended files into a website!") global outdir_type outdir_type = outdir reload(sys) sys.setdefaultencoding('utf8') build_files(outdir) print("Watching the content and templates directories for changes, press CTRL+C to stop...\n") w = Watcher() w.run()
python
def interactive(outdir): """Blends the generated files and outputs a HTML website on file change""" print("Building your Blended files into a website!") global outdir_type outdir_type = outdir reload(sys) sys.setdefaultencoding('utf8') build_files(outdir) print("Watching the content and templates directories for changes, press CTRL+C to stop...\n") w = Watcher() w.run()
[ "def", "interactive", "(", "outdir", ")", ":", "print", "(", "\"Building your Blended files into a website!\"", ")", "global", "outdir_type", "outdir_type", "=", "outdir", "reload", "(", "sys", ")", "sys", ".", "setdefaultencoding", "(", "'utf8'", ")", "build_files"...
Blends the generated files and outputs a HTML website on file change
[ "Blends", "the", "generated", "files", "and", "outputs", "a", "HTML", "website", "on", "file", "change" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L792-L808
BlendedSiteGenerator/Blended
blended/__main__.py
view
def view(outdir): """Opens the built index.html file in a web browser""" index_path = os.path.realpath(os.path.join(cwd, outdir, "index.html")) if os.path.exists(index_path): webbrowser.open('file://' + index_path) else: print("The index.html file could not be found in the " + outdir + "/ folder! Have you deleted it or have you built with home_page_list set to 'no' in config.py?")
python
def view(outdir): """Opens the built index.html file in a web browser""" index_path = os.path.realpath(os.path.join(cwd, outdir, "index.html")) if os.path.exists(index_path): webbrowser.open('file://' + index_path) else: print("The index.html file could not be found in the " + outdir + "/ folder! Have you deleted it or have you built with home_page_list set to 'no' in config.py?")
[ "def", "view", "(", "outdir", ")", ":", "index_path", "=", "os", ".", "path", ".", "realpath", "(", "os", ".", "path", ".", "join", "(", "cwd", ",", "outdir", ",", "\"index.html\"", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "index_path...
Opens the built index.html file in a web browser
[ "Opens", "the", "built", "index", ".", "html", "file", "in", "a", "web", "browser" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L813-L821
BlendedSiteGenerator/Blended
blended/__main__.py
Watcher.run
def run(self): """Run the builder on changes""" event_handler = Handler() threads = [] paths = [os.path.join(cwd, "content"), os.path.join(cwd, "templates")] for i in paths: targetPath = str(i) self.observer.schedule(event_handler, targetPath, recursive=True) threads.append(self.observer) self.observer.start() try: while True: time.sleep(5) except: self.observer.stop() print("\nObserver stopped.") self.observer.join()
python
def run(self): """Run the builder on changes""" event_handler = Handler() threads = [] paths = [os.path.join(cwd, "content"), os.path.join(cwd, "templates")] for i in paths: targetPath = str(i) self.observer.schedule(event_handler, targetPath, recursive=True) threads.append(self.observer) self.observer.start() try: while True: time.sleep(5) except: self.observer.stop() print("\nObserver stopped.") self.observer.join()
[ "def", "run", "(", "self", ")", ":", "event_handler", "=", "Handler", "(", ")", "threads", "=", "[", "]", "paths", "=", "[", "os", ".", "path", ".", "join", "(", "cwd", ",", "\"content\"", ")", ",", "os", ".", "path", ".", "join", "(", "cwd", "...
Run the builder on changes
[ "Run", "the", "builder", "on", "changes" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/__main__.py#L742-L762
darxtrix/lehar
lehar/main.py
_sanitize_numbers
def _sanitize_numbers(uncleaned_numbers): """ Convert strings to integers if possible """ cleaned_numbers = [] for x in uncleaned_numbers: try: cleaned_numbers.append(int(x)) except ValueError: cleaned_numbers.append(x) return cleaned_numbers
python
def _sanitize_numbers(uncleaned_numbers): """ Convert strings to integers if possible """ cleaned_numbers = [] for x in uncleaned_numbers: try: cleaned_numbers.append(int(x)) except ValueError: cleaned_numbers.append(x) return cleaned_numbers
[ "def", "_sanitize_numbers", "(", "uncleaned_numbers", ")", ":", "cleaned_numbers", "=", "[", "]", "for", "x", "in", "uncleaned_numbers", ":", "try", ":", "cleaned_numbers", ".", "append", "(", "int", "(", "x", ")", ")", "except", "ValueError", ":", "cleaned_...
Convert strings to integers if possible
[ "Convert", "strings", "to", "integers", "if", "possible" ]
train
https://github.com/darxtrix/lehar/blob/8a2fbeb2b38068dcb609d5dda37b24bd2bc6b343/lehar/main.py#L38-L48
darxtrix/lehar
lehar/main.py
_handle_negatives
def _handle_negatives(numbers): """ Add the minimum negative number to all the numbers in the such that all the elements become >= 0 """ min_number = min(filter(lambda x : type(x)==int,numbers)) if min_number < 0: return [x+abs(min_number) if type(x)==int else x for x in numbers] else: return numbers
python
def _handle_negatives(numbers): """ Add the minimum negative number to all the numbers in the such that all the elements become >= 0 """ min_number = min(filter(lambda x : type(x)==int,numbers)) if min_number < 0: return [x+abs(min_number) if type(x)==int else x for x in numbers] else: return numbers
[ "def", "_handle_negatives", "(", "numbers", ")", ":", "min_number", "=", "min", "(", "filter", "(", "lambda", "x", ":", "type", "(", "x", ")", "==", "int", ",", "numbers", ")", ")", "if", "min_number", "<", "0", ":", "return", "[", "x", "+", "abs",...
Add the minimum negative number to all the numbers in the such that all the elements become >= 0
[ "Add", "the", "minimum", "negative", "number", "to", "all", "the", "numbers", "in", "the", "such", "that", "all", "the", "elements", "become", ">", "=", "0" ]
train
https://github.com/darxtrix/lehar/blob/8a2fbeb2b38068dcb609d5dda37b24bd2bc6b343/lehar/main.py#L51-L60
darxtrix/lehar
lehar/main.py
_draw_tickgram
def _draw_tickgram(numbers): """ Takes a list of integers and generate the equivalent list of ticks corresponding to each of the number """ max_number = max(filter(lambda x : type(x)==int,numbers)) # If the maxium number is 0, then all the numbers should be 0 # coz we have called handle_negatives prior to this function if max_number == 0 : return upticks[0]*len(numbers) else: normalized_numbers = [ float(x)/max_number if type(x)==int else x for x in numbers ] upticks_indexes = [ int(math.ceil(x*len(upticks))) if type(x)==float else x for x in normalized_numbers ] return ''.join([ ' ' if type(x)==str else upticks[x-1] if x != 0 else upticks[0] for x in upticks_indexes ])
python
def _draw_tickgram(numbers): """ Takes a list of integers and generate the equivalent list of ticks corresponding to each of the number """ max_number = max(filter(lambda x : type(x)==int,numbers)) # If the maxium number is 0, then all the numbers should be 0 # coz we have called handle_negatives prior to this function if max_number == 0 : return upticks[0]*len(numbers) else: normalized_numbers = [ float(x)/max_number if type(x)==int else x for x in numbers ] upticks_indexes = [ int(math.ceil(x*len(upticks))) if type(x)==float else x for x in normalized_numbers ] return ''.join([ ' ' if type(x)==str else upticks[x-1] if x != 0 else upticks[0] for x in upticks_indexes ])
[ "def", "_draw_tickgram", "(", "numbers", ")", ":", "max_number", "=", "max", "(", "filter", "(", "lambda", "x", ":", "type", "(", "x", ")", "==", "int", ",", "numbers", ")", ")", "# If the maxium number is 0, then all the numbers should be 0", "# coz we have calle...
Takes a list of integers and generate the equivalent list of ticks corresponding to each of the number
[ "Takes", "a", "list", "of", "integers", "and", "generate", "the", "equivalent", "list", "of", "ticks", "corresponding", "to", "each", "of", "the", "number" ]
train
https://github.com/darxtrix/lehar/blob/8a2fbeb2b38068dcb609d5dda37b24bd2bc6b343/lehar/main.py#L63-L76
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/figure_06.py
plot_multi_scale_output_b
def plot_multi_scale_output_b(fig, X='L5E'): '''docstring me''' show_ax_labels = True show_insets = False show_images = False T=[800, 1000] T_inset=[900, 920] left = 0.075 bottom = 0.05 top = 0.475 right = 0.95 axwidth = 0.16 numcols = 4 insetwidth = axwidth/2 insetheight = 0.5 lefts = np.linspace(left, right-axwidth, numcols) lefts += axwidth/2 #lower row of panels #fig = plt.figure() #fig.subplots_adjust(left=0.12, right=0.9, bottom=0.36, top=0.9, wspace=0.2, hspace=0.3) ############################################################################ # E part, soma locations ############################################################################ ax4 = fig.add_axes([lefts[0], bottom, axwidth, top-bottom], frameon=False) plt.locator_params(nbins=4) ax4.xaxis.set_ticks([]) ax4.yaxis.set_ticks([]) if show_ax_labels: phlp.annotate_subplot(ax4, ncols=4, nrows=1, letter='E') plot_population(ax4, params, isometricangle=np.pi/24, rasterized=False) ############################################################################ # F part, CSD ############################################################################ ax5 = fig.add_axes([lefts[1], bottom, axwidth, top-bottom]) plt.locator_params(nbins=4) phlp.remove_axis_junk(ax5) if show_ax_labels: phlp.annotate_subplot(ax5, ncols=4, nrows=1, letter='F') plot_signal_sum(ax5, params, fname=os.path.join(params.savefolder, 'CSDsum.h5'), unit='$\mu$A mm$^{-3}$', T=T, ylim=[ax4.axis()[2], ax4.axis()[3]], rasterized=False) ax5.set_title('CSD', va='center') # Inset if show_insets: ax6 = fig.add_axes([lefts[1]+axwidth-insetwidth, top-insetheight, insetwidth, insetheight]) plt.locator_params(nbins=4) phlp.remove_axis_junk(ax6) plot_signal_sum_colorplot(ax6, params, os.path.join(params.savefolder, 'CSDsum.h5'), unit=r'$\mu$Amm$^{-3}$', T=T_inset, ylim=[ax4.axis()[2], ax4.axis()[3]], fancy=False,colorbar=False,cmap='bwr_r') ax6.set_xticks(T_inset) ax6.set_yticklabels([]) #show traces superimposed on color image if show_images: plot_signal_sum_colorplot(ax5, params, os.path.join(params.savefolder, 'CSDsum.h5'), unit=r'$\mu$Amm$^{-3}$', T=T, ylim=[ax4.axis()[2], ax4.axis()[3]], fancy=False,colorbar=False,cmap='jet_r') ############################################################################ # G part, LFP ############################################################################ ax7 = fig.add_axes([lefts[2], bottom, axwidth, top-bottom]) plt.locator_params(nbins=4) if show_ax_labels: phlp.annotate_subplot(ax7, ncols=4, nrows=1, letter='G') phlp.remove_axis_junk(ax7) plot_signal_sum(ax7, params, fname=os.path.join(params.savefolder, 'LFPsum.h5'), unit='mV', T=T, ylim=[ax4.axis()[2], ax4.axis()[3]], rasterized=False) ax7.set_title('LFP',va='center') # Inset if show_insets: ax8 = fig.add_axes([lefts[2]+axwidth-insetwidth, top-insetheight, insetwidth, insetheight]) plt.locator_params(nbins=4) phlp.remove_axis_junk(ax8) plot_signal_sum_colorplot(ax8, params, os.path.join(params.savefolder, 'LFPsum.h5'), unit='mV', T=T_inset, ylim=[ax4.axis()[2], ax4.axis()[3]], fancy=False,colorbar=False,cmap='bwr_r') ax8.set_xticks(T_inset) ax8.set_yticklabels([]) #show traces superimposed on color image if show_images: plot_signal_sum_colorplot(ax7, params, os.path.join(params.savefolder, 'LFPsum.h5'), unit='mV', T=T, ylim=[ax4.axis()[2], ax4.axis()[3]], fancy=False,colorbar=False,cmap='bwr_r')
python
def plot_multi_scale_output_b(fig, X='L5E'): '''docstring me''' show_ax_labels = True show_insets = False show_images = False T=[800, 1000] T_inset=[900, 920] left = 0.075 bottom = 0.05 top = 0.475 right = 0.95 axwidth = 0.16 numcols = 4 insetwidth = axwidth/2 insetheight = 0.5 lefts = np.linspace(left, right-axwidth, numcols) lefts += axwidth/2 #lower row of panels #fig = plt.figure() #fig.subplots_adjust(left=0.12, right=0.9, bottom=0.36, top=0.9, wspace=0.2, hspace=0.3) ############################################################################ # E part, soma locations ############################################################################ ax4 = fig.add_axes([lefts[0], bottom, axwidth, top-bottom], frameon=False) plt.locator_params(nbins=4) ax4.xaxis.set_ticks([]) ax4.yaxis.set_ticks([]) if show_ax_labels: phlp.annotate_subplot(ax4, ncols=4, nrows=1, letter='E') plot_population(ax4, params, isometricangle=np.pi/24, rasterized=False) ############################################################################ # F part, CSD ############################################################################ ax5 = fig.add_axes([lefts[1], bottom, axwidth, top-bottom]) plt.locator_params(nbins=4) phlp.remove_axis_junk(ax5) if show_ax_labels: phlp.annotate_subplot(ax5, ncols=4, nrows=1, letter='F') plot_signal_sum(ax5, params, fname=os.path.join(params.savefolder, 'CSDsum.h5'), unit='$\mu$A mm$^{-3}$', T=T, ylim=[ax4.axis()[2], ax4.axis()[3]], rasterized=False) ax5.set_title('CSD', va='center') # Inset if show_insets: ax6 = fig.add_axes([lefts[1]+axwidth-insetwidth, top-insetheight, insetwidth, insetheight]) plt.locator_params(nbins=4) phlp.remove_axis_junk(ax6) plot_signal_sum_colorplot(ax6, params, os.path.join(params.savefolder, 'CSDsum.h5'), unit=r'$\mu$Amm$^{-3}$', T=T_inset, ylim=[ax4.axis()[2], ax4.axis()[3]], fancy=False,colorbar=False,cmap='bwr_r') ax6.set_xticks(T_inset) ax6.set_yticklabels([]) #show traces superimposed on color image if show_images: plot_signal_sum_colorplot(ax5, params, os.path.join(params.savefolder, 'CSDsum.h5'), unit=r'$\mu$Amm$^{-3}$', T=T, ylim=[ax4.axis()[2], ax4.axis()[3]], fancy=False,colorbar=False,cmap='jet_r') ############################################################################ # G part, LFP ############################################################################ ax7 = fig.add_axes([lefts[2], bottom, axwidth, top-bottom]) plt.locator_params(nbins=4) if show_ax_labels: phlp.annotate_subplot(ax7, ncols=4, nrows=1, letter='G') phlp.remove_axis_junk(ax7) plot_signal_sum(ax7, params, fname=os.path.join(params.savefolder, 'LFPsum.h5'), unit='mV', T=T, ylim=[ax4.axis()[2], ax4.axis()[3]], rasterized=False) ax7.set_title('LFP',va='center') # Inset if show_insets: ax8 = fig.add_axes([lefts[2]+axwidth-insetwidth, top-insetheight, insetwidth, insetheight]) plt.locator_params(nbins=4) phlp.remove_axis_junk(ax8) plot_signal_sum_colorplot(ax8, params, os.path.join(params.savefolder, 'LFPsum.h5'), unit='mV', T=T_inset, ylim=[ax4.axis()[2], ax4.axis()[3]], fancy=False,colorbar=False,cmap='bwr_r') ax8.set_xticks(T_inset) ax8.set_yticklabels([]) #show traces superimposed on color image if show_images: plot_signal_sum_colorplot(ax7, params, os.path.join(params.savefolder, 'LFPsum.h5'), unit='mV', T=T, ylim=[ax4.axis()[2], ax4.axis()[3]], fancy=False,colorbar=False,cmap='bwr_r')
[ "def", "plot_multi_scale_output_b", "(", "fig", ",", "X", "=", "'L5E'", ")", ":", "show_ax_labels", "=", "True", "show_insets", "=", "False", "show_images", "=", "False", "T", "=", "[", "800", ",", "1000", "]", "T_inset", "=", "[", "900", ",", "920", "...
docstring me
[ "docstring", "me" ]
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/figure_06.py#L284-L394
INM-6/hybridLFPy
examples/Hagen_et_al_2016_cercor/nest_output_processing.py
get_raw_gids
def get_raw_gids(model_params): ''' Reads text file containing gids of neuron populations as created within the NEST simulation. These gids are not continuous as in the simulation devices get created in between. ''' gidfile = open(os.path.join(model_params.raw_nest_output_path, model_params.GID_filename), 'r') gids = [] for l in gidfile : a = l.split() gids.append([int(a[0]),int(a[1])]) return gids
python
def get_raw_gids(model_params): ''' Reads text file containing gids of neuron populations as created within the NEST simulation. These gids are not continuous as in the simulation devices get created in between. ''' gidfile = open(os.path.join(model_params.raw_nest_output_path, model_params.GID_filename), 'r') gids = [] for l in gidfile : a = l.split() gids.append([int(a[0]),int(a[1])]) return gids
[ "def", "get_raw_gids", "(", "model_params", ")", ":", "gidfile", "=", "open", "(", "os", ".", "path", ".", "join", "(", "model_params", ".", "raw_nest_output_path", ",", "model_params", ".", "GID_filename", ")", ",", "'r'", ")", "gids", "=", "[", "]", "f...
Reads text file containing gids of neuron populations as created within the NEST simulation. These gids are not continuous as in the simulation devices get created in between.
[ "Reads", "text", "file", "containing", "gids", "of", "neuron", "populations", "as", "created", "within", "the", "NEST", "simulation", ".", "These", "gids", "are", "not", "continuous", "as", "in", "the", "simulation", "devices", "get", "created", "in", "between...
train
https://github.com/INM-6/hybridLFPy/blob/c38bdf38982c4624c2f70caeb50c40f1d5980abd/examples/Hagen_et_al_2016_cercor/nest_output_processing.py#L25-L37
BlendedSiteGenerator/Blended
blended/functions.py
replace_folder
def replace_folder(path): """If the specified folder exists, it is deleted and recreated""" if os.path.exists(path): shutil.rmtree(path) os.makedirs(path) else: os.makedirs(path)
python
def replace_folder(path): """If the specified folder exists, it is deleted and recreated""" if os.path.exists(path): shutil.rmtree(path) os.makedirs(path) else: os.makedirs(path)
[ "def", "replace_folder", "(", "path", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "shutil", ".", "rmtree", "(", "path", ")", "os", ".", "makedirs", "(", "path", ")", "else", ":", "os", ".", "makedirs", "(", "path", ")...
If the specified folder exists, it is deleted and recreated
[ "If", "the", "specified", "folder", "exists", "it", "is", "deleted", "and", "recreated" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/functions.py#L24-L30
BlendedSiteGenerator/Blended
blended/functions.py
get_html_filename
def get_html_filename(filename): """Converts the filename to a .html extension""" if ".html" in filename: newFilename = filename elif ".md" in filename: newFilename = filename.replace(".md", ".html") elif ".tile" in filename: newFilename = filename.replace(".tile", ".html") elif ".jade" in filename: newFilename = filename.replace(".jade", ".html") elif ".txt" in filename: newFilename = filename.replace(".txt", ".html") elif ".rst" in filename: newFilename = filename.replace(".rst", ".html") elif ".docx" in filename: newFilename = filename.replace(".docx", ".html") else: print(filename + " is not a valid file type!") return newFilename
python
def get_html_filename(filename): """Converts the filename to a .html extension""" if ".html" in filename: newFilename = filename elif ".md" in filename: newFilename = filename.replace(".md", ".html") elif ".tile" in filename: newFilename = filename.replace(".tile", ".html") elif ".jade" in filename: newFilename = filename.replace(".jade", ".html") elif ".txt" in filename: newFilename = filename.replace(".txt", ".html") elif ".rst" in filename: newFilename = filename.replace(".rst", ".html") elif ".docx" in filename: newFilename = filename.replace(".docx", ".html") else: print(filename + " is not a valid file type!") return newFilename
[ "def", "get_html_filename", "(", "filename", ")", ":", "if", "\".html\"", "in", "filename", ":", "newFilename", "=", "filename", "elif", "\".md\"", "in", "filename", ":", "newFilename", "=", "filename", ".", "replace", "(", "\".md\"", ",", "\".html\"", ")", ...
Converts the filename to a .html extension
[ "Converts", "the", "filename", "to", "a", ".", "html", "extension" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/functions.py#L33-L52
BlendedSiteGenerator/Blended
blended/functions.py
get_html_clear_filename
def get_html_clear_filename(filename): """Clears the file extension from the filename and makes it nice looking""" newFilename = filename.replace(".html", "") newFilename = newFilename.replace(".md", "") newFilename = newFilename.replace(".txt", "") newFilename = newFilename.replace(".tile", "") newFilename = newFilename.replace(".jade", "") newFilename = newFilename.replace(".rst", "") newFilename = newFilename.replace(".docx", "") newFilename = newFilename.replace("index", "home") newFilename = newFilename.replace("-", " ") newFilename = newFilename.replace("_", " ") newFilename = newFilename.title() return newFilename
python
def get_html_clear_filename(filename): """Clears the file extension from the filename and makes it nice looking""" newFilename = filename.replace(".html", "") newFilename = newFilename.replace(".md", "") newFilename = newFilename.replace(".txt", "") newFilename = newFilename.replace(".tile", "") newFilename = newFilename.replace(".jade", "") newFilename = newFilename.replace(".rst", "") newFilename = newFilename.replace(".docx", "") newFilename = newFilename.replace("index", "home") newFilename = newFilename.replace("-", " ") newFilename = newFilename.replace("_", " ") newFilename = newFilename.title() return newFilename
[ "def", "get_html_clear_filename", "(", "filename", ")", ":", "newFilename", "=", "filename", ".", "replace", "(", "\".html\"", ",", "\"\"", ")", "newFilename", "=", "newFilename", ".", "replace", "(", "\".md\"", ",", "\"\"", ")", "newFilename", "=", "newFilena...
Clears the file extension from the filename and makes it nice looking
[ "Clears", "the", "file", "extension", "from", "the", "filename", "and", "makes", "it", "nice", "looking" ]
train
https://github.com/BlendedSiteGenerator/Blended/blob/e5865a8633e461a22c86ef6ee98cdd7051c412ac/blended/functions.py#L55-L69