id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
17,600
Legobot/Legobot
Legobot/Connectors/Slack.py
RtmBot.get_userid_from_botid
def get_userid_from_botid(self, botid): '''Perform a lookup of bots.info to resolve a botid to a userid Args: botid (string): Slack botid to lookup. Returns: string: userid value ''' botinfo = self.slack_client.api_call('bots.info', bot=botid) if botinfo['ok'] is True: return botinfo['bot'].get('user_id') else: return botid
python
def get_userid_from_botid(self, botid): '''Perform a lookup of bots.info to resolve a botid to a userid Args: botid (string): Slack botid to lookup. Returns: string: userid value ''' botinfo = self.slack_client.api_call('bots.info', bot=botid) if botinfo['ok'] is True: return botinfo['bot'].get('user_id') else: return botid
[ "def", "get_userid_from_botid", "(", "self", ",", "botid", ")", ":", "botinfo", "=", "self", ".", "slack_client", ".", "api_call", "(", "'bots.info'", ",", "bot", "=", "botid", ")", "if", "botinfo", "[", "'ok'", "]", "is", "True", ":", "return", "botinfo...
Perform a lookup of bots.info to resolve a botid to a userid Args: botid (string): Slack botid to lookup. Returns: string: userid value
[ "Perform", "a", "lookup", "of", "bots", ".", "info", "to", "resolve", "a", "botid", "to", "a", "userid" ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Slack.py#L291-L303
17,601
Legobot/Legobot
Legobot/Connectors/Slack.py
RtmBot._parse_metadata
def _parse_metadata(self, message): '''Parse incoming messages to build metadata dict Lots of 'if' statements. It sucks, I know. Args: message (dict): JSON dump of message sent from Slack Returns: Legobot.Metadata ''' # Try to handle all the fields of events we care about. metadata = Metadata(source=self.actor_urn).__dict__ metadata['thread_ts'] = message.get('thread_ts') if 'presence' in message: metadata['presence'] = message['presence'] if 'text' in message: metadata['text'] = message['text'] elif 'previous_message' in message: # Try to handle slack links if 'text' in message['previous_message']: metadata['text'] = message['previous_message']['text'] else: metadata['text'] = None else: metadata['text'] = None if 'user' in message: metadata['source_user'] = message['user'] elif 'bot_id' in message: metadata['source_user'] = self.get_userid_from_botid( message['bot_id']) elif 'message' in message and 'user' in message['message']: metadata['source_user'] = message['message']['user'] else: metadata['source_user'] = None metadata['user_id'] = metadata['source_user'] metadata['display_name'] = self.get_username(metadata['source_user']) if 'channel' in message: metadata['source_channel'] = message['channel'] # Slack starts DM channel IDs with "D" if message['channel'].startswith('D'): metadata['is_private_message'] = True else: metadata['is_private_message'] = False metadata['source_connector'] = 'slack' return metadata
python
def _parse_metadata(self, message): '''Parse incoming messages to build metadata dict Lots of 'if' statements. It sucks, I know. Args: message (dict): JSON dump of message sent from Slack Returns: Legobot.Metadata ''' # Try to handle all the fields of events we care about. metadata = Metadata(source=self.actor_urn).__dict__ metadata['thread_ts'] = message.get('thread_ts') if 'presence' in message: metadata['presence'] = message['presence'] if 'text' in message: metadata['text'] = message['text'] elif 'previous_message' in message: # Try to handle slack links if 'text' in message['previous_message']: metadata['text'] = message['previous_message']['text'] else: metadata['text'] = None else: metadata['text'] = None if 'user' in message: metadata['source_user'] = message['user'] elif 'bot_id' in message: metadata['source_user'] = self.get_userid_from_botid( message['bot_id']) elif 'message' in message and 'user' in message['message']: metadata['source_user'] = message['message']['user'] else: metadata['source_user'] = None metadata['user_id'] = metadata['source_user'] metadata['display_name'] = self.get_username(metadata['source_user']) if 'channel' in message: metadata['source_channel'] = message['channel'] # Slack starts DM channel IDs with "D" if message['channel'].startswith('D'): metadata['is_private_message'] = True else: metadata['is_private_message'] = False metadata['source_connector'] = 'slack' return metadata
[ "def", "_parse_metadata", "(", "self", ",", "message", ")", ":", "# Try to handle all the fields of events we care about.", "metadata", "=", "Metadata", "(", "source", "=", "self", ".", "actor_urn", ")", ".", "__dict__", "metadata", "[", "'thread_ts'", "]", "=", "...
Parse incoming messages to build metadata dict Lots of 'if' statements. It sucks, I know. Args: message (dict): JSON dump of message sent from Slack Returns: Legobot.Metadata
[ "Parse", "incoming", "messages", "to", "build", "metadata", "dict", "Lots", "of", "if", "statements", ".", "It", "sucks", "I", "know", "." ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Slack.py#L305-L356
17,602
Legobot/Legobot
Legobot/Connectors/Slack.py
Slack.build_attachment
def build_attachment(self, text, target, attachment, thread): '''Builds a slack attachment. Args: message (Legobot.Message): message w/ metadata to send. Returns: attachment (dict): attachment data. ''' attachment = { 'as_user': True, 'text': text, 'channel': target, 'attachments': [ { 'fallback': text, 'image_url': attachment } ] } if thread: attachment['thread_ts'] = thread return attachment
python
def build_attachment(self, text, target, attachment, thread): '''Builds a slack attachment. Args: message (Legobot.Message): message w/ metadata to send. Returns: attachment (dict): attachment data. ''' attachment = { 'as_user': True, 'text': text, 'channel': target, 'attachments': [ { 'fallback': text, 'image_url': attachment } ] } if thread: attachment['thread_ts'] = thread return attachment
[ "def", "build_attachment", "(", "self", ",", "text", ",", "target", ",", "attachment", ",", "thread", ")", ":", "attachment", "=", "{", "'as_user'", ":", "True", ",", "'text'", ":", "text", ",", "'channel'", ":", "target", ",", "'attachments'", ":", "[",...
Builds a slack attachment. Args: message (Legobot.Message): message w/ metadata to send. Returns: attachment (dict): attachment data.
[ "Builds", "a", "slack", "attachment", "." ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Slack.py#L403-L426
17,603
cuihantao/andes
andes/filters/__init__.py
guess
def guess(system): """ input format guess function. First guess by extension, then test by lines """ files = system.files maybe = [] if files.input_format: maybe.append(files.input_format) # first, guess by extension for key, val in input_formats.items(): if type(val) == list: for item in val: if files.ext.strip('.').lower() == item: maybe.append(key) else: if files.ext.strip('.').lower() == val: maybe.append(key) # second, guess by lines true_format = '' fid = open(files.case, 'r') for item in maybe: try: parser = importlib.import_module('.' + item, __name__) testlines = getattr(parser, 'testlines') if testlines(fid): true_format = item break except ImportError: logger.debug( 'Parser for {:s} format is not found. ' 'Format guess will continue.'. format(item)) fid.close() if true_format: logger.debug('Input format guessed as {:s}.'.format(true_format)) else: logger.error('Unable to determine case format.') files.input_format = true_format # guess addfile format if files.addfile: _, add_ext = os.path.splitext(files.addfile) for key, val in input_formats.items(): if type(val) == list: if add_ext[1:] in val: files.add_format = key else: if add_ext[1:] == val: files.add_format = key return true_format
python
def guess(system): files = system.files maybe = [] if files.input_format: maybe.append(files.input_format) # first, guess by extension for key, val in input_formats.items(): if type(val) == list: for item in val: if files.ext.strip('.').lower() == item: maybe.append(key) else: if files.ext.strip('.').lower() == val: maybe.append(key) # second, guess by lines true_format = '' fid = open(files.case, 'r') for item in maybe: try: parser = importlib.import_module('.' + item, __name__) testlines = getattr(parser, 'testlines') if testlines(fid): true_format = item break except ImportError: logger.debug( 'Parser for {:s} format is not found. ' 'Format guess will continue.'. format(item)) fid.close() if true_format: logger.debug('Input format guessed as {:s}.'.format(true_format)) else: logger.error('Unable to determine case format.') files.input_format = true_format # guess addfile format if files.addfile: _, add_ext = os.path.splitext(files.addfile) for key, val in input_formats.items(): if type(val) == list: if add_ext[1:] in val: files.add_format = key else: if add_ext[1:] == val: files.add_format = key return true_format
[ "def", "guess", "(", "system", ")", ":", "files", "=", "system", ".", "files", "maybe", "=", "[", "]", "if", "files", ".", "input_format", ":", "maybe", ".", "append", "(", "files", ".", "input_format", ")", "# first, guess by extension", "for", "key", "...
input format guess function. First guess by extension, then test by lines
[ "input", "format", "guess", "function", ".", "First", "guess", "by", "extension", "then", "test", "by", "lines" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/filters/__init__.py#L32-L85
17,604
cuihantao/andes
andes/filters/__init__.py
parse
def parse(system): """ Parse input file with the given format in system.files.input_format """ t, _ = elapsed() input_format = system.files.input_format add_format = system.files.add_format # exit when no input format is given if not input_format: logger.error( 'No input format found. Specify or guess a format before parsing.') return False # exit if the format parser could not be imported try: parser = importlib.import_module('.' + input_format, __name__) dmparser = importlib.import_module('.' + 'dome', __name__) if add_format: addparser = importlib.import_module('.' + add_format, __name__) except ImportError: logger.error( 'Parser for {:s} format not found. Program will exit.'.format( input_format)) return False # try parsing the base case file logger.info('Parsing input file <{:s}>'.format(system.files.fullname)) if not parser.read(system.files.case, system): logger.error( 'Error parsing case file {:s} with {:s} format parser.'.format( system.files.fullname, input_format)) return False # Try parsing the addfile if system.files.addfile: if not system.files.add_format: logger.error('Unknown addfile format.') return logger.info('Parsing additional file {:s}.'.format( system.files.addfile)) if not addparser.readadd(system.files.addfile, system): logger.error( 'Error parsing addfile {:s} with {:s} format parser.'.format( system.files.addfile, input_format)) return False # Try parsing the dynfile with dm filter if system.files.dynfile: logger.info('Parsing input file {:s}.'.format( system.files.dynfile)) if not dmparser.read(system.files.dynfile, system): logger.error( 'Error parsing dynfile {:s} with dm format parser.'.format( system.files.dynfile)) return False _, s = elapsed(t) logger.debug('Case file {:s} parsed in {:s}.'.format( system.files.fullname, s)) return True
python
def parse(system): t, _ = elapsed() input_format = system.files.input_format add_format = system.files.add_format # exit when no input format is given if not input_format: logger.error( 'No input format found. Specify or guess a format before parsing.') return False # exit if the format parser could not be imported try: parser = importlib.import_module('.' + input_format, __name__) dmparser = importlib.import_module('.' + 'dome', __name__) if add_format: addparser = importlib.import_module('.' + add_format, __name__) except ImportError: logger.error( 'Parser for {:s} format not found. Program will exit.'.format( input_format)) return False # try parsing the base case file logger.info('Parsing input file <{:s}>'.format(system.files.fullname)) if not parser.read(system.files.case, system): logger.error( 'Error parsing case file {:s} with {:s} format parser.'.format( system.files.fullname, input_format)) return False # Try parsing the addfile if system.files.addfile: if not system.files.add_format: logger.error('Unknown addfile format.') return logger.info('Parsing additional file {:s}.'.format( system.files.addfile)) if not addparser.readadd(system.files.addfile, system): logger.error( 'Error parsing addfile {:s} with {:s} format parser.'.format( system.files.addfile, input_format)) return False # Try parsing the dynfile with dm filter if system.files.dynfile: logger.info('Parsing input file {:s}.'.format( system.files.dynfile)) if not dmparser.read(system.files.dynfile, system): logger.error( 'Error parsing dynfile {:s} with dm format parser.'.format( system.files.dynfile)) return False _, s = elapsed(t) logger.debug('Case file {:s} parsed in {:s}.'.format( system.files.fullname, s)) return True
[ "def", "parse", "(", "system", ")", ":", "t", ",", "_", "=", "elapsed", "(", ")", "input_format", "=", "system", ".", "files", ".", "input_format", "add_format", "=", "system", ".", "files", ".", "add_format", "# exit when no input format is given", "if", "n...
Parse input file with the given format in system.files.input_format
[ "Parse", "input", "file", "with", "the", "given", "format", "in", "system", ".", "files", ".", "input_format" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/filters/__init__.py#L88-L151
17,605
cuihantao/andes
andes/routines/eig.py
EIG.calc_state_matrix
def calc_state_matrix(self): """ Return state matrix and store to ``self.As`` Returns ------- matrix state matrix """ system = self.system Gyx = matrix(system.dae.Gx) self.solver.linsolve(system.dae.Gy, Gyx) self.As = matrix(system.dae.Fx - system.dae.Fy * Gyx) # ------------------------------------------------------ # TODO: use scipy eigs # self.As = sparse(self.As) # I = np.array(self.As.I).reshape((-1,)) # J = np.array(self.As.J).reshape((-1,)) # V = np.array(self.As.V).reshape((-1,)) # self.As = csr_matrix((V, (I, J)), shape=self.As.size) # ------------------------------------------------------ return self.As
python
def calc_state_matrix(self): system = self.system Gyx = matrix(system.dae.Gx) self.solver.linsolve(system.dae.Gy, Gyx) self.As = matrix(system.dae.Fx - system.dae.Fy * Gyx) # ------------------------------------------------------ # TODO: use scipy eigs # self.As = sparse(self.As) # I = np.array(self.As.I).reshape((-1,)) # J = np.array(self.As.J).reshape((-1,)) # V = np.array(self.As.V).reshape((-1,)) # self.As = csr_matrix((V, (I, J)), shape=self.As.size) # ------------------------------------------------------ return self.As
[ "def", "calc_state_matrix", "(", "self", ")", ":", "system", "=", "self", ".", "system", "Gyx", "=", "matrix", "(", "system", ".", "dae", ".", "Gx", ")", "self", ".", "solver", ".", "linsolve", "(", "system", ".", "dae", ".", "Gy", ",", "Gyx", ")",...
Return state matrix and store to ``self.As`` Returns ------- matrix state matrix
[ "Return", "state", "matrix", "and", "store", "to", "self", ".", "As" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/routines/eig.py#L39-L63
17,606
cuihantao/andes
andes/routines/eig.py
EIG.calc_eigvals
def calc_eigvals(self): """ Solve eigenvalues of the state matrix ``self.As`` Returns ------- None """ self.eigs = numpy.linalg.eigvals(self.As) # TODO: use scipy.sparse.linalg.eigs(self.As) return self.eigs
python
def calc_eigvals(self): self.eigs = numpy.linalg.eigvals(self.As) # TODO: use scipy.sparse.linalg.eigs(self.As) return self.eigs
[ "def", "calc_eigvals", "(", "self", ")", ":", "self", ".", "eigs", "=", "numpy", ".", "linalg", ".", "eigvals", "(", "self", ".", "As", ")", "# TODO: use scipy.sparse.linalg.eigs(self.As)", "return", "self", ".", "eigs" ]
Solve eigenvalues of the state matrix ``self.As`` Returns ------- None
[ "Solve", "eigenvalues", "of", "the", "state", "matrix", "self", ".", "As" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/routines/eig.py#L65-L76
17,607
cuihantao/andes
andes/routines/eig.py
EIG.calc_part_factor
def calc_part_factor(self): """ Compute participation factor of states in eigenvalues Returns ------- """ mu, N = numpy.linalg.eig(self.As) # TODO: use scipy.sparse.linalg.eigs(self.As) N = matrix(N) n = len(mu) idx = range(n) W = matrix(spmatrix(1.0, idx, idx, (n, n), N.typecode)) gesv(N, W) partfact = mul(abs(W.T), abs(N)) b = matrix(1.0, (1, n)) WN = b * partfact partfact = partfact.T for item in idx: mu_real = mu[item].real mu_imag = mu[item].imag mu[item] = complex(round(mu_real, 4), round(mu_imag, 4)) partfact[item, :] /= WN[item] # participation factor self.mu = matrix(mu) self.part_fact = matrix(partfact) return self.mu, self.part_fact
python
def calc_part_factor(self): mu, N = numpy.linalg.eig(self.As) # TODO: use scipy.sparse.linalg.eigs(self.As) N = matrix(N) n = len(mu) idx = range(n) W = matrix(spmatrix(1.0, idx, idx, (n, n), N.typecode)) gesv(N, W) partfact = mul(abs(W.T), abs(N)) b = matrix(1.0, (1, n)) WN = b * partfact partfact = partfact.T for item in idx: mu_real = mu[item].real mu_imag = mu[item].imag mu[item] = complex(round(mu_real, 4), round(mu_imag, 4)) partfact[item, :] /= WN[item] # participation factor self.mu = matrix(mu) self.part_fact = matrix(partfact) return self.mu, self.part_fact
[ "def", "calc_part_factor", "(", "self", ")", ":", "mu", ",", "N", "=", "numpy", ".", "linalg", ".", "eig", "(", "self", ".", "As", ")", "# TODO: use scipy.sparse.linalg.eigs(self.As)", "N", "=", "matrix", "(", "N", ")", "n", "=", "len", "(", "mu", ")",...
Compute participation factor of states in eigenvalues Returns -------
[ "Compute", "participation", "factor", "of", "states", "in", "eigenvalues" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/routines/eig.py#L78-L112
17,608
cuihantao/andes
andes/models/breaker.py
Breaker.get_times
def get_times(self): """Return all the action times and times-1e-6 in a list""" if not self.n: return [] self.times = list(mul(self.u1, self.t1)) + \ list(mul(self.u2, self.t2)) + \ list(mul(self.u3, self.t3)) + \ list(mul(self.u4, self.t4)) self.times = matrix(list(set(self.times))) self.times = list(self.times) + list(self.times - 1e-6) return self.times
python
def get_times(self): if not self.n: return [] self.times = list(mul(self.u1, self.t1)) + \ list(mul(self.u2, self.t2)) + \ list(mul(self.u3, self.t3)) + \ list(mul(self.u4, self.t4)) self.times = matrix(list(set(self.times))) self.times = list(self.times) + list(self.times - 1e-6) return self.times
[ "def", "get_times", "(", "self", ")", ":", "if", "not", "self", ".", "n", ":", "return", "[", "]", "self", ".", "times", "=", "list", "(", "mul", "(", "self", ".", "u1", ",", "self", ".", "t1", ")", ")", "+", "list", "(", "mul", "(", "self", ...
Return all the action times and times-1e-6 in a list
[ "Return", "all", "the", "action", "times", "and", "times", "-", "1e", "-", "6", "in", "a", "list" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/models/breaker.py#L75-L86
17,609
Legobot/Legobot
Legobot/Connectors/Discord.py
Heartbeat.send
def send(self, ws, seq): """ Sends heartbeat message to Discord Attributes: ws: Websocket connection to discord seq: Sequence number of heartbeat """ payload = {'op': 1, 'd': seq} payload = json.dumps(payload) logger.debug("Sending heartbeat with payload {}".format(payload)) ws.send(payload) return
python
def send(self, ws, seq): payload = {'op': 1, 'd': seq} payload = json.dumps(payload) logger.debug("Sending heartbeat with payload {}".format(payload)) ws.send(payload) return
[ "def", "send", "(", "self", ",", "ws", ",", "seq", ")", ":", "payload", "=", "{", "'op'", ":", "1", ",", "'d'", ":", "seq", "}", "payload", "=", "json", ".", "dumps", "(", "payload", ")", "logger", ".", "debug", "(", "\"Sending heartbeat with payload...
Sends heartbeat message to Discord Attributes: ws: Websocket connection to discord seq: Sequence number of heartbeat
[ "Sends", "heartbeat", "message", "to", "Discord" ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Discord.py#L37-L50
17,610
Legobot/Legobot
Legobot/Connectors/Discord.py
DiscoBot.create_message
def create_message(self, channel_id, text): """ Sends a message to a Discord channel or user via REST API Args: channel_id (string): ID of destingation Discord channel text (string): Content of message """ baseurl = self.rest_baseurl + \ '/channels/{}/messages'.format(channel_id) requests.post(baseurl, headers=self.headers, data=json.dumps({'content': text}))
python
def create_message(self, channel_id, text): baseurl = self.rest_baseurl + \ '/channels/{}/messages'.format(channel_id) requests.post(baseurl, headers=self.headers, data=json.dumps({'content': text}))
[ "def", "create_message", "(", "self", ",", "channel_id", ",", "text", ")", ":", "baseurl", "=", "self", ".", "rest_baseurl", "+", "'/channels/{}/messages'", ".", "format", "(", "channel_id", ")", "requests", ".", "post", "(", "baseurl", ",", "headers", "=", ...
Sends a message to a Discord channel or user via REST API Args: channel_id (string): ID of destingation Discord channel text (string): Content of message
[ "Sends", "a", "message", "to", "a", "Discord", "channel", "or", "user", "via", "REST", "API" ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Discord.py#L109-L122
17,611
Legobot/Legobot
Legobot/Connectors/Discord.py
DiscoBot.identify
def identify(self, token): """ Identifies to the websocket endpoint Args: token (string): Discord bot token """ payload = { 'op': 2, 'd': { 'token': self.token, 'properties': { '$os': sys.platform, '$browser': 'legobot', '$device': 'legobot' }, 'compress': False, 'large_threshold': 250 } } payload['d']['synced_guilds'] = [] logger.info("Identifying with the following message: \ {}".format(payload)) self.ws.send(json.dumps(payload)) return
python
def identify(self, token): payload = { 'op': 2, 'd': { 'token': self.token, 'properties': { '$os': sys.platform, '$browser': 'legobot', '$device': 'legobot' }, 'compress': False, 'large_threshold': 250 } } payload['d']['synced_guilds'] = [] logger.info("Identifying with the following message: \ {}".format(payload)) self.ws.send(json.dumps(payload)) return
[ "def", "identify", "(", "self", ",", "token", ")", ":", "payload", "=", "{", "'op'", ":", "2", ",", "'d'", ":", "{", "'token'", ":", "self", ".", "token", ",", "'properties'", ":", "{", "'$os'", ":", "sys", ".", "platform", ",", "'$browser'", ":", ...
Identifies to the websocket endpoint Args: token (string): Discord bot token
[ "Identifies", "to", "the", "websocket", "endpoint" ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Discord.py#L124-L149
17,612
Legobot/Legobot
Legobot/Connectors/Discord.py
DiscoBot.on_hello
def on_hello(self, message): """ Runs on a hello event from websocket connection Args: message (dict): Full message from Discord websocket connection" """ logger.info("Got a hello") self.identify(self.token) self.heartbeat_thread = Heartbeat(self.ws, message['d']['heartbeat_interval']) self.heartbeat_thread.start() return
python
def on_hello(self, message): logger.info("Got a hello") self.identify(self.token) self.heartbeat_thread = Heartbeat(self.ws, message['d']['heartbeat_interval']) self.heartbeat_thread.start() return
[ "def", "on_hello", "(", "self", ",", "message", ")", ":", "logger", ".", "info", "(", "\"Got a hello\"", ")", "self", ".", "identify", "(", "self", ".", "token", ")", "self", ".", "heartbeat_thread", "=", "Heartbeat", "(", "self", ".", "ws", ",", "mess...
Runs on a hello event from websocket connection Args: message (dict): Full message from Discord websocket connection"
[ "Runs", "on", "a", "hello", "event", "from", "websocket", "connection" ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Discord.py#L151-L164
17,613
Legobot/Legobot
Legobot/Connectors/Discord.py
DiscoBot.on_heartbeat
def on_heartbeat(self, message): """ Runs on a heartbeat event from websocket connection Args: message (dict): Full message from Discord websocket connection" """ logger.info("Got a heartbeat") logger.info("Heartbeat message: {}".format(message)) self.heartbeat_thread.update_sequence(message['d']) return
python
def on_heartbeat(self, message): logger.info("Got a heartbeat") logger.info("Heartbeat message: {}".format(message)) self.heartbeat_thread.update_sequence(message['d']) return
[ "def", "on_heartbeat", "(", "self", ",", "message", ")", ":", "logger", ".", "info", "(", "\"Got a heartbeat\"", ")", "logger", ".", "info", "(", "\"Heartbeat message: {}\"", ".", "format", "(", "message", ")", ")", "self", ".", "heartbeat_thread", ".", "upd...
Runs on a heartbeat event from websocket connection Args: message (dict): Full message from Discord websocket connection"
[ "Runs", "on", "a", "heartbeat", "event", "from", "websocket", "connection" ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Discord.py#L166-L177
17,614
Legobot/Legobot
Legobot/Connectors/Discord.py
DiscoBot.on_message
def on_message(self, message): """ Runs on a create_message event from websocket connection Args: message (dict): Full message from Discord websocket connection" """ if 'content' in message['d']: metadata = self._parse_metadata(message) message = Message(text=message['d']['content'], metadata=metadata).__dict__ logger.debug(message) self.baseplate.tell(message)
python
def on_message(self, message): if 'content' in message['d']: metadata = self._parse_metadata(message) message = Message(text=message['d']['content'], metadata=metadata).__dict__ logger.debug(message) self.baseplate.tell(message)
[ "def", "on_message", "(", "self", ",", "message", ")", ":", "if", "'content'", "in", "message", "[", "'d'", "]", ":", "metadata", "=", "self", ".", "_parse_metadata", "(", "message", ")", "message", "=", "Message", "(", "text", "=", "message", "[", "'d...
Runs on a create_message event from websocket connection Args: message (dict): Full message from Discord websocket connection"
[ "Runs", "on", "a", "create_message", "event", "from", "websocket", "connection" ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Discord.py#L179-L192
17,615
Legobot/Legobot
Legobot/Connectors/Discord.py
DiscoBot._parse_metadata
def _parse_metadata(self, message): """ Sets metadata in Legobot message Args: message (dict): Full message from Discord websocket connection" Returns: Legobot.Metadata """ metadata = Metadata(source=self.actor_urn).__dict__ if 'author' in message['d']: metadata['source_user'] = message['d']['author']['username'] else: metadata['source_user'] = None if 'channel_id' in message['d']: metadata['source_channel'] = message['d']['channel_id'] else: metadata['source_channel'] = None metadata['user_id'] = metadata['source_user'] metadata['display_name'] = metadata['source_user'] metadata['source_connector'] = 'discord' return metadata
python
def _parse_metadata(self, message): metadata = Metadata(source=self.actor_urn).__dict__ if 'author' in message['d']: metadata['source_user'] = message['d']['author']['username'] else: metadata['source_user'] = None if 'channel_id' in message['d']: metadata['source_channel'] = message['d']['channel_id'] else: metadata['source_channel'] = None metadata['user_id'] = metadata['source_user'] metadata['display_name'] = metadata['source_user'] metadata['source_connector'] = 'discord' return metadata
[ "def", "_parse_metadata", "(", "self", ",", "message", ")", ":", "metadata", "=", "Metadata", "(", "source", "=", "self", ".", "actor_urn", ")", ".", "__dict__", "if", "'author'", "in", "message", "[", "'d'", "]", ":", "metadata", "[", "'source_user'", "...
Sets metadata in Legobot message Args: message (dict): Full message from Discord websocket connection" Returns: Legobot.Metadata
[ "Sets", "metadata", "in", "Legobot", "message" ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Discord.py#L194-L219
17,616
Legobot/Legobot
Legobot/Connectors/Discord.py
DiscoBot.handle
def handle(self, message): """ Dispatches messages to appropriate handler based on opcode Args: message (dict): Full message from Discord websocket connection """ opcode = message['op'] if opcode == 10: self.on_hello(message) elif opcode == 11: self.on_heartbeat(message) elif opcode == 0: self.on_message(message) else: logger.debug("Not a message we handle: OPCODE {}".format(opcode)) return
python
def handle(self, message): opcode = message['op'] if opcode == 10: self.on_hello(message) elif opcode == 11: self.on_heartbeat(message) elif opcode == 0: self.on_message(message) else: logger.debug("Not a message we handle: OPCODE {}".format(opcode)) return
[ "def", "handle", "(", "self", ",", "message", ")", ":", "opcode", "=", "message", "[", "'op'", "]", "if", "opcode", "==", "10", ":", "self", ".", "on_hello", "(", "message", ")", "elif", "opcode", "==", "11", ":", "self", ".", "on_heartbeat", "(", ...
Dispatches messages to appropriate handler based on opcode Args: message (dict): Full message from Discord websocket connection
[ "Dispatches", "messages", "to", "appropriate", "handler", "based", "on", "opcode" ]
d13da172960a149681cb5151ce34b2f3a58ad32b
https://github.com/Legobot/Legobot/blob/d13da172960a149681cb5151ce34b2f3a58ad32b/Legobot/Connectors/Discord.py#L227-L244
17,617
cuihantao/andes
andes/variables/dae.py
DAE.resize
def resize(self): """Resize dae and and extend for init1 variables """ yext = self.m - len(self.y) xext = self.n - len(self.x) if yext > 0: yzeros = zeros(yext, 1) yones = ones(yext, 1) self.y = matrix([self.y, yzeros], (self.m, 1), 'd') self.g = matrix([self.g, yzeros], (self.m, 1), 'd') self.uy = matrix([self.uy, yones], (self.m, 1), 'd') self.zymin = matrix([self.zymin, yones], (self.m, 1), 'd') self.zymax = matrix([self.zymax, yones], (self.m, 1), 'd') if xext > 0: xzeros = zeros(xext, 1) xones = ones(xext, 1) self.x = matrix([self.x, xzeros], (self.n, 1), 'd') self.f = matrix([self.f, xzeros], (self.n, 1), 'd') self.ux = matrix([self.ux, xones], (self.n, 1), 'd') self.zxmin = matrix([self.zxmin, xones], (self.n, 1), 'd') self.zxmax = matrix([self.zxmax, xones], (self.n, 1), 'd')
python
def resize(self): yext = self.m - len(self.y) xext = self.n - len(self.x) if yext > 0: yzeros = zeros(yext, 1) yones = ones(yext, 1) self.y = matrix([self.y, yzeros], (self.m, 1), 'd') self.g = matrix([self.g, yzeros], (self.m, 1), 'd') self.uy = matrix([self.uy, yones], (self.m, 1), 'd') self.zymin = matrix([self.zymin, yones], (self.m, 1), 'd') self.zymax = matrix([self.zymax, yones], (self.m, 1), 'd') if xext > 0: xzeros = zeros(xext, 1) xones = ones(xext, 1) self.x = matrix([self.x, xzeros], (self.n, 1), 'd') self.f = matrix([self.f, xzeros], (self.n, 1), 'd') self.ux = matrix([self.ux, xones], (self.n, 1), 'd') self.zxmin = matrix([self.zxmin, xones], (self.n, 1), 'd') self.zxmax = matrix([self.zxmax, xones], (self.n, 1), 'd')
[ "def", "resize", "(", "self", ")", ":", "yext", "=", "self", ".", "m", "-", "len", "(", "self", ".", "y", ")", "xext", "=", "self", ".", "n", "-", "len", "(", "self", ".", "x", ")", "if", "yext", ">", "0", ":", "yzeros", "=", "zeros", "(", ...
Resize dae and and extend for init1 variables
[ "Resize", "dae", "and", "and", "extend", "for", "init1", "variables" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L356-L376
17,618
cuihantao/andes
andes/variables/dae.py
DAE.hard_limit
def hard_limit(self, yidx, ymin, ymax, min_set=None, max_set=None): """Set hard limits for algebraic variables and reset the equation mismatches :param yidx: algebraic variable indices :param ymin: lower limit to check for :param ymax: upper limit to check for :param min_set: optional lower limit to set (``ymin`` as default) :param max_set: optional upper limit to set (``ymax`` as default) :type yidx: list, matrix :type ymin: matrix, int, float, list :type ymax: matrix, int, float, list :type min_set: matrix :type max_set: matrix :return: None """ yidx = matrix(yidx) yval = self.y[yidx] ny = len(yidx) if isinstance(ymin, (int, float, list)): ymin = matrix(ymin, (ny, 1), 'd') if isinstance(ymax, (int, float, list)): ymax = matrix(ymax, (ny, 1), 'd') if not min_set: min_set = ymin elif isinstance(min_set, (int, float, list)): min_set = matrix(min_set, (ny, 1), 'd') if not max_set: max_set = ymax elif isinstance(max_set, (int, float, list)): max_set = matrix(max_set, (ny, 1), 'd') above = ageb(yval, ymax) below = aleb(yval, ymin) above_idx = index(above, 1.0) below_idx = index(below, 1.0) above_yidx = yidx[above_idx] below_yidx = yidx[below_idx] idx = list(above_idx) + list(below_idx) if len(above_yidx) > 0: self.y[above_yidx] = max_set[above_idx] self.zymax[above_yidx] = 0 if len(below_yidx) > 0: self.y[below_yidx] = min_set[below_idx] self.zymin[below_yidx] = 0 if len(idx): self.g[yidx[idx]] = 0 self.ac_reset = True
python
def hard_limit(self, yidx, ymin, ymax, min_set=None, max_set=None): yidx = matrix(yidx) yval = self.y[yidx] ny = len(yidx) if isinstance(ymin, (int, float, list)): ymin = matrix(ymin, (ny, 1), 'd') if isinstance(ymax, (int, float, list)): ymax = matrix(ymax, (ny, 1), 'd') if not min_set: min_set = ymin elif isinstance(min_set, (int, float, list)): min_set = matrix(min_set, (ny, 1), 'd') if not max_set: max_set = ymax elif isinstance(max_set, (int, float, list)): max_set = matrix(max_set, (ny, 1), 'd') above = ageb(yval, ymax) below = aleb(yval, ymin) above_idx = index(above, 1.0) below_idx = index(below, 1.0) above_yidx = yidx[above_idx] below_yidx = yidx[below_idx] idx = list(above_idx) + list(below_idx) if len(above_yidx) > 0: self.y[above_yidx] = max_set[above_idx] self.zymax[above_yidx] = 0 if len(below_yidx) > 0: self.y[below_yidx] = min_set[below_idx] self.zymin[below_yidx] = 0 if len(idx): self.g[yidx[idx]] = 0 self.ac_reset = True
[ "def", "hard_limit", "(", "self", ",", "yidx", ",", "ymin", ",", "ymax", ",", "min_set", "=", "None", ",", "max_set", "=", "None", ")", ":", "yidx", "=", "matrix", "(", "yidx", ")", "yval", "=", "self", ".", "y", "[", "yidx", "]", "ny", "=", "l...
Set hard limits for algebraic variables and reset the equation mismatches :param yidx: algebraic variable indices :param ymin: lower limit to check for :param ymax: upper limit to check for :param min_set: optional lower limit to set (``ymin`` as default) :param max_set: optional upper limit to set (``ymax`` as default) :type yidx: list, matrix :type ymin: matrix, int, float, list :type ymax: matrix, int, float, list :type min_set: matrix :type max_set: matrix :return: None
[ "Set", "hard", "limits", "for", "algebraic", "variables", "and", "reset", "the", "equation", "mismatches" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L378-L435
17,619
cuihantao/andes
andes/variables/dae.py
DAE.hard_limit_remote
def hard_limit_remote(self, yidx, ridx, rtype='y', rmin=None, rmax=None, min_yset=0, max_yset=0): """Limit the output of yidx if the remote y is not within the limits This function needs to be modernized. """ ny = len(yidx) assert ny == len( ridx), "Length of output vars and remote vars does not match" assert rtype in ('x', 'y'), "ridx must be either y (algeb) or x (state)" if isinstance(min_yset, (int, float)): min_yset = matrix(min_yset, (ny, 1), 'd') if isinstance(max_yset, (int, float)): max_yset = matrix(max_yset, (ny, 1), 'd') above_idx, below_idx = list(), list() yidx = matrix(yidx) if rmax: # find the over-limit remote idx above = ageb(self.__dict__[rtype][ridx], rmax) above_idx = index(above, 1.0) # reset the y values based on the remote limit violations self.y[yidx[above_idx]] = max_yset[above_idx] self.zymax[yidx[above_idx]] = 0 if rmin: below = aleb(self.__dict__[rtype][ridx], rmin) below_idx = index(below, 1.0) self.y[yidx[below_idx]] = min_yset[below_idx] self.zymin[yidx[below_idx]] = 0 idx = above_idx + below_idx self.g[yidx[idx]] = 0 if len(idx) > 0: self.factorize = True
python
def hard_limit_remote(self, yidx, ridx, rtype='y', rmin=None, rmax=None, min_yset=0, max_yset=0): ny = len(yidx) assert ny == len( ridx), "Length of output vars and remote vars does not match" assert rtype in ('x', 'y'), "ridx must be either y (algeb) or x (state)" if isinstance(min_yset, (int, float)): min_yset = matrix(min_yset, (ny, 1), 'd') if isinstance(max_yset, (int, float)): max_yset = matrix(max_yset, (ny, 1), 'd') above_idx, below_idx = list(), list() yidx = matrix(yidx) if rmax: # find the over-limit remote idx above = ageb(self.__dict__[rtype][ridx], rmax) above_idx = index(above, 1.0) # reset the y values based on the remote limit violations self.y[yidx[above_idx]] = max_yset[above_idx] self.zymax[yidx[above_idx]] = 0 if rmin: below = aleb(self.__dict__[rtype][ridx], rmin) below_idx = index(below, 1.0) self.y[yidx[below_idx]] = min_yset[below_idx] self.zymin[yidx[below_idx]] = 0 idx = above_idx + below_idx self.g[yidx[idx]] = 0 if len(idx) > 0: self.factorize = True
[ "def", "hard_limit_remote", "(", "self", ",", "yidx", ",", "ridx", ",", "rtype", "=", "'y'", ",", "rmin", "=", "None", ",", "rmax", "=", "None", ",", "min_yset", "=", "0", ",", "max_yset", "=", "0", ")", ":", "ny", "=", "len", "(", "yidx", ")", ...
Limit the output of yidx if the remote y is not within the limits This function needs to be modernized.
[ "Limit", "the", "output", "of", "yidx", "if", "the", "remote", "y", "is", "not", "within", "the", "limits" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L437-L481
17,620
cuihantao/andes
andes/variables/dae.py
DAE.anti_windup
def anti_windup(self, xidx, xmin, xmax): """ Anti-windup limiter for state variables. Resets the limited variables and differential equations. :param xidx: state variable indices :param xmin: lower limit :param xmax: upper limit :type xidx: matrix, list :type xmin: matrix, float, int, list :type xmax: matrix, float, int, list """ xidx = matrix(xidx) xval = self.x[xidx] fval = self.f[xidx] if isinstance(xmin, (float, int, list)): xmin = matrix(xmin, xidx.size, 'd') if isinstance(xmax, (float, int, list)): xmax = matrix(xmax, xidx.size, 'd') x_above = ageb(xval, xmax) f_above = ageb(fval, 0.0) x_below = aleb(xval, xmin) f_below = aleb(fval, 0.0) above = aandb(x_above, f_above) above_idx = index(above, 1.0) if len(above_idx) > 0: above_xidx = xidx[above_idx] self.x[above_xidx] = xmax[above_idx] self.zxmax[above_xidx] = 0 below = aandb(x_below, f_below) below_idx = index(below, 1.0) if len(below_idx) > 0: below_xidx = xidx[below_idx] self.x[below_xidx] = xmin[below_idx] self.zxmin[below_xidx] = 0 idx = list(above_idx) + list(below_idx) if len(idx) > 0: self.f[xidx[idx]] = 0 self.ac_reset = True
python
def anti_windup(self, xidx, xmin, xmax): xidx = matrix(xidx) xval = self.x[xidx] fval = self.f[xidx] if isinstance(xmin, (float, int, list)): xmin = matrix(xmin, xidx.size, 'd') if isinstance(xmax, (float, int, list)): xmax = matrix(xmax, xidx.size, 'd') x_above = ageb(xval, xmax) f_above = ageb(fval, 0.0) x_below = aleb(xval, xmin) f_below = aleb(fval, 0.0) above = aandb(x_above, f_above) above_idx = index(above, 1.0) if len(above_idx) > 0: above_xidx = xidx[above_idx] self.x[above_xidx] = xmax[above_idx] self.zxmax[above_xidx] = 0 below = aandb(x_below, f_below) below_idx = index(below, 1.0) if len(below_idx) > 0: below_xidx = xidx[below_idx] self.x[below_xidx] = xmin[below_idx] self.zxmin[below_xidx] = 0 idx = list(above_idx) + list(below_idx) if len(idx) > 0: self.f[xidx[idx]] = 0 self.ac_reset = True
[ "def", "anti_windup", "(", "self", ",", "xidx", ",", "xmin", ",", "xmax", ")", ":", "xidx", "=", "matrix", "(", "xidx", ")", "xval", "=", "self", ".", "x", "[", "xidx", "]", "fval", "=", "self", ".", "f", "[", "xidx", "]", "if", "isinstance", "...
Anti-windup limiter for state variables. Resets the limited variables and differential equations. :param xidx: state variable indices :param xmin: lower limit :param xmax: upper limit :type xidx: matrix, list :type xmin: matrix, float, int, list :type xmax: matrix, float, int, list
[ "Anti", "-", "windup", "limiter", "for", "state", "variables", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L483-L531
17,621
cuihantao/andes
andes/variables/dae.py
DAE.reset_Ac
def reset_Ac(self): """ Reset ``dae.Ac`` sparse matrix for disabled equations due to hard_limit and anti_windup limiters. :return: None """ if self.ac_reset is False: return mn = self.m + self.n x = index(aandb(self.zxmin, self.zxmax), 0.) y = [i + self.n for i in index(aandb(self.zymin, self.zymax), 0.)] xy = list(x) + y eye = spdiag([1.0] * mn) H = spmatrix(1.0, xy, xy, (mn, mn), 'd') # Modifying ``eye`` is more efficient than ``eye = eye - H``. # CVXOPT modifies eye in place because all the accessed elements exist. for idx in xy: eye[idx, idx] = 0 if len(xy) > 0: self.Ac = eye * (self.Ac * eye) - H self.q[x] = 0 self.ac_reset = False self.factorize = True
python
def reset_Ac(self): if self.ac_reset is False: return mn = self.m + self.n x = index(aandb(self.zxmin, self.zxmax), 0.) y = [i + self.n for i in index(aandb(self.zymin, self.zymax), 0.)] xy = list(x) + y eye = spdiag([1.0] * mn) H = spmatrix(1.0, xy, xy, (mn, mn), 'd') # Modifying ``eye`` is more efficient than ``eye = eye - H``. # CVXOPT modifies eye in place because all the accessed elements exist. for idx in xy: eye[idx, idx] = 0 if len(xy) > 0: self.Ac = eye * (self.Ac * eye) - H self.q[x] = 0 self.ac_reset = False self.factorize = True
[ "def", "reset_Ac", "(", "self", ")", ":", "if", "self", ".", "ac_reset", "is", "False", ":", "return", "mn", "=", "self", ".", "m", "+", "self", ".", "n", "x", "=", "index", "(", "aandb", "(", "self", ".", "zxmin", ",", "self", ".", "zxmax", ")...
Reset ``dae.Ac`` sparse matrix for disabled equations due to hard_limit and anti_windup limiters. :return: None
[ "Reset", "dae", ".", "Ac", "sparse", "matrix", "for", "disabled", "equations", "due", "to", "hard_limit", "and", "anti_windup", "limiters", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L533-L563
17,622
cuihantao/andes
andes/variables/dae.py
DAE.get_size
def get_size(self, m): """ Return the 2-D size of a Jacobian matrix in tuple """ nrow, ncol = 0, 0 if m[0] == 'F': nrow = self.n elif m[0] == 'G': nrow = self.m if m[1] == 'x': ncol = self.n elif m[1] == 'y': ncol = self.m return nrow, ncol
python
def get_size(self, m): nrow, ncol = 0, 0 if m[0] == 'F': nrow = self.n elif m[0] == 'G': nrow = self.m if m[1] == 'x': ncol = self.n elif m[1] == 'y': ncol = self.m return nrow, ncol
[ "def", "get_size", "(", "self", ",", "m", ")", ":", "nrow", ",", "ncol", "=", "0", ",", "0", "if", "m", "[", "0", "]", "==", "'F'", ":", "nrow", "=", "self", ".", "n", "elif", "m", "[", "0", "]", "==", "'G'", ":", "nrow", "=", "self", "."...
Return the 2-D size of a Jacobian matrix in tuple
[ "Return", "the", "2", "-", "D", "size", "of", "a", "Jacobian", "matrix", "in", "tuple" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L565-L580
17,623
cuihantao/andes
andes/variables/dae.py
DAE.temp_to_spmatrix
def temp_to_spmatrix(self, ty): """ Convert Jacobian tuples to matrices :param ty: name of the matrices to convert in ``('jac0','jac')`` :return: None """ assert ty in ('jac0', 'jac') jac0s = ['Fx0', 'Fy0', 'Gx0', 'Gy0'] jacs = ['Fx', 'Fy', 'Gx', 'Gy'] if ty == 'jac0': todo = jac0s elif ty == 'jac': todo = jacs for m in todo: self.__dict__[m] = spmatrix(self._temp[m]['V'], self._temp[m]['I'], self._temp[m]['J'], self.get_size(m), 'd') if ty == 'jac': self.__dict__[m] += self.__dict__[m + '0'] self.apply_set(ty)
python
def temp_to_spmatrix(self, ty): assert ty in ('jac0', 'jac') jac0s = ['Fx0', 'Fy0', 'Gx0', 'Gy0'] jacs = ['Fx', 'Fy', 'Gx', 'Gy'] if ty == 'jac0': todo = jac0s elif ty == 'jac': todo = jacs for m in todo: self.__dict__[m] = spmatrix(self._temp[m]['V'], self._temp[m]['I'], self._temp[m]['J'], self.get_size(m), 'd') if ty == 'jac': self.__dict__[m] += self.__dict__[m + '0'] self.apply_set(ty)
[ "def", "temp_to_spmatrix", "(", "self", ",", "ty", ")", ":", "assert", "ty", "in", "(", "'jac0'", ",", "'jac'", ")", "jac0s", "=", "[", "'Fx0'", ",", "'Fy0'", ",", "'Gx0'", ",", "'Gy0'", "]", "jacs", "=", "[", "'Fx'", ",", "'Fy'", ",", "'Gx'", ",...
Convert Jacobian tuples to matrices :param ty: name of the matrices to convert in ``('jac0','jac')`` :return: None
[ "Convert", "Jacobian", "tuples", "to", "matrices" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L597-L622
17,624
cuihantao/andes
andes/variables/dae.py
DAE.apply_set
def apply_set(self, ty): """ Apply Jacobian set values to matrices :param ty: Jacobian type in ``('jac0', 'jac')`` :return: """ assert ty in ('jac0', 'jac') if ty == 'jac0': todo = ['Fx0', 'Fy0', 'Gx0', 'Gy0'] else: todo = ['Fx', 'Fy', 'Gx', 'Gy'] for m in todo: for idx in range(len(self._set[m]['I'])): i = self._set[m]['I'][idx] j = self._set[m]['J'][idx] v = self._set[m]['V'][idx] self.__dict__[m][i, j] = v
python
def apply_set(self, ty): assert ty in ('jac0', 'jac') if ty == 'jac0': todo = ['Fx0', 'Fy0', 'Gx0', 'Gy0'] else: todo = ['Fx', 'Fy', 'Gx', 'Gy'] for m in todo: for idx in range(len(self._set[m]['I'])): i = self._set[m]['I'][idx] j = self._set[m]['J'][idx] v = self._set[m]['V'][idx] self.__dict__[m][i, j] = v
[ "def", "apply_set", "(", "self", ",", "ty", ")", ":", "assert", "ty", "in", "(", "'jac0'", ",", "'jac'", ")", "if", "ty", "==", "'jac0'", ":", "todo", "=", "[", "'Fx0'", ",", "'Fy0'", ",", "'Gx0'", ",", "'Gy0'", "]", "else", ":", "todo", "=", "...
Apply Jacobian set values to matrices :param ty: Jacobian type in ``('jac0', 'jac')`` :return:
[ "Apply", "Jacobian", "set", "values", "to", "matrices" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L646-L665
17,625
cuihantao/andes
andes/variables/dae.py
DAE.show
def show(self, eq, value=None): """Show equation or variable array along with the names""" if eq in ['f', 'x']: key = 'unamex' elif eq in ['g', 'y']: key = 'unamey' if value: value = list(value) else: value = list(self.__dict__[eq]) out = '' for name, val, idx in zip(self.system.varname.__dict__[key], value, range(len(value))): out += '{:20s} [{:>12.4f}] {:g}\n'.format(name, val, idx) return out
python
def show(self, eq, value=None): if eq in ['f', 'x']: key = 'unamex' elif eq in ['g', 'y']: key = 'unamey' if value: value = list(value) else: value = list(self.__dict__[eq]) out = '' for name, val, idx in zip(self.system.varname.__dict__[key], value, range(len(value))): out += '{:20s} [{:>12.4f}] {:g}\n'.format(name, val, idx) return out
[ "def", "show", "(", "self", ",", "eq", ",", "value", "=", "None", ")", ":", "if", "eq", "in", "[", "'f'", ",", "'x'", "]", ":", "key", "=", "'unamex'", "elif", "eq", "in", "[", "'g'", ",", "'y'", "]", ":", "key", "=", "'unamey'", "if", "value...
Show equation or variable array along with the names
[ "Show", "equation", "or", "variable", "array", "along", "with", "the", "names" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L667-L683
17,626
cuihantao/andes
andes/variables/dae.py
DAE.find_val
def find_val(self, eq, val): """Return the name of the equation having the given value""" if eq not in ('f', 'g', 'q'): return elif eq in ('f', 'q'): key = 'unamex' elif eq == 'g': key = 'unamey' idx = 0 for m, n in zip(self.system.varname.__dict__[key], self.__dict__[eq]): if n == val: return m, idx idx += 1 return
python
def find_val(self, eq, val): if eq not in ('f', 'g', 'q'): return elif eq in ('f', 'q'): key = 'unamex' elif eq == 'g': key = 'unamey' idx = 0 for m, n in zip(self.system.varname.__dict__[key], self.__dict__[eq]): if n == val: return m, idx idx += 1 return
[ "def", "find_val", "(", "self", ",", "eq", ",", "val", ")", ":", "if", "eq", "not", "in", "(", "'f'", ",", "'g'", ",", "'q'", ")", ":", "return", "elif", "eq", "in", "(", "'f'", ",", "'q'", ")", ":", "key", "=", "'unamex'", "elif", "eq", "=="...
Return the name of the equation having the given value
[ "Return", "the", "name", "of", "the", "equation", "having", "the", "given", "value" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L685-L698
17,627
cuihantao/andes
andes/variables/dae.py
DAE.reset_small
def reset_small(self, eq): """Reset numbers smaller than 1e-12 in f and g equations""" assert eq in ('f', 'g') for idx, var in enumerate(self.__dict__[eq]): if abs(var) <= 1e-12: self.__dict__[eq][idx] = 0
python
def reset_small(self, eq): assert eq in ('f', 'g') for idx, var in enumerate(self.__dict__[eq]): if abs(var) <= 1e-12: self.__dict__[eq][idx] = 0
[ "def", "reset_small", "(", "self", ",", "eq", ")", ":", "assert", "eq", "in", "(", "'f'", ",", "'g'", ")", "for", "idx", ",", "var", "in", "enumerate", "(", "self", ".", "__dict__", "[", "eq", "]", ")", ":", "if", "abs", "(", "var", ")", "<=", ...
Reset numbers smaller than 1e-12 in f and g equations
[ "Reset", "numbers", "smaller", "than", "1e", "-", "12", "in", "f", "and", "g", "equations" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L700-L705
17,628
cuihantao/andes
andes/variables/dae.py
DAE.check_diag
def check_diag(self, jac, name): """ Check matrix ``jac`` for diagonal elements that equals 0 """ system = self.system pos = [] names = [] pairs = '' size = jac.size diag = jac[0:size[0] ** 2:size[0] + 1] for idx in range(size[0]): if abs(diag[idx]) <= 1e-8: pos.append(idx) for idx in pos: names.append(system.varname.__dict__[name][idx]) if len(names) > 0: for i, j in zip(pos, names): pairs += '{0}: {1}\n'.format(i, j) logger.debug('Jacobian diagonal check:') logger.debug(pairs)
python
def check_diag(self, jac, name): system = self.system pos = [] names = [] pairs = '' size = jac.size diag = jac[0:size[0] ** 2:size[0] + 1] for idx in range(size[0]): if abs(diag[idx]) <= 1e-8: pos.append(idx) for idx in pos: names.append(system.varname.__dict__[name][idx]) if len(names) > 0: for i, j in zip(pos, names): pairs += '{0}: {1}\n'.format(i, j) logger.debug('Jacobian diagonal check:') logger.debug(pairs)
[ "def", "check_diag", "(", "self", ",", "jac", ",", "name", ")", ":", "system", "=", "self", ".", "system", "pos", "=", "[", "]", "names", "=", "[", "]", "pairs", "=", "''", "size", "=", "jac", ".", "size", "diag", "=", "jac", "[", "0", ":", "...
Check matrix ``jac`` for diagonal elements that equals 0
[ "Check", "matrix", "jac", "for", "diagonal", "elements", "that", "equals", "0" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/variables/dae.py#L713-L735
17,629
DiamondLightSource/python-workflows
workflows/logging/__init__.py
get_exception_source
def get_exception_source(): """Returns full file path, file name, line number, function name, and line contents causing the last exception.""" _, _, tb = sys.exc_info() while tb.tb_next: tb = tb.tb_next f = tb.tb_frame lineno = tb.tb_lineno co = f.f_code filefullpath = co.co_filename filename = os.path.basename(filefullpath) name = co.co_name linecache.checkcache(filefullpath) line = linecache.getline(filefullpath, lineno, f.f_globals) if line: line = line.strip() else: line = None return filefullpath, filename, lineno, name, line
python
def get_exception_source(): _, _, tb = sys.exc_info() while tb.tb_next: tb = tb.tb_next f = tb.tb_frame lineno = tb.tb_lineno co = f.f_code filefullpath = co.co_filename filename = os.path.basename(filefullpath) name = co.co_name linecache.checkcache(filefullpath) line = linecache.getline(filefullpath, lineno, f.f_globals) if line: line = line.strip() else: line = None return filefullpath, filename, lineno, name, line
[ "def", "get_exception_source", "(", ")", ":", "_", ",", "_", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "while", "tb", ".", "tb_next", ":", "tb", "=", "tb", ".", "tb_next", "f", "=", "tb", ".", "tb_frame", "lineno", "=", "tb", ".", "tb_lin...
Returns full file path, file name, line number, function name, and line contents causing the last exception.
[ "Returns", "full", "file", "path", "file", "name", "line", "number", "function", "name", "and", "line", "contents", "causing", "the", "last", "exception", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/logging/__init__.py#L9-L27
17,630
DiamondLightSource/python-workflows
workflows/logging/__init__.py
CallbackHandler.prepare
def prepare(self, record): # Function taken from Python 3.6 QueueHandler """ Prepares a record for queuing. The object returned by this method is enqueued. The base implementation formats the record to merge the message and arguments, and removes unpickleable items from the record in-place. You might want to override this method if you want to convert the record to a dict or JSON string, or send a modified copy of the record while leaving the original intact. """ # The format operation gets traceback text into record.exc_text # (if there's exception data), and also puts the message into # record.message. We can then use this to replace the original # msg + args, as these might be unpickleable. We also zap the # exc_info attribute, as it's no longer needed and, if not None, # will typically not be pickleable. self.format(record) record.msg = record.message record.args = None record.exc_info = None return record
python
def prepare(self, record): # Function taken from Python 3.6 QueueHandler # The format operation gets traceback text into record.exc_text # (if there's exception data), and also puts the message into # record.message. We can then use this to replace the original # msg + args, as these might be unpickleable. We also zap the # exc_info attribute, as it's no longer needed and, if not None, # will typically not be pickleable. self.format(record) record.msg = record.message record.args = None record.exc_info = None return record
[ "def", "prepare", "(", "self", ",", "record", ")", ":", "# Function taken from Python 3.6 QueueHandler", "# The format operation gets traceback text into record.exc_text", "# (if there's exception data), and also puts the message into", "# record.message. We can then use this to replace the or...
Prepares a record for queuing. The object returned by this method is enqueued. The base implementation formats the record to merge the message and arguments, and removes unpickleable items from the record in-place. You might want to override this method if you want to convert the record to a dict or JSON string, or send a modified copy of the record while leaving the original intact.
[ "Prepares", "a", "record", "for", "queuing", ".", "The", "object", "returned", "by", "this", "method", "is", "enqueued", ".", "The", "base", "implementation", "formats", "the", "record", "to", "merge", "the", "message", "and", "arguments", "and", "removes", ...
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/logging/__init__.py#L38-L60
17,631
DiamondLightSource/python-workflows
workflows/logging/__init__.py
CallbackHandler.emit
def emit(self, record): """Send a LogRecord to the callback function, after preparing it for serialization.""" try: self._callback(self.prepare(record)) except Exception: self.handleError(record)
python
def emit(self, record): try: self._callback(self.prepare(record)) except Exception: self.handleError(record)
[ "def", "emit", "(", "self", ",", "record", ")", ":", "try", ":", "self", ".", "_callback", "(", "self", ".", "prepare", "(", "record", ")", ")", "except", "Exception", ":", "self", ".", "handleError", "(", "record", ")" ]
Send a LogRecord to the callback function, after preparing it for serialization.
[ "Send", "a", "LogRecord", "to", "the", "callback", "function", "after", "preparing", "it", "for", "serialization", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/logging/__init__.py#L62-L68
17,632
DiamondLightSource/python-workflows
workflows/frontend/__init__.py
Frontend.run
def run(self): """The main loop of the frontend. Here incoming messages from the service are processed and forwarded to the corresponding callback methods.""" self.log.debug("Entered main loop") while not self.shutdown: # If no service is running slow down the main loop if not self._pipe_service: time.sleep(0.3) self.update_status() # While a service is running, check for incoming messages from that service if self._pipe_service and self._pipe_service.poll(1): try: message = self._pipe_service.recv() if isinstance(message, dict) and "band" in message: # only dictionaries with 'band' entry are valid messages try: handler = getattr(self, "parse_band_" + message["band"]) except AttributeError: handler = None self.log.warning("Unknown band %s", str(message["band"])) if handler: # try: handler(message) # except Exception: # print('Uh oh. What to do.') else: self.log.warning("Invalid message received %s", str(message)) except EOFError: # Service has gone away error_message = False if self._service_status == CommonService.SERVICE_STATUS_END: self.log.info("Service terminated") elif self._service_status == CommonService.SERVICE_STATUS_ERROR: error_message = "Service terminated with error code" elif self._service_status in ( CommonService.SERVICE_STATUS_NONE, CommonService.SERVICE_STATUS_NEW, CommonService.SERVICE_STATUS_STARTING, ): error_message = ( "Service may have died unexpectedly in " + "initialization (last known status: %s)" % CommonService.human_readable_state.get( self._service_status, self._service_status ) ) else: error_message = ( "Service may have died unexpectedly" " (last known status: %s)" % CommonService.human_readable_state.get( self._service_status, self._service_status ) ) if error_message: self.log.error(error_message) self._terminate_service() if self.restart_service: self.exponential_backoff() else: self.shutdown = True if error_message: raise workflows.Error(error_message) with self.__lock: if ( self._service is None and self.restart_service and self._service_factory ): self.update_status(status_code=CommonService.SERVICE_STATUS_NEW) self.switch_service() # Check that the transport is alive if not self._transport.is_connected(): self._terminate_service() raise workflows.Error("Lost transport layer connection") self.log.debug("Left main loop") self.update_status(status_code=CommonService.SERVICE_STATUS_TEARDOWN) self._terminate_service() self.log.debug("Terminating.")
python
def run(self): self.log.debug("Entered main loop") while not self.shutdown: # If no service is running slow down the main loop if not self._pipe_service: time.sleep(0.3) self.update_status() # While a service is running, check for incoming messages from that service if self._pipe_service and self._pipe_service.poll(1): try: message = self._pipe_service.recv() if isinstance(message, dict) and "band" in message: # only dictionaries with 'band' entry are valid messages try: handler = getattr(self, "parse_band_" + message["band"]) except AttributeError: handler = None self.log.warning("Unknown band %s", str(message["band"])) if handler: # try: handler(message) # except Exception: # print('Uh oh. What to do.') else: self.log.warning("Invalid message received %s", str(message)) except EOFError: # Service has gone away error_message = False if self._service_status == CommonService.SERVICE_STATUS_END: self.log.info("Service terminated") elif self._service_status == CommonService.SERVICE_STATUS_ERROR: error_message = "Service terminated with error code" elif self._service_status in ( CommonService.SERVICE_STATUS_NONE, CommonService.SERVICE_STATUS_NEW, CommonService.SERVICE_STATUS_STARTING, ): error_message = ( "Service may have died unexpectedly in " + "initialization (last known status: %s)" % CommonService.human_readable_state.get( self._service_status, self._service_status ) ) else: error_message = ( "Service may have died unexpectedly" " (last known status: %s)" % CommonService.human_readable_state.get( self._service_status, self._service_status ) ) if error_message: self.log.error(error_message) self._terminate_service() if self.restart_service: self.exponential_backoff() else: self.shutdown = True if error_message: raise workflows.Error(error_message) with self.__lock: if ( self._service is None and self.restart_service and self._service_factory ): self.update_status(status_code=CommonService.SERVICE_STATUS_NEW) self.switch_service() # Check that the transport is alive if not self._transport.is_connected(): self._terminate_service() raise workflows.Error("Lost transport layer connection") self.log.debug("Left main loop") self.update_status(status_code=CommonService.SERVICE_STATUS_TEARDOWN) self._terminate_service() self.log.debug("Terminating.")
[ "def", "run", "(", "self", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Entered main loop\"", ")", "while", "not", "self", ".", "shutdown", ":", "# If no service is running slow down the main loop", "if", "not", "self", ".", "_pipe_service", ":", "time", ...
The main loop of the frontend. Here incoming messages from the service are processed and forwarded to the corresponding callback methods.
[ "The", "main", "loop", "of", "the", "frontend", ".", "Here", "incoming", "messages", "from", "the", "service", "are", "processed", "and", "forwarded", "to", "the", "corresponding", "callback", "methods", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/frontend/__init__.py#L172-L252
17,633
DiamondLightSource/python-workflows
workflows/frontend/__init__.py
Frontend.send_command
def send_command(self, command): """Send command to service via the command queue.""" if self._pipe_commands: self._pipe_commands.send(command) else: if self.shutdown: # Stop delivering messages in shutdown. self.log.info( "During shutdown no command queue pipe found for command\n%s", str(command), ) else: self.log.error( "No command queue pipe found for command\n%s", str(command) )
python
def send_command(self, command): if self._pipe_commands: self._pipe_commands.send(command) else: if self.shutdown: # Stop delivering messages in shutdown. self.log.info( "During shutdown no command queue pipe found for command\n%s", str(command), ) else: self.log.error( "No command queue pipe found for command\n%s", str(command) )
[ "def", "send_command", "(", "self", ",", "command", ")", ":", "if", "self", ".", "_pipe_commands", ":", "self", ".", "_pipe_commands", ".", "send", "(", "command", ")", "else", ":", "if", "self", ".", "shutdown", ":", "# Stop delivering messages in shutdown.",...
Send command to service via the command queue.
[ "Send", "command", "to", "service", "via", "the", "command", "queue", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/frontend/__init__.py#L254-L268
17,634
DiamondLightSource/python-workflows
workflows/frontend/__init__.py
Frontend.process_transport_command
def process_transport_command(self, header, message): """Parse a command coming in through the transport command subscription""" if not isinstance(message, dict): return relevant = False if "host" in message: # Filter by host if message["host"] != self.__hostid: return relevant = True if "service" in message: # Filter by service if message["service"] != self._service_class_name: return relevant = True if not relevant: # Ignore message unless at least one filter matches return if message.get("command"): self.log.info( "Received command '%s' via transport layer", message["command"] ) if message["command"] == "shutdown": self.shutdown = True else: self.log.warning("Received invalid transport command message")
python
def process_transport_command(self, header, message): if not isinstance(message, dict): return relevant = False if "host" in message: # Filter by host if message["host"] != self.__hostid: return relevant = True if "service" in message: # Filter by service if message["service"] != self._service_class_name: return relevant = True if not relevant: # Ignore message unless at least one filter matches return if message.get("command"): self.log.info( "Received command '%s' via transport layer", message["command"] ) if message["command"] == "shutdown": self.shutdown = True else: self.log.warning("Received invalid transport command message")
[ "def", "process_transport_command", "(", "self", ",", "header", ",", "message", ")", ":", "if", "not", "isinstance", "(", "message", ",", "dict", ")", ":", "return", "relevant", "=", "False", "if", "\"host\"", "in", "message", ":", "# Filter by host", "if", ...
Parse a command coming in through the transport command subscription
[ "Parse", "a", "command", "coming", "in", "through", "the", "transport", "command", "subscription" ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/frontend/__init__.py#L270-L294
17,635
DiamondLightSource/python-workflows
workflows/frontend/__init__.py
Frontend.parse_band_log
def parse_band_log(self, message): """Process incoming logging messages from the service.""" if "payload" in message and hasattr(message["payload"], "name"): record = message["payload"] for k in dir(record): if k.startswith("workflows_exc_"): setattr(record, k[14:], getattr(record, k)) delattr(record, k) for k, v in self.get_status().items(): setattr(record, "workflows_" + k, v) logging.getLogger(record.name).handle(record) else: self.log.warning( "Received broken record on log band\n" + "Message: %s\nRecord: %s", str(message), str( hasattr(message.get("payload"), "__dict__") and message["payload"].__dict__ ), )
python
def parse_band_log(self, message): if "payload" in message and hasattr(message["payload"], "name"): record = message["payload"] for k in dir(record): if k.startswith("workflows_exc_"): setattr(record, k[14:], getattr(record, k)) delattr(record, k) for k, v in self.get_status().items(): setattr(record, "workflows_" + k, v) logging.getLogger(record.name).handle(record) else: self.log.warning( "Received broken record on log band\n" + "Message: %s\nRecord: %s", str(message), str( hasattr(message.get("payload"), "__dict__") and message["payload"].__dict__ ), )
[ "def", "parse_band_log", "(", "self", ",", "message", ")", ":", "if", "\"payload\"", "in", "message", "and", "hasattr", "(", "message", "[", "\"payload\"", "]", ",", "\"name\"", ")", ":", "record", "=", "message", "[", "\"payload\"", "]", "for", "k", "in...
Process incoming logging messages from the service.
[ "Process", "incoming", "logging", "messages", "from", "the", "service", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/frontend/__init__.py#L296-L315
17,636
DiamondLightSource/python-workflows
workflows/frontend/__init__.py
Frontend.parse_band_request_termination
def parse_band_request_termination(self, message): """Service declares it should be terminated.""" self.log.debug("Service requests termination") self._terminate_service() if not self.restart_service: self.shutdown = True
python
def parse_band_request_termination(self, message): self.log.debug("Service requests termination") self._terminate_service() if not self.restart_service: self.shutdown = True
[ "def", "parse_band_request_termination", "(", "self", ",", "message", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Service requests termination\"", ")", "self", ".", "_terminate_service", "(", ")", "if", "not", "self", ".", "restart_service", ":", "self",...
Service declares it should be terminated.
[ "Service", "declares", "it", "should", "be", "terminated", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/frontend/__init__.py#L317-L322
17,637
DiamondLightSource/python-workflows
workflows/frontend/__init__.py
Frontend.parse_band_set_name
def parse_band_set_name(self, message): """Process incoming message indicating service name change.""" if message.get("name"): self._service_name = message["name"] else: self.log.warning( "Received broken record on set_name band\nMessage: %s", str(message) )
python
def parse_band_set_name(self, message): if message.get("name"): self._service_name = message["name"] else: self.log.warning( "Received broken record on set_name band\nMessage: %s", str(message) )
[ "def", "parse_band_set_name", "(", "self", ",", "message", ")", ":", "if", "message", ".", "get", "(", "\"name\"", ")", ":", "self", ".", "_service_name", "=", "message", "[", "\"name\"", "]", "else", ":", "self", ".", "log", ".", "warning", "(", "\"Re...
Process incoming message indicating service name change.
[ "Process", "incoming", "message", "indicating", "service", "name", "change", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/frontend/__init__.py#L324-L331
17,638
DiamondLightSource/python-workflows
workflows/frontend/__init__.py
Frontend.parse_band_status_update
def parse_band_status_update(self, message): """Process incoming status updates from the service.""" self.log.debug("Status update: " + str(message)) self.update_status(status_code=message["statuscode"])
python
def parse_band_status_update(self, message): self.log.debug("Status update: " + str(message)) self.update_status(status_code=message["statuscode"])
[ "def", "parse_band_status_update", "(", "self", ",", "message", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Status update: \"", "+", "str", "(", "message", ")", ")", "self", ".", "update_status", "(", "status_code", "=", "message", "[", "\"statuscode...
Process incoming status updates from the service.
[ "Process", "incoming", "status", "updates", "from", "the", "service", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/frontend/__init__.py#L333-L336
17,639
DiamondLightSource/python-workflows
workflows/frontend/__init__.py
Frontend.get_status
def get_status(self): """Returns a dictionary containing all relevant status information to be broadcast across the network.""" return { "host": self.__hostid, "status": self._service_status_announced, "statustext": CommonService.human_readable_state.get( self._service_status_announced ), "service": self._service_name, "serviceclass": self._service_class_name, "utilization": self._utilization.report(), "workflows": workflows.version(), }
python
def get_status(self): return { "host": self.__hostid, "status": self._service_status_announced, "statustext": CommonService.human_readable_state.get( self._service_status_announced ), "service": self._service_name, "serviceclass": self._service_class_name, "utilization": self._utilization.report(), "workflows": workflows.version(), }
[ "def", "get_status", "(", "self", ")", ":", "return", "{", "\"host\"", ":", "self", ".", "__hostid", ",", "\"status\"", ":", "self", ".", "_service_status_announced", ",", "\"statustext\"", ":", "CommonService", ".", "human_readable_state", ".", "get", "(", "s...
Returns a dictionary containing all relevant status information to be broadcast across the network.
[ "Returns", "a", "dictionary", "containing", "all", "relevant", "status", "information", "to", "be", "broadcast", "across", "the", "network", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/frontend/__init__.py#L342-L355
17,640
DiamondLightSource/python-workflows
workflows/frontend/__init__.py
Frontend.exponential_backoff
def exponential_backoff(self): """A function that keeps waiting longer and longer the more rapidly it is called. It can be used to increasingly slow down service starts when they keep failing.""" last_service_switch = self._service_starttime if not last_service_switch: return time_since_last_switch = time.time() - last_service_switch if not self._service_rapidstarts: self._service_rapidstarts = 0 minimum_wait = 0.1 * (2 ** self._service_rapidstarts) minimum_wait = min(5, minimum_wait) if time_since_last_switch > 10: self._service_rapidstarts = 0 return self._service_rapidstarts += 1 self.log.debug("Slowing down service starts (%.1f seconds)", minimum_wait) time.sleep(minimum_wait)
python
def exponential_backoff(self): last_service_switch = self._service_starttime if not last_service_switch: return time_since_last_switch = time.time() - last_service_switch if not self._service_rapidstarts: self._service_rapidstarts = 0 minimum_wait = 0.1 * (2 ** self._service_rapidstarts) minimum_wait = min(5, minimum_wait) if time_since_last_switch > 10: self._service_rapidstarts = 0 return self._service_rapidstarts += 1 self.log.debug("Slowing down service starts (%.1f seconds)", minimum_wait) time.sleep(minimum_wait)
[ "def", "exponential_backoff", "(", "self", ")", ":", "last_service_switch", "=", "self", ".", "_service_starttime", "if", "not", "last_service_switch", ":", "return", "time_since_last_switch", "=", "time", ".", "time", "(", ")", "-", "last_service_switch", "if", "...
A function that keeps waiting longer and longer the more rapidly it is called. It can be used to increasingly slow down service starts when they keep failing.
[ "A", "function", "that", "keeps", "waiting", "longer", "and", "longer", "the", "more", "rapidly", "it", "is", "called", ".", "It", "can", "be", "used", "to", "increasingly", "slow", "down", "service", "starts", "when", "they", "keep", "failing", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/frontend/__init__.py#L357-L375
17,641
DiamondLightSource/python-workflows
workflows/frontend/__init__.py
Frontend._terminate_service
def _terminate_service(self): """Force termination of running service. Disconnect queues, end queue feeder threads. Wait for service process to clear, drop all references.""" with self.__lock: if self._service: self._service.terminate() if self._pipe_commands: self._pipe_commands.close() if self._pipe_service: self._pipe_service.close() self._pipe_commands = None self._pipe_service = None self._service_class_name = None self._service_name = None if self._service_status != CommonService.SERVICE_STATUS_TEARDOWN: self.update_status(status_code=CommonService.SERVICE_STATUS_END) if self._service: self._service.join() # must wait for process to be actually destroyed self._service = None
python
def _terminate_service(self): with self.__lock: if self._service: self._service.terminate() if self._pipe_commands: self._pipe_commands.close() if self._pipe_service: self._pipe_service.close() self._pipe_commands = None self._pipe_service = None self._service_class_name = None self._service_name = None if self._service_status != CommonService.SERVICE_STATUS_TEARDOWN: self.update_status(status_code=CommonService.SERVICE_STATUS_END) if self._service: self._service.join() # must wait for process to be actually destroyed self._service = None
[ "def", "_terminate_service", "(", "self", ")", ":", "with", "self", ".", "__lock", ":", "if", "self", ".", "_service", ":", "self", ".", "_service", ".", "terminate", "(", ")", "if", "self", ".", "_pipe_commands", ":", "self", ".", "_pipe_commands", ".",...
Force termination of running service. Disconnect queues, end queue feeder threads. Wait for service process to clear, drop all references.
[ "Force", "termination", "of", "running", "service", ".", "Disconnect", "queues", "end", "queue", "feeder", "threads", ".", "Wait", "for", "service", "process", "to", "clear", "drop", "all", "references", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/frontend/__init__.py#L430-L449
17,642
cuihantao/andes
andes/utils/solver.py
Solver.symbolic
def symbolic(self, A): """ Return the symbolic factorization of sparse matrix ``A`` Parameters ---------- sparselib Library name in ``umfpack`` and ``klu`` A Sparse matrix Returns symbolic factorization ------- """ if self.sparselib == 'umfpack': return umfpack.symbolic(A) elif self.sparselib == 'klu': return klu.symbolic(A)
python
def symbolic(self, A): if self.sparselib == 'umfpack': return umfpack.symbolic(A) elif self.sparselib == 'klu': return klu.symbolic(A)
[ "def", "symbolic", "(", "self", ",", "A", ")", ":", "if", "self", ".", "sparselib", "==", "'umfpack'", ":", "return", "umfpack", ".", "symbolic", "(", "A", ")", "elif", "self", ".", "sparselib", "==", "'klu'", ":", "return", "klu", ".", "symbolic", "...
Return the symbolic factorization of sparse matrix ``A`` Parameters ---------- sparselib Library name in ``umfpack`` and ``klu`` A Sparse matrix Returns symbolic factorization -------
[ "Return", "the", "symbolic", "factorization", "of", "sparse", "matrix", "A" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/utils/solver.py#L17-L38
17,643
cuihantao/andes
andes/utils/solver.py
Solver.numeric
def numeric(self, A, F): """ Return the numeric factorization of sparse matrix ``A`` using symbolic factorization ``F`` Parameters ---------- A Sparse matrix F Symbolic factorization Returns ------- N Numeric factorization of ``A`` """ if self.sparselib == 'umfpack': return umfpack.numeric(A, F) elif self.sparselib == 'klu': return klu.numeric(A, F)
python
def numeric(self, A, F): if self.sparselib == 'umfpack': return umfpack.numeric(A, F) elif self.sparselib == 'klu': return klu.numeric(A, F)
[ "def", "numeric", "(", "self", ",", "A", ",", "F", ")", ":", "if", "self", ".", "sparselib", "==", "'umfpack'", ":", "return", "umfpack", ".", "numeric", "(", "A", ",", "F", ")", "elif", "self", ".", "sparselib", "==", "'klu'", ":", "return", "klu"...
Return the numeric factorization of sparse matrix ``A`` using symbolic factorization ``F`` Parameters ---------- A Sparse matrix F Symbolic factorization Returns ------- N Numeric factorization of ``A``
[ "Return", "the", "numeric", "factorization", "of", "sparse", "matrix", "A", "using", "symbolic", "factorization", "F" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/utils/solver.py#L40-L60
17,644
cuihantao/andes
andes/utils/solver.py
Solver.solve
def solve(self, A, F, N, b): """ Solve linear system ``Ax = b`` using numeric factorization ``N`` and symbolic factorization ``F``. Store the solution in ``b``. Parameters ---------- A Sparse matrix F Symbolic factorization N Numeric factorization b RHS of the equation Returns ------- None """ if self.sparselib == 'umfpack': umfpack.solve(A, N, b) elif self.sparselib == 'klu': klu.solve(A, F, N, b)
python
def solve(self, A, F, N, b): if self.sparselib == 'umfpack': umfpack.solve(A, N, b) elif self.sparselib == 'klu': klu.solve(A, F, N, b)
[ "def", "solve", "(", "self", ",", "A", ",", "F", ",", "N", ",", "b", ")", ":", "if", "self", ".", "sparselib", "==", "'umfpack'", ":", "umfpack", ".", "solve", "(", "A", ",", "N", ",", "b", ")", "elif", "self", ".", "sparselib", "==", "'klu'", ...
Solve linear system ``Ax = b`` using numeric factorization ``N`` and symbolic factorization ``F``. Store the solution in ``b``. Parameters ---------- A Sparse matrix F Symbolic factorization N Numeric factorization b RHS of the equation Returns ------- None
[ "Solve", "linear", "system", "Ax", "=", "b", "using", "numeric", "factorization", "N", "and", "symbolic", "factorization", "F", ".", "Store", "the", "solution", "in", "b", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/utils/solver.py#L62-L86
17,645
cuihantao/andes
andes/utils/solver.py
Solver.linsolve
def linsolve(self, A, b): """ Solve linear equation set ``Ax = b`` and store the solutions in ``b``. Parameters ---------- A Sparse matrix b RHS of the equation Returns ------- None """ if self.sparselib == 'umfpack': return umfpack.linsolve(A, b) elif self.sparselib == 'klu': return klu.linsolve(A, b)
python
def linsolve(self, A, b): if self.sparselib == 'umfpack': return umfpack.linsolve(A, b) elif self.sparselib == 'klu': return klu.linsolve(A, b)
[ "def", "linsolve", "(", "self", ",", "A", ",", "b", ")", ":", "if", "self", ".", "sparselib", "==", "'umfpack'", ":", "return", "umfpack", ".", "linsolve", "(", "A", ",", "b", ")", "elif", "self", ".", "sparselib", "==", "'klu'", ":", "return", "kl...
Solve linear equation set ``Ax = b`` and store the solutions in ``b``. Parameters ---------- A Sparse matrix b RHS of the equation Returns ------- None
[ "Solve", "linear", "equation", "set", "Ax", "=", "b", "and", "store", "the", "solutions", "in", "b", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/utils/solver.py#L88-L108
17,646
cuihantao/andes
andes/models/bus.py
Bus._varname_inj
def _varname_inj(self): """Customize varname for bus injections""" # Bus Pi if not self.n: return m = self.system.dae.m xy_idx = range(m, self.n + m) self.system.varname.append( listname='unamey', xy_idx=xy_idx, var_name='P', element_name=self.name) self.system.varname.append( listname='fnamey', xy_idx=xy_idx, var_name='P', element_name=self.name) # Bus Qi xy_idx = range(m + self.n, m + 2 * self.n) self.system.varname.append( listname='unamey', xy_idx=xy_idx, var_name='Q', element_name=self.name) self.system.varname.append( listname='fnamey', xy_idx=xy_idx, var_name='Q', element_name=self.name)
python
def _varname_inj(self): # Bus Pi if not self.n: return m = self.system.dae.m xy_idx = range(m, self.n + m) self.system.varname.append( listname='unamey', xy_idx=xy_idx, var_name='P', element_name=self.name) self.system.varname.append( listname='fnamey', xy_idx=xy_idx, var_name='P', element_name=self.name) # Bus Qi xy_idx = range(m + self.n, m + 2 * self.n) self.system.varname.append( listname='unamey', xy_idx=xy_idx, var_name='Q', element_name=self.name) self.system.varname.append( listname='fnamey', xy_idx=xy_idx, var_name='Q', element_name=self.name)
[ "def", "_varname_inj", "(", "self", ")", ":", "# Bus Pi", "if", "not", "self", ".", "n", ":", "return", "m", "=", "self", ".", "system", ".", "dae", ".", "m", "xy_idx", "=", "range", "(", "m", ",", "self", ".", "n", "+", "m", ")", "self", ".", ...
Customize varname for bus injections
[ "Customize", "varname", "for", "bus", "injections" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/models/bus.py#L57-L86
17,647
cuihantao/andes
andes/models/bus.py
Bus.init0
def init0(self, dae): """Set bus Va and Vm initial values""" if not self.system.pflow.config.flatstart: dae.y[self.a] = self.angle + 1e-10 * uniform(self.n) dae.y[self.v] = self.voltage else: dae.y[self.a] = matrix(0.0, (self.n, 1), 'd') + 1e-10 * uniform(self.n) dae.y[self.v] = matrix(1.0, (self.n, 1), 'd')
python
def init0(self, dae): if not self.system.pflow.config.flatstart: dae.y[self.a] = self.angle + 1e-10 * uniform(self.n) dae.y[self.v] = self.voltage else: dae.y[self.a] = matrix(0.0, (self.n, 1), 'd') + 1e-10 * uniform(self.n) dae.y[self.v] = matrix(1.0, (self.n, 1), 'd')
[ "def", "init0", "(", "self", ",", "dae", ")", ":", "if", "not", "self", ".", "system", ".", "pflow", ".", "config", ".", "flatstart", ":", "dae", ".", "y", "[", "self", ".", "a", "]", "=", "self", ".", "angle", "+", "1e-10", "*", "uniform", "("...
Set bus Va and Vm initial values
[ "Set", "bus", "Va", "and", "Vm", "initial", "values" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/models/bus.py#L88-L96
17,648
DiamondLightSource/python-workflows
workflows/transport/__init__.py
get_known_transports
def get_known_transports(): """Return a dictionary of all known transport mechanisms.""" if not hasattr(get_known_transports, "cache"): setattr( get_known_transports, "cache", { e.name: e.load() for e in pkg_resources.iter_entry_points("workflows.transport") }, ) return get_known_transports.cache.copy()
python
def get_known_transports(): if not hasattr(get_known_transports, "cache"): setattr( get_known_transports, "cache", { e.name: e.load() for e in pkg_resources.iter_entry_points("workflows.transport") }, ) return get_known_transports.cache.copy()
[ "def", "get_known_transports", "(", ")", ":", "if", "not", "hasattr", "(", "get_known_transports", ",", "\"cache\"", ")", ":", "setattr", "(", "get_known_transports", ",", "\"cache\"", ",", "{", "e", ".", "name", ":", "e", ".", "load", "(", ")", "for", "...
Return a dictionary of all known transport mechanisms.
[ "Return", "a", "dictionary", "of", "all", "known", "transport", "mechanisms", "." ]
7ef47b457655b96f4d2ef7ee9863cf1b6d20e023
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/transport/__init__.py#L21-L32
17,649
cuihantao/andes
andes/models/wind.py
WindBase.windspeed
def windspeed(self, t): """Return the wind speed list at time `t`""" ws = [0] * self.n for i in range(self.n): q = ceil(t / self.dt[i]) q_prev = 0 if q == 0 else q - 1 r = t % self.dt[i] r = 0 if abs(r) < 1e-6 else r if r == 0: ws[i] = self.speed[i][q] else: t1 = self.time[i][q_prev] s1 = self.speed[i][q_prev] s2 = self.speed[i][q] ws[i] = s1 + (t - t1) * (s2 - s1) / self.dt[i] return matrix(ws)
python
def windspeed(self, t): ws = [0] * self.n for i in range(self.n): q = ceil(t / self.dt[i]) q_prev = 0 if q == 0 else q - 1 r = t % self.dt[i] r = 0 if abs(r) < 1e-6 else r if r == 0: ws[i] = self.speed[i][q] else: t1 = self.time[i][q_prev] s1 = self.speed[i][q_prev] s2 = self.speed[i][q] ws[i] = s1 + (t - t1) * (s2 - s1) / self.dt[i] return matrix(ws)
[ "def", "windspeed", "(", "self", ",", "t", ")", ":", "ws", "=", "[", "0", "]", "*", "self", ".", "n", "for", "i", "in", "range", "(", "self", ".", "n", ")", ":", "q", "=", "ceil", "(", "t", "/", "self", ".", "dt", "[", "i", "]", ")", "q...
Return the wind speed list at time `t`
[ "Return", "the", "wind", "speed", "list", "at", "time", "t" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/models/wind.py#L82-L101
17,650
cuihantao/andes
andes/system.py
PowerSystem.to_sysbase
def to_sysbase(self): """ Convert model parameters to system base. This function calls the ``data_to_sys_base`` function of the loaded models. Returns ------- None """ if self.config.base: for item in self.devman.devices: self.__dict__[item].data_to_sys_base()
python
def to_sysbase(self): if self.config.base: for item in self.devman.devices: self.__dict__[item].data_to_sys_base()
[ "def", "to_sysbase", "(", "self", ")", ":", "if", "self", ".", "config", ".", "base", ":", "for", "item", "in", "self", ".", "devman", ".", "devices", ":", "self", ".", "__dict__", "[", "item", "]", ".", "data_to_sys_base", "(", ")" ]
Convert model parameters to system base. This function calls the ``data_to_sys_base`` function of the loaded models. Returns ------- None
[ "Convert", "model", "parameters", "to", "system", "base", ".", "This", "function", "calls", "the", "data_to_sys_base", "function", "of", "the", "loaded", "models", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L213-L224
17,651
cuihantao/andes
andes/system.py
PowerSystem.to_elembase
def to_elembase(self): """ Convert parameters back to element base. This function calls the ```data_to_elem_base``` function. Returns ------- None """ if self.config.base: for item in self.devman.devices: self.__dict__[item].data_to_elem_base()
python
def to_elembase(self): if self.config.base: for item in self.devman.devices: self.__dict__[item].data_to_elem_base()
[ "def", "to_elembase", "(", "self", ")", ":", "if", "self", ".", "config", ".", "base", ":", "for", "item", "in", "self", ".", "devman", ".", "devices", ":", "self", ".", "__dict__", "[", "item", "]", ".", "data_to_elem_base", "(", ")" ]
Convert parameters back to element base. This function calls the ```data_to_elem_base``` function. Returns ------- None
[ "Convert", "parameters", "back", "to", "element", "base", ".", "This", "function", "calls", "the", "data_to_elem_base", "function", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L226-L237
17,652
cuihantao/andes
andes/system.py
PowerSystem.group_add
def group_add(self, name='Ungrouped'): """ Dynamically add a group instance to the system if not exist. Parameters ---------- name : str, optional ('Ungrouped' as default) Name of the group Returns ------- None """ if not hasattr(self, name): self.__dict__[name] = Group(self, name) self.loaded_groups.append(name)
python
def group_add(self, name='Ungrouped'): if not hasattr(self, name): self.__dict__[name] = Group(self, name) self.loaded_groups.append(name)
[ "def", "group_add", "(", "self", ",", "name", "=", "'Ungrouped'", ")", ":", "if", "not", "hasattr", "(", "self", ",", "name", ")", ":", "self", ".", "__dict__", "[", "name", "]", "=", "Group", "(", "self", ",", "name", ")", "self", ".", "loaded_gro...
Dynamically add a group instance to the system if not exist. Parameters ---------- name : str, optional ('Ungrouped' as default) Name of the group Returns ------- None
[ "Dynamically", "add", "a", "group", "instance", "to", "the", "system", "if", "not", "exist", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L239-L254
17,653
cuihantao/andes
andes/system.py
PowerSystem.model_import
def model_import(self): """ Import and instantiate the non-JIT models and the JIT models. Models defined in ``jits`` and ``non_jits`` in ``models/__init__.py`` will be imported and instantiated accordingly. Returns ------- None """ # non-JIT models for file, pair in non_jits.items(): for cls, name in pair.items(): themodel = importlib.import_module('andes.models.' + file) theclass = getattr(themodel, cls) self.__dict__[name] = theclass(self, name) group = self.__dict__[name]._group self.group_add(group) self.__dict__[group].register_model(name) self.devman.register_device(name) # import JIT models for file, pair in jits.items(): for cls, name in pair.items(): self.__dict__[name] = JIT(self, file, cls, name)
python
def model_import(self): # non-JIT models for file, pair in non_jits.items(): for cls, name in pair.items(): themodel = importlib.import_module('andes.models.' + file) theclass = getattr(themodel, cls) self.__dict__[name] = theclass(self, name) group = self.__dict__[name]._group self.group_add(group) self.__dict__[group].register_model(name) self.devman.register_device(name) # import JIT models for file, pair in jits.items(): for cls, name in pair.items(): self.__dict__[name] = JIT(self, file, cls, name)
[ "def", "model_import", "(", "self", ")", ":", "# non-JIT models", "for", "file", ",", "pair", "in", "non_jits", ".", "items", "(", ")", ":", "for", "cls", ",", "name", "in", "pair", ".", "items", "(", ")", ":", "themodel", "=", "importlib", ".", "imp...
Import and instantiate the non-JIT models and the JIT models. Models defined in ``jits`` and ``non_jits`` in ``models/__init__.py`` will be imported and instantiated accordingly. Returns ------- None
[ "Import", "and", "instantiate", "the", "non", "-", "JIT", "models", "and", "the", "JIT", "models", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L256-L283
17,654
cuihantao/andes
andes/system.py
PowerSystem.model_setup
def model_setup(self): """ Call the ``setup`` function of the loaded models. This function is to be called after parsing all the data files during the system set up. Returns ------- None """ for device in self.devman.devices: if self.__dict__[device].n: try: self.__dict__[device].setup() except Exception as e: raise e
python
def model_setup(self): for device in self.devman.devices: if self.__dict__[device].n: try: self.__dict__[device].setup() except Exception as e: raise e
[ "def", "model_setup", "(", "self", ")", ":", "for", "device", "in", "self", ".", "devman", ".", "devices", ":", "if", "self", ".", "__dict__", "[", "device", "]", ".", "n", ":", "try", ":", "self", ".", "__dict__", "[", "device", "]", ".", "setup",...
Call the ``setup`` function of the loaded models. This function is to be called after parsing all the data files during the system set up. Returns ------- None
[ "Call", "the", "setup", "function", "of", "the", "loaded", "models", ".", "This", "function", "is", "to", "be", "called", "after", "parsing", "all", "the", "data", "files", "during", "the", "system", "set", "up", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L306-L320
17,655
cuihantao/andes
andes/system.py
PowerSystem.xy_addr0
def xy_addr0(self): """ Assign indicies and variable names for variables used in power flow For each loaded model with the ``pflow`` flag as ``True``, the following functions are called sequentially: * ``_addr()`` * ``_intf_network()`` * ``_intf_ctrl()`` After resizing the ``varname`` instance, variable names from models are stored by calling ``_varname()`` Returns ------- None """ for device, pflow in zip(self.devman.devices, self.call.pflow): if pflow: self.__dict__[device]._addr() self.__dict__[device]._intf_network() self.__dict__[device]._intf_ctrl() self.varname.resize() for device, pflow in zip(self.devman.devices, self.call.pflow): if pflow: self.__dict__[device]._varname()
python
def xy_addr0(self): for device, pflow in zip(self.devman.devices, self.call.pflow): if pflow: self.__dict__[device]._addr() self.__dict__[device]._intf_network() self.__dict__[device]._intf_ctrl() self.varname.resize() for device, pflow in zip(self.devman.devices, self.call.pflow): if pflow: self.__dict__[device]._varname()
[ "def", "xy_addr0", "(", "self", ")", ":", "for", "device", ",", "pflow", "in", "zip", "(", "self", ".", "devman", ".", "devices", ",", "self", ".", "call", ".", "pflow", ")", ":", "if", "pflow", ":", "self", ".", "__dict__", "[", "device", "]", "...
Assign indicies and variable names for variables used in power flow For each loaded model with the ``pflow`` flag as ``True``, the following functions are called sequentially: * ``_addr()`` * ``_intf_network()`` * ``_intf_ctrl()`` After resizing the ``varname`` instance, variable names from models are stored by calling ``_varname()`` Returns ------- None
[ "Assign", "indicies", "and", "variable", "names", "for", "variables", "used", "in", "power", "flow" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L322-L350
17,656
cuihantao/andes
andes/system.py
PowerSystem.rmgen
def rmgen(self, idx): """ Remove the static generators if their dynamic models exist Parameters ---------- idx : list A list of static generator idx Returns ------- None """ stagens = [] for device, stagen in zip(self.devman.devices, self.call.stagen): if stagen: stagens.append(device) for gen in idx: for stagen in stagens: if gen in self.__dict__[stagen].uid.keys(): self.__dict__[stagen].disable_gen(gen)
python
def rmgen(self, idx): stagens = [] for device, stagen in zip(self.devman.devices, self.call.stagen): if stagen: stagens.append(device) for gen in idx: for stagen in stagens: if gen in self.__dict__[stagen].uid.keys(): self.__dict__[stagen].disable_gen(gen)
[ "def", "rmgen", "(", "self", ",", "idx", ")", ":", "stagens", "=", "[", "]", "for", "device", ",", "stagen", "in", "zip", "(", "self", ".", "devman", ".", "devices", ",", "self", ".", "call", ".", "stagen", ")", ":", "if", "stagen", ":", "stagens...
Remove the static generators if their dynamic models exist Parameters ---------- idx : list A list of static generator idx Returns ------- None
[ "Remove", "the", "static", "generators", "if", "their", "dynamic", "models", "exist" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L370-L389
17,657
cuihantao/andes
andes/system.py
PowerSystem.check_event
def check_event(self, sim_time): """ Check for event occurrance for``Event`` group models at ``sim_time`` Parameters ---------- sim_time : float The current simulation time Returns ------- list A list of model names who report (an) event(s) at ``sim_time`` """ ret = [] for model in self.__dict__['Event'].all_models: if self.__dict__[model].is_time(sim_time): ret.append(model) if self.Breaker.is_time(sim_time): ret.append('Breaker') return ret
python
def check_event(self, sim_time): ret = [] for model in self.__dict__['Event'].all_models: if self.__dict__[model].is_time(sim_time): ret.append(model) if self.Breaker.is_time(sim_time): ret.append('Breaker') return ret
[ "def", "check_event", "(", "self", ",", "sim_time", ")", ":", "ret", "=", "[", "]", "for", "model", "in", "self", ".", "__dict__", "[", "'Event'", "]", ".", "all_models", ":", "if", "self", ".", "__dict__", "[", "model", "]", ".", "is_time", "(", "...
Check for event occurrance for``Event`` group models at ``sim_time`` Parameters ---------- sim_time : float The current simulation time Returns ------- list A list of model names who report (an) event(s) at ``sim_time``
[ "Check", "for", "event", "occurrance", "for", "Event", "group", "models", "at", "sim_time" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L391-L413
17,658
cuihantao/andes
andes/system.py
PowerSystem.get_event_times
def get_event_times(self): """ Return event times of Fault, Breaker and other timed events Returns ------- list A sorted list of event times """ times = [] times.extend(self.Breaker.get_times()) for model in self.__dict__['Event'].all_models: times.extend(self.__dict__[model].get_times()) if times: times = sorted(list(set(times))) return times
python
def get_event_times(self): times = [] times.extend(self.Breaker.get_times()) for model in self.__dict__['Event'].all_models: times.extend(self.__dict__[model].get_times()) if times: times = sorted(list(set(times))) return times
[ "def", "get_event_times", "(", "self", ")", ":", "times", "=", "[", "]", "times", ".", "extend", "(", "self", ".", "Breaker", ".", "get_times", "(", ")", ")", "for", "model", "in", "self", ".", "__dict__", "[", "'Event'", "]", ".", "all_models", ":",...
Return event times of Fault, Breaker and other timed events Returns ------- list A sorted list of event times
[ "Return", "event", "times", "of", "Fault", "Breaker", "and", "other", "timed", "events" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L415-L434
17,659
cuihantao/andes
andes/system.py
PowerSystem.load_config
def load_config(self, conf_path): """ Load config from an ``andes.conf`` file. This function creates a ``configparser.ConfigParser`` object to read the specified conf file and calls the ``load_config`` function of the config instances of the system and the routines. Parameters ---------- conf_path : None or str Path to the Andes config file. If ``None``, the function body will not run. Returns ------- None """ if conf_path is None: return conf = configparser.ConfigParser() conf.read(conf_path) self.config.load_config(conf) for r in routines.__all__: self.__dict__[r.lower()].config.load_config(conf) logger.debug('Loaded config file from {}.'.format(conf_path))
python
def load_config(self, conf_path): if conf_path is None: return conf = configparser.ConfigParser() conf.read(conf_path) self.config.load_config(conf) for r in routines.__all__: self.__dict__[r.lower()].config.load_config(conf) logger.debug('Loaded config file from {}.'.format(conf_path))
[ "def", "load_config", "(", "self", ",", "conf_path", ")", ":", "if", "conf_path", "is", "None", ":", "return", "conf", "=", "configparser", ".", "ConfigParser", "(", ")", "conf", ".", "read", "(", "conf_path", ")", "self", ".", "config", ".", "load_confi...
Load config from an ``andes.conf`` file. This function creates a ``configparser.ConfigParser`` object to read the specified conf file and calls the ``load_config`` function of the config instances of the system and the routines. Parameters ---------- conf_path : None or str Path to the Andes config file. If ``None``, the function body will not run. Returns ------- None
[ "Load", "config", "from", "an", "andes", ".", "conf", "file", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L436-L464
17,660
cuihantao/andes
andes/system.py
PowerSystem.dump_config
def dump_config(self, file_path): """ Dump system and routine configurations to an rc-formatted file. Parameters ---------- file_path : str path to the configuration file. The user will be prompted if the file already exists. Returns ------- None """ if os.path.isfile(file_path): logger.debug('File {} alreay exist. Overwrite? [y/N]'.format(file_path)) choice = input('File {} alreay exist. Overwrite? [y/N]'.format(file_path)).lower() if len(choice) == 0 or choice[0] != 'y': logger.info('File not overwritten.') return conf = self.config.dump_conf() for r in routines.__all__: conf = self.__dict__[r.lower()].config.dump_conf(conf) with open(file_path, 'w') as f: conf.write(f) logger.info('Config written to {}'.format(file_path))
python
def dump_config(self, file_path): if os.path.isfile(file_path): logger.debug('File {} alreay exist. Overwrite? [y/N]'.format(file_path)) choice = input('File {} alreay exist. Overwrite? [y/N]'.format(file_path)).lower() if len(choice) == 0 or choice[0] != 'y': logger.info('File not overwritten.') return conf = self.config.dump_conf() for r in routines.__all__: conf = self.__dict__[r.lower()].config.dump_conf(conf) with open(file_path, 'w') as f: conf.write(f) logger.info('Config written to {}'.format(file_path))
[ "def", "dump_config", "(", "self", ",", "file_path", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "file_path", ")", ":", "logger", ".", "debug", "(", "'File {} alreay exist. Overwrite? [y/N]'", ".", "format", "(", "file_path", ")", ")", "choice", ...
Dump system and routine configurations to an rc-formatted file. Parameters ---------- file_path : str path to the configuration file. The user will be prompted if the file already exists. Returns ------- None
[ "Dump", "system", "and", "routine", "configurations", "to", "an", "rc", "-", "formatted", "file", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L466-L494
17,661
cuihantao/andes
andes/system.py
PowerSystem.check_islands
def check_islands(self, show_info=False): """ Check the connectivity for the ac system Parameters ---------- show_info : bool Show information when the system has islands. To be used when initializing power flow. Returns ------- None """ if not hasattr(self, 'Line'): logger.error('<Line> device not found.') return self.Line.connectivity(self.Bus) if show_info is True: if len(self.Bus.islanded_buses) == 0 and len( self.Bus.island_sets) == 0: logger.debug('System is interconnected.') else: logger.info( 'System contains {:d} islands and {:d} islanded buses.'. format( len(self.Bus.island_sets), len(self.Bus.islanded_buses))) nosw_island = [] # no slack bus island msw_island = [] # multiple slack bus island for idx, island in enumerate(self.Bus.island_sets): nosw = 1 for item in self.SW.bus: if self.Bus.uid[item] in island: nosw -= 1 if nosw == 1: nosw_island.append(idx) elif nosw < 0: msw_island.append(idx) if nosw_island: logger.warning( 'Slack bus is not defined for {:g} island(s).'.format( len(nosw_island))) if msw_island: logger.warning( 'Multiple slack buses are defined for {:g} island(s).'. format(len(nosw_island))) if (not nosw_island) and (not msw_island): logger.debug( 'Each island has a slack bus correctly defined.')
python
def check_islands(self, show_info=False): if not hasattr(self, 'Line'): logger.error('<Line> device not found.') return self.Line.connectivity(self.Bus) if show_info is True: if len(self.Bus.islanded_buses) == 0 and len( self.Bus.island_sets) == 0: logger.debug('System is interconnected.') else: logger.info( 'System contains {:d} islands and {:d} islanded buses.'. format( len(self.Bus.island_sets), len(self.Bus.islanded_buses))) nosw_island = [] # no slack bus island msw_island = [] # multiple slack bus island for idx, island in enumerate(self.Bus.island_sets): nosw = 1 for item in self.SW.bus: if self.Bus.uid[item] in island: nosw -= 1 if nosw == 1: nosw_island.append(idx) elif nosw < 0: msw_island.append(idx) if nosw_island: logger.warning( 'Slack bus is not defined for {:g} island(s).'.format( len(nosw_island))) if msw_island: logger.warning( 'Multiple slack buses are defined for {:g} island(s).'. format(len(nosw_island))) if (not nosw_island) and (not msw_island): logger.debug( 'Each island has a slack bus correctly defined.')
[ "def", "check_islands", "(", "self", ",", "show_info", "=", "False", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'Line'", ")", ":", "logger", ".", "error", "(", "'<Line> device not found.'", ")", "return", "self", ".", "Line", ".", "connectivity",...
Check the connectivity for the ac system Parameters ---------- show_info : bool Show information when the system has islands. To be used when initializing power flow. Returns ------- None
[ "Check", "the", "connectivity", "for", "the", "ac", "system" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L496-L550
17,662
cuihantao/andes
andes/system.py
PowerSystem.get_busdata
def get_busdata(self, sort_names=False): """ get ac bus data from solved power flow """ if self.pflow.solved is False: logger.error('Power flow not solved when getting bus data.') return tuple([False] * 8) idx = self.Bus.idx names = self.Bus.name Vm = [self.dae.y[x] for x in self.Bus.v] if self.pflow.config.usedegree: Va = [self.dae.y[x] * rad2deg for x in self.Bus.a] else: Va = [self.dae.y[x] for x in self.Bus.a] Pg = [self.Bus.Pg[x] for x in range(self.Bus.n)] Qg = [self.Bus.Qg[x] for x in range(self.Bus.n)] Pl = [self.Bus.Pl[x] for x in range(self.Bus.n)] Ql = [self.Bus.Ql[x] for x in range(self.Bus.n)] if sort_names: ret = (list(x) for x in zip(*sorted( zip(idx, names, Vm, Va, Pg, Qg, Pl, Ql), key=itemgetter(0)))) else: ret = idx, names, Vm, Va, Pg, Qg, Pl, Ql return ret
python
def get_busdata(self, sort_names=False): if self.pflow.solved is False: logger.error('Power flow not solved when getting bus data.') return tuple([False] * 8) idx = self.Bus.idx names = self.Bus.name Vm = [self.dae.y[x] for x in self.Bus.v] if self.pflow.config.usedegree: Va = [self.dae.y[x] * rad2deg for x in self.Bus.a] else: Va = [self.dae.y[x] for x in self.Bus.a] Pg = [self.Bus.Pg[x] for x in range(self.Bus.n)] Qg = [self.Bus.Qg[x] for x in range(self.Bus.n)] Pl = [self.Bus.Pl[x] for x in range(self.Bus.n)] Ql = [self.Bus.Ql[x] for x in range(self.Bus.n)] if sort_names: ret = (list(x) for x in zip(*sorted( zip(idx, names, Vm, Va, Pg, Qg, Pl, Ql), key=itemgetter(0)))) else: ret = idx, names, Vm, Va, Pg, Qg, Pl, Ql return ret
[ "def", "get_busdata", "(", "self", ",", "sort_names", "=", "False", ")", ":", "if", "self", ".", "pflow", ".", "solved", "is", "False", ":", "logger", ".", "error", "(", "'Power flow not solved when getting bus data.'", ")", "return", "tuple", "(", "[", "Fal...
get ac bus data from solved power flow
[ "get", "ac", "bus", "data", "from", "solved", "power", "flow" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L552-L578
17,663
cuihantao/andes
andes/system.py
PowerSystem.get_nodedata
def get_nodedata(self, sort_names=False): """ get dc node data from solved power flow """ if not self.Node.n: return if not self.pflow.solved: logger.error('Power flow not solved when getting bus data.') return tuple([False] * 7) idx = self.Node.idx names = self.Node.name V = [self.dae.y[x] for x in self.Node.v] if sort_names: ret = (list(x) for x in zip(*sorted(zip(idx, names, V), key=itemgetter(0)))) else: ret = idx, names, V return ret
python
def get_nodedata(self, sort_names=False): if not self.Node.n: return if not self.pflow.solved: logger.error('Power flow not solved when getting bus data.') return tuple([False] * 7) idx = self.Node.idx names = self.Node.name V = [self.dae.y[x] for x in self.Node.v] if sort_names: ret = (list(x) for x in zip(*sorted(zip(idx, names, V), key=itemgetter(0)))) else: ret = idx, names, V return ret
[ "def", "get_nodedata", "(", "self", ",", "sort_names", "=", "False", ")", ":", "if", "not", "self", ".", "Node", ".", "n", ":", "return", "if", "not", "self", ".", "pflow", ".", "solved", ":", "logger", ".", "error", "(", "'Power flow not solved when get...
get dc node data from solved power flow
[ "get", "dc", "node", "data", "from", "solved", "power", "flow" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L580-L599
17,664
cuihantao/andes
andes/system.py
PowerSystem.get_linedata
def get_linedata(self, sort_names=False): """ get line data from solved power flow """ if not self.pflow.solved: logger.error('Power flow not solved when getting line data.') return tuple([False] * 7) idx = self.Line.idx fr = self.Line.bus1 to = self.Line.bus2 Sloss = self.Line.S1 + self.Line.S2 Pfr = list(self.Line.S1.real()) Qfr = list(self.Line.S1.imag()) Pto = list(self.Line.S2.real()) Qto = list(self.Line.S2.imag()) Ploss = list(Sloss.real()) Qloss = list(Sloss.imag()) if sort_names: ret = (list(x) for x in zip(*sorted( zip(idx, fr, to, Pfr, Qfr, Pto, Qto, Ploss, Qloss), key=itemgetter(0)))) else: ret = idx, fr, to, Pfr, Qfr, Pto, Qto, Ploss, Qloss return ret
python
def get_linedata(self, sort_names=False): if not self.pflow.solved: logger.error('Power flow not solved when getting line data.') return tuple([False] * 7) idx = self.Line.idx fr = self.Line.bus1 to = self.Line.bus2 Sloss = self.Line.S1 + self.Line.S2 Pfr = list(self.Line.S1.real()) Qfr = list(self.Line.S1.imag()) Pto = list(self.Line.S2.real()) Qto = list(self.Line.S2.imag()) Ploss = list(Sloss.real()) Qloss = list(Sloss.imag()) if sort_names: ret = (list(x) for x in zip(*sorted( zip(idx, fr, to, Pfr, Qfr, Pto, Qto, Ploss, Qloss), key=itemgetter(0)))) else: ret = idx, fr, to, Pfr, Qfr, Pto, Qto, Ploss, Qloss return ret
[ "def", "get_linedata", "(", "self", ",", "sort_names", "=", "False", ")", ":", "if", "not", "self", ".", "pflow", ".", "solved", ":", "logger", ".", "error", "(", "'Power flow not solved when getting line data.'", ")", "return", "tuple", "(", "[", "False", "...
get line data from solved power flow
[ "get", "line", "data", "from", "solved", "power", "flow" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L601-L629
17,665
cuihantao/andes
andes/system.py
Group.register_model
def register_model(self, model): """ Register ``model`` to this group :param model: model name :return: None """ assert isinstance(model, str) if model not in self.all_models: self.all_models.append(model)
python
def register_model(self, model): assert isinstance(model, str) if model not in self.all_models: self.all_models.append(model)
[ "def", "register_model", "(", "self", ",", "model", ")", ":", "assert", "isinstance", "(", "model", ",", "str", ")", "if", "model", "not", "in", "self", ".", "all_models", ":", "self", ".", "all_models", ".", "append", "(", "model", ")" ]
Register ``model`` to this group :param model: model name :return: None
[ "Register", "model", "to", "this", "group" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L651-L661
17,666
cuihantao/andes
andes/system.py
Group.register_element
def register_element(self, model, idx): """ Register element with index ``idx`` to ``model`` :param model: model name :param idx: element idx :return: final element idx """ if idx is None: idx = model + '_' + str(len(self._idx_model)) self._idx_model[idx] = model self._idx.append(idx) return idx
python
def register_element(self, model, idx): if idx is None: idx = model + '_' + str(len(self._idx_model)) self._idx_model[idx] = model self._idx.append(idx) return idx
[ "def", "register_element", "(", "self", ",", "model", ",", "idx", ")", ":", "if", "idx", "is", "None", ":", "idx", "=", "model", "+", "'_'", "+", "str", "(", "len", "(", "self", ".", "_idx_model", ")", ")", "self", ".", "_idx_model", "[", "idx", ...
Register element with index ``idx`` to ``model`` :param model: model name :param idx: element idx :return: final element idx
[ "Register", "element", "with", "index", "idx", "to", "model" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L663-L678
17,667
cuihantao/andes
andes/system.py
Group.get_field
def get_field(self, field, idx): """ Return the field ``field`` of elements ``idx`` in the group :param field: field name :param idx: element idx :return: values of the requested field """ ret = [] scalar = False # TODO: ensure idx is unique in this Group if isinstance(idx, (int, float, str)): scalar = True idx = [idx] models = [self._idx_model[i] for i in idx] for i, m in zip(idx, models): ret.append(self.system.__dict__[m].get_field(field, idx=i)) if scalar is True: return ret[0] else: return ret
python
def get_field(self, field, idx): ret = [] scalar = False # TODO: ensure idx is unique in this Group if isinstance(idx, (int, float, str)): scalar = True idx = [idx] models = [self._idx_model[i] for i in idx] for i, m in zip(idx, models): ret.append(self.system.__dict__[m].get_field(field, idx=i)) if scalar is True: return ret[0] else: return ret
[ "def", "get_field", "(", "self", ",", "field", ",", "idx", ")", ":", "ret", "=", "[", "]", "scalar", "=", "False", "# TODO: ensure idx is unique in this Group", "if", "isinstance", "(", "idx", ",", "(", "int", ",", "float", ",", "str", ")", ")", ":", "...
Return the field ``field`` of elements ``idx`` in the group :param field: field name :param idx: element idx :return: values of the requested field
[ "Return", "the", "field", "field", "of", "elements", "idx", "in", "the", "group" ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L680-L705
17,668
cuihantao/andes
andes/system.py
Group.set_field
def set_field(self, field, idx, value): """ Set the field ``field`` of elements ``idx`` to ``value``. This function does not if the field is valid for all models. :param field: field name :param idx: element idx :param value: value of fields to set :return: None """ if isinstance(idx, (int, float, str)): idx = [idx] if isinstance(value, (int, float)): value = [value] models = [self._idx_model[i] for i in idx] for i, m, v in zip(idx, models, value): assert hasattr(self.system.__dict__[m], field) uid = self.system.__dict__[m].get_uid(idx) self.system.__dict__[m].__dict__[field][uid] = v
python
def set_field(self, field, idx, value): if isinstance(idx, (int, float, str)): idx = [idx] if isinstance(value, (int, float)): value = [value] models = [self._idx_model[i] for i in idx] for i, m, v in zip(idx, models, value): assert hasattr(self.system.__dict__[m], field) uid = self.system.__dict__[m].get_uid(idx) self.system.__dict__[m].__dict__[field][uid] = v
[ "def", "set_field", "(", "self", ",", "field", ",", "idx", ",", "value", ")", ":", "if", "isinstance", "(", "idx", ",", "(", "int", ",", "float", ",", "str", ")", ")", ":", "idx", "=", "[", "idx", "]", "if", "isinstance", "(", "value", ",", "("...
Set the field ``field`` of elements ``idx`` to ``value``. This function does not if the field is valid for all models. :param field: field name :param idx: element idx :param value: value of fields to set :return: None
[ "Set", "the", "field", "field", "of", "elements", "idx", "to", "value", "." ]
7067898d4f26ce7534e968b8486c4aa8fe3a511a
https://github.com/cuihantao/andes/blob/7067898d4f26ce7534e968b8486c4aa8fe3a511a/andes/system.py#L707-L730
17,669
webstack/webstack-django-sorting
webstack_django_sorting/util.py
get_sort_field
def get_sort_field(request): """ Retrieve field used for sorting a queryset :param request: HTTP request :return: the sorted field name, prefixed with "-" if ordering is descending """ sort_direction = request.GET.get("dir") field_name = (request.GET.get("sort") or "") if sort_direction else "" sort_sign = "-" if sort_direction == "desc" else "" result_field = "{sign}{field}".format(sign=sort_sign, field=field_name) return result_field
python
def get_sort_field(request): sort_direction = request.GET.get("dir") field_name = (request.GET.get("sort") or "") if sort_direction else "" sort_sign = "-" if sort_direction == "desc" else "" result_field = "{sign}{field}".format(sign=sort_sign, field=field_name) return result_field
[ "def", "get_sort_field", "(", "request", ")", ":", "sort_direction", "=", "request", ".", "GET", ".", "get", "(", "\"dir\"", ")", "field_name", "=", "(", "request", ".", "GET", ".", "get", "(", "\"sort\"", ")", "or", "\"\"", ")", "if", "sort_direction", ...
Retrieve field used for sorting a queryset :param request: HTTP request :return: the sorted field name, prefixed with "-" if ordering is descending
[ "Retrieve", "field", "used", "for", "sorting", "a", "queryset" ]
e78bfb890da1da6eb3cc12a7151390317ac4be99
https://github.com/webstack/webstack-django-sorting/blob/e78bfb890da1da6eb3cc12a7151390317ac4be99/webstack_django_sorting/util.py#L4-L15
17,670
iliana/python-simplemediawiki
simplemediawiki.py
MediaWiki.normalize_api_url
def normalize_api_url(self): """ Checks that the API URL used to initialize this object actually returns JSON. If it doesn't, make some educated guesses and try to find the correct URL. :returns: a valid API URL or ``None`` """ def tester(self, api_url): """ Attempts to fetch general information about the MediaWiki instance in order to test whether *api_url* will return JSON. """ data = self._fetch_http(api_url, {'action': 'query', 'meta': 'siteinfo'}) try: data_json = json.loads(data) return (data, data_json) except ValueError: return (data, None) data, data_json = tester(self, self._api_url) if data_json: return self._api_url else: # if there's an index.php in the URL, we might find the API if 'index.php' in self._api_url: test_api_url = self._api_url.split('index.php')[0] + 'api.php' test_data, test_data_json = tester(self, test_api_url) if test_data_json: self._api_url = test_api_url return self._api_url return None
python
def normalize_api_url(self): def tester(self, api_url): """ Attempts to fetch general information about the MediaWiki instance in order to test whether *api_url* will return JSON. """ data = self._fetch_http(api_url, {'action': 'query', 'meta': 'siteinfo'}) try: data_json = json.loads(data) return (data, data_json) except ValueError: return (data, None) data, data_json = tester(self, self._api_url) if data_json: return self._api_url else: # if there's an index.php in the URL, we might find the API if 'index.php' in self._api_url: test_api_url = self._api_url.split('index.php')[0] + 'api.php' test_data, test_data_json = tester(self, test_api_url) if test_data_json: self._api_url = test_api_url return self._api_url return None
[ "def", "normalize_api_url", "(", "self", ")", ":", "def", "tester", "(", "self", ",", "api_url", ")", ":", "\"\"\"\n Attempts to fetch general information about the MediaWiki instance\n in order to test whether *api_url* will return JSON.\n \"\"\"", "da...
Checks that the API URL used to initialize this object actually returns JSON. If it doesn't, make some educated guesses and try to find the correct URL. :returns: a valid API URL or ``None``
[ "Checks", "that", "the", "API", "URL", "used", "to", "initialize", "this", "object", "actually", "returns", "JSON", ".", "If", "it", "doesn", "t", "make", "some", "educated", "guesses", "and", "try", "to", "find", "the", "correct", "URL", "." ]
e531dabcb6541cc95770ce3de418cabc6d2424a1
https://github.com/iliana/python-simplemediawiki/blob/e531dabcb6541cc95770ce3de418cabc6d2424a1/simplemediawiki.py#L185-L217
17,671
LettError/ufoProcessor
Lib/ufoProcessor/__init__.py
build
def build( documentPath, outputUFOFormatVersion=3, roundGeometry=True, verbose=True, # not supported logPath=None, # not supported progressFunc=None, # not supported processRules=True, logger=None, useVarlib=False, ): """ Simple builder for UFO designspaces. """ import os, glob if os.path.isdir(documentPath): # process all *.designspace documents in this folder todo = glob.glob(os.path.join(documentPath, "*.designspace")) else: # process the todo = [documentPath] results = [] for path in todo: document = DesignSpaceProcessor(ufoVersion=outputUFOFormatVersion) document.useVarlib = useVarlib document.roundGeometry = roundGeometry document.read(path) try: r = document.generateUFO(processRules=processRules) results.append(r) except: if logger: logger.exception("ufoProcessor error") #results += document.generateUFO(processRules=processRules) reader = None return results
python
def build( documentPath, outputUFOFormatVersion=3, roundGeometry=True, verbose=True, # not supported logPath=None, # not supported progressFunc=None, # not supported processRules=True, logger=None, useVarlib=False, ): import os, glob if os.path.isdir(documentPath): # process all *.designspace documents in this folder todo = glob.glob(os.path.join(documentPath, "*.designspace")) else: # process the todo = [documentPath] results = [] for path in todo: document = DesignSpaceProcessor(ufoVersion=outputUFOFormatVersion) document.useVarlib = useVarlib document.roundGeometry = roundGeometry document.read(path) try: r = document.generateUFO(processRules=processRules) results.append(r) except: if logger: logger.exception("ufoProcessor error") #results += document.generateUFO(processRules=processRules) reader = None return results
[ "def", "build", "(", "documentPath", ",", "outputUFOFormatVersion", "=", "3", ",", "roundGeometry", "=", "True", ",", "verbose", "=", "True", ",", "# not supported", "logPath", "=", "None", ",", "# not supported", "progressFunc", "=", "None", ",", "# not support...
Simple builder for UFO designspaces.
[ "Simple", "builder", "for", "UFO", "designspaces", "." ]
7c63e1c8aba2f2ef9b12edb6560aa6c58024a89a
https://github.com/LettError/ufoProcessor/blob/7c63e1c8aba2f2ef9b12edb6560aa6c58024a89a/Lib/ufoProcessor/__init__.py#L89-L124
17,672
LettError/ufoProcessor
Lib/ufoProcessor/__init__.py
DesignSpaceProcessor.getInfoMutator
def getInfoMutator(self): """ Returns a info mutator """ if self._infoMutator: return self._infoMutator infoItems = [] for sourceDescriptor in self.sources: if sourceDescriptor.layerName is not None: continue loc = Location(sourceDescriptor.location) sourceFont = self.fonts[sourceDescriptor.name] if sourceFont is None: continue if hasattr(sourceFont.info, "toMathInfo"): infoItems.append((loc, sourceFont.info.toMathInfo())) else: infoItems.append((loc, self.mathInfoClass(sourceFont.info))) bias, self._infoMutator = self.getVariationModel(infoItems, axes=self.serializedAxes, bias=self.newDefaultLocation()) return self._infoMutator
python
def getInfoMutator(self): if self._infoMutator: return self._infoMutator infoItems = [] for sourceDescriptor in self.sources: if sourceDescriptor.layerName is not None: continue loc = Location(sourceDescriptor.location) sourceFont = self.fonts[sourceDescriptor.name] if sourceFont is None: continue if hasattr(sourceFont.info, "toMathInfo"): infoItems.append((loc, sourceFont.info.toMathInfo())) else: infoItems.append((loc, self.mathInfoClass(sourceFont.info))) bias, self._infoMutator = self.getVariationModel(infoItems, axes=self.serializedAxes, bias=self.newDefaultLocation()) return self._infoMutator
[ "def", "getInfoMutator", "(", "self", ")", ":", "if", "self", ".", "_infoMutator", ":", "return", "self", ".", "_infoMutator", "infoItems", "=", "[", "]", "for", "sourceDescriptor", "in", "self", ".", "sources", ":", "if", "sourceDescriptor", ".", "layerName...
Returns a info mutator
[ "Returns", "a", "info", "mutator" ]
7c63e1c8aba2f2ef9b12edb6560aa6c58024a89a
https://github.com/LettError/ufoProcessor/blob/7c63e1c8aba2f2ef9b12edb6560aa6c58024a89a/Lib/ufoProcessor/__init__.py#L346-L363
17,673
LettError/ufoProcessor
Lib/ufoProcessor/__init__.py
DesignSpaceProcessor.collectMastersForGlyph
def collectMastersForGlyph(self, glyphName, decomposeComponents=False): """ Return a glyph mutator.defaultLoc decomposeComponents = True causes the source glyphs to be decomposed first before building the mutator. That gives you instances that do not depend on a complete font. If you're calculating previews for instance. XXX check glyphs in layers """ items = [] empties = [] foundEmpty = False for sourceDescriptor in self.sources: if not os.path.exists(sourceDescriptor.path): #kthxbai p = "\tMissing UFO at %s" % sourceDescriptor.path if p not in self.problems: self.problems.append(p) continue if glyphName in sourceDescriptor.mutedGlyphNames: continue thisIsDefault = self.default == sourceDescriptor ignoreMaster, filteredLocation = self.filterThisLocation(sourceDescriptor.location, self.mutedAxisNames) if ignoreMaster: continue f = self.fonts.get(sourceDescriptor.name) if f is None: continue loc = Location(sourceDescriptor.location) sourceLayer = f if not glyphName in f: # log this> continue layerName = getDefaultLayerName(f) sourceGlyphObject = None # handle source layers if sourceDescriptor.layerName is not None: # start looking for a layer # Do not bother for mutatorMath designspaces layerName = sourceDescriptor.layerName sourceLayer = getLayer(f, sourceDescriptor.layerName) if sourceLayer is None: continue if glyphName not in sourceLayer: # start looking for a glyph # this might be a support in a sparse layer # so we're skipping! continue # still have to check if the sourcelayer glyph is empty if not glyphName in sourceLayer: continue else: sourceGlyphObject = sourceLayer[glyphName] if checkGlyphIsEmpty(sourceGlyphObject, allowWhiteSpace=True): foundEmpty = True #sourceGlyphObject = None #continue if decomposeComponents: # what about decomposing glyphs in a partial font? temp = self.glyphClass() p = temp.getPointPen() dpp = DecomposePointPen(sourceLayer, p) sourceGlyphObject.drawPoints(dpp) temp.width = sourceGlyphObject.width temp.name = sourceGlyphObject.name processThis = temp else: processThis = sourceGlyphObject sourceInfo = dict(source=f.path, glyphName=glyphName, layerName=layerName, location=filteredLocation, # sourceDescriptor.location, sourceName=sourceDescriptor.name, ) if hasattr(processThis, "toMathGlyph"): processThis = processThis.toMathGlyph() else: processThis = self.mathGlyphClass(processThis) items.append((loc, processThis, sourceInfo)) empties.append((thisIsDefault, foundEmpty)) # check the empties: # if the default glyph is empty, then all must be empty # if the default glyph is not empty then none can be empty checkedItems = [] emptiesAllowed = False # first check if the default is empty. # remember that the sources can be in any order for i, p in enumerate(empties): isDefault, isEmpty = p if isDefault and isEmpty: emptiesAllowed = True # now we know what to look for if not emptiesAllowed: for i, p in enumerate(empties): isDefault, isEmpty = p if not isEmpty: checkedItems.append(items[i]) else: for i, p in enumerate(empties): isDefault, isEmpty = p if isEmpty: checkedItems.append(items[i]) return checkedItems
python
def collectMastersForGlyph(self, glyphName, decomposeComponents=False): items = [] empties = [] foundEmpty = False for sourceDescriptor in self.sources: if not os.path.exists(sourceDescriptor.path): #kthxbai p = "\tMissing UFO at %s" % sourceDescriptor.path if p not in self.problems: self.problems.append(p) continue if glyphName in sourceDescriptor.mutedGlyphNames: continue thisIsDefault = self.default == sourceDescriptor ignoreMaster, filteredLocation = self.filterThisLocation(sourceDescriptor.location, self.mutedAxisNames) if ignoreMaster: continue f = self.fonts.get(sourceDescriptor.name) if f is None: continue loc = Location(sourceDescriptor.location) sourceLayer = f if not glyphName in f: # log this> continue layerName = getDefaultLayerName(f) sourceGlyphObject = None # handle source layers if sourceDescriptor.layerName is not None: # start looking for a layer # Do not bother for mutatorMath designspaces layerName = sourceDescriptor.layerName sourceLayer = getLayer(f, sourceDescriptor.layerName) if sourceLayer is None: continue if glyphName not in sourceLayer: # start looking for a glyph # this might be a support in a sparse layer # so we're skipping! continue # still have to check if the sourcelayer glyph is empty if not glyphName in sourceLayer: continue else: sourceGlyphObject = sourceLayer[glyphName] if checkGlyphIsEmpty(sourceGlyphObject, allowWhiteSpace=True): foundEmpty = True #sourceGlyphObject = None #continue if decomposeComponents: # what about decomposing glyphs in a partial font? temp = self.glyphClass() p = temp.getPointPen() dpp = DecomposePointPen(sourceLayer, p) sourceGlyphObject.drawPoints(dpp) temp.width = sourceGlyphObject.width temp.name = sourceGlyphObject.name processThis = temp else: processThis = sourceGlyphObject sourceInfo = dict(source=f.path, glyphName=glyphName, layerName=layerName, location=filteredLocation, # sourceDescriptor.location, sourceName=sourceDescriptor.name, ) if hasattr(processThis, "toMathGlyph"): processThis = processThis.toMathGlyph() else: processThis = self.mathGlyphClass(processThis) items.append((loc, processThis, sourceInfo)) empties.append((thisIsDefault, foundEmpty)) # check the empties: # if the default glyph is empty, then all must be empty # if the default glyph is not empty then none can be empty checkedItems = [] emptiesAllowed = False # first check if the default is empty. # remember that the sources can be in any order for i, p in enumerate(empties): isDefault, isEmpty = p if isDefault and isEmpty: emptiesAllowed = True # now we know what to look for if not emptiesAllowed: for i, p in enumerate(empties): isDefault, isEmpty = p if not isEmpty: checkedItems.append(items[i]) else: for i, p in enumerate(empties): isDefault, isEmpty = p if isEmpty: checkedItems.append(items[i]) return checkedItems
[ "def", "collectMastersForGlyph", "(", "self", ",", "glyphName", ",", "decomposeComponents", "=", "False", ")", ":", "items", "=", "[", "]", "empties", "=", "[", "]", "foundEmpty", "=", "False", "for", "sourceDescriptor", "in", "self", ".", "sources", ":", ...
Return a glyph mutator.defaultLoc decomposeComponents = True causes the source glyphs to be decomposed first before building the mutator. That gives you instances that do not depend on a complete font. If you're calculating previews for instance. XXX check glyphs in layers
[ "Return", "a", "glyph", "mutator", ".", "defaultLoc", "decomposeComponents", "=", "True", "causes", "the", "source", "glyphs", "to", "be", "decomposed", "first", "before", "building", "the", "mutator", ".", "That", "gives", "you", "instances", "that", "do", "n...
7c63e1c8aba2f2ef9b12edb6560aa6c58024a89a
https://github.com/LettError/ufoProcessor/blob/7c63e1c8aba2f2ef9b12edb6560aa6c58024a89a/Lib/ufoProcessor/__init__.py#L454-L553
17,674
LettError/ufoProcessor
Lib/ufoProcessor/emptyPen.py
checkGlyphIsEmpty
def checkGlyphIsEmpty(glyph, allowWhiteSpace=True): """ This will establish if the glyph is completely empty by drawing the glyph with an EmptyPen. Additionally, the unicode of the glyph is checked against a list of known unicode whitespace characters. This makes it possible to filter out glyphs that have a valid reason to be empty and those that can be ignored. """ whiteSpace = [ 0x9, # horizontal tab 0xa, # line feed 0xb, # vertical tab 0xc, # form feed 0xd, # carriage return 0x20, # space 0x85, # next line 0xa0, # nobreak space 0x1680, # ogham space mark 0x180e, # mongolian vowel separator 0x2000, # en quad 0x2001, # em quad 0x2003, # en space 0x2004, # three per em space 0x2005, # four per em space 0x2006, # six per em space 0x2007, # figure space 0x2008, # punctuation space 0x2009, # thin space 0x200a, # hair space 0x2028, # line separator 0x2029, # paragraph separator 0x202f, # narrow no break space 0x205f, # medium mathematical space 0x3000, # ideographic space ] emptyPen = EmptyPen() glyph.drawPoints(emptyPen) if emptyPen.isEmpty(): # we're empty? if glyph.unicode in whiteSpace and allowWhiteSpace: # are we allowed to be? return False return True return False
python
def checkGlyphIsEmpty(glyph, allowWhiteSpace=True): whiteSpace = [ 0x9, # horizontal tab 0xa, # line feed 0xb, # vertical tab 0xc, # form feed 0xd, # carriage return 0x20, # space 0x85, # next line 0xa0, # nobreak space 0x1680, # ogham space mark 0x180e, # mongolian vowel separator 0x2000, # en quad 0x2001, # em quad 0x2003, # en space 0x2004, # three per em space 0x2005, # four per em space 0x2006, # six per em space 0x2007, # figure space 0x2008, # punctuation space 0x2009, # thin space 0x200a, # hair space 0x2028, # line separator 0x2029, # paragraph separator 0x202f, # narrow no break space 0x205f, # medium mathematical space 0x3000, # ideographic space ] emptyPen = EmptyPen() glyph.drawPoints(emptyPen) if emptyPen.isEmpty(): # we're empty? if glyph.unicode in whiteSpace and allowWhiteSpace: # are we allowed to be? return False return True return False
[ "def", "checkGlyphIsEmpty", "(", "glyph", ",", "allowWhiteSpace", "=", "True", ")", ":", "whiteSpace", "=", "[", "0x9", ",", "# horizontal tab", "0xa", ",", "# line feed", "0xb", ",", "# vertical tab", "0xc", ",", "# form feed", "0xd", ",", "# carriage return", ...
This will establish if the glyph is completely empty by drawing the glyph with an EmptyPen. Additionally, the unicode of the glyph is checked against a list of known unicode whitespace characters. This makes it possible to filter out glyphs that have a valid reason to be empty and those that can be ignored.
[ "This", "will", "establish", "if", "the", "glyph", "is", "completely", "empty", "by", "drawing", "the", "glyph", "with", "an", "EmptyPen", ".", "Additionally", "the", "unicode", "of", "the", "glyph", "is", "checked", "against", "a", "list", "of", "known", ...
7c63e1c8aba2f2ef9b12edb6560aa6c58024a89a
https://github.com/LettError/ufoProcessor/blob/7c63e1c8aba2f2ef9b12edb6560aa6c58024a89a/Lib/ufoProcessor/emptyPen.py#L34-L75
17,675
signetlabdei/sem
sem/runner.py
SimulationRunner.configure_and_build
def configure_and_build(self, show_progress=True, optimized=True, skip_configuration=False): """ Configure and build the ns-3 code. Args: show_progress (bool): whether or not to display a progress bar during compilation. optimized (bool): whether to use an optimized build. If False, use a standard ./waf configure. skip_configuration (bool): whether to skip the configuration step, and only perform compilation. """ # Only configure if necessary if not skip_configuration: configuration_command = ['python', 'waf', 'configure', '--enable-examples', '--disable-gtk', '--disable-python'] if optimized: configuration_command += ['--build-profile=optimized', '--out=build/optimized'] # Check whether path points to a valid installation subprocess.call(configuration_command, cwd=self.path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Build ns-3 build_process = subprocess.Popen(['python', 'waf', 'build'], cwd=self.path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Show a progress bar if show_progress: line_iterator = self.get_build_output(build_process) pbar = None try: [initial, total] = next(line_iterator) pbar = tqdm(line_iterator, initial=initial, total=total, unit='file', desc='Building ns-3', smoothing=0) for current, total in pbar: pbar.n = current except (StopIteration): if pbar is not None: pbar.n = pbar.total else: # Wait for the build to finish anyway build_process.communicate()
python
def configure_and_build(self, show_progress=True, optimized=True, skip_configuration=False): # Only configure if necessary if not skip_configuration: configuration_command = ['python', 'waf', 'configure', '--enable-examples', '--disable-gtk', '--disable-python'] if optimized: configuration_command += ['--build-profile=optimized', '--out=build/optimized'] # Check whether path points to a valid installation subprocess.call(configuration_command, cwd=self.path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Build ns-3 build_process = subprocess.Popen(['python', 'waf', 'build'], cwd=self.path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Show a progress bar if show_progress: line_iterator = self.get_build_output(build_process) pbar = None try: [initial, total] = next(line_iterator) pbar = tqdm(line_iterator, initial=initial, total=total, unit='file', desc='Building ns-3', smoothing=0) for current, total in pbar: pbar.n = current except (StopIteration): if pbar is not None: pbar.n = pbar.total else: # Wait for the build to finish anyway build_process.communicate()
[ "def", "configure_and_build", "(", "self", ",", "show_progress", "=", "True", ",", "optimized", "=", "True", ",", "skip_configuration", "=", "False", ")", ":", "# Only configure if necessary", "if", "not", "skip_configuration", ":", "configuration_command", "=", "["...
Configure and build the ns-3 code. Args: show_progress (bool): whether or not to display a progress bar during compilation. optimized (bool): whether to use an optimized build. If False, use a standard ./waf configure. skip_configuration (bool): whether to skip the configuration step, and only perform compilation.
[ "Configure", "and", "build", "the", "ns", "-", "3", "code", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/runner.py#L108-L154
17,676
signetlabdei/sem
sem/runner.py
SimulationRunner.get_build_output
def get_build_output(self, process): """ Parse the output of the ns-3 build process to extract the information that is needed to draw the progress bar. Args: process: the subprocess instance to listen to. """ while True: output = process.stdout.readline() if output == b'' and process.poll() is not None: if process.returncode > 0: raise Exception("Compilation ended with an error" ".\nSTDERR\n%s\nSTDOUT\n%s" % (process.stderr.read(), process.stdout.read())) return if output: # Parse the output to get current and total tasks # This assumes the progress displayed by waf is in the form # [current/total] matches = re.search(r'\[\s*(\d+?)/(\d+)\].*', output.strip().decode('utf-8')) if matches is not None: yield [int(matches.group(1)), int(matches.group(2))]
python
def get_build_output(self, process): while True: output = process.stdout.readline() if output == b'' and process.poll() is not None: if process.returncode > 0: raise Exception("Compilation ended with an error" ".\nSTDERR\n%s\nSTDOUT\n%s" % (process.stderr.read(), process.stdout.read())) return if output: # Parse the output to get current and total tasks # This assumes the progress displayed by waf is in the form # [current/total] matches = re.search(r'\[\s*(\d+?)/(\d+)\].*', output.strip().decode('utf-8')) if matches is not None: yield [int(matches.group(1)), int(matches.group(2))]
[ "def", "get_build_output", "(", "self", ",", "process", ")", ":", "while", "True", ":", "output", "=", "process", ".", "stdout", ".", "readline", "(", ")", "if", "output", "==", "b''", "and", "process", ".", "poll", "(", ")", "is", "not", "None", ":"...
Parse the output of the ns-3 build process to extract the information that is needed to draw the progress bar. Args: process: the subprocess instance to listen to.
[ "Parse", "the", "output", "of", "the", "ns", "-", "3", "build", "process", "to", "extract", "the", "information", "that", "is", "needed", "to", "draw", "the", "progress", "bar", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/runner.py#L156-L181
17,677
signetlabdei/sem
sem/runner.py
SimulationRunner.run_simulations
def run_simulations(self, parameter_list, data_folder): """ Run several simulations using a certain combination of parameters. Yields results as simulations are completed. Args: parameter_list (list): list of parameter combinations to simulate. data_folder (str): folder in which to save subfolders containing simulation output. """ for idx, parameter in enumerate(parameter_list): current_result = { 'params': {}, 'meta': {} } current_result['params'].update(parameter) command = [self.script_executable] + ['--%s=%s' % (param, value) for param, value in parameter.items()] # Run from dedicated temporary folder current_result['meta']['id'] = str(uuid.uuid4()) temp_dir = os.path.join(data_folder, current_result['meta']['id']) os.makedirs(temp_dir) start = time.time() # Time execution stdout_file_path = os.path.join(temp_dir, 'stdout') stderr_file_path = os.path.join(temp_dir, 'stderr') with open(stdout_file_path, 'w') as stdout_file, open( stderr_file_path, 'w') as stderr_file: return_code = subprocess.call(command, cwd=temp_dir, env=self.environment, stdout=stdout_file, stderr=stderr_file) end = time.time() # Time execution if return_code > 0: complete_command = [self.script] complete_command.extend(command[1:]) complete_command = "python waf --run \"%s\"" % ( ' '.join(complete_command)) with open(stdout_file_path, 'r') as stdout_file, open( stderr_file_path, 'r') as stderr_file: raise Exception(('Simulation exited with an error.\n' 'Params: %s\n' '\nStderr: %s\n' 'Stdout: %s\n' 'Use this command to reproduce:\n' '%s' % (parameter, stderr_file.read(), stdout_file.read(), complete_command))) current_result['meta']['elapsed_time'] = end-start yield current_result
python
def run_simulations(self, parameter_list, data_folder): for idx, parameter in enumerate(parameter_list): current_result = { 'params': {}, 'meta': {} } current_result['params'].update(parameter) command = [self.script_executable] + ['--%s=%s' % (param, value) for param, value in parameter.items()] # Run from dedicated temporary folder current_result['meta']['id'] = str(uuid.uuid4()) temp_dir = os.path.join(data_folder, current_result['meta']['id']) os.makedirs(temp_dir) start = time.time() # Time execution stdout_file_path = os.path.join(temp_dir, 'stdout') stderr_file_path = os.path.join(temp_dir, 'stderr') with open(stdout_file_path, 'w') as stdout_file, open( stderr_file_path, 'w') as stderr_file: return_code = subprocess.call(command, cwd=temp_dir, env=self.environment, stdout=stdout_file, stderr=stderr_file) end = time.time() # Time execution if return_code > 0: complete_command = [self.script] complete_command.extend(command[1:]) complete_command = "python waf --run \"%s\"" % ( ' '.join(complete_command)) with open(stdout_file_path, 'r') as stdout_file, open( stderr_file_path, 'r') as stderr_file: raise Exception(('Simulation exited with an error.\n' 'Params: %s\n' '\nStderr: %s\n' 'Stdout: %s\n' 'Use this command to reproduce:\n' '%s' % (parameter, stderr_file.read(), stdout_file.read(), complete_command))) current_result['meta']['elapsed_time'] = end-start yield current_result
[ "def", "run_simulations", "(", "self", ",", "parameter_list", ",", "data_folder", ")", ":", "for", "idx", ",", "parameter", "in", "enumerate", "(", "parameter_list", ")", ":", "current_result", "=", "{", "'params'", ":", "{", "}", ",", "'meta'", ":", "{", ...
Run several simulations using a certain combination of parameters. Yields results as simulations are completed. Args: parameter_list (list): list of parameter combinations to simulate. data_folder (str): folder in which to save subfolders containing simulation output.
[ "Run", "several", "simulations", "using", "a", "certain", "combination", "of", "parameters", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/runner.py#L221-L280
17,678
signetlabdei/sem
sem/utils.py
list_param_combinations
def list_param_combinations(param_ranges): """ Create a list of all parameter combinations from a dictionary specifying desired parameter values as lists. Example: >>> param_ranges = {'a': [1], 'b': [2, 3]} >>> list_param_combinations(param_ranges) [{'a': 1, 'b': 2}, {'a': 1, 'b': 3}] Additionally, this function is robust in case values are not lists: >>> param_ranges = {'a': 1, 'b': [2, 3]} >>> list_param_combinations(param_ranges) [{'a': 1, 'b': 2}, {'a': 1, 'b': 3}] """ # Convert non-list values to single-element lists # This is required to make sure product work. for key in param_ranges: if not isinstance(param_ranges[key], list): param_ranges[key] = [param_ranges[key]] return [dict(zip(param_ranges, v)) for v in product(*param_ranges.values())]
python
def list_param_combinations(param_ranges): # Convert non-list values to single-element lists # This is required to make sure product work. for key in param_ranges: if not isinstance(param_ranges[key], list): param_ranges[key] = [param_ranges[key]] return [dict(zip(param_ranges, v)) for v in product(*param_ranges.values())]
[ "def", "list_param_combinations", "(", "param_ranges", ")", ":", "# Convert non-list values to single-element lists", "# This is required to make sure product work.", "for", "key", "in", "param_ranges", ":", "if", "not", "isinstance", "(", "param_ranges", "[", "key", "]", "...
Create a list of all parameter combinations from a dictionary specifying desired parameter values as lists. Example: >>> param_ranges = {'a': [1], 'b': [2, 3]} >>> list_param_combinations(param_ranges) [{'a': 1, 'b': 2}, {'a': 1, 'b': 3}] Additionally, this function is robust in case values are not lists: >>> param_ranges = {'a': 1, 'b': [2, 3]} >>> list_param_combinations(param_ranges) [{'a': 1, 'b': 2}, {'a': 1, 'b': 3}]
[ "Create", "a", "list", "of", "all", "parameter", "combinations", "from", "a", "dictionary", "specifying", "desired", "parameter", "values", "as", "lists", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/utils.py#L13-L38
17,679
signetlabdei/sem
sem/utils.py
get_command_from_result
def get_command_from_result(script, result, debug=False): """ Return the command that is needed to obtain a certain result. Args: params (dict): Dictionary containing parameter: value pairs. debug (bool): Whether the command should include the debugging template. """ if not debug: command = "python waf --run \"" + script + " " + " ".join( ['--%s=%s' % (param, value) for param, value in result['params'].items()]) + "\"" else: command = "python waf --run " + script + " --command-template=\"" +\ "gdb --args %s " + " ".join(['--%s=%s' % (param, value) for param, value in result['params'].items()]) + "\"" return command
python
def get_command_from_result(script, result, debug=False): if not debug: command = "python waf --run \"" + script + " " + " ".join( ['--%s=%s' % (param, value) for param, value in result['params'].items()]) + "\"" else: command = "python waf --run " + script + " --command-template=\"" +\ "gdb --args %s " + " ".join(['--%s=%s' % (param, value) for param, value in result['params'].items()]) + "\"" return command
[ "def", "get_command_from_result", "(", "script", ",", "result", ",", "debug", "=", "False", ")", ":", "if", "not", "debug", ":", "command", "=", "\"python waf --run \\\"\"", "+", "script", "+", "\" \"", "+", "\" \"", ".", "join", "(", "[", "'--%s=%s'", "%"...
Return the command that is needed to obtain a certain result. Args: params (dict): Dictionary containing parameter: value pairs. debug (bool): Whether the command should include the debugging template.
[ "Return", "the", "command", "that", "is", "needed", "to", "obtain", "a", "certain", "result", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/utils.py#L41-L59
17,680
signetlabdei/sem
sem/utils.py
automatic_parser
def automatic_parser(result, dtypes={}, converters={}): """ Try and automatically convert strings formatted as tables into nested list structures. Under the hood, this function essentially applies the genfromtxt function to all files in the output, and passes it the additional kwargs. Args: result (dict): the result to parse. dtypes (dict): a dictionary containing the dtype specification to perform parsing for each available filename. See the numpy genfromtxt documentation for more details on how to format these. """ np.seterr(all='raise') parsed = {} for filename, contents in result['output'].items(): if dtypes.get(filename) is None: dtypes[filename] = None if converters.get(filename) is None: converters[filename] = None with warnings.catch_warnings(): warnings.simplefilter("ignore") parsed[filename] = np.genfromtxt(io.StringIO(contents), dtype=dtypes[filename], converters=converters[filename] ).tolist() return parsed
python
def automatic_parser(result, dtypes={}, converters={}): np.seterr(all='raise') parsed = {} for filename, contents in result['output'].items(): if dtypes.get(filename) is None: dtypes[filename] = None if converters.get(filename) is None: converters[filename] = None with warnings.catch_warnings(): warnings.simplefilter("ignore") parsed[filename] = np.genfromtxt(io.StringIO(contents), dtype=dtypes[filename], converters=converters[filename] ).tolist() return parsed
[ "def", "automatic_parser", "(", "result", ",", "dtypes", "=", "{", "}", ",", "converters", "=", "{", "}", ")", ":", "np", ".", "seterr", "(", "all", "=", "'raise'", ")", "parsed", "=", "{", "}", "for", "filename", ",", "contents", "in", "result", "...
Try and automatically convert strings formatted as tables into nested list structures. Under the hood, this function essentially applies the genfromtxt function to all files in the output, and passes it the additional kwargs. Args: result (dict): the result to parse. dtypes (dict): a dictionary containing the dtype specification to perform parsing for each available filename. See the numpy genfromtxt documentation for more details on how to format these.
[ "Try", "and", "automatically", "convert", "strings", "formatted", "as", "tables", "into", "nested", "list", "structures", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/utils.py#L69-L99
17,681
signetlabdei/sem
sem/manager.py
CampaignManager.new
def new(cls, ns_path, script, campaign_dir, runner_type='Auto', overwrite=False, optimized=True, check_repo=True): """ Create a new campaign from an ns-3 installation and a campaign directory. This method will create a DatabaseManager, which will install a database in the specified campaign_dir. If a database is already available at the ns_path described in the specified campaign_dir and its configuration matches config, this instance is used instead. If the overwrite argument is set to True instead, the specified directory is wiped and a new campaign is created in its place. Furthermore, this method will initialize a SimulationRunner, of type specified by the runner_type parameter, which will be locked on the ns-3 installation at ns_path and set up to run the desired script. Finally, note that creation of a campaign requires a git repository to be initialized at the specified ns_path. This will allow SEM to save the commit at which the simulations are run, enforce reproducibility and avoid mixing results coming from different versions of ns-3 and its libraries. Args: ns_path (str): path to the ns-3 installation to employ in this campaign. script (str): ns-3 script that will be executed to run simulations. campaign_dir (str): path to the directory in which to save the simulation campaign database. runner_type (str): implementation of the SimulationRunner to use. Value can be: SimulationRunner (for running sequential simulations locally), ParallelRunner (for running parallel simulations locally), GridRunner (for running simulations using a DRMAA-compatible parallel task scheduler). Use Auto to automatically pick the best runner. overwrite (bool): whether to overwrite already existing campaign_dir folders. This deletes the directory if and only if it only contains files that were detected to be created by sem. optimized (bool): whether to configure the runner to employ an optimized ns-3 build. """ # Convert paths to be absolute ns_path = os.path.abspath(ns_path) campaign_dir = os.path.abspath(campaign_dir) # Verify if the specified campaign is already available if Path(campaign_dir).exists() and not overwrite: # Try loading manager = CampaignManager.load(campaign_dir, ns_path, runner_type=runner_type, optimized=optimized, check_repo=check_repo) if manager.db.get_script() == script: return manager else: del manager # Initialize runner runner = CampaignManager.create_runner(ns_path, script, runner_type=runner_type, optimized=optimized) # Get list of parameters to save in the DB params = runner.get_available_parameters() # Get current commit commit = "" if check_repo: from git import Repo, exc commit = Repo(ns_path).head.commit.hexsha # Create a database manager from the configuration db = DatabaseManager.new(script=script, params=params, commit=commit, campaign_dir=campaign_dir, overwrite=overwrite) return cls(db, runner, check_repo)
python
def new(cls, ns_path, script, campaign_dir, runner_type='Auto', overwrite=False, optimized=True, check_repo=True): # Convert paths to be absolute ns_path = os.path.abspath(ns_path) campaign_dir = os.path.abspath(campaign_dir) # Verify if the specified campaign is already available if Path(campaign_dir).exists() and not overwrite: # Try loading manager = CampaignManager.load(campaign_dir, ns_path, runner_type=runner_type, optimized=optimized, check_repo=check_repo) if manager.db.get_script() == script: return manager else: del manager # Initialize runner runner = CampaignManager.create_runner(ns_path, script, runner_type=runner_type, optimized=optimized) # Get list of parameters to save in the DB params = runner.get_available_parameters() # Get current commit commit = "" if check_repo: from git import Repo, exc commit = Repo(ns_path).head.commit.hexsha # Create a database manager from the configuration db = DatabaseManager.new(script=script, params=params, commit=commit, campaign_dir=campaign_dir, overwrite=overwrite) return cls(db, runner, check_repo)
[ "def", "new", "(", "cls", ",", "ns_path", ",", "script", ",", "campaign_dir", ",", "runner_type", "=", "'Auto'", ",", "overwrite", "=", "False", ",", "optimized", "=", "True", ",", "check_repo", "=", "True", ")", ":", "# Convert paths to be absolute", "ns_pa...
Create a new campaign from an ns-3 installation and a campaign directory. This method will create a DatabaseManager, which will install a database in the specified campaign_dir. If a database is already available at the ns_path described in the specified campaign_dir and its configuration matches config, this instance is used instead. If the overwrite argument is set to True instead, the specified directory is wiped and a new campaign is created in its place. Furthermore, this method will initialize a SimulationRunner, of type specified by the runner_type parameter, which will be locked on the ns-3 installation at ns_path and set up to run the desired script. Finally, note that creation of a campaign requires a git repository to be initialized at the specified ns_path. This will allow SEM to save the commit at which the simulations are run, enforce reproducibility and avoid mixing results coming from different versions of ns-3 and its libraries. Args: ns_path (str): path to the ns-3 installation to employ in this campaign. script (str): ns-3 script that will be executed to run simulations. campaign_dir (str): path to the directory in which to save the simulation campaign database. runner_type (str): implementation of the SimulationRunner to use. Value can be: SimulationRunner (for running sequential simulations locally), ParallelRunner (for running parallel simulations locally), GridRunner (for running simulations using a DRMAA-compatible parallel task scheduler). Use Auto to automatically pick the best runner. overwrite (bool): whether to overwrite already existing campaign_dir folders. This deletes the directory if and only if it only contains files that were detected to be created by sem. optimized (bool): whether to configure the runner to employ an optimized ns-3 build.
[ "Create", "a", "new", "campaign", "from", "an", "ns", "-", "3", "installation", "and", "a", "campaign", "directory", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L57-L136
17,682
signetlabdei/sem
sem/manager.py
CampaignManager.load
def load(cls, campaign_dir, ns_path=None, runner_type='Auto', optimized=True, check_repo=True): """ Load an existing simulation campaign. Note that specifying an ns-3 installation is not compulsory when using this method: existing results will be available, but in order to run additional simulations it will be necessary to specify a SimulationRunner object, and assign it to the CampaignManager. Args: campaign_dir (str): path to the directory in which to save the simulation campaign database. ns_path (str): path to the ns-3 installation to employ in this campaign. runner_type (str): implementation of the SimulationRunner to use. Value can be: SimulationRunner (for running sequential simulations locally), ParallelRunner (for running parallel simulations locally), GridRunner (for running simulations using a DRMAA-compatible parallel task scheduler). optimized (bool): whether to configure the runner to employ an optimized ns-3 build. """ # Convert paths to be absolute if ns_path is not None: ns_path = os.path.abspath(ns_path) campaign_dir = os.path.abspath(campaign_dir) # Read the existing configuration into the new DatabaseManager db = DatabaseManager.load(campaign_dir) script = db.get_script() runner = None if ns_path is not None: runner = CampaignManager.create_runner(ns_path, script, runner_type, optimized) return cls(db, runner, check_repo)
python
def load(cls, campaign_dir, ns_path=None, runner_type='Auto', optimized=True, check_repo=True): # Convert paths to be absolute if ns_path is not None: ns_path = os.path.abspath(ns_path) campaign_dir = os.path.abspath(campaign_dir) # Read the existing configuration into the new DatabaseManager db = DatabaseManager.load(campaign_dir) script = db.get_script() runner = None if ns_path is not None: runner = CampaignManager.create_runner(ns_path, script, runner_type, optimized) return cls(db, runner, check_repo)
[ "def", "load", "(", "cls", ",", "campaign_dir", ",", "ns_path", "=", "None", ",", "runner_type", "=", "'Auto'", ",", "optimized", "=", "True", ",", "check_repo", "=", "True", ")", ":", "# Convert paths to be absolute", "if", "ns_path", "is", "not", "None", ...
Load an existing simulation campaign. Note that specifying an ns-3 installation is not compulsory when using this method: existing results will be available, but in order to run additional simulations it will be necessary to specify a SimulationRunner object, and assign it to the CampaignManager. Args: campaign_dir (str): path to the directory in which to save the simulation campaign database. ns_path (str): path to the ns-3 installation to employ in this campaign. runner_type (str): implementation of the SimulationRunner to use. Value can be: SimulationRunner (for running sequential simulations locally), ParallelRunner (for running parallel simulations locally), GridRunner (for running simulations using a DRMAA-compatible parallel task scheduler). optimized (bool): whether to configure the runner to employ an optimized ns-3 build.
[ "Load", "an", "existing", "simulation", "campaign", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L139-L176
17,683
signetlabdei/sem
sem/manager.py
CampaignManager.create_runner
def create_runner(ns_path, script, runner_type='Auto', optimized=True): """ Create a SimulationRunner from a string containing the desired class implementation, and return it. Args: ns_path (str): path to the ns-3 installation to employ in this SimulationRunner. script (str): ns-3 script that will be executed to run simulations. runner_type (str): implementation of the SimulationRunner to use. Value can be: SimulationRunner (for running sequential simulations locally), ParallelRunner (for running parallel simulations locally), GridRunner (for running simulations using a DRMAA-compatible parallel task scheduler). If Auto, automatically pick the best available runner (GridRunner if DRMAA is available, ParallelRunner otherwise). optimized (bool): whether to configure the runner to employ an optimized ns-3 build. """ # locals() contains a dictionary pairing class names with class # objects: we can create the object using the desired class starting # from its name. if runner_type == 'Auto' and DRMAA_AVAILABLE: runner_type = 'GridRunner' elif runner_type == 'Auto': runner_type = 'ParallelRunner' return locals().get(runner_type, globals().get(runner_type))( ns_path, script, optimized=optimized)
python
def create_runner(ns_path, script, runner_type='Auto', optimized=True): # locals() contains a dictionary pairing class names with class # objects: we can create the object using the desired class starting # from its name. if runner_type == 'Auto' and DRMAA_AVAILABLE: runner_type = 'GridRunner' elif runner_type == 'Auto': runner_type = 'ParallelRunner' return locals().get(runner_type, globals().get(runner_type))( ns_path, script, optimized=optimized)
[ "def", "create_runner", "(", "ns_path", ",", "script", ",", "runner_type", "=", "'Auto'", ",", "optimized", "=", "True", ")", ":", "# locals() contains a dictionary pairing class names with class", "# objects: we can create the object using the desired class starting", "# from it...
Create a SimulationRunner from a string containing the desired class implementation, and return it. Args: ns_path (str): path to the ns-3 installation to employ in this SimulationRunner. script (str): ns-3 script that will be executed to run simulations. runner_type (str): implementation of the SimulationRunner to use. Value can be: SimulationRunner (for running sequential simulations locally), ParallelRunner (for running parallel simulations locally), GridRunner (for running simulations using a DRMAA-compatible parallel task scheduler). If Auto, automatically pick the best available runner (GridRunner if DRMAA is available, ParallelRunner otherwise). optimized (bool): whether to configure the runner to employ an optimized ns-3 build.
[ "Create", "a", "SimulationRunner", "from", "a", "string", "containing", "the", "desired", "class", "implementation", "and", "return", "it", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L178-L208
17,684
signetlabdei/sem
sem/manager.py
CampaignManager.run_simulations
def run_simulations(self, param_list, show_progress=True): """ Run several simulations specified by a list of parameter combinations. Note: this function does not verify whether we already have the required simulations in the database - it just runs all the parameter combinations that are specified in the list. Args: param_list (list): list of parameter combinations to execute. Items of this list are dictionaries, with one key for each parameter, and a value specifying the parameter value (which can be either a string or a number). show_progress (bool): whether or not to show a progress bar with percentage and expected remaining time. """ # Make sure we have a runner to run simulations with. # This can happen in case the simulation campaign is loaded and not # created from scratch. if self.runner is None: raise Exception("No runner was ever specified" " for this CampaignManager.") # Return if the list is empty if param_list == []: return # Check all parameter combinations fully specify the desired simulation desired_params = self.db.get_params() for p in param_list: # Besides the parameters that were actually passed, we add the ones # that are always available in every script passed = list(p.keys()) available = ['RngRun'] + desired_params if set(passed) != set(available): raise ValueError("Specified parameter combination does not " "match the supported parameters:\n" "Passed: %s\nSupported: %s" % (sorted(passed), sorted(available))) # Check that the current repo commit corresponds to the one specified # in the campaign if self.check_repo: self.check_repo_ok() # Build ns-3 before running any simulations # At this point, we can assume the project was already configured self.runner.configure_and_build(skip_configuration=True) # Shuffle simulations # This mixes up long and short simulations, and gives better time # estimates. shuffle(param_list) # Offload simulation execution to self.runner # Note that this only creates a generator for the results, no # computation is performed on this line. results = self.runner.run_simulations(param_list, self.db.get_data_dir()) # Wrap the result generator in the progress bar generator. if show_progress: result_generator = tqdm(results, total=len(param_list), unit='simulation', desc='Running simulations') else: result_generator = results # Insert result object in db. Using the generator here ensures we # save results as they are finalized by the SimulationRunner, and # that they are kept even if execution is terminated abruptly by # crashes or by a KeyboardInterrupt. for result in result_generator: self.db.insert_result(result)
python
def run_simulations(self, param_list, show_progress=True): # Make sure we have a runner to run simulations with. # This can happen in case the simulation campaign is loaded and not # created from scratch. if self.runner is None: raise Exception("No runner was ever specified" " for this CampaignManager.") # Return if the list is empty if param_list == []: return # Check all parameter combinations fully specify the desired simulation desired_params = self.db.get_params() for p in param_list: # Besides the parameters that were actually passed, we add the ones # that are always available in every script passed = list(p.keys()) available = ['RngRun'] + desired_params if set(passed) != set(available): raise ValueError("Specified parameter combination does not " "match the supported parameters:\n" "Passed: %s\nSupported: %s" % (sorted(passed), sorted(available))) # Check that the current repo commit corresponds to the one specified # in the campaign if self.check_repo: self.check_repo_ok() # Build ns-3 before running any simulations # At this point, we can assume the project was already configured self.runner.configure_and_build(skip_configuration=True) # Shuffle simulations # This mixes up long and short simulations, and gives better time # estimates. shuffle(param_list) # Offload simulation execution to self.runner # Note that this only creates a generator for the results, no # computation is performed on this line. results = self.runner.run_simulations(param_list, self.db.get_data_dir()) # Wrap the result generator in the progress bar generator. if show_progress: result_generator = tqdm(results, total=len(param_list), unit='simulation', desc='Running simulations') else: result_generator = results # Insert result object in db. Using the generator here ensures we # save results as they are finalized by the SimulationRunner, and # that they are kept even if execution is terminated abruptly by # crashes or by a KeyboardInterrupt. for result in result_generator: self.db.insert_result(result)
[ "def", "run_simulations", "(", "self", ",", "param_list", ",", "show_progress", "=", "True", ")", ":", "# Make sure we have a runner to run simulations with.", "# This can happen in case the simulation campaign is loaded and not", "# created from scratch.", "if", "self", ".", "ru...
Run several simulations specified by a list of parameter combinations. Note: this function does not verify whether we already have the required simulations in the database - it just runs all the parameter combinations that are specified in the list. Args: param_list (list): list of parameter combinations to execute. Items of this list are dictionaries, with one key for each parameter, and a value specifying the parameter value (which can be either a string or a number). show_progress (bool): whether or not to show a progress bar with percentage and expected remaining time.
[ "Run", "several", "simulations", "specified", "by", "a", "list", "of", "parameter", "combinations", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L214-L288
17,685
signetlabdei/sem
sem/manager.py
CampaignManager.get_missing_simulations
def get_missing_simulations(self, param_list, runs=None): """ Return a list of the simulations among the required ones that are not available in the database. Args: param_list (list): a list of dictionaries containing all the parameters combinations. runs (int): an integer representing how many repetitions are wanted for each parameter combination, None if the dictionaries in param_list already feature the desired RngRun value. """ params_to_simulate = [] if runs is not None: # Get next available runs from the database next_runs = self.db.get_next_rngruns() available_params = [r['params'] for r in self.db.get_results()] for param_comb in param_list: # Count how many param combinations we found, and remove them # from the list of available_params for faster searching in the # future needed_runs = runs for i, p in enumerate(available_params): if param_comb == {k: p[k] for k in p.keys() if k != "RngRun"}: needed_runs -= 1 new_param_combs = [] for needed_run in range(needed_runs): # Here it's important that we make copies of the # dictionaries, so that if we modify one we don't modify # the others. This is necessary because after this step, # typically, we will add the RngRun key which must be # different for each copy. new_param = deepcopy(param_comb) new_param['RngRun'] = next(next_runs) new_param_combs += [new_param] params_to_simulate += new_param_combs else: for param_comb in param_list: if not self.db.get_results(param_comb): params_to_simulate += [param_comb] return params_to_simulate
python
def get_missing_simulations(self, param_list, runs=None): params_to_simulate = [] if runs is not None: # Get next available runs from the database next_runs = self.db.get_next_rngruns() available_params = [r['params'] for r in self.db.get_results()] for param_comb in param_list: # Count how many param combinations we found, and remove them # from the list of available_params for faster searching in the # future needed_runs = runs for i, p in enumerate(available_params): if param_comb == {k: p[k] for k in p.keys() if k != "RngRun"}: needed_runs -= 1 new_param_combs = [] for needed_run in range(needed_runs): # Here it's important that we make copies of the # dictionaries, so that if we modify one we don't modify # the others. This is necessary because after this step, # typically, we will add the RngRun key which must be # different for each copy. new_param = deepcopy(param_comb) new_param['RngRun'] = next(next_runs) new_param_combs += [new_param] params_to_simulate += new_param_combs else: for param_comb in param_list: if not self.db.get_results(param_comb): params_to_simulate += [param_comb] return params_to_simulate
[ "def", "get_missing_simulations", "(", "self", ",", "param_list", ",", "runs", "=", "None", ")", ":", "params_to_simulate", "=", "[", "]", "if", "runs", "is", "not", "None", ":", "# Get next available runs from the database", "next_runs", "=", "self", ".", "db",...
Return a list of the simulations among the required ones that are not available in the database. Args: param_list (list): a list of dictionaries containing all the parameters combinations. runs (int): an integer representing how many repetitions are wanted for each parameter combination, None if the dictionaries in param_list already feature the desired RngRun value.
[ "Return", "a", "list", "of", "the", "simulations", "among", "the", "required", "ones", "that", "are", "not", "available", "in", "the", "database", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L290-L332
17,686
signetlabdei/sem
sem/manager.py
CampaignManager.run_missing_simulations
def run_missing_simulations(self, param_list, runs=None): """ Run the simulations from the parameter list that are not yet available in the database. This function also makes sure that we have at least runs replications for each parameter combination. Additionally, param_list can either be a list containing the desired parameter combinations or a dictionary containing multiple values for each parameter, to be expanded into a list. Args: param_list (list, dict): either a list of parameter combinations or a dictionary to be expanded into a list through the list_param_combinations function. runs (int): the number of runs to perform for each parameter combination. This parameter is only allowed if the param_list specification doesn't feature an 'RngRun' key already. """ # If we are passed a dictionary, we need to expand this if isinstance(param_list, dict): param_list = list_param_combinations(param_list) # If we are passed a list already, just run the missing simulations self.run_simulations( self.get_missing_simulations(param_list, runs))
python
def run_missing_simulations(self, param_list, runs=None): # If we are passed a dictionary, we need to expand this if isinstance(param_list, dict): param_list = list_param_combinations(param_list) # If we are passed a list already, just run the missing simulations self.run_simulations( self.get_missing_simulations(param_list, runs))
[ "def", "run_missing_simulations", "(", "self", ",", "param_list", ",", "runs", "=", "None", ")", ":", "# If we are passed a dictionary, we need to expand this", "if", "isinstance", "(", "param_list", ",", "dict", ")", ":", "param_list", "=", "list_param_combinations", ...
Run the simulations from the parameter list that are not yet available in the database. This function also makes sure that we have at least runs replications for each parameter combination. Additionally, param_list can either be a list containing the desired parameter combinations or a dictionary containing multiple values for each parameter, to be expanded into a list. Args: param_list (list, dict): either a list of parameter combinations or a dictionary to be expanded into a list through the list_param_combinations function. runs (int): the number of runs to perform for each parameter combination. This parameter is only allowed if the param_list specification doesn't feature an 'RngRun' key already.
[ "Run", "the", "simulations", "from", "the", "parameter", "list", "that", "are", "not", "yet", "available", "in", "the", "database", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L334-L360
17,687
signetlabdei/sem
sem/manager.py
CampaignManager.get_results_as_numpy_array
def get_results_as_numpy_array(self, parameter_space, result_parsing_function, runs): """ Return the results relative to the desired parameter space in the form of a numpy array. Args: parameter_space (dict): dictionary containing parameter/list-of-values pairs. result_parsing_function (function): user-defined function, taking a result dictionary as argument, that can be used to parse the result files and return a list of values. runs (int): number of runs to gather for each parameter combination. """ return np.array(self.get_space(self.db.get_complete_results(), {}, parameter_space, runs, result_parsing_function))
python
def get_results_as_numpy_array(self, parameter_space, result_parsing_function, runs): return np.array(self.get_space(self.db.get_complete_results(), {}, parameter_space, runs, result_parsing_function))
[ "def", "get_results_as_numpy_array", "(", "self", ",", "parameter_space", ",", "result_parsing_function", ",", "runs", ")", ":", "return", "np", ".", "array", "(", "self", ".", "get_space", "(", "self", ".", "db", ".", "get_complete_results", "(", ")", ",", ...
Return the results relative to the desired parameter space in the form of a numpy array. Args: parameter_space (dict): dictionary containing parameter/list-of-values pairs. result_parsing_function (function): user-defined function, taking a result dictionary as argument, that can be used to parse the result files and return a list of values. runs (int): number of runs to gather for each parameter combination.
[ "Return", "the", "results", "relative", "to", "the", "desired", "parameter", "space", "in", "the", "form", "of", "a", "numpy", "array", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L366-L383
17,688
signetlabdei/sem
sem/manager.py
CampaignManager.save_to_mat_file
def save_to_mat_file(self, parameter_space, result_parsing_function, filename, runs): """ Return the results relative to the desired parameter space in the form of a .mat file. Args: parameter_space (dict): dictionary containing parameter/list-of-values pairs. result_parsing_function (function): user-defined function, taking a result dictionary as argument, that can be used to parse the result files and return a list of values. filename (path): name of output .mat file. runs (int): number of runs to gather for each parameter combination. """ # Make sure all values are lists for key in parameter_space: if not isinstance(parameter_space[key], list): parameter_space[key] = [parameter_space[key]] # Add a dimension label for each non-singular dimension dimension_labels = [{key: str(parameter_space[key])} for key in parameter_space.keys() if len(parameter_space[key]) > 1] + [{'runs': range(runs)}] # Create a list of the parameter names return savemat( filename, {'results': self.get_results_as_numpy_array(parameter_space, result_parsing_function, runs=runs), 'dimension_labels': dimension_labels})
python
def save_to_mat_file(self, parameter_space, result_parsing_function, filename, runs): # Make sure all values are lists for key in parameter_space: if not isinstance(parameter_space[key], list): parameter_space[key] = [parameter_space[key]] # Add a dimension label for each non-singular dimension dimension_labels = [{key: str(parameter_space[key])} for key in parameter_space.keys() if len(parameter_space[key]) > 1] + [{'runs': range(runs)}] # Create a list of the parameter names return savemat( filename, {'results': self.get_results_as_numpy_array(parameter_space, result_parsing_function, runs=runs), 'dimension_labels': dimension_labels})
[ "def", "save_to_mat_file", "(", "self", ",", "parameter_space", ",", "result_parsing_function", ",", "filename", ",", "runs", ")", ":", "# Make sure all values are lists", "for", "key", "in", "parameter_space", ":", "if", "not", "isinstance", "(", "parameter_space", ...
Return the results relative to the desired parameter space in the form of a .mat file. Args: parameter_space (dict): dictionary containing parameter/list-of-values pairs. result_parsing_function (function): user-defined function, taking a result dictionary as argument, that can be used to parse the result files and return a list of values. filename (path): name of output .mat file. runs (int): number of runs to gather for each parameter combination.
[ "Return", "the", "results", "relative", "to", "the", "desired", "parameter", "space", "in", "the", "form", "of", "a", ".", "mat", "file", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L385-L421
17,689
signetlabdei/sem
sem/manager.py
CampaignManager.save_to_npy_file
def save_to_npy_file(self, parameter_space, result_parsing_function, filename, runs): """ Save results to a numpy array file format. """ np.save(filename, self.get_results_as_numpy_array( parameter_space, result_parsing_function, runs=runs))
python
def save_to_npy_file(self, parameter_space, result_parsing_function, filename, runs): np.save(filename, self.get_results_as_numpy_array( parameter_space, result_parsing_function, runs=runs))
[ "def", "save_to_npy_file", "(", "self", ",", "parameter_space", ",", "result_parsing_function", ",", "filename", ",", "runs", ")", ":", "np", ".", "save", "(", "filename", ",", "self", ".", "get_results_as_numpy_array", "(", "parameter_space", ",", "result_parsing...
Save results to a numpy array file format.
[ "Save", "results", "to", "a", "numpy", "array", "file", "format", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L423-L430
17,690
signetlabdei/sem
sem/manager.py
CampaignManager.save_to_folders
def save_to_folders(self, parameter_space, folder_name, runs): """ Save results to a folder structure. """ self.space_to_folders(self.db.get_results(), {}, parameter_space, runs, folder_name)
python
def save_to_folders(self, parameter_space, folder_name, runs): self.space_to_folders(self.db.get_results(), {}, parameter_space, runs, folder_name)
[ "def", "save_to_folders", "(", "self", ",", "parameter_space", ",", "folder_name", ",", "runs", ")", ":", "self", ".", "space_to_folders", "(", "self", ".", "db", ".", "get_results", "(", ")", ",", "{", "}", ",", "parameter_space", ",", "runs", ",", "fol...
Save results to a folder structure.
[ "Save", "results", "to", "a", "folder", "structure", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L432-L437
17,691
signetlabdei/sem
sem/manager.py
CampaignManager.space_to_folders
def space_to_folders(self, current_result_list, current_query, param_space, runs, current_directory): """ Convert a parameter space specification to a directory tree with a nested structure. """ # Base case: we iterate over the runs and copy files in the final # directory. if not param_space: for run, r in enumerate(current_result_list[:runs]): files = self.db.get_result_files(r) new_dir = os.path.join(current_directory, "run=%s" % run) os.makedirs(new_dir, exist_ok=True) for filename, filepath in files.items(): shutil.copyfile(filepath, os.path.join(new_dir, filename)) return [key, value] = list(param_space.items())[0] # Iterate over dictionary values for v in value: next_query = deepcopy(current_query) temp_query = deepcopy(current_query) # For each list, recur 'fixing' that dimension. next_query[key] = v # Update query # Create folder folder_name = ("%s=%s" % (key, v)).replace('/', '_') new_dir = os.path.join(current_directory, folder_name) os.makedirs(new_dir, exist_ok=True) next_param_space = deepcopy(param_space) del(next_param_space[key]) temp_query[key] = v temp_result_list = [r for r in current_result_list if self.satisfies_query(r, temp_query)] self.space_to_folders(temp_result_list, next_query, next_param_space, runs, new_dir)
python
def space_to_folders(self, current_result_list, current_query, param_space, runs, current_directory): # Base case: we iterate over the runs and copy files in the final # directory. if not param_space: for run, r in enumerate(current_result_list[:runs]): files = self.db.get_result_files(r) new_dir = os.path.join(current_directory, "run=%s" % run) os.makedirs(new_dir, exist_ok=True) for filename, filepath in files.items(): shutil.copyfile(filepath, os.path.join(new_dir, filename)) return [key, value] = list(param_space.items())[0] # Iterate over dictionary values for v in value: next_query = deepcopy(current_query) temp_query = deepcopy(current_query) # For each list, recur 'fixing' that dimension. next_query[key] = v # Update query # Create folder folder_name = ("%s=%s" % (key, v)).replace('/', '_') new_dir = os.path.join(current_directory, folder_name) os.makedirs(new_dir, exist_ok=True) next_param_space = deepcopy(param_space) del(next_param_space[key]) temp_query[key] = v temp_result_list = [r for r in current_result_list if self.satisfies_query(r, temp_query)] self.space_to_folders(temp_result_list, next_query, next_param_space, runs, new_dir)
[ "def", "space_to_folders", "(", "self", ",", "current_result_list", ",", "current_query", ",", "param_space", ",", "runs", ",", "current_directory", ")", ":", "# Base case: we iterate over the runs and copy files in the final", "# directory.", "if", "not", "param_space", ":...
Convert a parameter space specification to a directory tree with a nested structure.
[ "Convert", "a", "parameter", "space", "specification", "to", "a", "directory", "tree", "with", "a", "nested", "structure", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L439-L478
17,692
signetlabdei/sem
sem/manager.py
CampaignManager.get_results_as_xarray
def get_results_as_xarray(self, parameter_space, result_parsing_function, output_labels, runs): """ Return the results relative to the desired parameter space in the form of an xarray data structure. Args: parameter_space (dict): The space of parameters to export. result_parsing_function (function): user-defined function, taking a result dictionary as argument, that can be used to parse the result files and return a list of values. output_labels (list): a list of labels to apply to the results dimensions, output by the result_parsing_function. runs (int): the number of runs to export for each parameter combination. """ np_array = np.array( self.get_space( self.db.get_complete_results(), {}, collections.OrderedDict([(k, v) for k, v in parameter_space.items()]), runs, result_parsing_function)) # Create a parameter space only containing the variable parameters clean_parameter_space = collections.OrderedDict( [(k, v) for k, v in parameter_space.items()]) clean_parameter_space['runs'] = range(runs) if isinstance(output_labels, list): clean_parameter_space['metrics'] = output_labels xr_array = xr.DataArray(np_array, coords=clean_parameter_space, dims=list(clean_parameter_space.keys())) return xr_array
python
def get_results_as_xarray(self, parameter_space, result_parsing_function, output_labels, runs): np_array = np.array( self.get_space( self.db.get_complete_results(), {}, collections.OrderedDict([(k, v) for k, v in parameter_space.items()]), runs, result_parsing_function)) # Create a parameter space only containing the variable parameters clean_parameter_space = collections.OrderedDict( [(k, v) for k, v in parameter_space.items()]) clean_parameter_space['runs'] = range(runs) if isinstance(output_labels, list): clean_parameter_space['metrics'] = output_labels xr_array = xr.DataArray(np_array, coords=clean_parameter_space, dims=list(clean_parameter_space.keys())) return xr_array
[ "def", "get_results_as_xarray", "(", "self", ",", "parameter_space", ",", "result_parsing_function", ",", "output_labels", ",", "runs", ")", ":", "np_array", "=", "np", ".", "array", "(", "self", ".", "get_space", "(", "self", ".", "db", ".", "get_complete_res...
Return the results relative to the desired parameter space in the form of an xarray data structure. Args: parameter_space (dict): The space of parameters to export. result_parsing_function (function): user-defined function, taking a result dictionary as argument, that can be used to parse the result files and return a list of values. output_labels (list): a list of labels to apply to the results dimensions, output by the result_parsing_function. runs (int): the number of runs to export for each parameter combination.
[ "Return", "the", "results", "relative", "to", "the", "desired", "parameter", "space", "in", "the", "form", "of", "an", "xarray", "data", "structure", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/manager.py#L480-L516
17,693
signetlabdei/sem
sem/cli.py
run
def run(ns_3_path, results_dir, script, no_optimization, parameters, max_processes): """ Run multiple simulations. """ sem.parallelrunner.MAX_PARALLEL_PROCESSES = max_processes # Create a campaign campaign = sem.CampaignManager.new(ns_3_path, script, results_dir, overwrite=False, optimized=not no_optimization) # Print campaign info click.echo(campaign) # Run the simulations [params, defaults] = zip(*get_params_and_defaults(campaign.db.get_params(), campaign.db)) # Check whether we need to read parameters from the command line if not parameters: # Substitute non-None defaults with their string representation # This will be then converted back to a Python data structure in # query_parameters string_defaults = list() for idx, d in enumerate(defaults): if d is not None: string_defaults.append(str(d)) else: string_defaults.append(d) script_params = query_parameters(params, defaults=string_defaults) else: script_params = import_parameters_from_file(parameters) # Finally, run the simulations campaign.run_missing_simulations(script_params, runs=click.prompt("Total runs", type=int))
python
def run(ns_3_path, results_dir, script, no_optimization, parameters, max_processes): sem.parallelrunner.MAX_PARALLEL_PROCESSES = max_processes # Create a campaign campaign = sem.CampaignManager.new(ns_3_path, script, results_dir, overwrite=False, optimized=not no_optimization) # Print campaign info click.echo(campaign) # Run the simulations [params, defaults] = zip(*get_params_and_defaults(campaign.db.get_params(), campaign.db)) # Check whether we need to read parameters from the command line if not parameters: # Substitute non-None defaults with their string representation # This will be then converted back to a Python data structure in # query_parameters string_defaults = list() for idx, d in enumerate(defaults): if d is not None: string_defaults.append(str(d)) else: string_defaults.append(d) script_params = query_parameters(params, defaults=string_defaults) else: script_params = import_parameters_from_file(parameters) # Finally, run the simulations campaign.run_missing_simulations(script_params, runs=click.prompt("Total runs", type=int))
[ "def", "run", "(", "ns_3_path", ",", "results_dir", ",", "script", ",", "no_optimization", ",", "parameters", ",", "max_processes", ")", ":", "sem", ".", "parallelrunner", ".", "MAX_PARALLEL_PROCESSES", "=", "max_processes", "# Create a campaign", "campaign", "=", ...
Run multiple simulations.
[ "Run", "multiple", "simulations", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/cli.py#L55-L94
17,694
signetlabdei/sem
sem/cli.py
view
def view(results_dir, result_id, hide_simulation_output, parameters, no_pager): """ View results of simulations. """ campaign = sem.CampaignManager.load(results_dir) # Pick the most appropriate function based on the level of detail we want if hide_simulation_output: get_results_function = campaign.db.get_results else: get_results_function = campaign.db.get_complete_results # If a result id was specified, just query for that result if result_id: output = '\n\n\n'.join([pprint.pformat(item) for item in get_results_function(result_id=result_id)]) else: [params, defaults] = zip(*get_params_and_defaults( campaign.db.get_params(), campaign.db)) if not parameters: # Convert to string string_defaults = list() for idx, d in enumerate(defaults): string_defaults.append(str(d)) script_params = query_parameters(params, string_defaults) else: script_params = import_parameters_from_file(parameters) # Perform the search output = '\n\n\n'.join([pprint.pformat(item) for item in get_results_function(script_params)]) # Print the results if no_pager: click.echo(output) else: click.echo_via_pager(output)
python
def view(results_dir, result_id, hide_simulation_output, parameters, no_pager): campaign = sem.CampaignManager.load(results_dir) # Pick the most appropriate function based on the level of detail we want if hide_simulation_output: get_results_function = campaign.db.get_results else: get_results_function = campaign.db.get_complete_results # If a result id was specified, just query for that result if result_id: output = '\n\n\n'.join([pprint.pformat(item) for item in get_results_function(result_id=result_id)]) else: [params, defaults] = zip(*get_params_and_defaults( campaign.db.get_params(), campaign.db)) if not parameters: # Convert to string string_defaults = list() for idx, d in enumerate(defaults): string_defaults.append(str(d)) script_params = query_parameters(params, string_defaults) else: script_params = import_parameters_from_file(parameters) # Perform the search output = '\n\n\n'.join([pprint.pformat(item) for item in get_results_function(script_params)]) # Print the results if no_pager: click.echo(output) else: click.echo_via_pager(output)
[ "def", "view", "(", "results_dir", ",", "result_id", ",", "hide_simulation_output", ",", "parameters", ",", "no_pager", ")", ":", "campaign", "=", "sem", ".", "CampaignManager", ".", "load", "(", "results_dir", ")", "# Pick the most appropriate function based on the l...
View results of simulations.
[ "View", "results", "of", "simulations", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/cli.py#L128-L168
17,695
signetlabdei/sem
sem/cli.py
command
def command(results_dir, result_id): """ Print the command that needs to be used to reproduce a result. """ campaign = sem.CampaignManager.load(results_dir) result = campaign.db.get_results(result_id=result_id)[0] click.echo("Simulation command:") click.echo(sem.utils.get_command_from_result(campaign.db.get_script(), result)) click.echo("Debug command:") click.echo(sem.utils.get_command_from_result(campaign.db.get_script(), result, debug=True))
python
def command(results_dir, result_id): campaign = sem.CampaignManager.load(results_dir) result = campaign.db.get_results(result_id=result_id)[0] click.echo("Simulation command:") click.echo(sem.utils.get_command_from_result(campaign.db.get_script(), result)) click.echo("Debug command:") click.echo(sem.utils.get_command_from_result(campaign.db.get_script(), result, debug=True))
[ "def", "command", "(", "results_dir", ",", "result_id", ")", ":", "campaign", "=", "sem", ".", "CampaignManager", ".", "load", "(", "results_dir", ")", "result", "=", "campaign", ".", "db", ".", "get_results", "(", "result_id", "=", "result_id", ")", "[", ...
Print the command that needs to be used to reproduce a result.
[ "Print", "the", "command", "that", "needs", "to", "be", "used", "to", "reproduce", "a", "result", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/cli.py#L181-L195
17,696
signetlabdei/sem
sem/cli.py
export
def export(results_dir, filename, do_not_try_parsing, parameters): """ Export results to file. An extension in filename is required to deduce the file type. If no extension is specified, a directory tree export will be used. Note that this command automatically tries to parse the simulation output. Supported extensions: .mat (Matlab file), .npy (Numpy file), no extension (Directory tree) """ # Get the extension _, extension = os.path.splitext(filename) campaign = sem.CampaignManager.load(results_dir) [params, defaults] = zip(*get_params_and_defaults(campaign.db.get_params(), campaign.db)) if do_not_try_parsing: parsing_function = None else: parsing_function = sem.utils.automatic_parser if not parameters: # Convert to string string_defaults = list() for idx, d in enumerate(defaults): string_defaults.append(str(d)) parameter_query = query_parameters(params, string_defaults) else: # Import specified parameter file parameter_query = import_parameters_from_file(parameters) if extension == ".mat": campaign.save_to_mat_file(parameter_query, parsing_function, filename, runs=click.prompt("Runs", type=int)) elif extension == ".npy": campaign.save_to_npy_file(parameter_query, parsing_function, filename, runs=click.prompt("Runs", type=int)) elif extension == "": campaign.save_to_folders(parameter_query, filename, runs=click.prompt("Runs", type=int)) else: # Unrecognized format raise ValueError("Format not recognized")
python
def export(results_dir, filename, do_not_try_parsing, parameters): # Get the extension _, extension = os.path.splitext(filename) campaign = sem.CampaignManager.load(results_dir) [params, defaults] = zip(*get_params_and_defaults(campaign.db.get_params(), campaign.db)) if do_not_try_parsing: parsing_function = None else: parsing_function = sem.utils.automatic_parser if not parameters: # Convert to string string_defaults = list() for idx, d in enumerate(defaults): string_defaults.append(str(d)) parameter_query = query_parameters(params, string_defaults) else: # Import specified parameter file parameter_query = import_parameters_from_file(parameters) if extension == ".mat": campaign.save_to_mat_file(parameter_query, parsing_function, filename, runs=click.prompt("Runs", type=int)) elif extension == ".npy": campaign.save_to_npy_file(parameter_query, parsing_function, filename, runs=click.prompt("Runs", type=int)) elif extension == "": campaign.save_to_folders(parameter_query, filename, runs=click.prompt("Runs", type=int)) else: # Unrecognized format raise ValueError("Format not recognized")
[ "def", "export", "(", "results_dir", ",", "filename", ",", "do_not_try_parsing", ",", "parameters", ")", ":", "# Get the extension", "_", ",", "extension", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "campaign", "=", "sem", ".", "CampaignM...
Export results to file. An extension in filename is required to deduce the file type. If no extension is specified, a directory tree export will be used. Note that this command automatically tries to parse the simulation output. Supported extensions: .mat (Matlab file), .npy (Numpy file), no extension (Directory tree)
[ "Export", "results", "to", "file", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/cli.py#L220-L268
17,697
signetlabdei/sem
sem/cli.py
merge
def merge(move, output_dir, sources): """ Merge multiple results folder into one, by copying the results over to a new folder. For a faster operation (which on the other hand destroys the campaign data if interrupted), the move option can be used to directly move results to the new folder. """ # Get paths for all campaign JSONS jsons = [] for s in sources: filename = "%s.json" % os.path.split(s)[1] jsons += [os.path.join(s, filename)] # Check that the configuration for all campaigns is the same reference_config = TinyDB(jsons[0]).table('config') for j in jsons[1:]: for i, j in zip(reference_config.all(), TinyDB(j).table('config').all()): assert i == j # Create folders for new results directory filename = "%s.json" % os.path.split(output_dir)[1] output_json = os.path.join(output_dir, filename) output_data = os.path.join(output_dir, 'data') os.makedirs(output_data) # Create new database db = TinyDB(output_json) db.table('config').insert_multiple(reference_config.all()) # Import results from all databases to the new JSON file for s in sources: filename = "%s.json" % os.path.split(s)[1] current_db = TinyDB(os.path.join(s, filename)) db.table('results').insert_multiple(current_db.table('results').all()) # Copy or move results to new data folder for s in sources: for r in glob.glob(os.path.join(s, 'data/*')): basename = os.path.basename(r) if move: shutil.move(r, os.path.join(output_data, basename)) else: shutil.copytree(r, os.path.join(output_data, basename)) if move: for s in sources: shutil.rmtree(os.path.join(s, 'data/*')) shutil.rmtree(os.path.join(s, "%s.json" % os.path.split(s)[1])) shutil.rmtree(s)
python
def merge(move, output_dir, sources): # Get paths for all campaign JSONS jsons = [] for s in sources: filename = "%s.json" % os.path.split(s)[1] jsons += [os.path.join(s, filename)] # Check that the configuration for all campaigns is the same reference_config = TinyDB(jsons[0]).table('config') for j in jsons[1:]: for i, j in zip(reference_config.all(), TinyDB(j).table('config').all()): assert i == j # Create folders for new results directory filename = "%s.json" % os.path.split(output_dir)[1] output_json = os.path.join(output_dir, filename) output_data = os.path.join(output_dir, 'data') os.makedirs(output_data) # Create new database db = TinyDB(output_json) db.table('config').insert_multiple(reference_config.all()) # Import results from all databases to the new JSON file for s in sources: filename = "%s.json" % os.path.split(s)[1] current_db = TinyDB(os.path.join(s, filename)) db.table('results').insert_multiple(current_db.table('results').all()) # Copy or move results to new data folder for s in sources: for r in glob.glob(os.path.join(s, 'data/*')): basename = os.path.basename(r) if move: shutil.move(r, os.path.join(output_data, basename)) else: shutil.copytree(r, os.path.join(output_data, basename)) if move: for s in sources: shutil.rmtree(os.path.join(s, 'data/*')) shutil.rmtree(os.path.join(s, "%s.json" % os.path.split(s)[1])) shutil.rmtree(s)
[ "def", "merge", "(", "move", ",", "output_dir", ",", "sources", ")", ":", "# Get paths for all campaign JSONS", "jsons", "=", "[", "]", "for", "s", "in", "sources", ":", "filename", "=", "\"%s.json\"", "%", "os", ".", "path", ".", "split", "(", "s", ")",...
Merge multiple results folder into one, by copying the results over to a new folder. For a faster operation (which on the other hand destroys the campaign data if interrupted), the move option can be used to directly move results to the new folder.
[ "Merge", "multiple", "results", "folder", "into", "one", "by", "copying", "the", "results", "over", "to", "a", "new", "folder", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/cli.py#L286-L335
17,698
signetlabdei/sem
sem/cli.py
query_parameters
def query_parameters(param_list, defaults=None): """ Asks the user for parameters. If available, proposes some defaults. Args: param_list (list): List of parameters to ask the user for values. defaults (list): A list of proposed defaults. It must be a list of the same length as param_list. A value of None in one element of the list means that no default will be proposed for the corresponding parameter. """ script_params = collections.OrderedDict([k, []] for k in param_list) for param, default in zip(list(script_params.keys()), defaults): user_input = click.prompt("%s" % param, default=default) script_params[param] = ast.literal_eval(user_input) return script_params
python
def query_parameters(param_list, defaults=None): script_params = collections.OrderedDict([k, []] for k in param_list) for param, default in zip(list(script_params.keys()), defaults): user_input = click.prompt("%s" % param, default=default) script_params[param] = ast.literal_eval(user_input) return script_params
[ "def", "query_parameters", "(", "param_list", ",", "defaults", "=", "None", ")", ":", "script_params", "=", "collections", ".", "OrderedDict", "(", "[", "k", ",", "[", "]", "]", "for", "k", "in", "param_list", ")", "for", "param", ",", "default", "in", ...
Asks the user for parameters. If available, proposes some defaults. Args: param_list (list): List of parameters to ask the user for values. defaults (list): A list of proposed defaults. It must be a list of the same length as param_list. A value of None in one element of the list means that no default will be proposed for the corresponding parameter.
[ "Asks", "the", "user", "for", "parameters", ".", "If", "available", "proposes", "some", "defaults", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/cli.py#L349-L367
17,699
signetlabdei/sem
sem/cli.py
import_parameters_from_file
def import_parameters_from_file(parameters_file): """ Try importing a parameter dictionary from file. We expect values in parameters_file to be defined as follows: param1: value1 param2: [value2, value3] """ params = {} with open(parameters_file, 'r') as f: matches = re.findall('(.*): (.*)', f.read()) for m in matches: params[m[0]] = ast.literal_eval(m[1]) return params
python
def import_parameters_from_file(parameters_file): params = {} with open(parameters_file, 'r') as f: matches = re.findall('(.*): (.*)', f.read()) for m in matches: params[m[0]] = ast.literal_eval(m[1]) return params
[ "def", "import_parameters_from_file", "(", "parameters_file", ")", ":", "params", "=", "{", "}", "with", "open", "(", "parameters_file", ",", "'r'", ")", "as", "f", ":", "matches", "=", "re", ".", "findall", "(", "'(.*): (.*)'", ",", "f", ".", "read", "(...
Try importing a parameter dictionary from file. We expect values in parameters_file to be defined as follows: param1: value1 param2: [value2, value3]
[ "Try", "importing", "a", "parameter", "dictionary", "from", "file", "." ]
5077dd7a6d15644a18790bb6fde320e905f0fef0
https://github.com/signetlabdei/sem/blob/5077dd7a6d15644a18790bb6fde320e905f0fef0/sem/cli.py#L370-L386