_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q17400
CommonService.initialize_logging
train
def initialize_logging(self): """Reset the logging for the service process. All logged messages are forwarded to the frontend. If any filtering is desired, then this must take place on the service side.""" # Reset logging to pass logrecords into the queue to the frontend only. # Existing handlers may be broken as they were copied into a new process, # so should be discarded. for loggername in [None] + list(logging.Logger.manager.loggerDict.keys()): logger = logging.getLogger(loggername) while logger.handlers: logger.removeHandler(logger.handlers[0]) # Re-enable logging to console root_logger = logging.getLogger() # By default pass all warning (and higher) level messages to the frontend root_logger.setLevel(logging.WARN)
python
{ "resource": "" }
q17401
CommonService.start
train
def start(self, **kwargs): """Start listening to command queue, process commands in main loop, set status, etc... This function is most likely called by the frontend in a separate process.""" # Keep a copy of keyword arguments for use in subclasses self.start_kwargs.update(kwargs) try: self.initialize_logging() self.__update_service_status(self.SERVICE_STATUS_STARTING) self.start_transport() self.initializing() self._register("command", self.__process_command) if self.__pipe_commands is None: # can only listen to commands if command queue is defined self.__shutdown = True else: # start listening to command queue in separate thread self.__start_command_queue_listener() while not self.__shutdown: # main loop self.__update_service_status(self.SERVICE_STATUS_IDLE) if self._idle_time is None: task = self.__queue.get() else: try: task = self.__queue.get(True, self._idle_time) except queue.Empty: self.__update_service_status(self.SERVICE_STATUS_TIMER) if self._idle_callback: self._idle_callback() continue self.__update_service_status(self.SERVICE_STATUS_PROCESSING) if task[0] == Priority.COMMAND: message = task[2] if message and "band" in message: processor = self.__callback_register.get(message["band"]) if processor is None: self.log.warning(
python
{ "resource": "" }
q17402
CommonService.process_uncaught_exception
train
def process_uncaught_exception(self, e): """This is called to handle otherwise uncaught exceptions from the service. The service will terminate either way, but here we can do things such as gathering useful environment information and logging for posterity.""" # Add information about the actual exception to the log message # This includes the file, line and piece of code causing the exception. # exc_info=True adds the full stack trace to the log message. exc_file_fullpath, exc_file, exc_lineno, exc_func, exc_line = ( workflows.logging.get_exception_source() ) added_information = { "workflows_exc_lineno": exc_lineno,
python
{ "resource": "" }
q17403
IRCBot.connect
train
def connect(self, *args, **kwargs): """ Connect to a server. This overrides the function in SimpleIRCClient to provide SSL functionality. :param args: :param kwargs: :return: """ if self.use_ssl: factory = irc.connection.Factory(wrapper=ssl.wrap_socket) else: factory = irc.connection.Factory() self.connection.connect(server=self.server, port=self.port,
python
{ "resource": "" }
q17404
IRCBot.set_metadata
train
def set_metadata(self, e): """ This function sets the metadata that is common between pub and priv """ metadata = Metadata(source=self.actor_urn).__dict__ metadata['source_connector'] = 'irc'
python
{ "resource": "" }
q17405
IRCBot.on_pubmsg
train
def on_pubmsg(self, c, e): """ This function runs when the bot receives a public message. """ text = e.arguments[0] metadata = self.set_metadata(e)
python
{ "resource": "" }
q17406
IRCBot.on_welcome
train
def on_welcome(self, c, e): """ This function runs when the bot successfully connects to the IRC server """ self.backoff = 1 # Assume we had a good connection. Reset backoff. if self.nickserv: if Utilities.isNotEmpty(self.nickserv_pass): self.identify(c, e, self.nickserv_pass) time.sleep(3) # Make sure Nickserv really sees us else: logger.error('If nickserv is enabled, you must supply'
python
{ "resource": "" }
q17407
IRCBot.run
train
def run(self): """ Run the bot in a thread. Implementing the IRC listener as a thread allows it to listen without blocking IRCLego's ability to listen as
python
{ "resource": "" }
q17408
System.check
train
def check(self): """ Check config data consistency Returns ------- """ if self.sparselib not in self.sparselib_alt: logger.warning("Invalid sparse library
python
{ "resource": "" }
q17409
Fault.apply
train
def apply(self, actual_time): """Check time and apply faults""" if self.time != actual_time: self.time = actual_time else: return for i in range(self.n): if self.tf[i] == self.time: logger.info( ' <Fault> Applying fault on Bus <{}> at t={}.'.format( self.bus[i], self.tf[i])) self.u[i] = 1
python
{ "resource": "" }
q17410
SampleConsumer.consume_message
train
def consume_message(self, header, message): """Consume a message""" logmessage = { "time": (time.time() % 1000) * 1000, "header": "", "message": message, } if header: logmessage["header"] = ( json.dumps(header, indent=2) + "\n" + "----------------" + "\n" ) if isinstance(message, dict): logmessage["message"] = ( json.dumps(message, indent=2) + "\n" + "----------------" + "\n"
python
{ "resource": "" }
q17411
Lego.on_receive
train
def on_receive(self, message): """ Handle being informed of a message. This function is called whenever a Lego receives a message, as specified in the pykka documentation. Legos should not override this function. :param message: :return: """ if self.log_file is not None and message['should_log']: message_copy = Message(message['text'], Metadata(None).__dict__,
python
{ "resource": "" }
q17412
Lego.cleanup
train
def cleanup(self): """ Clean up finished children. :return: None """ self.lock.acquire() logger.debug('Acquired lock in cleanup for ' + str(self))
python
{ "resource": "" }
q17413
Lego.add_child
train
def add_child(self, child_type, *args, **kwargs): """ Initialize and keep track of a child. :param child_type: a class inheriting from Lego to initialize \ an instance of :param args: arguments for initializing the child :param kwargs: keyword arguments for initializing the child :return: """ try: baseplate = kwargs['baseplate'] except: if self.baseplate is None:
python
{ "resource": "" }
q17414
Lego.reply
train
def reply(self, message, text, opts=None): """ Reply to the sender of the provided message with a message \ containing the provided text. :param message: the message to reply to :param text: the text to reply with :param opts: A dictionary of additional values to add to metadata :return: None """ metadata = Metadata(source=self.actor_urn, dest=message['metadata']['source']).__dict__ metadata['opts'] = opts message = Message(text=text, metadata=metadata,
python
{ "resource": "" }
q17415
Lego.build_reply_opts
train
def build_reply_opts(self, message): """ Convenience method for constructing default options for a reply message. :param message: the message to reply to :return: opts """ try: source = message['metadata']['source_channel'] thread = message['metadata'].get('thread_ts') opts = {'target': source, 'thread': thread}
python
{ "resource": "" }
q17416
alter
train
def alter(data, system): """Alter data in dm format devices""" device = data[0] action = data[1] if data[2] == '*': data[2] = '.*' regex = re.compile(data[2]) prop = data[3] value = float(data[4]) if action == 'MUL': for item in range(system.__dict__[device].n): if regex.search(system.__dict__[device].name[item]): system.__dict__[device].__dict__[prop][item] *= value elif action == 'REP': for item in range(system.__dict__[device].n): if regex.search(system.__dict__[device].name[item]): system.__dict__[device].__dict__[prop][item] = value elif action
python
{ "resource": "" }
q17417
write
train
def write(file, system): """ Write data in system to a dm file """ # TODO: Check for bugs!!! out = list() out.append('# DOME format version 1.0') ppl = 7 # parameter per line retval = True dev_list = sorted(system.devman.devices) for dev in dev_list: model = system.__dict__[dev] if not model.n: continue out.append('') header = dev + ', ' space = ' ' * (len(dev) + 2) keys = list(model._data.keys()) keys.extend(['name', 'idx']) keys = sorted(keys) # remove non-existent keys for key in keys: if key not in model.__dict__.keys(): keys.pop(key) nline = int(ceil(len(keys) / ppl)) nelement = model.n vals = [''] * len(keys) # for each element, read values for elem in range(nelement): for idx, key in enumerate(keys): if model._flags['sysbase'] and key in model._store.keys(): val = model._store[key][elem] else: val = model.__dict__[key][elem] if isinstance(val, float): val = round(val, 5) elif isinstance(val, str): val = '"{}"'.format(val) elif isinstance(val, list): val = list(val) val = '; '.join(str(i) for i in val) val = '[{}]'.format(val) elif
python
{ "resource": "" }
q17418
StompTransport.add_command_line_options
train
def add_command_line_options(cls, parser): """function to inject command line parameters""" if "add_argument" in
python
{ "resource": "" }
q17419
StompTransport.add_command_line_options_argparse
train
def add_command_line_options_argparse(cls, argparser): """function to inject command line parameters into a Python ArgumentParser.""" import argparse class SetParameter(argparse.Action): """callback object for ArgumentParser""" def __call__(self, parser, namespace, value, option_string=None): cls.config[option_string] = value if option_string == "--stomp-conf": cls.load_configuration_file(value) argparser.add_argument( "--stomp-host", metavar="HOST", default=cls.defaults.get("--stomp-host"), help="Stomp broker address, default '%(default)s'", type=str, action=SetParameter, ) argparser.add_argument( "--stomp-port", metavar="PORT", default=cls.defaults.get("--stomp-port"), help="Stomp broker port, default '%(default)s'", type=int, action=SetParameter, ) argparser.add_argument( "--stomp-user", metavar="USER", default=cls.defaults.get("--stomp-user"), help="Stomp user, default '%(default)s'", type=str,
python
{ "resource": "" }
q17420
StompTransport.add_command_line_options_optparse
train
def add_command_line_options_optparse(cls, optparser): """function to inject command line parameters into a Python OptionParser.""" def set_parameter(option, opt, value, parser): """callback function for OptionParser""" cls.config[opt] = value if opt == "--stomp-conf": cls.load_configuration_file(value) optparser.add_option( "--stomp-host", metavar="HOST", default=cls.defaults.get("--stomp-host"), help="Stomp broker address, default '%default'", type="string", nargs=1, action="callback", callback=set_parameter, ) optparser.add_option( "--stomp-port", metavar="PORT", default=cls.defaults.get("--stomp-port"), help="Stomp broker port, default '%default'", type="int", nargs=1,
python
{ "resource": "" }
q17421
StompTransport.is_connected
train
def is_connected(self): """Return connection status"""
python
{ "resource": "" }
q17422
StompTransport.disconnect
train
def disconnect(self): """Gracefully close connection to stomp server.""" if self._connected:
python
{ "resource": "" }
q17423
StompTransport.broadcast_status
train
def broadcast_status(self, status): """Broadcast transient status information to all listeners""" self._broadcast( "transient.status",
python
{ "resource": "" }
q17424
DevMan.register_device
train
def register_device(self, dev_name): """register a device to the device list""" if dev_name not in self.devices: self.devices.append(dev_name)
python
{ "resource": "" }
q17425
DevMan.register_element
train
def register_element(self, dev_name, idx=None): """ Register a device element to the group list Parameters ---------- dev_name : str model name idx : str element idx Returns ------- str assigned idx """ if dev_name not in self.devices: logger.error( 'Device {} missing. call add_device before adding elements'. format(dev_name))
python
{ "resource": "" }
q17426
DevMan.sort_device
train
def sort_device(self): """ Sort device to follow the order of initialization :return: None """ self.devices.sort() # idx: the indices of order-sensitive models # names: an ordered list of order-sensitive models idx = [] names = [] for dev in order: # if ``dev`` in ``order`` is a model file name: # initialize the models in alphabet order
python
{ "resource": "" }
q17427
TDS._calc_time_step_first
train
def _calc_time_step_first(self): """ Compute the first time step and save to ``self.h`` Returns ------- None """ system = self.system config = self.config if not system.dae.n: freq = 1.0 elif system.dae.n == 1: B = matrix(system.dae.Gx) self.solver.linsolve(system.dae.Gy, B) As = system.dae.Fx - system.dae.Fy * B freq = abs(As[0, 0]) else: freq = 20.0 if freq > system.freq:
python
{ "resource": "" }
q17428
TDS.calc_time_step
train
def calc_time_step(self): """ Set the time step during time domain simulations Parameters ---------- convergence: bool truth value of the convergence of the last step niter: int current iteration count t: float current simulation time Returns ------- float computed time step size """ system = self.system config = self.config convergence = self.convergence niter = self.niter t = self.t if t == 0: self._calc_time_step_first() return if convergence: if niter >= 15: config.deltat = max(config.deltat * 0.5, config.deltatmin) elif niter <= 6: config.deltat = min(config.deltat * 1.1, config.deltatmax) else: config.deltat = max(config.deltat * 0.95, config.deltatmin) # adjust fixed time step if niter is high if config.fixt: config.deltat = min(config.tstep, config.deltat) else: config.deltat *= 0.9
python
{ "resource": "" }
q17429
TDS.init
train
def init(self): """ Initialize time domain simulation Returns ------- None """ system = self.system config = self.config dae = self.system.dae if system.pflow.solved is False: return t, s = elapsed() # Assign indices for post-powerflow device variables system.xy_addr1() # Assign variable names for bus injections and line flows if enabled system.varname.resize_for_flows() system.varname.bus_line_names() # Reshape dae to retain power flow solutions system.dae.init1() # Initialize post-powerflow device variables for device, init1 in zip(system.devman.devices, system.call.init1): if init1:
python
{ "resource": "" }
q17430
TDS.restore_values
train
def restore_values(self): """ Restore x, y, and f values if not converged Returns ------- None """ if self.convergence is True: return dae = self.system.dae system = self.system inc_g = self.inc[dae.n:dae.m + dae.n] max_g_err_sign = 1 if abs(max(inc_g)) > abs(min(inc_g)) else -1 if max_g_err_sign == 1: max_g_err_idx = list(inc_g).index(max(inc_g)) else: max_g_err_idx = list(inc_g).index(min(inc_g)) logger.debug(
python
{ "resource": "" }
q17431
TDS.implicit_step
train
def implicit_step(self): """ Integrate one step using trapezoidal method. Sets convergence and niter flags. Returns ------- None """ config = self.config system = self.system dae = self.system.dae # constant short names In = spdiag([1] * dae.n) h = self.h while self.err > config.tol and self.niter < config.maxit: if self.t - self.t_jac >= 5: dae.rebuild = True self.t_jac = self.t elif self.niter > 4: dae.rebuild = True elif dae.factorize: dae.rebuild = True # rebuild Jacobian if dae.rebuild: exec(system.call.int) dae.rebuild = False else: exec(system.call.int_fg) # complete Jacobian matrix dae.Ac if config.method == 'euler': dae.Ac = sparse( [[In - h * dae.Fx, dae.Gx], [-h * dae.Fy, dae.Gy]], 'd') dae.q = dae.x - self.x0 - h * dae.f elif config.method == 'trapezoidal': dae.Ac = sparse([[In - h * 0.5 * dae.Fx, dae.Gx], [-h * 0.5 * dae.Fy, dae.Gy]], 'd') dae.q = dae.x - self.x0 - h * 0.5 * (dae.f + self.f0) # windup limiters dae.reset_Ac() if dae.factorize: self.F = self.solver.symbolic(dae.Ac) dae.factorize = False self.inc = -matrix([dae.q, dae.g]) try: N = self.solver.numeric(dae.Ac, self.F) self.solver.solve(dae.Ac, self.F, N, self.inc)
python
{ "resource": "" }
q17432
TDS.event_actions
train
def event_actions(self): """ Take actions for timed events Returns ------- None """ system = self.system dae = system.dae if self.switch: system.Breaker.apply(self.t)
python
{ "resource": "" }
q17433
TDS.load_pert
train
def load_pert(self): """ Load perturbation files to ``self.callpert`` Returns ------- None """ system = self.system if system.files.pert: try: sys.path.append(system.files.path) module = importlib.import_module(system.files.pert[:-3])
python
{ "resource": "" }
q17434
TDS.run_step0
train
def run_step0(self): """ For the 0th step, store the data and stream data Returns ------- None """ dae = self.system.dae system = self.system
python
{ "resource": "" }
q17435
TDS.streaming_step
train
def streaming_step(self): """ Sync, handle and streaming for each integration step Returns ------- None """ system = self.system if system.config.dime_enable:
python
{ "resource": "" }
q17436
TDS.streaming_init
train
def streaming_init(self): """ Send out initialization variables and process init from modules Returns ------- None """ system = self.system config = self.config if system.config.dime_enable: config.compute_flows = True
python
{ "resource": "" }
q17437
TDS.compute_flows
train
def compute_flows(self): """ If enabled, compute the line flows after each step Returns ------- None """ system = self.system config = self.config dae = system.dae if config.compute_flows: # compute and append series injections on buses exec(system.call.bus_injection) bus_inj = dae.g[:2 * system.Bus.n] exec(system.call.seriesflow)
python
{ "resource": "" }
q17438
TDS.dump_results
train
def dump_results(self, success): """ Dump simulation results to ``dat`` and ``lst`` files Returns ------- None """ system = self.system t, _ = elapsed() if success and (not system.files.no_output):
python
{ "resource": "" }
q17439
de_blank
train
def de_blank(val): """Remove blank elements in `val` and return `ret`""" ret = list(val) if type(val) == list: for idx, item in enumerate(val): if item.strip() == '':
python
{ "resource": "" }
q17440
stringfy
train
def stringfy(expr, sym_const=None, sym_states=None, sym_algebs=None): """Convert the right-hand-side of an equation into CVXOPT matrix operations""" if not sym_const: sym_const = [] if not sym_states: sym_states = [] if not sym_algebs: sym_algebs = [] expr_str = [] if type(expr) in (int, float): return expr if expr.is_Atom: if expr in sym_const: expr_str = 'self.{}'.format(expr) elif expr in sym_states: expr_str = 'dae.x[self.{}]'.format(expr) elif expr in sym_algebs: expr_str = 'dae.y[self.{}]'.format(expr) elif expr.is_Number: if expr.is_Integer: expr_str = str(int(expr)) else: expr_str = str(float(expr)) # if expr.is_negative: # expr_str = '{}'.format(expr) # else: # expr_str = str(expr) else: raise AttributeError('Unknown free symbol <{}>'.format(expr)) else: nargs = len(expr.args) arg_str = [] for arg in expr.args: arg_str.append(stringfy(arg, sym_const, sym_states, sym_algebs)) if expr.is_Add: expr_str = '' for idx, item in enumerate(arg_str): if idx == 0: if len(item) > 1 and item[1] == ' ': item = item[0] + item[2:] if idx > 0: if item[0] == '-': item = ' ' + item else: item = ' + ' + item expr_str += item elif expr.is_Mul: if nargs
python
{ "resource": "" }
q17441
readadd
train
def readadd(file, system): """read DYR file""" dyr = {} data = [] end = 0 retval = True sep = ',' fid = open(file, 'r') for line in fid.readlines(): if line.find('/') >= 0: line = line.split('/')[0] end = 1 if line.find(',') >= 0: # mixed comma and space splitter not allowed line = [to_number(item.strip()) for item in line.split(sep)] else: line = [to_number(item.strip()) for item in line.split()] if not line: end = 0 continue data.extend(line) if end == 1: field = data[1] if field not in dyr.keys(): dyr[field] = [] dyr[field].append(data) end = 0 data = [] fid.close() # elem_add device elements to system supported = [ 'GENROU',
python
{ "resource": "" }
q17442
Recipe._sanitize
train
def _sanitize(recipe): """Clean up a recipe that may have been stored as serialized json string. Convert any numerical pointers that are stored as strings to integers.""" recipe = recipe.copy() for k in list(recipe): if k not in ("start", "error") and int(k) and k != int(k): recipe[int(k)] = recipe[k] del recipe[k] for k in list(recipe): if "output" in recipe[k] and not isinstance(
python
{ "resource": "" }
q17443
parse_string
train
def parse_string(data, unquote=default_unquote): """Decode URL-encoded strings to UTF-8 containing the escaped chars. """ if data is None: return None # We'll soon need to unquote to recover our UTF-8 data. # In Python 2, unquote crashes on chars beyond ASCII. So encode functions # had better not include anything beyond ASCII in data. # In Python 3, unquote crashes on bytes objects, requiring conversion to # str objects (unicode) using decode(). # But in Python 2, the same decode causes unquote to butcher the data. # So in that case, just leave the bytes. if isinstance(data, bytes): if sys.version_info > (3, 0, 0): # pragma: no cover
python
{ "resource": "" }
q17444
parse_value
train
def parse_value(value, allow_spaces=True, unquote=default_unquote): "Process a cookie value" if value is None: return None value = strip_spaces_and_quotes(value)
python
{ "resource": "" }
q17445
valid_name
train
def valid_name(name): "Validate a cookie name string" if isinstance(name, bytes): name = name.decode('ascii') if not Definitions.COOKIE_NAME_RE.match(name): return False # This module doesn't
python
{ "resource": "" }
q17446
valid_value
train
def valid_value(value, quote=default_cookie_quote, unquote=default_unquote): """Validate a cookie value string. This is generic across quote/unquote functions because it directly verifies the encoding round-trip using the specified quote/unquote functions. So if you use different quote/unquote functions, use something like this as a replacement for valid_value:: my_valid_value = lambda value: valid_value(value, quote=my_quote, unquote=my_unquote) """ if value is None: return False # Put the value through a round trip with the given quote and unquote # functions, so we will know whether data will get lost or not in the event # that we don't complain. encoded = encode_cookie_value(value, quote=quote) decoded = parse_string(encoded, unquote=unquote) # If the original
python
{ "resource": "" }
q17447
valid_date
train
def valid_date(date): "Validate an expires datetime object" # We want something that acts like a datetime. In particular, # strings indicate a failure to parse down to an object and ints are # nonstandard and ambiguous at best. if not hasattr(date, 'tzinfo'): return False # Relevant RFCs define UTC as 'close enough' to GMT, and
python
{ "resource": "" }
q17448
valid_domain
train
def valid_domain(domain): "Validate a cookie domain ASCII string" # Using encoding on domain would confuse browsers into not sending cookies. # Generate UnicodeDecodeError up front if it
python
{ "resource": "" }
q17449
valid_path
train
def valid_path(value): "Validate a cookie path ASCII string" # Generate UnicodeDecodeError if path can't store as ASCII. value.encode("ascii") # Cookies without leading slash will likely be ignored, raise ASAP.
python
{ "resource": "" }
q17450
valid_max_age
train
def valid_max_age(number): "Validate a cookie Max-Age" if isinstance(number, basestring): try:
python
{ "resource": "" }
q17451
encode_cookie_value
train
def encode_cookie_value(data, quote=default_cookie_quote): """URL-encode strings to make them safe for a cookie value. By default this uses urllib quoting, as used in many other cookie implementations and in other Python code, instead of an ad hoc escaping mechanism which includes backslashes (these also being illegal chars in RFC 6265). """ if data is None: return None # encode() to ASCII bytes so quote won't crash on non-ASCII. # but doing that to bytes objects is nonsense. # On Python 2 encode crashes if s is bytes containing non-ASCII. # On Python 3 encode crashes on all byte objects.
python
{ "resource": "" }
q17452
Cookie.from_dict
train
def from_dict(cls, cookie_dict, ignore_bad_attributes=True): """Construct an instance from a dict of strings to parse. The main difference between this and Cookie(name, value, **kwargs) is that the values in the argument to this method are parsed. If ignore_bad_attributes=True (default), values which did not parse are set to '' in order to avoid passing bad data. """ name = cookie_dict.get('name', None) if not name: raise InvalidCookieError("Cookie must have name") raw_value = cookie_dict.get('value', '') # Absence or failure of parser here is fatal; errors in present name # and value should be found by Cookie.__init__. value = cls.attribute_parsers['value'](raw_value) cookie = cls(name, value) # Parse values from serialized formats into objects parsed = {} for key, value in cookie_dict.items(): # Don't want to pass name/value to _set_attributes if key in ('name', 'value'): continue parser = cls.attribute_parsers.get(key) if not parser: # Don't let totally unknown attributes pass silently
python
{ "resource": "" }
q17453
Cookie.from_string
train
def from_string(cls, line, ignore_bad_cookies=False, ignore_bad_attributes=True): "Construct a Cookie object from a line of Set-Cookie header data." cookie_dict = parse_one_response(
python
{ "resource": "" }
q17454
Cookie.validate
train
def validate(self, name, value): """Validate a cookie attribute with an appropriate validator. The value comes in already parsed (for example, an expires value should be a datetime). Called automatically when an attribute value is set.
python
{ "resource": "" }
q17455
Cookie.attributes
train
def attributes(self): """Export this cookie's attributes as a dict of encoded values. This is an important part of the code for rendering attributes, e.g. render_response(). """ dictionary = {} # Only look for attributes registered in attribute_names. for python_attr_name, cookie_attr_name in self.attribute_names.items(): value = getattr(self, python_attr_name) renderer = self.attribute_renderers.get(python_attr_name, None) if renderer: value = renderer(value)
python
{ "resource": "" }
q17456
Cookies.add
train
def add(self, *args, **kwargs): """Add Cookie objects by their names, or create new ones under specified names. Any unnamed arguments are interpreted as existing cookies, and are added under the value in their .name attribute. With keyword arguments, the key is interpreted as the cookie name and the value as the UNENCODED value stored in the cookie. """ # Only the first one is accessible through the main interface, # others accessible through get_all (all_cookies). for cookie in args: self.all_cookies.append(cookie)
python
{ "resource": "" }
q17457
Cookies.parse_request
train
def parse_request(self, header_data, ignore_bad_cookies=False): """Parse 'Cookie' header data into Cookie objects, and add them to this Cookies object. :arg header_data: string containing only 'Cookie:' request headers or header values (as in CGI/WSGI HTTP_COOKIE); if more than one, they must be separated by CRLF (\\r\\n). :arg ignore_bad_cookies: if set, will log each syntactically invalid cookie (at the granularity of semicolon-delimited blocks) rather than raising an exception at the first bad cookie. :returns: a Cookies instance containing Cookie objects parsed from header_data. .. note:: If you want to parse 'Set-Cookie:' response headers, please use parse_response instead. parse_request will happily turn 'expires=frob' into a separate cookie without complaining, according to the grammar. """ cookies_dict = _parse_request( header_data, ignore_bad_cookies=ignore_bad_cookies) cookie_objects = [] for name, values in cookies_dict.items(): for value in values: # Use from_dict to check name and parse value
python
{ "resource": "" }
q17458
Cookies.parse_response
train
def parse_response(self, header_data, ignore_bad_cookies=False, ignore_bad_attributes=True): """Parse 'Set-Cookie' header data into Cookie objects, and add them to this Cookies object. :arg header_data: string containing only 'Set-Cookie:' request headers or their corresponding header values; if more than one, they must be separated by CRLF (\\r\\n). :arg ignore_bad_cookies: if set, will log each syntactically invalid cookie rather than raising an exception at the first bad cookie. (This includes cookies which have noncompliant characters in the attribute section). :arg ignore_bad_attributes: defaults to True, which means to log but not raise an error when a particular attribute is unrecognized. (This does not necessarily mean that the attribute is invalid, although that would often be the case.) if unset, then an error will be raised at the first semicolon-delimited block which has an unknown attribute. :returns: a Cookies instance containing Cookie objects parsed from header_data, each with recognized attributes populated. .. note:: If you want to parse 'Cookie:' headers (i.e., data like what's sent with an HTTP request, which has only name=value pairs and no attributes), then please use parse_request instead. Such lines often
python
{ "resource": "" }
q17459
Cookies.from_request
train
def from_request(cls, header_data, ignore_bad_cookies=False): "Construct a Cookies object from request header data." cookies = cls()
python
{ "resource": "" }
q17460
Cookies.from_response
train
def from_response(cls, header_data, ignore_bad_cookies=False, ignore_bad_attributes=True): "Construct a Cookies object from response header data." cookies = cls() cookies.parse_response( header_data,
python
{ "resource": "" }
q17461
not0
train
def not0(a): """Return u if u!= 0, return 1 if u
python
{ "resource": "" }
q17462
index
train
def index(m, val): """ Return the indices of all the ``val`` in ``m`` """ mm = np.array(m) idx_tuple =
python
{ "resource": "" }
q17463
to_number
train
def to_number(s): """ Convert a string to a number. If not successful, return the string without blanks """ ret = s # try converting to float try: ret = float(s) except ValueError: ret = ret.strip('\'').strip() # try converting to uid try: ret =
python
{ "resource": "" }
q17464
get_config_load_path
train
def get_config_load_path(conf_path=None): """ Return config file load path Priority: 1. conf_path 2. current directory 3. home directory Parameters ---------- conf_path Returns ------- """ if conf_path is None: # test ./andes.conf if os.path.isfile('andes.conf'): conf_path = 'andes.conf' #
python
{ "resource": "" }
q17465
get_log_dir
train
def get_log_dir(): """ Get a directory for logging On Linux or macOS, '/tmp/andes' is the default. On Windows, '%APPDATA%/andes' is the default. Returns ------- str Path to the logging directory """ PATH = '' if platform.system() in ('Linux', 'Darwin'): PATH = tempfile.mkdtemp(prefix='andes-')
python
{ "resource": "" }
q17466
VarOut.show
train
def show(self): """ The representation of an Varout object :return: the full result matrix (for use with PyCharm viewer) :rtype: np.array """ out = []
python
{ "resource": "" }
q17467
VarOut.concat_t_vars
train
def concat_t_vars(self): """ Concatenate ``self.t`` with ``self.vars`` and output a single matrix for data dump :return matrix: concatenated matrix with ``self.t`` as the 0-th column """ logger.warning('This function is deprecated and replaced by `concat_t_vars_np`.') out = np.array([]) if len(self.t) == 0:
python
{ "resource": "" }
q17468
VarOut.concat_t_vars_np
train
def concat_t_vars_np(self, vars_idx=None): """ Concatenate `self.np_t` with `self.np_vars` and return a single matrix. The first column corresponds to time, and the rest of the matrix is the variables. Returns
python
{ "resource": "" }
q17469
VarOut.get_xy
train
def get_xy(self, yidx, xidx=0): """ Return stored data for the given indices for plot :param yidx: the indices of the y-axis variables(1-indexing) :param xidx: the index of the x-axis variables
python
{ "resource": "" }
q17470
VarOut.dump_np_vars
train
def dump_np_vars(self, store_format='csv', delimiter=','): """ Dump the TDS simulation data to files by calling subroutines `write_lst` and `write_np_dat`. Parameters ----------- store_format : str dump format in `('csv', 'txt', 'hdf5')` delimiter : str delimiter for the `csv` and `txt` format Returns ------- bool: success flag """
python
{ "resource": "" }
q17471
VarOut.dump
train
def dump(self): """ Dump the TDS results to the output `dat` file :return: succeed flag """ logger.warn('This function is deprecated and replaced by `dump_np_vars`.') ret = False if self.system.files.no_output:
python
{ "resource": "" }
q17472
VarOut.write_np_dat
train
def write_np_dat(self, store_format='csv', delimiter=',', fmt='%.12g'): """ Write TDS data stored in `self.np_vars` to the output file Parameters ---------- store_format : str dump format in ('csv', 'txt', 'hdf5') delimiter : str delimiter for the `csv` and `txt` format fmt : str output formatting template Returns ------- bool : success flag """ ret = False system = self.system # compute the total number of columns, excluding time if not system.Recorder.n: n_vars = system.dae.m + system.dae.n # post-computed power flows include: # bus - (Pi, Qi) # line - (Pij, Pji, Qij, Qji, Iij_Real, Iij_Imag, Iji_real, Iji_Imag) if system.tds.config.compute_flows: n_vars += 2 * system.Bus.n + 8 * system.Line.n + 2 * system.Area.n_combination idx = list(range(n_vars)) else: n_vars = len(system.Recorder.varout_idx) idx = system.Recorder.varout_idx
python
{ "resource": "" }
q17473
VarOut.write_lst
train
def write_lst(self): """ Dump the variable name lst file :return: succeed flag """ ret = False out = '' system = self.system dae = self.system.dae varname = self.system.varname template = '{:>6g}, {:>25s}, {:>35s}\n' # header line out += template.format(0, 'Time [s]', '$Time\\ [s]$') # include line flow variables in algebraic variables nflows = 0 if self.system.tds.config.compute_flows: nflows = 2 * self.system.Bus.n + \ 8 * self.system.Line.n + \ 2 * self.system.Area.n_combination # output variable indices if system.Recorder.n == 0: state_idx = list(range(dae.n)) algeb_idx = list(range(dae.n, dae.n + dae.m + nflows)) idx = state_idx + algeb_idx else: idx
python
{ "resource": "" }
q17474
VarOut.vars_to_array
train
def vars_to_array(self): """ Convert `self.vars` to a numpy array Returns ------- numpy.array """ logger.warn('This function is deprecated. You can inspect `self.np_vars` directly as NumPy arrays
python
{ "resource": "" }
q17475
preamble
train
def preamble(): """ Log the Andes command-line preamble at the `logging.INFO` level Returns ------- None """ from . import __version__ as version logger.info('ANDES {ver} (Build {b}, Python {p} on {os})' .format(ver=version[:5], b=version[-8:], p=platform.python_version(), os=platform.system())) try:
python
{ "resource": "" }
q17476
edit_conf
train
def edit_conf(edit_config=False, load_config=None, **kwargs): """ Edit the Andes config file which occurs first in the search path. Parameters ---------- edit_config : bool If ``True``, try to open up an editor and edit the config file. Otherwise returns. load_config : None or str, optional Path to the config file, which will be placed to the first in the search order. kwargs : dict Other keyword arguments. Returns ------- bool ``True`` is a config file is found and an editor is opened. ``False`` if ``edit_config`` is False. """ ret = False # no `edit-config` supplied if edit_config == '': return ret conf_path = misc.get_config_load_path(load_config) if conf_path is not None: logger.info('Editing config file {}'.format(conf_path)) if edit_config is None: # use the following default editors if platform.system() == 'Linux': editor = os.environ.get('EDITOR', 'gedit')
python
{ "resource": "" }
q17477
remove_output
train
def remove_output(clean=False, **kwargs): """ Remove the outputs generated by Andes, including power flow reports ``_out.txt``, time-domain list ``_out.lst`` and data ``_out.dat``, eigenvalue analysis report ``_eig.txt``. Parameters ---------- clean : bool If ``True``, execute the function body. Returns otherwise. kwargs : dict Other keyword arguments Returns ------- bool ``True`` is the function body executes with success. ``False``
python
{ "resource": "" }
q17478
search
train
def search(search, **kwargs): """ Search for models whose names matches the given pattern. Print the results to stdout. .. deprecated :: 1.0.0 `search` will be moved to ``andeshelp`` in future versions. Parameters ---------- search : str Partial or full name of the model to search for kwargs : dict Other keyword arguments. Returns ------- list The list of model names that match the given pattern. """ from .models import all_models out = [] if not search: return out keys = sorted(list(all_models.keys())) for key in keys:
python
{ "resource": "" }
q17479
save_config
train
def save_config(save_config='', **kwargs): """ Save the Andes config to a file at the path specified by ``save_config``. The save action will not run if `save_config = ''`. Parameters ---------- save_config : None or str, optional, ('' by default) Path to the file to save the config file. If the path is an emtpy string, the save action will not run. Save to `~/.andes/andes.conf` if ``None``. kwargs : dict, optional Other keyword arguments Returns ------- bool ``True`` is the save action is run. ``False`` otherwise. """ ret = False cf_path =
python
{ "resource": "" }
q17480
Call.setup
train
def setup(self): """ setup the call list after case file is parsed and jit models are loaded """ self.devices = self.system.devman.devices self.ndevice = len(self.devices) self.gcalls = [''] * self.ndevice self.fcalls = [''] * self.ndevice self.gycalls = [''] *
python
{ "resource": "" }
q17481
Call.build_vec
train
def build_vec(self): """build call validity vector for each device""" for item in all_calls: self.__dict__[item] = [] for dev in self.devices:
python
{ "resource": "" }
q17482
Call.build_strings
train
def build_strings(self): """build call string for each device""" for idx, dev in enumerate(self.devices): header = 'system.' + dev self.gcalls[idx] = header + '.gcall(system.dae)\n'
python
{ "resource": "" }
q17483
Call._compile_pfgen
train
def _compile_pfgen(self): """Post power flow computation for PV and SW""" string = '"""\n' string += 'system.dae.init_g()\n' for gcall, pflow, shunt, series, stagen, call in zip( self.gcall, self.pflow, self.shunt, self.series, self.stagen, self.gcalls): if gcall and
python
{ "resource": "" }
q17484
Call._compile_bus_injection
train
def _compile_bus_injection(self): """Impose injections on buses""" string = '"""\n' for device, series in zip(self.devices, self.series): if series: string += 'system.' + device + '.gcall(system.dae)\n' string += '\n'
python
{ "resource": "" }
q17485
Call._compile_seriesflow
train
def _compile_seriesflow(self): """Post power flow computation of series device flow""" string = '"""\n' for device, pflow, series in zip(self.devices, self.pflow, self.series): if pflow and series:
python
{ "resource": "" }
q17486
Call._compile_int_f
train
def _compile_int_f(self): """Time Domain Simulation - update differential equations""" string = '"""\n' string += 'system.dae.init_f()\n' # evaluate differential equations f for fcall, call in zip(self.fcall, self.fcalls): if fcall:
python
{ "resource": "" }
q17487
Call._compile_int_g
train
def _compile_int_g(self): """Time Domain Simulation - update algebraic equations and Jacobian""" string = '"""\n' # evaluate the algebraic equations g string += 'system.dae.init_g()\n' for gcall, call in zip(self.gcall, self.gcalls): if gcall: string += call string += '\n' string += 'system.dae.reset_small_g()\n' # handle islands string += self.gisland # rebuild constant Jacobian elements if needed string += 'if system.dae.factorize:\n' string += ' system.dae.init_jac0()\n' for jac0, call in zip(self.jac0, self.jac0s): if jac0:
python
{ "resource": "" }
q17488
ModelBase._init
train
def _init(self): """ Convert model metadata to class attributes. This function is called automatically after ``define()`` in new versions. :return: None """ assert self._name assert self._group # self.n = 0 self.u = [] self.name = [] self.idx = [] self.uid = {} if not self._unamey: self._unamey = self._algebs else: assert len(self._unamey) == len(self._algebs)
python
{ "resource": "" }
q17489
ModelBase.param_define
train
def param_define(self, param, default, unit='', descr='', tomatrix=True, nonzero=False, mandatory=False, power=False, voltage=False, current=False, z=False, y=False, r=False, g=False, dccurrent=False, dcvoltage=False, time=False, event_time=False, **kwargs): """ Define a parameter in the model :param tomatrix: convert this parameter list to matrix :param param: parameter name :param default: parameter default value :param unit: parameter unit :param descr: description :param nonzero: is non-zero :param mandatory: is mandatory :param power: is a power value in the `self.Sn` base :param voltage: is a voltage value in the `self.Vn` base :param current: is a current value in the device base :param z: is an impedance value in the device base :param y: is an admittance value in the device base :param r: is a dc resistance value in the device base :param g: is a dc conductance value in the device base :param dccurrent: is a dc current value in the device base :param dcvoltage: is a dc votlage value in the device base :param time: is a time value in the device base :param event_time: is a variable for timed event :type param: str :type tomatrix: bool :type default: str, float :type unit: str :type descr: str :type nonzero: bool :type mandatory: bool :type power: bool :type voltage: bool :type current: bool :type z: bool :type y: bool :type r: bool :type g: bool :type dccurrent: bool :type dcvoltage: bool :type time: bool
python
{ "resource": "" }
q17490
ModelBase.var_define
train
def var_define(self, variable, ty, fname, descr='', uname=''): """ Define a variable in the model :param fname: LaTex formatted variable name string :param uname: unformatted variable name string, `variable` as default :param variable: variable name :param ty: type code in ``('x', 'y')`` :param descr: variable description :type variable: str
python
{ "resource": "" }
q17491
ModelBase.service_define
train
def service_define(self, service, ty): """ Add a service variable of type ``ty`` to this model :param str service: variable name :param type ty: variable type :return: None """ assert service
python
{ "resource": "" }
q17492
ModelBase.get_uid
train
def get_uid(self, idx): """ Return the `uid` of the elements with the given `idx` :param list, matrix idx: external indices :type idx: list, matrix :return: a matrix of uid """ assert idx is not None
python
{ "resource": "" }
q17493
ModelBase.get_field
train
def get_field(self, field, idx=None, astype=None): """ Return `self.field` for the elements labeled by `idx` :param astype: type cast of the return value :param field: field name of this model :param idx: element indices, will be the whole list if not specified :return: field values """ assert astype in (None, list, matrix) ret = None if idx is None: idx = self.idx # ===================disable warning ============================== # if field in self._service: # logger.warning( # 'Reading service variable <{model}.{field}> could be unsafe.' # .format(field=field, model=self._name) # )
python
{ "resource": "" }
q17494
ModelBase._alloc
train
def _alloc(self): """ Allocate empty memory for dae variable indices. Called in device setup phase. :return: None """ nzeros = [0] * self.n for var in self._states:
python
{ "resource": "" }
q17495
ModelBase.data_to_dict
train
def data_to_dict(self, sysbase=False): """ Return the loaded model parameters as one dictionary. Each key of the dictionary is a parameter name, and the value is a list of all the parameter values. :param sysbase: use system base quantities :type sysbase: bool """ assert isinstance(sysbase, bool) ret = {} for key in self.data_keys: if
python
{ "resource": "" }
q17496
ModelBase.data_to_list
train
def data_to_list(self, sysbase=False): """ Return the loaded model data as a list of dictionaries. Each dictionary contains the full parameters of an element. :param sysbase: use system base quantities :type sysbase: bool """ ret = list() # for each element for i in range(self.n): # read the parameter values and put in the temp dict ``e`` e = {}
python
{ "resource": "" }
q17497
ModelBase.data_from_dict
train
def data_from_dict(self, data): """ Populate model parameters from a dictionary of parameters Parameters ---------- data : dict List of parameter dictionaries Returns ------- None """ nvars = [] for key, val in data.items(): self.__dict__[key].extend(val) # assure the same parameter matrix size if len(nvars) > 1 and len(val) != nvars[-1]: raise IndexError(
python
{ "resource": "" }
q17498
ModelBase.var_to_df
train
def var_to_df(self): """ Return the current var_to_df of variables :return: pandas.DataFrame """ ret = {} self._check_pd() if self._flags['address'] is False: return pd.DataFrame.from_dict(ret)
python
{ "resource": "" }
q17499
ModelBase.param_remove
train
def param_remove(self, param: 'str') -> None: """ Remove a param from this model :param param: name of the parameter to be removed :type param: str """ for attr in self._param_attr_dicts: if param in self.__dict__[attr]:
python
{ "resource": "" }