docstring
stringlengths 52
499
| function
stringlengths 67
35.2k
| __index_level_0__
int64 52.6k
1.16M
|
|---|---|---|
Adds and starts a behaviour to the agent.
If template is not None it is used to match
new messages and deliver them to the behaviour.
Args:
behaviour (spade.behaviour.CyclicBehaviour): the behaviour to be started
template (spade.template.Template, optional): the template to match messages with (Default value = None)
|
def add_behaviour(self, behaviour, template=None):
behaviour.set_agent(self)
if issubclass(type(behaviour), FSMBehaviour):
for _, state in behaviour.get_states().items():
state.set_agent(self)
behaviour.set_template(template)
self.behaviours.append(behaviour)
if self.is_alive():
behaviour.start()
| 410,339
|
Removes a behaviour from the agent.
The behaviour is first killed.
Args:
behaviour (spade.behaviour.CyclicBehaviour): the behaviour instance to be removed
|
def remove_behaviour(self, behaviour):
if not self.has_behaviour(behaviour):
raise ValueError("This behaviour is not registered")
index = self.behaviours.index(behaviour)
self.behaviours[index].kill()
self.behaviours.pop(index)
| 410,340
|
Callback run when an XMPP Message is reveived.
This callback delivers the message to every behaviour
that is waiting for it. First, the aioxmpp.Message is
converted to spade.message.Message
Args:
msg (aioxmpp.Messagge): the message just received.
Returns:
list(asyncio.Future): a list of futures of the append of the message at each matched behaviour.
|
def _message_received(self, msg):
msg = Message.from_node(msg)
return self.dispatch(msg)
| 410,342
|
Dispatch the message to every behaviour that is waiting for
it using their templates match.
Args:
msg (spade.message.Messagge): the message to dispatch.
Returns:
list(asyncio.Future): a list of futures of the append of the message at each matched behaviour.
|
def dispatch(self, msg):
logger.debug(f"Got message: {msg}")
futures = []
matched = False
for behaviour in (x for x in self.behaviours if x.match(msg)):
futures.append(self.submit(behaviour.enqueue(msg)))
logger.debug(f"Message enqueued to behaviour: {behaviour}")
self.traces.append(msg, category=str(behaviour))
matched = True
if not matched:
logger.warning(f"No behaviour matched for message: {msg}")
self.traces.append(msg)
return futures
| 410,343
|
Creates a new spade.message.Message from an aixoxmpp.stanza.Message
Args:
node (aioxmpp.stanza.Message): an aioxmpp Message
Returns:
spade.message.Message: a new spade Message
|
def from_node(cls, node):
if not isinstance(node, aioxmpp.stanza.Message):
raise AttributeError("node must be a aioxmpp.stanza.Message instance")
msg = cls()
msg._to = node.to
msg._sender = node.from_
if None in node.body:
msg.body = node.body[None]
else:
for key in node.body.keys():
msg.body = node.body[key]
break
for data in node.xep0004_data:
if data.title == SPADE_X_METADATA:
for field in data.fields:
if field.var != "_thread_node":
msg.set_metadata(field.var, field.values[0])
else:
msg.thread = field.values[0]
return msg
| 410,345
|
Set jid of the receiver.
Args:
jid (str): the jid of the receiver.
|
def to(self, jid: str):
if jid is not None and not isinstance(jid, str):
raise TypeError("'to' MUST be a string")
self._to = aioxmpp.JID.fromstr(jid) if jid is not None else None
| 410,346
|
Set jid of the sender
Args:
jid (str): jid of the sender
|
def sender(self, jid: str):
if jid is not None and not isinstance(jid, str):
raise TypeError("'sender' MUST be a string")
self._sender = aioxmpp.JID.fromstr(jid) if jid is not None else None
| 410,347
|
Set body of the message
Args:
body (str): The body of the message
|
def body(self, body: str):
if body is not None and not isinstance(body, str):
raise TypeError("'body' MUST be a string")
self._body = body
| 410,348
|
Set thread id of the message
Args:
value (str): the thread id
|
def thread(self, value: str):
if value is not None and not isinstance(value, str):
raise TypeError("'thread' MUST be a string")
self._thread = value
| 410,349
|
Add a new metadata to the message
Args:
key (str): name of the metadata
value (str): value of the metadata
|
def set_metadata(self, key: str, value: str):
if not isinstance(key, str) or not isinstance(value, str):
raise TypeError("'key' and 'value' of metadata MUST be strings")
self.metadata[key] = value
| 410,350
|
Get the value of a metadata. Returns None if metadata does not exist.
Args:
key (str): name of the metadata
Returns:
str: the value of the metadata (or None)
|
def get_metadata(self, key) -> str:
return self.metadata[key] if key in self.metadata else None
| 410,351
|
Returns wether a message matches with this message or not.
The message can be a Message object or a Template object.
Args:
message (spade.message.Message): the message to match to
Returns:
bool: wether the message matches or not
|
def match(self, message) -> bool:
if self.to and message.to != self.to:
return False
if self.sender and message.sender != self.sender:
return False
if self.body and message.body != self.body:
return False
if self.thread and message.thread != self.thread:
return False
for key, value in self.metadata.items():
if message.get_metadata(key) != value:
return False
logger.debug(f"message matched {self} == {message}")
return True
| 410,352
|
Listens to http requests and sends them to the webapp.
Args:
runner: AppRunner to process the http requests
hostname: host name to listen from.
port: port to listen from.
agent: agent that owns the web app.
|
async def start_server_in_loop(runner, hostname, port, agent):
await runner.setup()
agent.web.server = aioweb.TCPSite(runner, hostname, port)
await agent.web.server.start()
logger.info(f"Serving on http://{hostname}:{port}/")
| 410,360
|
Starts the web interface.
Args:
hostname (str, optional): host name to listen from. (Default value = None)
port (int, optional): port to listen from. (Default value = None)
templates_path (str, optional): path to look for templates. (Default value = None)
|
def start(self, hostname=None, port=None, templates_path=None):
self.hostname = hostname if hostname else "localhost"
if port:
self.port = port
elif not self.port:
self.port = unused_port(self.hostname)
if templates_path:
self.loaders.insert(0, jinja2.FileSystemLoader(templates_path))
self._set_loaders()
self.setup_routes()
self.runner = aioweb.AppRunner(self.app)
return self.agent.submit(start_server_in_loop(self.runner, self.hostname, self.port, self.agent))
| 410,362
|
Setup a route of type GET
Args:
path (str): URL to listen to
controller (coroutine): the coroutine to handle the request
template (str): the template to render the response or None if it is a JSON response
raw (bool): indicates if post-processing (jinja, json, etc) is needed or not
|
def add_get(self, path, controller, template, raw=False):
if raw:
fn = controller
else:
fn = self._prepare_controller(controller, template)
self.app.router.add_get(path, fn)
| 410,365
|
Wraps the controller wether to render a jinja template or to return a json response (if template is None)
Args:
controller (coroutine): the coroutine to be wrapped
template (str): the name of the template or None
Returns:
coroutine: a wrapped coroutine of the controller
|
def _prepare_controller(self, controller, template):
if template:
fn = aiohttp_jinja2.template(template_name=template)(controller)
else:
fn = self._parse_json_response(controller)
return fn
| 410,366
|
Links behaviour with its owner agent
Args:
agent (spade.agent.Agent): the agent who owns the behaviour
|
def set_agent(self, agent):
self.agent = agent
self.queue = asyncio.Queue(loop=self.agent.loop)
self.presence = agent.presence
self.web = agent.web
| 410,379
|
Matches a message with the behaviour's template
Args:
message(spade.message.Message): the message to match with
Returns:
bool: wheter the messaged matches or not
|
def match(self, message: Message) -> bool:
if self.template:
return self.template.match(message)
return True
| 410,380
|
Stores a knowledge item in the agent knowledge base.
Args:
name (str): name of the item
value (Any): value of the item
|
def set(self, name: str, value: Any) -> None:
self.agent.set(name, value)
| 410,381
|
Stops the behaviour
Args:
exit_code (object, optional): the exit code of the behaviour (Default value = None)
|
def kill(self, exit_code: Any = None):
self._force_kill.set()
if exit_code is not None:
self._exit_code = exit_code
logger.info("Killing behavior {0} with exit code: {1}".format(self, exit_code))
| 410,384
|
Sends a message.
Args:
msg (spade.message.Message): the message to be sent.
|
async def send(self, msg: Message):
if not msg.sender:
msg.sender = str(self.agent.jid)
logger.debug(f"Adding agent's jid as sender to message: {msg}")
await self.agent.container.send(msg, self)
msg.sent = True
self.agent.traces.append(msg, category=str(self))
| 410,389
|
Receives a message for this behaviour.
If timeout is not None it returns the message or "None"
after timeout is done.
Args:
timeout (float): number of seconds until return
Returns:
spade.message.Message: a Message or None
|
async def receive(self, timeout: float = None) -> Union[Message, None]:
if timeout:
coro = self.queue.get()
try:
msg = await asyncio.wait_for(coro, timeout=timeout)
except asyncio.TimeoutError:
msg = None
else:
try:
msg = self.queue.get_nowait()
except asyncio.QueueEmpty:
msg = None
return msg
| 410,391
|
Creates a periodic behaviour.
Args:
period (float): interval of the behaviour in seconds
start_at (datetime.datetime): whether to start the behaviour with an offset
|
def __init__(self, period, start_at=None):
super().__init__()
self._period = None
self.period = period
if start_at:
self._next_activation = start_at
else:
self._next_activation = now()
| 410,394
|
Set the period.
Args:
value (float): seconds
|
def period(self, value: float):
if value < 0:
raise ValueError("Period must be greater or equal than zero.")
self._period = timedelta(seconds=value)
| 410,395
|
Creates a timeout behaviour, which is run at start_at
Args:
start_at (datetime.datetime): when to start the behaviour
|
def __init__(self, start_at):
super().__init__()
self._timeout = start_at
self._timeout_triggered = False
| 410,397
|
Adds a new state to the FSM.
Args:
name (str): the name of the state, which is used as its identifier.
state (spade.behaviour.State): The state class
initial (bool, optional): wether the state is the initial state or not. (Only one initial state is allowed) (Default value = False)
|
def add_state(self, name: str, state: State, initial: bool = False):
if not issubclass(state.__class__, State):
raise AttributeError("state must be subclass of spade.behaviour.State")
self._states[name] = state
if initial:
self.current_state = name
| 410,400
|
Adds a transition from one state to another.
Args:
source (str): the name of the state from where the transition starts
dest (str): the name of the state where the transition ends
|
def add_transition(self, source: str, dest: str):
self._transitions[source].append(dest)
| 410,401
|
Checks if a transitions is registered in the FSM
Args:
source (str): the source state name
dest (str): the destination state name
Returns:
bool: wether the transition is valid or not
|
def is_valid_transition(self, source: str, dest: str) -> bool:
if dest not in self._states or source not in self._states:
raise NotValidState
elif dest not in self._transitions[source]:
raise NotValidTransition
return True
| 410,402
|
Create a new choice.
Args:
title: Text shown in the selection list.
value: Value returned, when the choice is selected.
disabled: If set, the choice can not be selected by the user. The
provided text is used to explain, why the selection is
disabled.
checked: Preselect this choice when displaying the options.
shortcut_key: Key shortcut used to select this item.
|
def __init__(self,
title: Text,
value: Optional[Any] = None,
disabled: Optional[Text] = None,
checked: bool = False,
shortcut_key: Optional[Text] = None) -> None:
self.disabled = disabled
self.value = value if value is not None else title
self.title = title
self.checked = checked
if shortcut_key is not None:
self.shortcut_key = str(shortcut_key)
else:
self.shortcut_key = None
| 410,470
|
Create a separator in a list.
Args:
line: Text to be displayed in the list, by default uses `---`.
|
def __init__(self, line: Optional[Text] = None):
self.line = line or self.default_separator
super(Separator, self).__init__(self.line, None, "-")
| 410,472
|
Avro file reader.
Args:
file_path_or_buffer: Input file path or file-like object.
schema: Avro schema.
**kwargs: Keyword argument to pandas.DataFrame.from_records.
Returns:
Class of pd.DataFrame.
|
def read_avro(file_path_or_buffer, schema=None, **kwargs):
if isinstance(file_path_or_buffer, six.string_types):
with open(file_path_or_buffer, 'rb') as f:
return __file_to_dataframe(f, schema, **kwargs)
else:
return __file_to_dataframe(file_path_or_buffer, schema, **kwargs)
| 410,674
|
Query the search index for sets similar to the query set.
Args:
s (Iterable): the query set.
Returns (list): a list of tuples `(index, similarity)` where the index
is the index of the matching sets in the original list of sets.
|
def query(self, s):
s1 = np.sort([self.order[token] for token in s if token in self.order])
logging.debug("{} original tokens and {} tokens after applying "
"frequency order.".format(len(s), len(s1)))
prefix = self._get_prefix(s1)
candidates = set([i for p1, token in enumerate(prefix)
for i, p2 in self.index[token]
if self.position_filter_func(s1, self.sets[i], p1, p2,
self.similarity_threshold)])
logging.debug("{} candidates found.".format(len(candidates)))
results = deque([])
for i in candidates:
s2 = self.sets[i]
sim = self.similarity_func(s1, s2)
if sim < self.similarity_threshold:
continue
results.append((i, sim))
logging.debug("{} verified sets found.".format(len(results)))
return list(results)
| 410,770
|
Give random samples for a binary quadratic model.
Variable assignments are chosen by coin flip.
Args:
bqm (:obj:`.BinaryQuadraticModel`):
Binary quadratic model to be sampled from.
num_reads (int, optional, default=10):
Number of reads.
Returns:
:obj:`.SampleSet`
|
def sample(self, bqm, num_reads=10):
values = tuple(bqm.vartype.value)
def _itersample():
for __ in range(num_reads):
sample = {v: choice(values) for v in bqm.linear}
energy = bqm.energy(sample)
yield sample, energy
samples, energies = zip(*_itersample())
return SampleSet.from_samples(samples, bqm.vartype, energies)
| 410,774
|
Sample from the problem provided by bqm and truncate output.
Args:
bqm (:obj:`dimod.BinaryQuadraticModel`):
Binary quadratic model to be sampled from.
**kwargs:
Parameters for the sampling method, specified by the child
sampler.
Returns:
:obj:`dimod.SampleSet`
|
def sample(self, bqm, **kwargs):
tkw = self._truncate_kwargs
if self._aggregate:
return self.child.sample(bqm, **kwargs).aggregate().truncate(**tkw)
else:
return self.child.sample(bqm, **kwargs).truncate(**tkw)
| 410,787
|
Sample from a binary quadratic model.
Args:
bqm (:obj:`~dimod.BinaryQuadraticModel`):
Binary quadratic model to be sampled from.
Returns:
:obj:`~dimod.SampleSet`
|
def sample(self, bqm):
M = bqm.binary.to_numpy_matrix()
off = bqm.binary.offset
if M.shape == (0, 0):
return SampleSet.from_samples([], bqm.vartype, energy=[])
sample = np.zeros((len(bqm),), dtype=bool)
# now we iterate, flipping one bit at a time until we have
# traversed all samples. This is a Gray code.
# https://en.wikipedia.org/wiki/Gray_code
def iter_samples():
sample = np.zeros((len(bqm)), dtype=bool)
energy = 0.0
yield sample.copy(), energy + off
for i in range(1, 1 << len(bqm)):
v = _ffs(i)
# flip the bit in the sample
sample[v] = not sample[v]
# for now just calculate the energy, but there is a more clever way by calculating
# the energy delta for the single bit flip, don't have time, pull requests
# appreciated!
energy = sample.dot(M).dot(sample.transpose())
yield sample.copy(), float(energy) + off
samples, energies = zip(*iter_samples())
response = SampleSet.from_samples(np.array(samples, dtype='int8'), Vartype.BINARY, energies)
# make sure the response matches the given vartype, in-place.
response.change_vartype(bqm.vartype, inplace=True)
return response
| 410,788
|
Cast various inputs to a valid vartype object.
Args:
vartype (:class:`.Vartype`/str/set):
Variable type. Accepted input values:
* :class:`.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}``
* :class:`.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}``
Returns:
:class:`.Vartype`: Either :class:`.Vartype.SPIN` or
:class:`.Vartype.BINARY`.
See also:
:func:`~dimod.decorators.vartype_argument`
|
def as_vartype(vartype):
if isinstance(vartype, Vartype):
return vartype
try:
if isinstance(vartype, str):
vartype = Vartype[vartype]
elif isinstance(vartype, frozenset):
vartype = Vartype(vartype)
else:
vartype = Vartype(frozenset(vartype))
except (ValueError, KeyError):
raise TypeError(("expected input vartype to be one of: "
"Vartype.SPIN, 'SPIN', {-1, 1}, "
"Vartype.BINARY, 'BINARY', or {0, 1}."))
return vartype
| 410,806
|
The energy of the given sample.
Args:
sample_like (samples_like):
A raw sample. `sample_like` is an extension of
NumPy's array_like structure. See :func:`.as_samples`.
dtype (:class:`numpy.dtype`, optional):
The data type of the returned energies. Defaults to float.
Returns:
The energy.
|
def energy(self, sample_like, dtype=np.float):
energy, = self.energies(sample_like, dtype=dtype)
return energy
| 410,810
|
The energies of the given samples.
Args:
samples_like (samples_like):
A collection of raw samples. `samples_like` is an extension of
NumPy's array_like structure. See :func:`.as_samples`.
dtype (:class:`numpy.dtype`, optional):
The data type of the returned energies. Defaults to float.
Returns:
:obj:`numpy.ndarray`: The energies.
|
def energies(self, samples_like, dtype=np.float):
samples, labels = as_samples(samples_like)
if labels:
idx, label = zip(*enumerate(labels))
labeldict = dict(zip(label, idx))
else:
labeldict = {}
num_samples = samples.shape[0]
energies = np.zeros(num_samples, dtype=dtype)
for term, bias in self.items():
if len(term) == 0:
energies += bias
else:
energies += np.prod([samples[:, labeldict[v]] for v in term], axis=0) * bias
return energies
| 410,811
|
Multiply the polynomial by the given scalar.
Args:
scalar (number):
Value to multiply the polynomial by.
ignored_terms (iterable, optional):
Biases associated with these terms are not scaled.
|
def scale(self, scalar, ignored_terms=None):
if ignored_terms is None:
ignored_terms = set()
else:
ignored_terms = {asfrozenset(term) for term in ignored_terms}
for term in self:
if term not in ignored_terms:
self[term] *= scalar
| 410,814
|
Construct a binary polynomial from a higher-order Ising problem.
Args:
h (dict):
The linear biases.
J (dict):
The higher-order biases.
offset (optional, default=0.0):
Constant offset applied to the model.
Returns:
:obj:`.BinaryPolynomial`
Examples:
>>> poly = dimod.BinaryPolynomial.from_hising({'a': 2}, {'ab': -1}, 0)
|
def from_hising(cls, h, J, offset=None):
poly = {(k,): v for k, v in h.items()}
poly.update(J)
if offset is not None:
poly[frozenset([])] = offset
return cls(poly, Vartype.SPIN)
| 410,815
|
Construct a binary polynomial from a higher-order unconstrained
binary optimization (HUBO) problem.
Args:
H (dict):
Coefficients of a higher-order unconstrained binary optimization
(HUBO) model.
Returns:
:obj:`.BinaryPolynomial`
Examples:
>>> poly = dimod.BinaryPolynomial.from_hubo({('a', 'b', 'c'): -1})
|
def from_hubo(cls, H, offset=None):
poly = cls(H, Vartype.BINARY)
if offset is not None:
poly[()] = poly.get((), 0) + offset
return poly
| 410,817
|
Return a binary polynomial over `{0, 1}` variables.
Args:
copy (optional, default=False):
If True, the returned polynomial is always a copy. Otherwise,
if the polynomial is binary-valued already it returns itself.
Returns:
:obj:`.BinaryPolynomial`
|
def to_binary(self, copy=False):
if self.vartype is Vartype.BINARY:
if copy:
return self.copy()
else:
return self
new = BinaryPolynomial({}, Vartype.BINARY)
# s = 2x - 1
for term, bias in self.items():
for t in map(frozenset, powerset(term)):
newbias = bias * 2**len(t) * (-1)**(len(term) - len(t))
if t in new:
new[t] += newbias
else:
new[t] = newbias
return new
| 410,819
|
Determines a vertex coloring.
Args:
adj (dict): The edge structure of the graph to be colored.
`adj` should be of the form {node: neighbors, ...} where
neighbors is a set.
Returns:
dict: the coloring {node: color, ...}
dict: the colors {color: [node, ...], ...}
Note:
This is a greedy heuristic: the resulting coloring is not
necessarily minimal.
|
def greedy_coloring(adj):
# now let's start coloring
coloring = {}
colors = {}
possible_colors = {n: set(range(len(adj))) for n in adj}
while possible_colors:
# get the n with the fewest possible colors
n = min(possible_colors, key=lambda n: len(possible_colors[n]))
# assign that node the lowest color it can still have
color = min(possible_colors[n])
coloring[n] = color
if color not in colors:
colors[color] = {n}
else:
colors[color].add(n)
# also remove color from the possible colors for n's neighbors
for neighbor in adj[n]:
if neighbor in possible_colors and color in possible_colors[neighbor]:
possible_colors[neighbor].remove(color)
# finally remove n from nodes
del possible_colors[n]
return coloring, colors
| 410,828
|
Fix the value of the variables and remove it from a binary quadratic model.
Args:
fixed (dict):
A dictionary of variable assignments.
Examples:
>>> bqm = dimod.BinaryQuadraticModel({'a': -.5, 'b': 0., 'c': 5}, {('a', 'b'): -1}, 0.0, dimod.SPIN)
>>> bqm.fix_variables({'a': -1, 'b': +1})
|
def fix_variables(self, fixed):
for v, val in fixed.items():
self.fix_variable(v, val)
| 410,870
|
Determine the energies of the given samples.
Args:
samples_like (samples_like):
A collection of raw samples. `samples_like` is an extension of NumPy's array_like
structure. See :func:`.as_samples`.
dtype (:class:`numpy.dtype`):
The data type of the returned energies.
Returns:
:obj:`numpy.ndarray`: The energies.
|
def energies(self, samples_like, dtype=np.float):
samples, labels = as_samples(samples_like)
if all(v == idx for idx, v in enumerate(labels)):
ldata, (irow, icol, qdata), offset = self.to_numpy_vectors(dtype=dtype)
else:
ldata, (irow, icol, qdata), offset = self.to_numpy_vectors(variable_order=labels, dtype=dtype)
energies = samples.dot(ldata) + (samples[:, irow]*samples[:, icol]).dot(qdata) + offset
return np.asarray(energies, dtype=dtype)
| 410,880
|
Sample from the provided binary quadratic model.
Args:
bqm (:obj:`dimod.BinaryQuadraticModel`):
Binary quadratic model to be sampled from.
fixed_variables (dict):
A dictionary of variable assignments.
**parameters:
Parameters for the sampling method, specified by the child sampler.
Returns:
:obj:`dimod.SampleSet`
|
def sample(self, bqm, fixed_variables=None, **parameters):
# solve the problem on the child system
child = self.child
bqm_copy = bqm.copy()
if fixed_variables is None:
fixed_variables = {}
bqm_copy.fix_variables(fixed_variables)
sampleset = child.sample(bqm_copy, **parameters)
if len(sampleset):
return sampleset.append_variables(fixed_variables)
elif fixed_variables:
return type(sampleset).from_samples_bqm(fixed_variables, bqm=bqm)
else:
# no fixed variables and sampleset is empty
return sampleset
| 410,898
|
Create a bqm with a gap of 2 that represents the product of two variables.
Note that spin-product requires an auxiliary variable.
Args:
variables (list):
multiplier, multiplicand, product, aux
Returns:
:obj:`.BinaryQuadraticModel`
|
def _spin_product(variables):
multiplier, multiplicand, product, aux = variables
return BinaryQuadraticModel({multiplier: -.5,
multiplicand: -.5,
product: -.5,
aux: -1.},
{(multiplier, multiplicand): .5,
(multiplier, product): .5,
(multiplier, aux): 1.,
(multiplicand, product): .5,
(multiplicand, aux): 1.,
(product, aux): 1.},
2.,
Vartype.SPIN)
| 410,917
|
Create a bqm with a gap of 2 that represents the product of two variables.
Args:
variables (list):
multiplier, multiplicand, product
Returns:
:obj:`.BinaryQuadraticModel`
|
def _binary_product(variables):
multiplier, multiplicand, product = variables
return BinaryQuadraticModel({multiplier: 0.0,
multiplicand: 0.0,
product: 3.0},
{(multiplier, multiplicand): 1.0,
(multiplier, product): -2.0,
(multiplicand, product): -2.0},
0.0,
Vartype.BINARY)
| 410,918
|
Calculates energy of a sample from a higher order polynomial.
Args:
sample (samples_like):
A raw sample. `samples_like` is an extension of NumPy's
array_like structure. See :func:`.as_samples`.
poly (dict):
Polynomial as a dict of form {term: bias, ...}, where `term` is a
tuple of variables and `bias` the associated bias.
Returns:
float: The energy of the sample.
|
def poly_energy(sample_like, poly):
msg = ("poly_energy is deprecated and will be removed in dimod 0.9.0."
"In the future, use BinaryPolynomial.energy")
warnings.warn(msg, DeprecationWarning)
# dev note the vartype is not used in the energy calculation and this will
# be deprecated in the future
return BinaryPolynomial(poly, 'SPIN').energy(sample_like)
| 410,921
|
Add a column to the table.
Args:
header (str):
Column header
f (function(datum)->str):
Makes the row string from the datum. Str returned by f should
have the same width as header.
|
def append(self, header, f, _left=False):
self.items_length += len(header)
if _left:
self.deque.appendleft((header, f))
else:
self.deque.append((header, f))
| 410,928
|
Sample from the binary polynomial and truncate output.
Args:
poly (obj:`.BinaryPolynomial`): A binary polynomial.
**kwargs:
Parameters for the sampling method, specified by the child
sampler.
Returns:
:obj:`dimod.SampleSet`
|
def sample_poly(self, poly, **kwargs):
tkw = self._truncate_kwargs
if self._aggregate:
return self.child.sample_poly(poly, **kwargs).aggregate().truncate(**tkw)
else:
return self.child.sample_poly(poly, **kwargs).truncate(**tkw)
| 410,974
|
Split an image into a specified number of tiles.
Args:
filename (str): The filename of the image to split.
number_tiles (int): The number of tiles required.
Kwargs:
save (bool): Whether or not to save tiles to disk.
Returns:
Tuple of :class:`Tile` instances.
|
def slice(filename, number_tiles=None, col=None, row=None, save=True):
im = Image.open(filename)
im_w, im_h = im.size
columns = 0
rows = 0
if not number_tiles is None:
validate_image(im, number_tiles)
columns, rows = calc_columns_rows(number_tiles)
extras = (columns * rows) - number_tiles
else:
validate_image_col_row(im, col, row)
columns = col
rows = row
extras = (columns * rows) - number_tiles
tile_w, tile_h = int(floor(im_w / columns)), int(floor(im_h / rows))
tiles = []
number = 1
for pos_y in range(0, im_h - rows, tile_h): # -rows for rounding error.
for pos_x in range(0, im_w - columns, tile_w): # as above.
area = (pos_x, pos_y, pos_x + tile_w, pos_y + tile_h)
image = im.crop(area)
position = (int(floor(pos_x / tile_w)) + 1,
int(floor(pos_y / tile_h)) + 1)
coords = (pos_x, pos_y)
tile = Tile(image, number, position, coords)
tiles.append(tile)
number += 1
if save:
save_tiles(tiles,
prefix=get_basename(filename),
directory=os.path.dirname(filename))
return tuple(tiles)
| 412,710
|
Write image files to disk. Create specified folder(s) if they
don't exist. Return list of :class:`Tile` instance.
Args:
tiles (list): List, tuple or set of :class:`Tile` objects to save.
prefix (str): Filename prefix of saved tiles.
Kwargs:
directory (str): Directory to save tiles. Created if non-existant.
Returns:
Tuple of :class:`Tile` instances.
|
def save_tiles(tiles, prefix='', directory=os.getcwd(), format='png'):
# Causes problems in CLI script.
# if not os.path.exists(directory):
# os.makedirs(directory)
for tile in tiles:
tile.save(filename=tile.generate_filename(prefix=prefix,
directory=directory,
format=format),
format=format)
return tuple(tiles)
| 412,711
|
Set the current user's presence on the network. Supports :attr:`.Status.Online`, :attr:`.Status.Busy` or
:attr:`.Status.Hidden` (shown as :attr:`.Status.Offline` to others).
Args:
status (.Status): new availability to display to contacts
|
def setPresence(self, status=SkypeUtils.Status.Online):
self.conn("PUT", "{0}/users/ME/presenceDocs/messagingService".format(self.conn.msgsHost),
auth=SkypeConnection.Auth.RegToken, json={"status": status.label})
| 412,721
|
Update the activity message for the current user.
Args:
mood (str): new mood message
|
def setMood(self, mood):
self.conn("POST", "{0}/users/{1}/profile/partial".format(SkypeConnection.API_USER, self.userId),
auth=SkypeConnection.Auth.SkypeToken, json={"payload": {"mood": mood or ""}})
self.user.mood = SkypeUser.Mood(plain=mood) if mood else None
| 412,722
|
Update the profile picture for the current user.
Args:
image (file): a file-like object to read the image from
|
def setAvatar(self, image):
self.conn("PUT", "{0}/users/{1}/profile/avatar".format(SkypeConnection.API_USER, self.userId),
auth=SkypeConnection.Auth.SkypeToken, data=image.read())
| 412,723
|
Retrieve various metadata associated with a URL, as seen by Skype.
Args:
url (str): address to ping for info
Returns:
dict: metadata for the website queried
|
def getUrlMeta(self, url):
return self.conn("GET", SkypeConnection.API_URL, params={"url": url},
auth=SkypeConnection.Auth.Authorize).json()
| 412,724
|
Attempt translation of a string. Supports automatic language detection if ``fromLang`` is not specified.
Args:
text (str): input text to be translated
toLang (str): country code of output language
fromLang (str): country code of input language
|
def __call__(self, text, toLang, fromLang=None):
return self.skype.conn("GET", "{0}/skype/translate".format(SkypeConnection.API_TRANSLATE),
params={"from": fromLang or "", "to": toLang, "text": text},
auth=SkypeConnection.Auth.SkypeToken).json()
| 412,736
|
Retrieve all details for a specific contact, including fields such as birthday and mood.
Args:
id (str): user identifier to lookup
Returns:
SkypeContact: resulting contact object
|
def contact(self, id):
try:
json = self.skype.conn("POST", "{0}/users/batch/profiles".format(SkypeConnection.API_USER),
json={"usernames": [id]}, auth=SkypeConnection.Auth.SkypeToken).json()
contact = SkypeContact.fromRaw(self.skype, json[0])
if contact.id not in self.contactIds:
self.contactIds.append(contact.id)
return self.merge(contact)
except SkypeApiException as e:
if len(e.args) >= 2 and getattr(e.args[1], "status_code", None) == 403:
# Not a contact, so no permission to retrieve information.
return None
raise
| 412,740
|
Retrieve public information about a user.
Args:
id (str): user identifier to lookup
Returns:
SkypeUser: resulting user object
|
def user(self, id):
json = self.skype.conn("POST", "{0}/batch/profiles".format(SkypeConnection.API_PROFILE),
auth=SkypeConnection.Auth.SkypeToken, json={"usernames": [id]}).json()
if json and "status" not in json[0]:
return self.merge(SkypeUser.fromRaw(self.skype, json[0]))
else:
return None
| 412,741
|
Retrieve a single bot.
Args:
id (str): UUID or username of the bot
Returns:
SkypeBotUser: resulting bot user object
|
def bot(self, id):
json = self.skype.conn("GET", "{0}/agents".format(SkypeConnection.API_BOT), params={"agentId": id},
auth=SkypeConnection.Auth.SkypeToken).json().get("agentDescriptions", [])
return self.merge(SkypeBotUser.fromRaw(self.skype, json[0])) if json else None
| 412,743
|
Search the Skype Directory for a user.
Args:
query (str): name to search for
Returns:
SkypeUser list: collection of possible results
|
def search(self, query):
results = self.skype.conn("GET", SkypeConnection.API_DIRECTORY,
auth=SkypeConnection.Auth.SkypeToken,
params={"searchstring": query, "requestId": "0"}).json().get("results", [])
return [SkypeUser.fromRaw(self.skype, json.get("nodeProfileData", {})) for json in results]
| 412,744
|
Instantiate a plain instance of this class, and store a reference to the Skype object for later API calls.
Normally this method won't be called or implemented directly.
Implementers should make use of :meth:`fromRaw` and the :meth:`initAttrs` decorator instead.
Args:
skype (Skype): parent Skype instance
raw (dict): raw object, as provided by the API
|
def __init__(self, skype=None, raw=None):
self.skype = skype
self.raw = raw
| 412,746
|
Create a new instance based on the raw properties of an API response.
This can be overridden to automatically create subclass instances based on the raw content.
Args:
skype (Skype): parent Skype instance
raw (dict): raw object, as provided by the API
Returns:
SkypeObj: the new class instance
|
def fromRaw(cls, skype=None, raw={}):
return cls(skype, raw, **cls.rawToFields(raw))
| 412,747
|
Copy properties from other into self, skipping ``None`` values. Also merges the raw data.
Args:
other (SkypeObj): second object to copy fields from
|
def merge(self, other):
for attr in self.attrs:
if not getattr(other, attr, None) is None:
setattr(self, attr, getattr(other, attr))
if other.raw:
if not self.raw:
self.raw = {}
self.raw.update(other.raw)
| 412,748
|
Create a new container object. The :attr:`synced` state and internal :attr:`cache` are initialised here.
Args:
skype (Skype): parent Skype instance
|
def __init__(self, skype=None):
self.skype = skype
self.synced = False
self.cache = {}
| 412,751
|
Add a given object to the cache, or update an existing entry to include more fields.
Args:
obj (SkypeObj): object to add to the cache
|
def merge(self, obj):
if obj.id in self.cache:
self.cache[obj.id].merge(obj)
else:
self.cache[obj.id] = obj
return self.cache[obj.id]
| 412,754
|
Create a new enumeration. The parent enum creates an instance for each item.
Args:
label (str): enum name
names (list): item labels
path (list): qualified parent name, for :func:`repr` output
|
def __init__(self, label, names=(), path=None):
self.label = label
self.names = names
self.path = path
for name in names:
setattr(self, name, self.__class__(name, path="{0}.{1}".format(path, label) if path else label))
| 412,755
|
Method decorator: if a given status code is received, re-authenticate and try again.
Args:
codes (int list): status codes to respond to
regToken (bool): whether to try retrieving a new token on error
Returns:
method: decorator function, ready to apply to other methods
|
def handle(*codes, **kwargs):
regToken = kwargs.get("regToken", False)
subscribe = kwargs.get("subscribe")
def decorator(fn):
@functools.wraps(fn)
def wrapper(self, *args, **kwargs):
try:
return fn(self, *args, **kwargs)
except SkypeApiException as e:
if isinstance(e.args[1], requests.Response) and e.args[1].status_code in codes:
conn = self if isinstance(self, SkypeConnection) else self.conn
if regToken:
conn.getRegToken()
if subscribe:
conn.endpoints[subscribe].subscribe()
return fn(self, *args, **kwargs)
raise
return wrapper
return decorator
| 412,761
|
Replace the stub :meth:`getSkypeToken` method with one that connects via the Microsoft account flow using the
given credentials. Avoids storing the account password in an accessible way.
Args:
user (str): username or email address of the connecting account
pwd (str): password of the connecting account
|
def setUserPwd(self, user, pwd):
def getSkypeToken(self):
self.liveLogin(user, pwd)
self.getSkypeToken = MethodType(getSkypeToken, self)
| 412,767
|
Ensure the authentication token for the given auth method is still valid.
Args:
auth (Auth): authentication type to check
Raises:
.SkypeAuthException: if Skype auth is required, and the current token has expired and can't be renewed
|
def verifyToken(self, auth):
if auth in (self.Auth.SkypeToken, self.Auth.Authorize):
if "skype" not in self.tokenExpiry or datetime.now() >= self.tokenExpiry["skype"]:
if not hasattr(self, "getSkypeToken"):
raise SkypeAuthException("Skype token expired, and no password specified")
self.getSkypeToken()
elif auth == self.Auth.RegToken:
if "reg" not in self.tokenExpiry or datetime.now() >= self.tokenExpiry["reg"]:
self.getRegToken()
| 412,770
|
Query a username or email address to see if a corresponding Microsoft account exists.
Args:
user (str): username or email address of an account
Returns:
bool: whether the account exists
|
def checkUser(self, user):
return not self.conn("POST", "{0}/GetCredentialType.srf".format(SkypeConnection.API_MSACC),
json={"username": user}).json().get("IfExistsResult")
| 412,778
|
Take an existing Skype token and refresh it, to extend the expiry time without other credentials.
Args:
token (str): existing Skype token
Returns:
(str, datetime.datetime) tuple: Skype token, and associated expiry if known
Raises:
.SkypeAuthException: if the login request is rejected
.SkypeApiException: if the login form can't be processed
|
def auth(self, token):
t = self.sendToken(token)
return self.getToken(t)
| 412,784
|
Create a new instance based on a newly-created endpoint identifier.
Args:
conn (SkypeConnection): parent connection instance
id (str): endpoint identifier as generated by the API
|
def __init__(self, conn, id):
super(SkypeEndpoint, self).__init__()
self.conn = conn
self.id = id
self.subscribed = False
| 412,788
|
Configure this endpoint to allow setting presence.
Args:
name (str): display name for this endpoint
|
def config(self, name="skype"):
self.conn("PUT", "{0}/users/ME/endpoints/{1}/presenceDocs/messagingService"
.format(self.conn.msgsHost, self.id),
auth=SkypeConnection.Auth.RegToken,
json={"id": "messagingService",
"type": "EndpointPresenceDoc",
"selfLink": "uri",
"privateInfo": {"epname": name},
"publicInfo": {"capabilities": "",
"type": 1,
"skypeNameVersion": "skype.com",
"nodeInfo": "xx",
"version": "908/1.30.0.128"}})
| 412,789
|
Send a keep-alive request for the endpoint.
Args:
timeout (int): maximum amount of time for the endpoint to stay active
|
def ping(self, timeout=12):
self.conn("POST", "{0}/users/ME/endpoints/{1}/active".format(self.conn.msgsHost, self.id),
auth=SkypeConnection.Auth.RegToken, json={"timeout": timeout})
| 412,790
|
Get a single conversation by identifier.
Args:
id (str): single or group chat identifier
|
def chat(self, id):
json = self.skype.conn("GET", "{0}/users/ME/conversations/{1}".format(self.skype.conn.msgsHost, id),
auth=SkypeConnection.Auth.RegToken, params={"view": "msnp24Equivalent"}).json()
cls = SkypeSingleChat
if "threadProperties" in json:
info = self.skype.conn("GET", "{0}/threads/{1}".format(self.skype.conn.msgsHost, json.get("id")),
auth=SkypeConnection.Auth.RegToken, params={"view": "msnp24Equivalent"}).json()
json.update(info)
cls = SkypeGroupChat
return self.merge(cls.fromRaw(self.skype, json))
| 412,794
|
Create a new group chat with the given users.
The current user is automatically added to the conversation as an admin. Any other admin identifiers must also
be present in the member list.
Args:
members (str list): user identifiers to initially join the conversation
admins (str list): user identifiers to gain admin privileges
|
def create(self, members=(), admins=()):
memberObjs = [{"id": "8:{0}".format(self.skype.userId), "role": "Admin"}]
for id in members:
if id == self.skype.userId:
continue
memberObjs.append({"id": "8:{0}".format(id), "role": "Admin" if id in admins else "User"})
resp = self.skype.conn("POST", "{0}/threads".format(self.skype.conn.msgsHost),
auth=SkypeConnection.Auth.RegToken, json={"members": memberObjs})
return self.chat(resp.headers["Location"].rsplit("/", 1)[1])
| 412,795
|
Resolve a ``join.skype.com`` URL and returns various identifiers for the group conversation.
Args:
url (str): public join URL, or identifier from it
Returns:
dict: related conversation's identifiers -- keys: ``id``, ``long``, ``blob``
|
def urlToIds(url):
urlId = url.split("/")[-1]
convUrl = "https://join.skype.com/api/v2/conversation/"
json = SkypeConnection.externalCall("POST", convUrl, json={"shortId": urlId, "type": "wl"}).json()
return {"id": json.get("Resource"),
"long": json.get("Id"),
"blob": json.get("ChatBlob")}
| 412,796
|
Extract the username from a contact URL.
Matches addresses containing ``users/<user>`` or ``users/ME/contacts/<user>``.
Args:
url (str): Skype API URL
Returns:
str: extracted identifier
|
def userToId(url):
match = re.search(r"users(/ME/contacts)?/[0-9]+:([^/]+)", url)
return match.group(2) if match else None
| 412,797
|
Extract the conversation ID from a conversation URL.
Matches addresses containing ``conversations/<chat>``.
Args:
url (str): Skype API URL
Returns:
str: extracted identifier
|
def chatToId(url):
match = re.search(r"conversations/([0-9]+:[^/]+)", url)
return match.group(1) if match else None
| 412,798
|
Class decorator: automatically generate an ``__init__`` method that expects args from cls.attrs and stores them.
Args:
cls (class): class to decorate
Returns:
class: same, but modified, class
|
def initAttrs(cls):
def __init__(self, skype=None, raw=None, *args, **kwargs):
super(cls, self).__init__(skype, raw)
# Merge args into kwargs based on cls.attrs.
for i in range(len(args)):
kwargs[cls.attrs[i]] = args[i]
# Disallow any unknown kwargs.
unknown = set(kwargs) - set(cls.attrs)
if unknown:
unknownDesc = "an unexpected keyword argument" if len(unknown) == 1 else "unexpected keyword arguments"
unknownList = ", ".join("'{0}'".format(k) for k in sorted(unknown))
raise TypeError("__init__() got {0} {1}".format(unknownDesc, unknownList))
# Set each attribute from kwargs, or use the default if not specified.
for k in cls.attrs:
setattr(self, k, kwargs.get(k, cls.defaults.get(k)))
# Add the init method to the class.
setattr(cls, "__init__", __init__)
return cls
| 412,799
|
Class decorator: override __bool__ to set truthiness based on any attr being present.
Args:
cls (class): class to decorate
Returns:
class: same, but modified, class
|
def truthyAttrs(cls):
def __bool__(self):
return bool(any(getattr(self, attr) for attr in self.attrs))
cls.__bool__ = cls.__nonzero__ = __bool__
return cls
| 412,801
|
Method decorator: calculate the value on first access, produce the cached value thereafter.
If the function takes arguments, the cache is a dictionary using all arguments as the key.
Args:
fn (method): function to decorate
Returns:
method: wrapper function with caching
|
def cacheResult(fn):
cache = {}
@functools.wraps(fn)
def wrapper(*args, **kwargs):
# Imperfect key generation (args may be passed as kwargs, so multiple ways to represent one key).
key = args + tuple(kwargs.items())
# Order of operations here tries to minimise use of exceptions.
try:
# Don't call the function here, as it may throw a TypeError itself (or from incorrect arguments).
if key in cache:
return cache[key]
except TypeError:
# Key is not hashable, so we can't cache with these args -- just return the result.
return fn(*args, **kwargs)
# Not yet cached, so generate the result and store it.
cache[key] = fn(*args, **kwargs)
return cache[key]
# Make cache accessible externally.
wrapper.cache = cache
return wrapper
| 412,802
|
Write code object as a byte-compiled file
Arguments:
codeobject: code object
filefile: bytecode file to write
timestamp: timestamp to put in file
magic: Pyton bytecode magic
|
def dump_compile(codeobject, filename, timestamp, magic):
# Atomically write the pyc/pyo file. Issue #13146.
# id() is used to generate a pseudo-random filename.
path_tmp = '%s.%s' % (filename, id(filename))
fc = None
try:
fc = open(path_tmp, 'wb')
if PYTHON3:
fc.write(bytes([0, 0, 0, 0]))
else:
fc.write('\0\0\0\0')
wr_long(fc, timestamp)
marshal.dump(codeobject, fc)
fc.flush()
fc.seek(0, 0)
fc.write(magic)
fc.close()
os.rename(path_tmp, filename)
except OSError:
try:
os.unlink(path_tmp)
except OSError:
pass
raise
finally:
if fc: fc.close()
| 413,197
|
Parse a URL query string and return the components as a dictionary.
Based on the cgi.parse_qs method.This is a utility function provided
with urlparse so that users need not use cgi module for
parsing the url query string.
Arguments:
:type url: str
:param url: URL with query string to be parsed
|
def _urlparse_qs(url):
# Extract the query part from the URL.
querystring = urlparse(url)[4]
# Split the query into name/value pairs.
pairs = [s2 for s1 in querystring.split('&') for s2 in s1.split(';')]
# Split the name/value pairs.
result = OrderedDefaultDict(list)
for name_value in pairs:
pair = name_value.split('=', 1)
if len(pair) != 2:
continue
if len(pair[1]) > 0:
name = _unquote(pair[0].replace('+', ' '))
value = _unquote(pair[1].replace('+', ' '))
result[name].append(value)
return result
| 413,503
|
BIP143 hashSequence implementation
Args:
sighash_type (int): SIGHASH_SINGLE or SIGHASH_ALL
anyone_can_pay (bool): true if ANYONECANPAY should be set
Returns:
(bytes): the hashSequence, a 32 byte hash
|
def _hash_sequence(self, sighash_type, anyone_can_pay):
if anyone_can_pay or sighash_type == shared.SIGHASH_SINGLE:
# If any of ANYONECANPAY, SINGLE sighash type is set,
# hashSequence is a uint256 of 0x0000......0000.
return b'\x00' * 32
else:
# hashSequence is the double SHA256 of nSequence of all inputs;
sequences = ByteData()
for tx_in in self.tx_ins:
sequences += tx_in.sequence
return utils.hash256(sequences.to_bytes())
| 414,030
|
Checks if the script code pased in to the sighash function is already
length-prepended
This will break if there's a redeem script that's just a pushdata
That won't happen in practice
Args:
script (bytes): the spend script
Returns:
(bytes): the length-prepended script (if necessary)
|
def _adjusted_script_code(self, script):
script_code = ByteData()
if script[0] == len(script) - 1:
return script
script_code += VarInt(len(script))
script_code += script
return script_code
| 414,031
|
BIP143 hashOutputs implementation
Args:
index (int): index of input being signed
sighash_type (int): SIGHASH_SINGLE or SIGHASH_ALL
Returns:
(bytes): the hashOutputs, a 32 byte hash
|
def _hash_outputs(self, index, sighash_type):
if sighash_type == shared.SIGHASH_ALL:
# If the sighash type is ALL,
# hashOutputs is the double SHA256 of all output amounts
# paired up with their scriptPubKey;
outputs = ByteData()
for tx_out in self.tx_outs:
outputs += tx_out.to_bytes()
return utils.hash256(outputs.to_bytes())
elif (sighash_type == shared.SIGHASH_SINGLE
and index < len(self.tx_outs)):
# if sighash type is SINGLE
# and the input index is smaller than the number of outputs,
# hashOutputs is the double SHA256 of the output at the same index
return utils.hash256(self.tx_outs[index].to_bytes())
else:
# Otherwise, hashOutputs is a uint256 of 0x0000......0000
raise NotImplementedError(
'I refuse to implement the SIGHASH_SINGLE bug.')
| 414,032
|
Convert int to signed little endian (l.e.) hex for scripts
Args:
number (int): int value to convert to bytes in l.e. format
Returns:
(str): the hex-encoded signed LE number
|
def i2le_script(number):
if number == 0:
return '00'
for i in range(80):
try:
return number.to_bytes(
length=i, # minimal bytes lol
byteorder='little',
signed=True).hex()
except Exception:
continue
| 414,122
|
Construct a fully-signed segwit transaction
Args:
tx_ins list(TxIn instances): list of transaction inputs
tx_outs list(TxOut instances): list of transaction outputs
tx_witnesses list(TxWitness instances): list of transaction witnsses
**kwargs:
version (int): transaction version number
locktime (hex): transaction locktime
Returns:
(Tx instance): signed transaction with witnesses
|
def witness_tx(tx_ins, tx_outs, tx_witnesses, **kwargs):
# Parse legacy scripts AND witness scripts for OP_CLTV
deser = [script_ser.deserialize(tx_in.redeem_script) for tx_in in tx_ins
if tx_in is not None]
for w in tx_witnesses:
try:
deser.append(script_ser.deserialize(w.stack[-1].item))
except (NotImplementedError, ValueError):
pass
version = max([guess_version(d) for d in deser])
if 'lock_time' in kwargs:
lock_time = kwargs['lock_time']
else:
lock_time = max([guess_locktime(d) for d in deser])
return tb.make_tx(
version=version,
tx_ins=tx_ins,
tx_outs=tx_outs,
lock_time=lock_time,
tx_witnesses=tx_witnesses)
| 414,156
|
Prepares a set of setup, test, and teardown code to be
run in the console.
PARAMETERS:
code -- list; processed lines of code. Elements in the list are
either strings (input) or CodeAnswer objects (output)
setup -- str; raw setup code
teardown -- str; raw teardown code
|
def load(self, code, setup='', teardown=''):
super().load(code, setup, teardown)
self._frame = self._original_frame.copy()
| 415,122
|
Unlocks the CodeCase.
PARAMETERS:
unique_id_prefix -- string; a prefix of a unique identifier for this
Case, for purposes of analytics.
case_id -- string; an identifier for this Case, for purposes of
analytics.
interact -- function; handles user interaction during the unlocking
phase.
|
def unlock(self, unique_id_prefix, case_id, interact):
print(self.setup.strip())
prompt_num = 0
current_prompt = []
try:
for line in self.lines:
if isinstance(line, str) and line:
print(line)
current_prompt.append(line)
elif isinstance(line, CodeAnswer):
prompt_num += 1
if not line.locked:
print('\n'.join(line.output))
continue
unique_id = self._construct_unique_id(unique_id_prefix, self.lines)
line.output = interact(unique_id,
case_id + ' > Prompt {}'.format(prompt_num),
'\n'.join(current_prompt),
line.output, line.choices)
line.locked = False
current_prompt = []
self.locked = False
finally:
self._sync_code()
| 415,182
|
Splits the given string of code based on the provided PS1 and PS2
symbols.
PARAMETERS:
code -- str; lines of interpretable code, using PS1 and PS2 prompts
PS1 -- str; first-level prompt symbol
PS2 -- str; second-level prompt symbol
RETURN:
list; a processed sequence of lines corresponding to the input code.
|
def split_code(cls, code, PS1, PS2):
processed_lines = []
for line in textwrap.dedent(code).splitlines():
if not line or line.startswith(PS1) or line.startswith(PS2):
processed_lines.append(line)
continue
assert len(processed_lines) > 0, 'code improperly formatted: {}'.format(code)
if not isinstance(processed_lines[-1], CodeAnswer):
processed_lines.append(CodeAnswer())
processed_lines[-1].update(line)
return processed_lines
| 415,183
|
Prepares a set of setup, test, and teardown code to be
run in the console.
PARAMETERS:
code -- list; processed lines of code. Elements in the list are
either strings (input) or CodeAnswer objects (output)
setup -- str; raw setup code
teardown -- str; raw teardown code
|
def load(self, code, setup='', teardown=''):
self._setup = textwrap.dedent(setup).splitlines()
self._code = code
self._teardown = textwrap.dedent(teardown).splitlines()
| 415,187
|
Formats a Python-object into a string in a JSON like way, but
uses triple quotes for multiline strings.
PARAMETERS:
json -- Python object that is serializable into json.
indentation -- str; represents one level of indentation
NOTES:
All multiline strings are treated as raw strings.
RETURNS:
str; the formatted json-like string.
|
def prettyjson(json, indentation=' '):
if isinstance(json, int) or isinstance(json, float):
return str(json)
elif isinstance(json, str):
if '\n' in json:
return 'r'
return repr(json)
elif isinstance(json, list):
lst = [indent(prettyjson(el, indentation), indentation) for el in json]
return '[\n' + ',\n'.join(lst) + '\n]'
elif isinstance(json, dict):
pairs = []
for k, v in sorted(json.items()):
k = prettyjson(k, indentation)
v = prettyjson(v, indentation)
pairs.append(indent(k + ': ' + v, indentation))
return '{\n' + ',\n'.join(pairs) + '\n}'
else:
raise exceptions.SerializeException('Invalid json type: {}'.format(json))
| 415,201
|
Returns the number of seconds since the UNIX epoch for the given
datetime (dt).
PARAMETERS:
dt -- datetime
|
def unix_time(self, dt):
epoch = datetime.utcfromtimestamp(0)
delta = dt - epoch
return int(delta.total_seconds())
| 415,246
|
Dumps all tests, as determined by their .dump() method.
PARAMETERS:
tests -- dict; file -> Test. Each Test object has a .dump method
that takes a filename and serializes the test object.
|
def dump_tests(self):
log.info('Dumping tests')
for test in self.test_map.values():
try:
test.dump()
except ex.SerializeException as e:
log.warning('Unable to dump {}: {}'.format(test.name, str(e)))
else:
log.info('Dumped {}'.format(test.name))
| 415,302
|
Constructor.
PARAMETERS:
args -- Namespace; parsed command line arguments by argparse.
assignment -- dict; general information about the assignment.
|
def __init__(self, args, assignment):
self.args = args
self.assignment = assignment
| 415,317
|
Constructor for a List field.
PARAMETERS:
type -- type; if type is None, the List can be heterogeneous.
Otherwise, the List must be homogeneous with elements
of the specified type.
|
def __init__(self, type=None, **kargs):
super().__init__(**kargs)
self._type = type
| 415,321
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.