_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q270800 | join_lines | test | def join_lines(string, strip=Strip.BOTH):
'''
Join strings together and strip whitespace in between if needed
'''
lines = []
for line in string.splitlines():
if strip & Strip.RIGHT:
line = line.rstrip()
if strip & Strip.LEFT:
line = line.lstrip()
lines.append(line)
return ''.join(lines) | python | {
"resource": ""
} |
q270801 | json_or_text | test | async def json_or_text(response):
"""Turns response into a properly formatted json or text object"""
text = await response.text()
if response.headers['Content-Type'] == 'application/json; charset=utf-8':
return json.loads(text)
return text | python | {
"resource": ""
} |
q270802 | limited | test | async def limited(until):
"""Handles the message shown when we are ratelimited"""
duration = int(round(until - time.time()))
mins = duration / 60
fmt = 'We have exhausted a ratelimit quota. Retrying in %.2f seconds (%.3f minutes).'
log.warn(fmt, duration, mins) | python | {
"resource": ""
} |
q270803 | HTTPClient.request | test | async def request(self, method, url, **kwargs):
"""Handles requests to the API"""
rate_limiter = RateLimiter(max_calls=59, period=60, callback=limited)
# handles ratelimits. max_calls is set to 59 because current implementation will retry in 60s after 60 calls is reached. DBL has a 1h block so obviously this doesn't work well, as it will get a 429 when 60 is reached.
async with rate_limiter: # this works but doesn't 'save' over restart. need a better implementation.
if not self.token:
raise UnauthorizedDetected('UnauthorizedDetected (status code: 401): No TOKEN provided')
headers = {
'User-Agent': self.user_agent,
'Content-Type': 'application/json'
}
if 'json' in kwargs:
kwargs['data'] = to_json(kwargs.pop('json'))
kwargs['headers'] = headers
headers['Authorization'] = self.token
for tries in range(5):
async with self.session.request(method, url, **kwargs) as resp:
log.debug('%s %s with %s has returned %s', method,
url, kwargs.get('data'), resp.status)
data = await json_or_text(resp)
if 300 > resp.status >= 200:
return data
if resp.status == 429: # we are being ratelimited
fmt = 'We are being rate limited. Retrying in %.2f seconds (%.3f minutes).'
# sleep a bit
retry_after = json.loads(resp.headers.get('Retry-After'))
mins = retry_after / 60
log.warning(fmt, retry_after, mins)
# check if it's a global rate limit (True as only 1 ratelimit atm - /api/bots)
is_global = True # is_global = data.get('global', False)
if is_global:
self._global_over.clear()
await asyncio.sleep(retry_after, loop=self.loop)
log.debug('Done sleeping for the rate limit. Retrying...')
# release the global lock now that the
# global rate limit has passed
if is_global:
self._global_over.set()
log.debug('Global rate limit is now over.')
continue
if resp.status == 400:
raise HTTPException(resp, data)
elif resp.status == 401:
raise Unauthorized(resp, data)
elif resp.status == 403:
raise Forbidden(resp, data)
elif resp.status == 404:
raise NotFound(resp, data)
else:
raise HTTPException(resp, data)
# We've run out of retries, raise.
raise HTTPException(resp, data) | python | {
"resource": ""
} |
q270804 | HTTPClient.get_bot_info | test | async def get_bot_info(self, bot_id):
'''Gets the information of the given Bot ID'''
resp = await self.request('GET', '{}/bots/{}'.format(self.BASE, bot_id))
resp['date'] = datetime.strptime(resp['date'], '%Y-%m-%dT%H:%M:%S.%fZ')
for k in resp:
if resp[k] == '':
resp[k] = None
return resp | python | {
"resource": ""
} |
q270805 | HTTPClient.get_bots | test | async def get_bots(self, limit, offset):
'''Gets an object of bots on DBL'''
if limit > 500:
limit = 50
return await self.request('GET', '{}/bots?limit={}&offset={}'.format(self.BASE, limit, offset)) | python | {
"resource": ""
} |
q270806 | Port.read | test | def read(self):
"""Read incoming message."""
packet = self.packet
with self.__read_lock:
buffer = self.__buffer
while len(buffer) < packet:
buffer += self._read_data()
length = self.__unpack(buffer[:packet])[0] + packet
while len(buffer) < length:
buffer += self._read_data()
term, self.__buffer = decode(buffer[packet:])
return term | python | {
"resource": ""
} |
q270807 | Port.write | test | def write(self, message):
"""Write outgoing message."""
data = encode(message, compressed=self.compressed)
length = len(data)
data = self.__pack(length) + data
with self.__write_lock:
while data:
try:
n = os.write(self.out_d, data)
except OSError as why:
if why.errno in (errno.EPIPE, errno.EINVAL):
raise EOFError()
raise
if not n:
raise EOFError()
data = data[n:]
return length + self.packet | python | {
"resource": ""
} |
q270808 | Port.close | test | def close(self):
"""Close port."""
os.close(self.in_d)
os.close(self.out_d) | python | {
"resource": ""
} |
q270809 | decode | test | def decode(string):
"""Decode Erlang external term."""
if not string:
raise IncompleteData(string)
if string[0] != 131:
raise ValueError("unknown protocol version: %r" % string[0])
if string[1:2] == b'P':
# compressed term
if len(string) < 16:
raise IncompleteData(string)
d = decompressobj()
term_string = d.decompress(string[6:]) + d.flush()
uncompressed_size, = _int4_unpack(string[2:6])
if len(term_string) != uncompressed_size:
raise ValueError(
"invalid compressed tag, "
"%d bytes but got %d" % (uncompressed_size, len(term_string)))
# tail data returned by decode_term() can be simple ignored
term, _tail = decode_term(term_string)
return term, d.unused_data
return decode_term(string[1:]) | python | {
"resource": ""
} |
q270810 | encode | test | def encode(term, compressed=False):
"""Encode Erlang external term."""
encoded_term = encode_term(term)
# False and 0 do not attempt compression.
if compressed:
if compressed is True:
# default compression level of 6
compressed = 6
elif compressed < 0 or compressed > 9:
raise ValueError("invalid compression level: %r" % (compressed,))
zlib_term = compress(encoded_term, compressed)
ln = len(encoded_term)
if len(zlib_term) + 5 <= ln:
# Compressed term should be smaller
return b"\x83P" + _int4_pack(ln) + zlib_term
return b"\x83" + encoded_term | python | {
"resource": ""
} |
q270811 | NetworkingThread.addSourceAddr | test | def addSourceAddr(self, addr):
"""None means 'system default'"""
try:
self._multiInSocket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, self._makeMreq(addr))
except socket.error: # if 1 interface has more than 1 address, exception is raised for the second
pass
sock = self._createMulticastOutSocket(addr, self._observer.ttl)
self._multiOutUniInSockets[addr] = sock
self._poll.register(sock, select.POLLIN) | python | {
"resource": ""
} |
q270812 | NetworkingThread._sendPendingMessages | test | def _sendPendingMessages(self):
"""Method sleeps, if nothing to do"""
if len(self._queue) == 0:
time.sleep(0.1)
return
msg = self._queue.pop(0)
if msg.canSend():
self._sendMsg(msg)
msg.refresh()
if not (msg.isFinished()):
self._queue.append(msg)
else:
self._queue.append(msg)
time.sleep(0.01) | python | {
"resource": ""
} |
q270813 | WSDiscovery.setRemoteServiceHelloCallback | test | def setRemoteServiceHelloCallback(self, cb, types=None, scopes=None):
"""Set callback, which will be called when new service appeared online
and sent Hi message
typesFilter and scopesFilter might be list of types and scopes.
If filter is set, callback is called only for Hello messages,
which match filter
Set None to disable callback
"""
self._remoteServiceHelloCallback = cb
self._remoteServiceHelloCallbackTypesFilter = types
self._remoteServiceHelloCallbackScopesFilter = scopes | python | {
"resource": ""
} |
q270814 | WSDiscovery.stop | test | def stop(self):
'cleans up and stops the discovery server'
self.clearRemoteServices()
self.clearLocalServices()
self._stopThreads()
self._serverStarted = False | python | {
"resource": ""
} |
q270815 | WSDiscovery.clearLocalServices | test | def clearLocalServices(self):
'send Bye messages for the services and remove them'
for service in list(self._localServices.values()):
self._sendBye(service)
self._localServices.clear() | python | {
"resource": ""
} |
q270816 | WSDiscovery.searchServices | test | def searchServices(self, types=None, scopes=None, timeout=3):
'search for services given the TYPES and SCOPES in a given TIMEOUT'
if not self._serverStarted:
raise Exception("Server not started")
self._sendProbe(types, scopes)
time.sleep(timeout)
return self._filterServices(list(self._remoteServices.values()), types, scopes) | python | {
"resource": ""
} |
q270817 | createSOAPMessage | test | def createSOAPMessage(env):
"construct a a raw SOAP XML string, given a prepared SoapEnvelope object"
if env.getAction() == ACTION_PROBE:
return createProbeMessage(env)
if env.getAction() == ACTION_PROBE_MATCH:
return createProbeMatchMessage(env)
if env.getAction() == ACTION_RESOLVE:
return createResolveMessage(env)
if env.getAction() == ACTION_RESOLVE_MATCH:
return createResolveMatchMessage(env)
if env.getAction() == ACTION_HELLO:
return createHelloMessage(env)
if env.getAction() == ACTION_BYE:
return createByeMessage(env) | python | {
"resource": ""
} |
q270818 | discover | test | def discover(scope, loglevel, capture):
"Discover systems using WS-Discovery"
if loglevel:
level = getattr(logging, loglevel, None)
if not level:
print("Invalid log level '%s'" % loglevel)
return
logger.setLevel(level)
run(scope=scope, capture=capture) | python | {
"resource": ""
} |
q270819 | _ClusterTaggableManager.get_tagged_item_manager | test | def get_tagged_item_manager(self):
"""Return the manager that handles the relation from this instance to the tagged_item class.
If content_object on the tagged_item class is defined as a ParentalKey, this will be a
DeferringRelatedManager which allows writing related objects without committing them
to the database.
"""
rel_name = self.through._meta.get_field('content_object').remote_field.get_accessor_name()
return getattr(self.instance, rel_name) | python | {
"resource": ""
} |
q270820 | get_all_child_relations | test | def get_all_child_relations(model):
"""
Return a list of RelatedObject records for child relations of the given model,
including ones attached to ancestors of the model
"""
return [
field for field in model._meta.get_fields()
if isinstance(field.remote_field, ParentalKey)
] | python | {
"resource": ""
} |
q270821 | get_all_child_m2m_relations | test | def get_all_child_m2m_relations(model):
"""
Return a list of ParentalManyToManyFields on the given model,
including ones attached to ancestors of the model
"""
return [
field for field in model._meta.get_fields()
if isinstance(field, ParentalManyToManyField)
] | python | {
"resource": ""
} |
q270822 | ClusterableModel.save | test | def save(self, **kwargs):
"""
Save the model and commit all child relations.
"""
child_relation_names = [rel.get_accessor_name() for rel in get_all_child_relations(self)]
child_m2m_field_names = [field.name for field in get_all_child_m2m_relations(self)]
update_fields = kwargs.pop('update_fields', None)
if update_fields is None:
real_update_fields = None
relations_to_commit = child_relation_names
m2m_fields_to_commit = child_m2m_field_names
else:
real_update_fields = []
relations_to_commit = []
m2m_fields_to_commit = []
for field in update_fields:
if field in child_relation_names:
relations_to_commit.append(field)
elif field in child_m2m_field_names:
m2m_fields_to_commit.append(field)
else:
real_update_fields.append(field)
super(ClusterableModel, self).save(update_fields=real_update_fields, **kwargs)
for relation in relations_to_commit:
getattr(self, relation).commit()
for field in m2m_fields_to_commit:
getattr(self, field).commit() | python | {
"resource": ""
} |
q270823 | ClusterableModel.from_serializable_data | test | def from_serializable_data(cls, data, check_fks=True, strict_fks=False):
"""
Build an instance of this model from the JSON-like structure passed in,
recursing into related objects as required.
If check_fks is true, it will check whether referenced foreign keys still
exist in the database.
- dangling foreign keys on related objects are dealt with by either nullifying the key or
dropping the related object, according to the 'on_delete' setting.
- dangling foreign keys on the base object will be nullified, unless strict_fks is true,
in which case any dangling foreign keys with on_delete=CASCADE will cause None to be
returned for the entire object.
"""
obj = model_from_serializable_data(cls, data, check_fks=check_fks, strict_fks=strict_fks)
if obj is None:
return None
child_relations = get_all_child_relations(cls)
for rel in child_relations:
rel_name = rel.get_accessor_name()
try:
child_data_list = data[rel_name]
except KeyError:
continue
related_model = rel.related_model
if hasattr(related_model, 'from_serializable_data'):
children = [
related_model.from_serializable_data(child_data, check_fks=check_fks, strict_fks=True)
for child_data in child_data_list
]
else:
children = [
model_from_serializable_data(related_model, child_data, check_fks=check_fks, strict_fks=True)
for child_data in child_data_list
]
children = filter(lambda child: child is not None, children)
setattr(obj, rel_name, children)
return obj | python | {
"resource": ""
} |
q270824 | BaseChildFormSet.validate_unique | test | def validate_unique(self):
'''This clean method will check for unique_together condition'''
# Collect unique_checks and to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
forms_to_delete = self.deleted_forms
valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete]
for form in valid_forms:
unique_checks, date_checks = form.instance._get_unique_checks()
all_unique_checks.update(unique_checks)
all_date_checks.update(date_checks)
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in valid_forms:
# Get the data for the set of fields that must be unique among the forms.
row_data = (
field if field in self.unique_fields else form.cleaned_data[field]
for field in unique_check if field in form.cleaned_data
)
# Reduce Model instances to their primary key values
row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d
for d in row_data)
if row_data and None not in row_data:
# if we've already seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
for field in unique_check:
if field in form.cleaned_data:
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(row_data)
if errors:
raise ValidationError(errors) | python | {
"resource": ""
} |
q270825 | ClusterForm.has_changed | test | def has_changed(self):
"""Return True if data differs from initial."""
# Need to recurse over nested formsets so that the form is saved if there are changes
# to child forms but not the parent
if self.formsets:
for formset in self.formsets.values():
for form in formset.forms:
if form.has_changed():
return True
return bool(self.changed_data) | python | {
"resource": ""
} |
q270826 | Address.with_valid_checksum | test | def with_valid_checksum(self):
# type: () -> Address
"""
Returns the address with a valid checksum attached.
"""
return Address(
trytes=self.address + self._generate_checksum(),
# Make sure to copy all of the ancillary attributes, too!
balance=self.balance,
key_index=self.key_index,
security_level=self.security_level,
) | python | {
"resource": ""
} |
q270827 | Address._generate_checksum | test | def _generate_checksum(self):
# type: () -> AddressChecksum
"""
Generates the correct checksum for this address.
"""
checksum_trits = [] # type: MutableSequence[int]
sponge = Kerl()
sponge.absorb(self.address.as_trits())
sponge.squeeze(checksum_trits)
checksum_length = AddressChecksum.LEN * TRITS_PER_TRYTE
return AddressChecksum.from_trits(checksum_trits[-checksum_length:]) | python | {
"resource": ""
} |
q270828 | IotaCommandLineApp.parse_argv | test | def parse_argv(self, argv=None):
# type: (Optional[tuple]) -> dict
"""
Parses arguments for the command.
:param argv:
Arguments to pass to the argument parser.
If ``None``, defaults to ``sys.argv[1:]``.
"""
arguments = vars(self.create_argument_parser().parse_args(argv))
seed = None
if self.requires_seed:
seed_filepath = arguments.pop('seed_file')
seed = (
self.seed_from_filepath(seed_filepath)
if seed_filepath
else self.prompt_for_seed()
)
arguments['api'] = Iota(
adapter=arguments.pop('uri'),
seed=seed,
testnet=arguments.pop('testnet'),
)
return arguments | python | {
"resource": ""
} |
q270829 | IotaCommandLineApp.create_argument_parser | test | def create_argument_parser(self):
# type: () -> ArgumentParser
"""
Returns the argument parser that will be used to interpret
arguments and options from argv.
"""
parser = ArgumentParser(
description=self.__doc__,
epilog='PyOTA v{version}'.format(version=__version__),
)
parser.add_argument(
'--uri',
type=text_type,
default='http://localhost:14265/',
help=(
'URI of the node to connect to '
'(defaults to http://localhost:14265/).'
),
)
if self.requires_seed:
parser.add_argument(
'--seed-file',
type=text_type,
dest='seed_file',
help=(
'Path to a file containing your seed in cleartext. '
'If not provided, you will be prompted to enter '
'your seed via stdin.'
),
)
parser.add_argument(
'--testnet',
action='store_true',
default=False,
help='If set, use testnet settings (e.g., for PoW).',
)
return parser | python | {
"resource": ""
} |
q270830 | IotaCommandLineApp.prompt_for_seed | test | def prompt_for_seed():
# type: () -> Seed
"""
Prompts the user to enter their seed via stdin.
"""
seed = secure_input(
'Enter seed and press return (typing will not be shown).\n'
'If no seed is specified, a random one will be used instead.\n'
)
if isinstance(seed, text_type):
seed = seed.encode('ascii')
return Seed(seed) if seed else Seed.random() | python | {
"resource": ""
} |
q270831 | validate_signature_fragments | test | def validate_signature_fragments(
fragments,
hash_,
public_key,
sponge_type=Kerl,
):
# type: (Sequence[TryteString], Hash, TryteString, type) -> bool
"""
Returns whether a sequence of signature fragments is valid.
:param fragments:
Sequence of signature fragments (usually
:py:class:`iota.transaction.Fragment` instances).
:param hash_:
Hash used to generate the signature fragments (usually a
:py:class:`iota.transaction.BundleHash` instance).
:param public_key:
The public key value used to verify the signature digest (usually a
:py:class:`iota.types.Address` instance).
:param sponge_type:
The class used to create the cryptographic sponge (i.e., Curl or Kerl).
"""
checksum = [0] * (HASH_LENGTH * len(fragments))
normalized_hash = normalize(hash_)
for i, fragment in enumerate(fragments):
outer_sponge = sponge_type()
# If there are more than 3 iterations, loop back around to the
# start.
normalized_chunk = normalized_hash[i % len(normalized_hash)]
buffer = []
for j, hash_trytes in enumerate(fragment.iter_chunks(Hash.LEN)):
buffer = hash_trytes.as_trits() # type: List[int]
inner_sponge = sponge_type()
# Note the sign flip compared to
# :py;class:`SignatureFragmentGenerator`.
for _ in range(13 + normalized_chunk[j]):
inner_sponge.reset()
inner_sponge.absorb(buffer)
inner_sponge.squeeze(buffer)
outer_sponge.absorb(buffer)
outer_sponge.squeeze(buffer)
checksum[i * HASH_LENGTH:(i + 1) * HASH_LENGTH] = buffer
actual_public_key = [0] * HASH_LENGTH
addy_sponge = sponge_type()
addy_sponge.absorb(checksum)
addy_sponge.squeeze(actual_public_key)
return actual_public_key == public_key.as_trits() | python | {
"resource": ""
} |
q270832 | KeyGenerator.get_key | test | def get_key(self, index, iterations):
# type: (int, int) -> PrivateKey
"""
Generates a single key.
:param index:
The key index.
:param iterations:
Number of transform iterations to apply to the key, also
known as security level.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
"""
return (
self.get_keys(
start=index,
count=1,
step=1,
iterations=iterations,
)[0]
) | python | {
"resource": ""
} |
q270833 | KeyGenerator.get_key_for | test | def get_key_for(self, address):
"""
Generates the key associated with the specified address.
Note that this method will generate the wrong key if the input
address was generated from a different key!
"""
return self.get_key(
index=address.key_index,
iterations=address.security_level,
) | python | {
"resource": ""
} |
q270834 | KeyGenerator.create_iterator | test | def create_iterator(self, start=0, step=1, security_level=1):
# type: (int, int, int) -> KeyIterator
"""
Creates a generator that can be used to progressively generate
new keys.
:param start:
Starting index.
Warning: This method may take awhile to reset if ``start``
is a large number!
:param step:
Number of indexes to advance after each key.
This value can be negative; the generator will exit if it
reaches an index < 0.
Warning: The generator may take awhile to advance between
iterations if ``step`` is a large number!
:param security_level:
Number of _transform iterations to apply to each key.
Must be >= 1.
Increasing this value makes key generation slower, but more
resistant to brute-forcing.
"""
return KeyIterator(self.seed, start, step, security_level) | python | {
"resource": ""
} |
q270835 | KeyIterator._create_sponge | test | def _create_sponge(self, index):
# type: (int) -> Kerl
"""
Prepares the hash sponge for the generator.
"""
seed = self.seed_as_trits[:]
sponge = Kerl()
sponge.absorb(add_trits(seed, trits_from_int(index)))
# Squeeze all of the trits out of the sponge and re-absorb them.
# Note that the sponge transforms several times per operation,
# so this sequence is not as redundant as it looks at first
# glance.
sponge.squeeze(seed)
sponge.reset()
sponge.absorb(seed)
return sponge | python | {
"resource": ""
} |
q270836 | Curl.absorb | test | def absorb(self, trits, offset=0, length=None):
# type: (Sequence[int], Optional[int], Optional[int]) -> None
"""
Absorb trits into the sponge.
:param trits:
Sequence of trits to absorb.
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to absorb. Defaults to ``len(trits)``.
"""
pad = ((len(trits) % HASH_LENGTH) or HASH_LENGTH)
trits += [0] * (HASH_LENGTH - pad)
if length is None:
length = len(trits)
if length < 1:
raise with_context(
exc=ValueError('Invalid length passed to ``absorb``.'),
context={
'trits': trits,
'offset': offset,
'length': length,
},
)
# Copy trits from ``trits`` into internal state, one hash at a
# time, transforming internal state in between hashes.
while offset < length:
start = offset
stop = min(start + HASH_LENGTH, length)
# Copy the next hash worth of trits to internal state.
#
# Note that we always copy the trits to the start of the
# state. ``self._state`` is 3 hashes long, but only the
# first hash is "public"; the other 2 are only accessible to
# :py:meth:`_transform`.
self._state[0:stop - start] = trits[start:stop]
# Transform.
self._transform()
# Move on to the next hash.
offset += HASH_LENGTH | python | {
"resource": ""
} |
q270837 | Curl.squeeze | test | def squeeze(self, trits, offset=0, length=HASH_LENGTH):
# type: (MutableSequence[int], Optional[int], Optional[int]) -> None
"""
Squeeze trits from the sponge.
:param trits:
Sequence that the squeezed trits will be copied to.
Note: this object will be modified!
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to squeeze, default to ``HASH_LENGTH``
"""
# Squeeze is kind of like the opposite of absorb; it copies
# trits from internal state to the ``trits`` parameter, one hash
# at a time, and transforming internal state in between hashes.
#
# However, only the first hash of the state is "public", so we
# can simplify the implementation somewhat.
# Ensure length can be mod by HASH_LENGTH
if length % HASH_LENGTH != 0:
raise with_context(
exc=ValueError('Invalid length passed to ``squeeze`.'),
context={
'trits': trits,
'offset': offset,
'length': length,
})
# Ensure that ``trits`` can hold at least one hash worth of
# trits.
trits.extend([0] * max(0, length - len(trits)))
# Check trits with offset can handle hash length
if len(trits) - offset < HASH_LENGTH:
raise with_context(
exc=ValueError('Invalid offset passed to ``squeeze``.'),
context={
'trits': trits,
'offset': offset,
'length': length
},
)
while length >= HASH_LENGTH:
# Copy exactly one hash.
trits[offset:offset + HASH_LENGTH] = self._state[0:HASH_LENGTH]
# One hash worth of trits copied; now transform.
self._transform()
offset += HASH_LENGTH
length -= HASH_LENGTH | python | {
"resource": ""
} |
q270838 | Curl._transform | test | def _transform(self):
# type: () -> None
"""
Transforms internal state.
"""
# Copy some values locally so we can avoid global lookups in the
# inner loop.
#
# References:
#
# - https://wiki.python.org/moin/PythonSpeed/PerformanceTips#Local_Variables
state_length = STATE_LENGTH
truth_table = TRUTH_TABLE
# Operate on a copy of ``self._state`` to eliminate dot lookups
# in the inner loop.
#
# References:
#
# - https://wiki.python.org/moin/PythonSpeed/PerformanceTips#Avoiding_dots...
# - http://stackoverflow.com/a/2612990/
prev_state = self._state[:]
new_state = prev_state[:]
# Note: This code looks significantly different from the C
# implementation because it has been optimized to limit the
# number of list item lookups (these are relatively slow in
# Python).
index = 0
for _ in range(NUMBER_OF_ROUNDS):
prev_trit = prev_state[index]
for pos in range(state_length):
index += (364 if index < 365 else -365)
new_trit = prev_state[index]
new_state[pos] = truth_table[prev_trit + (3 * new_trit) + 4]
prev_trit = new_trit
prev_state = new_state
new_state = new_state[:]
self._state = new_state | python | {
"resource": ""
} |
q270839 | MultisigIota.get_digests | test | def get_digests(
self,
index=0,
count=1,
security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL,
):
# type: (int, int, int) -> dict
"""
Generates one or more key digests from the seed.
Digests are safe to share; use them to generate multisig
addresses.
:param index:
The starting key index.
:param count:
Number of digests to generate.
:param security_level:
Number of iterations to use when generating new addresses.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:return:
Dict with the following items::
{
'digests': List[Digest],
Always contains a list, even if only one digest
was generated.
}
"""
return commands.GetDigestsCommand(self.adapter)(
seed=self.seed,
index=index,
count=count,
securityLevel=security_level,
) | python | {
"resource": ""
} |
q270840 | MultisigIota.get_private_keys | test | def get_private_keys(
self,
index=0,
count=1,
security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL,
):
# type: (int, int, int) -> dict
"""
Generates one or more private keys from the seed.
As the name implies, private keys should not be shared.
However, in a few cases it may be necessary (e.g., for M-of-N
transactions).
:param index:
The starting key index.
:param count:
Number of keys to generate.
:param security_level:
Number of iterations to use when generating new keys.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:return:
Dict with the following items::
{
'keys': List[PrivateKey],
Always contains a list, even if only one key was
generated.
}
References:
- :py:class:`iota.crypto.signing.KeyGenerator`
- https://github.com/iotaledger/wiki/blob/master/multisigs.md#how-m-of-n-works
"""
return commands.GetPrivateKeysCommand(self.adapter)(
seed=self.seed,
index=index,
count=count,
securityLevel=security_level,
) | python | {
"resource": ""
} |
q270841 | MultisigIota.prepare_multisig_transfer | test | def prepare_multisig_transfer(
self,
transfers, # type: Iterable[ProposedTransaction]
multisig_input, # type: MultisigAddress
change_address=None, # type: Optional[Address]
):
# type: (...) -> dict
"""
Prepares a bundle that authorizes the spending of IOTAs from a
multisig address.
.. note::
This method is used exclusively to spend IOTAs from a
multisig address.
If you want to spend IOTAs from non-multisig addresses, or
if you want to create 0-value transfers (i.e., that don't
require inputs), use
:py:meth:`iota.api.Iota.prepare_transfer` instead.
:param transfers:
Transaction objects to prepare.
.. important::
Must include at least one transaction that spends IOTAs
(i.e., has a nonzero ``value``). If you want to prepare
a bundle that does not spend any IOTAs, use
:py:meth:`iota.api.prepare_transfer` instead.
:param multisig_input:
The multisig address to use as the input for the transfers.
.. note::
This method only supports creating a bundle with a
single multisig input.
If you would like to spend from multiple multisig
addresses in the same bundle, create the
:py:class:`iota.multisig.transaction.ProposedMultisigBundle`
object manually.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If the bundle has no unspent inputs, ``change_address` is
ignored.
.. important::
Unlike :py:meth:`iota.api.Iota.prepare_transfer`, this
method will NOT generate a change address automatically.
If there are unspent inputs and ``change_address`` is
empty, an exception will be raised.
This is because multisig transactions typically involve
multiple individuals, and it would be unfair to the
participants if we generated a change address
automatically using the seed of whoever happened to run
the ``prepare_multisig_transfer`` method!
.. danger::
Note that this protective measure is not a
substitute for due diligence!
Always verify the details of every transaction in a
bundle (including the change transaction) before
signing the input(s)!
:return:
Dict containing the following values::
{
'trytes': List[TransactionTrytes],
Finalized bundle, as trytes.
The input transactions are not signed.
}
In order to authorize the spending of IOTAs from the multisig
input, you must generate the correct private keys and invoke
the :py:meth:`iota.crypto.types.PrivateKey.sign_input_at`
method for each key, in the correct order.
Once the correct signatures are applied, you can then perform
proof of work (``attachToTangle``) and broadcast the bundle
using :py:meth:`iota.api.Iota.send_trytes`.
"""
return commands.PrepareMultisigTransferCommand(self.adapter)(
changeAddress=change_address,
multisigInput=multisig_input,
transfers=transfers,
) | python | {
"resource": ""
} |
q270842 | add_trits | test | def add_trits(left, right):
# type: (Sequence[int], Sequence[int]) -> List[int]
"""
Adds two sequences of trits together.
The result is a list of trits equal in length to the longer of the
two sequences.
.. note::
Overflow is possible.
For example, ``add_trits([1], [1])`` returns ``[-1]``.
"""
target_len = max(len(left), len(right))
res = [0] * target_len
left += [0] * (target_len - len(left))
right += [0] * (target_len - len(right))
carry = 0
for i in range(len(res)):
res[i], carry = _full_add_trits(left[i], right[i], carry)
return res | python | {
"resource": ""
} |
q270843 | trits_from_int | test | def trits_from_int(n, pad=1):
# type: (int, Optional[int]) -> List[int]
"""
Returns a trit representation of an integer value.
:param n:
Integer value to convert.
:param pad:
Ensure the result has at least this many trits.
References:
- https://dev.to/buntine/the-balanced-ternary-machines-of-soviet-russia
- https://en.wikipedia.org/wiki/Balanced_ternary
- https://rosettacode.org/wiki/Balanced_ternary#Python
"""
if n == 0:
trits = []
else:
quotient, remainder = divmod(n, 3)
if remainder == 2:
# Lend 1 to the next place so we can make this trit
# negative.
quotient += 1
remainder = -1
trits = [remainder] + trits_from_int(quotient, pad=0)
if pad:
trits += [0] * max(0, pad - len(trits))
return trits | python | {
"resource": ""
} |
q270844 | _add_trits | test | def _add_trits(left, right):
# type: (int, int) -> int
"""
Adds two individual trits together.
The result is always a single trit.
"""
res = left + right
return res if -2 < res < 2 else (res < 0) - (res > 0) | python | {
"resource": ""
} |
q270845 | _full_add_trits | test | def _full_add_trits(left, right, carry):
# type: (int, int, int) -> Tuple[int, int]
"""
Adds two trits together, with support for a carry trit.
"""
sum_both = _add_trits(left, right)
cons_left = _cons_trits(left, right)
cons_right = _cons_trits(sum_both, carry)
return _add_trits(sum_both, carry), _any_trits(cons_left, cons_right) | python | {
"resource": ""
} |
q270846 | output_seed | test | def output_seed(seed):
# type: (Seed) -> None
"""
Outputs the user's seed to stdout, along with lots of warnings
about security.
"""
print(
'WARNING: Anyone who has your seed can spend your IOTAs! '
'Clear the screen after recording your seed!'
)
compat.input('')
print('Your seed is:')
print('')
print(binary_type(seed).decode('ascii'))
print('')
print(
'Clear the screen to prevent shoulder surfing, '
'and press return to continue.'
)
print('https://en.wikipedia.org/wiki/Shoulder_surfing_(computer_security)')
compat.input('') | python | {
"resource": ""
} |
q270847 | StrictIota.find_transactions | test | def find_transactions(
self,
bundles=None, # type: Optional[Iterable[BundleHash]]
addresses=None, # type: Optional[Iterable[Address]]
tags=None, # type: Optional[Iterable[Tag]]
approvees=None, # type: Optional[Iterable[TransactionHash]]
):
# type: (...) -> dict
"""
Find the transactions which match the specified input and
return.
All input values are lists, for which a list of return values
(transaction hashes), in the same order, is returned for all
individual elements.
Using multiple of these input fields returns the intersection of
the values.
:param bundles:
List of bundle IDs.
:param addresses:
List of addresses.
:param tags:
List of tags.
:param approvees:
List of approvee transaction IDs.
References:
- https://iota.readme.io/docs/findtransactions
"""
return core.FindTransactionsCommand(self.adapter)(
bundles=bundles,
addresses=addresses,
tags=tags,
approvees=approvees,
) | python | {
"resource": ""
} |
q270848 | Iota.get_inputs | test | def get_inputs(
self,
start=0,
stop=None,
threshold=None,
security_level=None,
):
# type: (int, Optional[int], Optional[int], Optional[int]) -> dict
"""
Gets all possible inputs of a seed and returns them, along with
the total balance.
This is either done deterministically (by generating all
addresses until :py:meth:`find_transactions` returns an empty
result), or by providing a key range to search.
:param start:
Starting key index.
Defaults to 0.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will not stop until
it finds an unused address.
:param threshold:
If set, determines the minimum threshold for a successful
result:
- As soon as this threshold is reached, iteration will stop.
- If the command runs out of addresses before the threshold
is reached, an exception is raised.
.. note::
This method does not attempt to "optimize" the result
(e.g., smallest number of inputs, get as close to
``threshold`` as possible, etc.); it simply accumulates
inputs in order until the threshold is met.
If ``threshold`` is 0, the first address in the key range
with a non-zero balance will be returned (if it exists).
If ``threshold`` is ``None`` (default), this method will
return **all** inputs in the specified key range.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'inputs': List[Address],
Addresses with nonzero balances that can be used
as inputs.
'totalBalance': int,
Aggregate balance from all matching addresses.
}
Note that each Address in the result has its ``balance``
attribute set.
Example:
.. code-block:: python
response = iota.get_inputs(...)
input0 = response['inputs'][0] # type: Address
input0.balance # 42
:raise:
- :py:class:`iota.adapter.BadApiResponse` if ``threshold``
is not met. Not applicable if ``threshold`` is ``None``.
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getinputs
"""
return extended.GetInputsCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
threshold=threshold,
securityLevel=security_level
) | python | {
"resource": ""
} |
q270849 | Iota.get_new_addresses | test | def get_new_addresses(
self,
index=0,
count=1,
security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL,
checksum=False,
):
# type: (int, Optional[int], int, bool) -> dict
"""
Generates one or more new addresses from the seed.
:param index:
The key index of the first new address to generate (must be
>= 1).
:param count:
Number of addresses to generate (must be >= 1).
.. tip::
This is more efficient than calling ``get_new_address``
inside a loop.
If ``None``, this method will progressively generate
addresses and scan the Tangle until it finds one that has no
transactions referencing it.
:param security_level:
Number of iterations to use when generating new addresses.
Larger values take longer, but the resulting signatures are
more secure.
This value must be between 1 and 3, inclusive.
:param checksum:
Specify whether to return the address with the checksum.
Defaults to ``False``.
:return:
Dict with the following structure::
{
'addresses': List[Address],
Always a list, even if only one address was
generated.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#getnewaddress
"""
return extended.GetNewAddressesCommand(self.adapter)(
count=count,
index=index,
securityLevel=security_level,
checksum=checksum,
seed=self.seed,
) | python | {
"resource": ""
} |
q270850 | Iota.get_transfers | test | def get_transfers(self, start=0, stop=None, inclusion_states=False):
# type: (int, Optional[int], bool) -> dict
"""
Returns all transfers associated with the seed.
:param start:
Starting key index.
:param stop:
Stop before this index.
Note that this parameter behaves like the ``stop`` attribute
in a :py:class:`slice` object; the stop index is *not*
included in the result.
If ``None`` (default), then this method will check every
address until it finds one without any transfers.
:param inclusion_states:
Whether to also fetch the inclusion states of the transfers.
This requires an additional API call to the node, so it is
disabled by default.
:return:
Dict with the following structure::
{
'bundles': List[Bundle],
Matching bundles, sorted by tail transaction
timestamp.
This value is always a list, even if only one
bundle was found.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#gettransfers
"""
return extended.GetTransfersCommand(self.adapter)(
seed=self.seed,
start=start,
stop=stop,
inclusionStates=inclusion_states,
) | python | {
"resource": ""
} |
q270851 | Iota.promote_transaction | test | def promote_transaction(
self,
transaction,
depth=3,
min_weight_magnitude=None,
):
# type: (TransactionHash, int, Optional[int]) -> dict
"""
Promotes a transaction by adding spam on top of it.
:return:
Dict with the following structure::
{
'bundle': Bundle,
The newly-published bundle.
}
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.PromoteTransactionCommand(self.adapter)(
transaction=transaction,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
) | python | {
"resource": ""
} |
q270852 | Iota.replay_bundle | test | def replay_bundle(
self,
transaction,
depth=3,
min_weight_magnitude=None,
):
# type: (TransactionHash, int, Optional[int]) -> dict
"""
Takes a tail transaction hash as input, gets the bundle
associated with the transaction and then replays the bundle by
attaching it to the Tangle.
:param transaction:
Transaction hash. Must be a tail.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#replaytransfer
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.ReplayBundleCommand(self.adapter)(
transaction=transaction,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
) | python | {
"resource": ""
} |
q270853 | Iota.send_transfer | test | def send_transfer(
self,
transfers, # type: Iterable[ProposedTransaction]
depth=3, # type: int
inputs=None, # type: Optional[Iterable[Address]]
change_address=None, # type: Optional[Address]
min_weight_magnitude=None, # type: Optional[int]
security_level=None, # type: Optional[int]
):
# type: (...) -> dict
"""
Prepares a set of transfers and creates the bundle, then
attaches the bundle to the Tangle, and broadcasts and stores the
transactions.
:param transfers:
Transfers to include in the bundle.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param inputs:
List of inputs used to fund the transfer.
Not needed for zero-value transfers.
:param change_address:
If inputs are provided, any unspent amount will be sent to
this address.
If not specified, a change address will be generated
automatically.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:param security_level:
Number of iterations to use when generating new addresses
(see :py:meth:`get_new_addresses`).
This value must be between 1 and 3, inclusive.
If not set, defaults to
:py:attr:`AddressGenerator.DEFAULT_SECURITY_LEVEL`.
:return:
Dict with the following structure::
{
'bundle': Bundle,
The newly-published bundle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtransfer
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.SendTransferCommand(self.adapter)(
seed=self.seed,
depth=depth,
transfers=transfers,
inputs=inputs,
changeAddress=change_address,
minWeightMagnitude=min_weight_magnitude,
securityLevel=security_level,
) | python | {
"resource": ""
} |
q270854 | Iota.send_trytes | test | def send_trytes(self, trytes, depth=3, min_weight_magnitude=None):
# type: (Iterable[TransactionTrytes], int, Optional[int]) -> dict
"""
Attaches transaction trytes to the Tangle, then broadcasts and
stores them.
:param trytes:
Transaction encoded as a tryte sequence.
:param depth:
Depth at which to attach the bundle.
Defaults to 3.
:param min_weight_magnitude:
Min weight magnitude, used by the node to calibrate Proof of
Work.
If not provided, a default value will be used.
:return:
Dict with the following structure::
{
'trytes': List[TransactionTrytes],
Raw trytes that were published to the Tangle.
}
References:
- https://github.com/iotaledger/wiki/blob/master/api-proposal.md#sendtrytes
"""
if min_weight_magnitude is None:
min_weight_magnitude = self.default_min_weight_magnitude
return extended.SendTrytesCommand(self.adapter)(
trytes=trytes,
depth=depth,
minWeightMagnitude=min_weight_magnitude,
) | python | {
"resource": ""
} |
q270855 | resolve_adapter | test | def resolve_adapter(uri):
# type: (AdapterSpec) -> BaseAdapter
"""
Given a URI, returns a properly-configured adapter instance.
"""
if isinstance(uri, BaseAdapter):
return uri
parsed = compat.urllib_parse.urlsplit(uri) # type: SplitResult
if not parsed.scheme:
raise with_context(
exc=InvalidUri(
'URI must begin with "<protocol>://" (e.g., "udp://").',
),
context={
'parsed': parsed,
'uri': uri,
},
)
try:
adapter_type = adapter_registry[parsed.scheme]
except KeyError:
raise with_context(
exc=InvalidUri('Unrecognized protocol {protocol!r}.'.format(
protocol=parsed.scheme,
)),
context={
'parsed': parsed,
'uri': uri,
},
)
return adapter_type.configure(parsed) | python | {
"resource": ""
} |
q270856 | BaseAdapter.send_request | test | def send_request(self, payload, **kwargs):
# type: (dict, dict) -> dict
"""
Sends an API request to the node.
:param payload:
JSON payload.
:param kwargs:
Additional keyword arguments for the adapter.
:return:
Decoded response from the node.
:raise:
- :py:class:`BadApiResponse` if a non-success response was
received.
"""
raise NotImplementedError(
'Not implemented in {cls}.'.format(cls=type(self).__name__),
) | python | {
"resource": ""
} |
q270857 | BaseAdapter._log | test | def _log(self, level, message, context=None):
# type: (int, Text, Optional[dict]) -> None
"""
Sends a message to the instance's logger, if configured.
"""
if self._logger:
self._logger.log(level, message, extra={'context': context or {}}) | python | {
"resource": ""
} |
q270858 | HttpAdapter._send_http_request | test | def _send_http_request(self, url, payload, method='post', **kwargs):
# type: (Text, Optional[Text], Text, dict) -> Response
"""
Sends the actual HTTP request.
Split into its own method so that it can be mocked during unit
tests.
"""
kwargs.setdefault(
'timeout',
self.timeout if self.timeout else get_default_timeout(),
)
if self.authentication:
kwargs.setdefault('auth', auth.HTTPBasicAuth(*self.authentication))
self._log(
level=DEBUG,
message='Sending {method} to {url}: {payload!r}'.format(
method=method,
payload=payload,
url=url,
),
context={
'request_method': method,
'request_kwargs': kwargs,
'request_payload': payload,
'request_url': url,
},
)
response = request(method=method, url=url, data=payload, **kwargs)
self._log(
level=DEBUG,
message='Receiving {method} from {url}: {response!r}'.format(
method=method,
response=response.content,
url=url,
),
context={
'request_method': method,
'request_kwargs': kwargs,
'request_payload': payload,
'request_url': url,
'response_headers': response.headers,
'response_content': response.content,
},
)
return response | python | {
"resource": ""
} |
q270859 | HttpAdapter._interpret_response | test | def _interpret_response(self, response, payload, expected_status):
# type: (Response, dict, Container[int]) -> dict
"""
Interprets the HTTP response from the node.
:param response:
The response object received from
:py:meth:`_send_http_request`.
:param payload:
The request payload that was sent (used for debugging).
:param expected_status:
The response should match one of these status codes to be
considered valid.
"""
raw_content = response.text
if not raw_content:
raise with_context(
exc=BadApiResponse(
'Empty {status} response from node.'.format(
status=response.status_code,
),
),
context={
'request': payload,
},
)
try:
decoded = json.loads(raw_content) # type: dict
# :bc: py2k doesn't have JSONDecodeError
except ValueError:
raise with_context(
exc=BadApiResponse(
'Non-JSON {status} response from node: '
'{raw_content}'.format(
status=response.status_code,
raw_content=raw_content,
)
),
context={
'request': payload,
'raw_response': raw_content,
},
)
if not isinstance(decoded, dict):
raise with_context(
exc=BadApiResponse(
'Malformed {status} response from node: {decoded!r}'.format(
status=response.status_code,
decoded=decoded,
),
),
context={
'request': payload,
'response': decoded,
},
)
if response.status_code in expected_status:
return decoded
error = None
try:
if response.status_code == codes['bad_request']:
error = decoded['error']
elif response.status_code == codes['internal_server_error']:
error = decoded['exception']
except KeyError:
pass
raise with_context(
exc=BadApiResponse(
'{status} response from node: {error}'.format(
error=error or decoded,
status=response.status_code,
),
),
context={
'request': payload,
'response': decoded,
},
) | python | {
"resource": ""
} |
q270860 | MockAdapter.seed_response | test | def seed_response(self, command, response):
# type: (Text, dict) -> MockAdapter
"""
Sets the response that the adapter will return for the specified
command.
You can seed multiple responses per command; the adapter will
put them into a FIFO queue. When a request comes in, the
adapter will pop the corresponding response off of the queue.
Example:
.. code-block:: python
adapter.seed_response('sayHello', {'message': 'Hi!'})
adapter.seed_response('sayHello', {'message': 'Hello!'})
adapter.send_request({'command': 'sayHello'})
# {'message': 'Hi!'}
adapter.send_request({'command': 'sayHello'})
# {'message': 'Hello!'}
"""
if command not in self.responses:
self.responses[command] = deque()
self.responses[command].append(response)
return self | python | {
"resource": ""
} |
q270861 | MultisigAddressBuilder.add_digest | test | def add_digest(self, digest):
# type: (Digest) -> None
"""
Absorbs a digest into the sponge.
.. important::
Keep track of the order that digests are added!
To spend inputs from a multisig address, you must provide
the private keys in the same order!
References:
- https://github.com/iotaledger/wiki/blob/master/multisigs.md#spending-inputs
"""
if self._address:
raise ValueError('Cannot add digests once an address is extracted.')
self._sponge.absorb(digest.as_trits())
self._digests.append(digest) | python | {
"resource": ""
} |
q270862 | MultisigAddressBuilder.get_address | test | def get_address(self):
# type: () -> MultisigAddress
"""
Returns the new multisig address.
Note that you can continue to add digests after extracting an
address; the next address will use *all* of the digests that
have been added so far.
"""
if not self._digests:
raise ValueError(
'Must call ``add_digest`` at least once '
'before calling ``get_address``.',
)
if not self._address:
address_trits = [0] * HASH_LENGTH
self._sponge.squeeze(address_trits)
self._address = MultisigAddress.from_trits(
address_trits,
digests=self._digests[:],
)
return self._address | python | {
"resource": ""
} |
q270863 | AddressGenerator.create_iterator | test | def create_iterator(self, start=0, step=1):
# type: (int, int) -> Generator[Address, None, None]
"""
Creates an iterator that can be used to progressively generate new
addresses.
:param start:
Starting index.
Warning: This method may take awhile to reset if ``start``
is a large number!
:param step:
Number of indexes to advance after each address.
Warning: The generator may take awhile to advance between
iterations if ``step`` is a large number!
"""
key_iterator = (
KeyGenerator(self.seed).create_iterator(
start,
step,
self.security_level,
)
)
while True:
yield self._generate_address(key_iterator) | python | {
"resource": ""
} |
q270864 | AddressGenerator.address_from_digest | test | def address_from_digest(digest):
# type: (Digest) -> Address
"""
Generates an address from a private key digest.
"""
address_trits = [0] * (Address.LEN * TRITS_PER_TRYTE) # type: List[int]
sponge = Kerl()
sponge.absorb(digest.as_trits())
sponge.squeeze(address_trits)
return Address.from_trits(
trits=address_trits,
key_index=digest.key_index,
security_level=digest.security_level,
) | python | {
"resource": ""
} |
q270865 | AddressGenerator._generate_address | test | def _generate_address(self, key_iterator):
# type: (KeyIterator) -> Address
"""
Generates a new address.
Used in the event of a cache miss.
"""
if self.checksum:
return (
self.address_from_digest(
digest=self._get_digest(key_iterator),
).with_valid_checksum()
)
else:
return self.address_from_digest(self._get_digest(key_iterator)) | python | {
"resource": ""
} |
q270866 | find_transaction_objects | test | def find_transaction_objects(adapter, **kwargs):
# type: (BaseAdapter, **Iterable) -> List[Transaction]
"""
Finds transactions matching the specified criteria, fetches the
corresponding trytes and converts them into Transaction objects.
"""
ft_response = FindTransactionsCommand(adapter)(**kwargs)
hashes = ft_response['hashes']
if hashes:
gt_response = GetTrytesCommand(adapter)(hashes=hashes)
return list(map(
Transaction.from_tryte_string,
gt_response.get('trytes') or [],
)) # type: List[Transaction]
return [] | python | {
"resource": ""
} |
q270867 | iter_used_addresses | test | def iter_used_addresses(
adapter, # type: BaseAdapter
seed, # type: Seed
start, # type: int
security_level=None, # type: Optional[int]
):
# type: (...) -> Generator[Tuple[Address, List[TransactionHash]], None, None]
"""
Scans the Tangle for used addresses.
This is basically the opposite of invoking ``getNewAddresses`` with
``stop=None``.
"""
if security_level is None:
security_level = AddressGenerator.DEFAULT_SECURITY_LEVEL
ft_command = FindTransactionsCommand(adapter)
for addy in AddressGenerator(seed, security_level).create_iterator(start):
ft_response = ft_command(addresses=[addy])
if ft_response['hashes']:
yield addy, ft_response['hashes']
else:
break
# Reset the command so that we can call it again.
ft_command.reset() | python | {
"resource": ""
} |
q270868 | get_bundles_from_transaction_hashes | test | def get_bundles_from_transaction_hashes(
adapter,
transaction_hashes,
inclusion_states,
):
# type: (BaseAdapter, Iterable[TransactionHash], bool) -> List[Bundle]
"""
Given a set of transaction hashes, returns the corresponding bundles,
sorted by tail transaction timestamp.
"""
transaction_hashes = list(transaction_hashes)
if not transaction_hashes:
return []
my_bundles = [] # type: List[Bundle]
# Sort transactions into tail and non-tail.
tail_transaction_hashes = set()
non_tail_bundle_hashes = set()
gt_response = GetTrytesCommand(adapter)(hashes=transaction_hashes)
all_transactions = list(map(
Transaction.from_tryte_string,
gt_response['trytes'],
)) # type: List[Transaction]
for txn in all_transactions:
if txn.is_tail:
tail_transaction_hashes.add(txn.hash)
else:
# Capture the bundle ID instead of the transaction hash so
# that we can query the node to find the tail transaction
# for that bundle.
non_tail_bundle_hashes.add(txn.bundle_hash)
if non_tail_bundle_hashes:
for txn in find_transaction_objects(
adapter=adapter,
bundles=list(non_tail_bundle_hashes),
):
if txn.is_tail:
if txn.hash not in tail_transaction_hashes:
all_transactions.append(txn)
tail_transaction_hashes.add(txn.hash)
# Filter out all non-tail transactions.
tail_transactions = [
txn
for txn in all_transactions
if txn.hash in tail_transaction_hashes
]
# Attach inclusion states, if requested.
if inclusion_states:
gli_response = GetLatestInclusionCommand(adapter)(
hashes=list(tail_transaction_hashes),
)
for txn in tail_transactions:
txn.is_confirmed = gli_response['states'].get(txn.hash)
# Find the bundles for each transaction.
for txn in tail_transactions:
gb_response = GetBundlesCommand(adapter)(transaction=txn.hash)
txn_bundles = gb_response['bundles'] # type: List[Bundle]
if inclusion_states:
for bundle in txn_bundles:
bundle.is_confirmed = txn.is_confirmed
my_bundles.extend(txn_bundles)
return list(sorted(
my_bundles,
key=lambda bundle_: bundle_.tail_transaction.timestamp,
)) | python | {
"resource": ""
} |
q270869 | check_trytes_codec | test | def check_trytes_codec(encoding):
"""
Determines which codec to use for the specified encoding.
References:
- https://docs.python.org/3/library/codecs.html#codecs.register
"""
if encoding == AsciiTrytesCodec.name:
return AsciiTrytesCodec.get_codec_info()
elif encoding == AsciiTrytesCodec.compat_name:
warn(
'"{old_codec}" codec will be removed in PyOTA v2.1. '
'Use "{new_codec}" instead.'.format(
new_codec=AsciiTrytesCodec.name,
old_codec=AsciiTrytesCodec.compat_name,
),
DeprecationWarning,
)
return AsciiTrytesCodec.get_codec_info()
return None | python | {
"resource": ""
} |
q270870 | AsciiTrytesCodec.get_codec_info | test | def get_codec_info(cls):
"""
Returns information used by the codecs library to configure the
codec for use.
"""
codec = cls()
codec_info = {
'encode': codec.encode,
'decode': codec.decode,
}
# In Python 2, all codecs are made equal.
# In Python 3, some codecs are more equal than others.
if PY3:
codec_info['_is_text_encoding'] = False
return CodecInfo(**codec_info) | python | {
"resource": ""
} |
q270871 | AsciiTrytesCodec.encode | test | def encode(self, input, errors='strict'):
"""
Encodes a byte string into trytes.
"""
if isinstance(input, memoryview):
input = input.tobytes()
if not isinstance(input, (binary_type, bytearray)):
raise with_context(
exc=TypeError(
"Can't encode {type}; byte string expected.".format(
type=type(input).__name__,
)),
context={
'input': input,
},
)
# :bc: In Python 2, iterating over a byte string yields
# characters instead of integers.
if not isinstance(input, bytearray):
input = bytearray(input)
trytes = bytearray()
for c in input:
second, first = divmod(c, len(self.alphabet))
trytes.append(self.alphabet[first])
trytes.append(self.alphabet[second])
return binary_type(trytes), len(input) | python | {
"resource": ""
} |
q270872 | AsciiTrytesCodec.decode | test | def decode(self, input, errors='strict'):
"""
Decodes a tryte string into bytes.
"""
if isinstance(input, memoryview):
input = input.tobytes()
if not isinstance(input, (binary_type, bytearray)):
raise with_context(
exc=TypeError(
"Can't decode {type}; byte string expected.".format(
type=type(input).__name__,
)),
context={
'input': input,
},
)
# :bc: In Python 2, iterating over a byte string yields
# characters instead of integers.
if not isinstance(input, bytearray):
input = bytearray(input)
bytes_ = bytearray()
for i in range(0, len(input), 2):
try:
first, second = input[i:i + 2]
except ValueError:
if errors == 'strict':
raise with_context(
exc=TrytesDecodeError(
"'{name}' codec can't decode value; "
"tryte sequence has odd length.".format(
name=self.name,
),
),
context={
'input': input,
},
)
elif errors == 'replace':
bytes_ += b'?'
continue
try:
bytes_.append(
self.index[first]
+ (self.index[second] * len(self.index))
)
except ValueError:
# This combination of trytes yields a value > 255 when
# decoded.
# Naturally, we can't represent this using ASCII.
if errors == 'strict':
raise with_context(
exc=TrytesDecodeError(
"'{name}' codec can't decode trytes {pair} "
"at position {i}-{j}: "
"ordinal not in range(255)".format(
name=self.name,
pair=chr(first) + chr(second),
i=i,
j=i + 1,
),
),
context={
'input': input,
}
)
elif errors == 'replace':
bytes_ += b'?'
return binary_type(bytes_), len(input) | python | {
"resource": ""
} |
q270873 | GetNewAddressesCommand._find_addresses | test | def _find_addresses(self, seed, index, count, security_level, checksum):
# type: (Seed, int, Optional[int], int, bool) -> List[Address]
"""
Find addresses matching the command parameters.
"""
generator = AddressGenerator(seed, security_level, checksum)
if count is None:
# Connect to Tangle and find the first address without any
# transactions.
for addy in generator.create_iterator(start=index):
# We use addy.address here because FindTransactions does
# not work on an address with a checksum
response = FindTransactionsCommand(self.adapter)(
addresses=[addy.address],
)
if not response.get('hashes'):
return [addy]
return generator.get_addresses(start=index, count=count) | python | {
"resource": ""
} |
q270874 | RoutingWrapper.add_route | test | def add_route(self, command, adapter):
# type: (Text, AdapterSpec) -> RoutingWrapper
"""
Adds a route to the wrapper.
:param command:
The name of the command to route (e.g., "attachToTangle").
:param adapter:
The adapter object or URI to route requests to.
"""
if not isinstance(adapter, BaseAdapter):
try:
adapter = self.adapter_aliases[adapter]
except KeyError:
self.adapter_aliases[adapter] = adapter = resolve_adapter(
adapter
)
self.routes[command] = adapter
return self | python | {
"resource": ""
} |
q270875 | Transaction.from_tryte_string | test | def from_tryte_string(cls, trytes, hash_=None):
# type: (TrytesCompatible, Optional[TransactionHash]) -> Transaction
"""
Creates a Transaction object from a sequence of trytes.
:param trytes:
Raw trytes. Should be exactly 2673 trytes long.
:param hash_:
The transaction hash, if available.
If not provided, it will be computed from the transaction
trytes.
"""
tryte_string = TransactionTrytes(trytes)
if not hash_:
hash_trits = [0] * HASH_LENGTH # type: MutableSequence[int]
sponge = Curl()
sponge.absorb(tryte_string.as_trits())
sponge.squeeze(hash_trits)
hash_ = TransactionHash.from_trits(hash_trits)
return cls(
hash_=hash_,
signature_message_fragment=Fragment(tryte_string[0:2187]),
address=Address(tryte_string[2187:2268]),
value=int_from_trits(tryte_string[2268:2295].as_trits()),
legacy_tag=Tag(tryte_string[2295:2322]),
timestamp=int_from_trits(tryte_string[2322:2331].as_trits()),
current_index=int_from_trits(tryte_string[2331:2340].as_trits()),
last_index=int_from_trits(tryte_string[2340:2349].as_trits()),
bundle_hash=BundleHash(tryte_string[2349:2430]),
trunk_transaction_hash=TransactionHash(tryte_string[2430:2511]),
branch_transaction_hash=TransactionHash(tryte_string[2511:2592]),
tag=Tag(tryte_string[2592:2619]),
attachment_timestamp=int_from_trits(
tryte_string[2619:2628].as_trits()),
attachment_timestamp_lower_bound=int_from_trits(
tryte_string[2628:2637].as_trits()),
attachment_timestamp_upper_bound=int_from_trits(
tryte_string[2637:2646].as_trits()),
nonce=Nonce(tryte_string[2646:2673]),
) | python | {
"resource": ""
} |
q270876 | Transaction.as_json_compatible | test | def as_json_compatible(self):
# type: () -> dict
"""
Returns a JSON-compatible representation of the object.
References:
- :py:class:`iota.json.JsonEncoder`.
"""
return {
'hash_': self.hash,
'signature_message_fragment': self.signature_message_fragment,
'address': self.address,
'value': self.value,
'legacy_tag': self.legacy_tag,
'timestamp': self.timestamp,
'current_index': self.current_index,
'last_index': self.last_index,
'bundle_hash': self.bundle_hash,
'trunk_transaction_hash': self.trunk_transaction_hash,
'branch_transaction_hash': self.branch_transaction_hash,
'tag': self.tag,
'attachment_timestamp': self.attachment_timestamp,
'attachment_timestamp_lower_bound':
self.attachment_timestamp_lower_bound,
'attachment_timestamp_upper_bound':
self.attachment_timestamp_upper_bound,
'nonce': self.nonce,
} | python | {
"resource": ""
} |
q270877 | Transaction.get_signature_validation_trytes | test | def get_signature_validation_trytes(self):
# type: () -> TryteString
"""
Returns the values needed to validate the transaction's
``signature_message_fragment`` value.
"""
return (
self.address.address
+ self.value_as_trytes
+ self.legacy_tag
+ self.timestamp_as_trytes
+ self.current_index_as_trytes
+ self.last_index_as_trytes
) | python | {
"resource": ""
} |
q270878 | Bundle.is_confirmed | test | def is_confirmed(self, new_is_confirmed):
# type: (bool) -> None
"""
Sets the ``is_confirmed`` for the bundle.
"""
self._is_confirmed = new_is_confirmed
for txn in self:
txn.is_confirmed = new_is_confirmed | python | {
"resource": ""
} |
q270879 | Bundle.get_messages | test | def get_messages(self, errors='drop'):
# type: (Text) -> List[Text]
"""
Attempts to decipher encoded messages from the transactions in
the bundle.
:param errors:
How to handle trytes that can't be converted, or bytes that
can't be decoded using UTF-8:
'drop'
Drop the trytes from the result.
'strict'
Raise an exception.
'replace'
Replace with a placeholder character.
'ignore'
Omit the invalid tryte/byte sequence.
"""
decode_errors = 'strict' if errors == 'drop' else errors
messages = []
for group in self.group_transactions():
# Ignore inputs.
if group[0].value < 0:
continue
message_trytes = TryteString(b'')
for txn in group:
message_trytes += txn.signature_message_fragment
if message_trytes:
try:
messages.append(message_trytes.decode(decode_errors))
except (TrytesDecodeError, UnicodeDecodeError):
if errors != 'drop':
raise
return messages | python | {
"resource": ""
} |
q270880 | Bundle.as_tryte_strings | test | def as_tryte_strings(self, head_to_tail=False):
# type: (bool) -> List[TransactionTrytes]
"""
Returns TryteString representations of the transactions in this
bundle.
:param head_to_tail:
Determines the order of the transactions:
- ``True``: head txn first, tail txn last.
- ``False`` (default): tail txn first, head txn last.
Note that the order is reversed by default, as this is the
way bundles are typically broadcast to the Tangle.
"""
transactions = self if head_to_tail else reversed(self)
return [t.as_tryte_string() for t in transactions] | python | {
"resource": ""
} |
q270881 | Bundle.group_transactions | test | def group_transactions(self):
# type: () -> List[List[Transaction]]
"""
Groups transactions in the bundle by address.
"""
groups = []
if self:
last_txn = self.tail_transaction
current_group = [last_txn]
for current_txn in self.transactions[1:]:
# Transactions are grouped by address, so as long as the
# address stays consistent from one transaction to
# another, we are still in the same group.
if current_txn.address == last_txn.address:
current_group.append(current_txn)
else:
groups.append(current_group)
current_group = [current_txn]
last_txn = current_txn
if current_group:
groups.append(current_group)
return groups | python | {
"resource": ""
} |
q270882 | discover_commands | test | def discover_commands(package, recursively=True):
# type: (Union[ModuleType, Text], bool) -> Dict[Text, 'CommandMeta']
"""
Automatically discover commands in the specified package.
:param package:
Package path or reference.
:param recursively:
If True, will descend recursively into sub-packages.
:return:
All commands discovered in the specified package, indexed by
command name (note: not class name).
"""
# http://stackoverflow.com/a/25562415/
if isinstance(package, string_types):
package = import_module(package) # type: ModuleType
commands = {}
for _, name, is_package in walk_packages(package.__path__, package.__name__ + '.'):
# Loading the module is good enough; the CommandMeta metaclass will
# ensure that any commands in the module get registered.
# Prefix in name module move to function "walk_packages" for fix
# conflict with names importing packages
# Bug https://github.com/iotaledger/iota.lib.py/issues/63
sub_package = import_module(name)
# Index any command classes that we find.
for (_, obj) in get_members(sub_package):
if is_class(obj) and isinstance(obj, CommandMeta):
command_name = getattr(obj, 'command')
if command_name:
commands[command_name] = obj
if recursively and is_package:
commands.update(discover_commands(sub_package))
return commands | python | {
"resource": ""
} |
q270883 | BaseCommand._execute | test | def _execute(self, request):
# type: (dict) -> dict
"""
Sends the request object to the adapter and returns the response.
The command name will be automatically injected into the request
before it is sent (note: this will modify the request object).
"""
request['command'] = self.command
return self.adapter.send_request(request) | python | {
"resource": ""
} |
q270884 | FilterCommand._apply_filter | test | def _apply_filter(value, filter_, failure_message):
# type: (dict, Optional[f.BaseFilter], Text) -> dict
"""
Applies a filter to a value. If the value does not pass the
filter, an exception will be raised with lots of contextual info
attached to it.
"""
if filter_:
runner = f.FilterRunner(filter_, value)
if runner.is_valid():
return runner.cleaned_data
else:
raise with_context(
exc = ValueError(
'{message} ({error_codes}) '
'(`exc.context["filter_errors"]` '
'contains more information).'.format(
message = failure_message,
error_codes = runner.error_codes,
),
),
context = {
'filter_errors': runner.get_errors(with_context=True),
},
)
return value | python | {
"resource": ""
} |
q270885 | SandboxAdapter.get_jobs_url | test | def get_jobs_url(self, job_id):
# type: (Text) -> Text
"""
Returns the URL to check job status.
:param job_id:
The ID of the job to check.
"""
return compat.urllib_parse.urlunsplit((
self.uri.scheme,
self.uri.netloc,
self.uri.path.rstrip('/') + '/jobs/' + job_id,
self.uri.query,
self.uri.fragment,
)) | python | {
"resource": ""
} |
q270886 | BundleValidator.errors | test | def errors(self):
# type: () -> List[Text]
"""
Returns all errors found with the bundle.
"""
try:
self._errors.extend(self._validator) # type: List[Text]
except StopIteration:
pass
return self._errors | python | {
"resource": ""
} |
q270887 | BundleValidator.is_valid | test | def is_valid(self):
# type: () -> bool
"""
Returns whether the bundle is valid.
"""
if not self._errors:
try:
# We only have to check for a single error to determine
# if the bundle is valid or not.
self._errors.append(next(self._validator))
except StopIteration:
pass
return not self._errors | python | {
"resource": ""
} |
q270888 | BundleValidator._create_validator | test | def _create_validator(self):
# type: () -> Generator[Text, None, None]
"""
Creates a generator that does all the work.
"""
# Group transactions by address to make it easier to iterate
# over inputs.
grouped_transactions = self.bundle.group_transactions()
# Define a few expected values.
bundle_hash = self.bundle.hash
last_index = len(self.bundle) - 1
# Track a few others as we go along.
balance = 0
# Check indices and balance first.
# Note that we use a counter to keep track of the current index,
# since at this point we can't trust that the transactions have
# correct ``current_index`` values.
counter = 0
for group in grouped_transactions:
for txn in group:
balance += txn.value
if txn.bundle_hash != bundle_hash:
yield 'Transaction {i} has invalid bundle hash.'.format(
i=counter,
)
if txn.current_index != counter:
yield (
'Transaction {i} has invalid current index value '
'(expected {i}, actual {actual}).'.format(
actual=txn.current_index,
i=counter,
)
)
if txn.last_index != last_index:
yield (
'Transaction {i} has invalid last index value '
'(expected {expected}, actual {actual}).'.format(
actual=txn.last_index,
expected=last_index,
i=counter,
)
)
counter += 1
# Bundle must be balanced (spends must match inputs).
if balance != 0:
yield (
'Bundle has invalid balance '
'(expected 0, actual {actual}).'.format(
actual=balance,
)
)
# Signature validation is only meaningful if the transactions
# are otherwise valid.
if not self._errors:
signature_validation_queue = [] # type: List[List[Transaction]]
for group in grouped_transactions:
# Signature validation only applies to inputs.
if group[0].value >= 0:
continue
validate_group_signature = True
for j, txn in enumerate(group):
if (j > 0) and (txn.value != 0):
# Input is malformed; signature fragments after
# the first should have zero value.
yield (
'Transaction {i} has invalid value '
'(expected 0, actual {actual}).'.format(
actual=txn.value,
# If we get to this point, we know that
# the ``current_index`` value for each
# transaction can be trusted.
i=txn.current_index,
)
)
# We won't be able to validate the signature,
# but continue anyway, so that we can check that
# the other transactions in the group have the
# correct ``value``.
validate_group_signature = False
continue
# After collecting the signature fragment from each
# transaction in the group, queue them up to run through
# the validator.
#
# We have to perform signature validation separately so
# that we can try different algorithms (for
# backwards-compatibility).
#
# References:
#
# - https://github.com/iotaledger/kerl#kerl-integration-in-iota
if validate_group_signature:
signature_validation_queue.append(group)
# Once we've finished checking the attributes from each
# transaction in the bundle, go back and validate
# signatures.
if signature_validation_queue:
# ``yield from`` is an option here, but for
# compatibility with Python 2 clients, we will do it the
# old-fashioned way.
for error in self._get_bundle_signature_errors(
signature_validation_queue
):
yield error | python | {
"resource": ""
} |
q270889 | BundleValidator._get_bundle_signature_errors | test | def _get_bundle_signature_errors(self, groups):
# type: (List[List[Transaction]]) -> List[Text]
"""
Validates the signature fragments in the bundle.
:return:
List of error messages.
If empty, signature fragments are valid.
"""
# Start with the currently-supported hash algo.
current_pos = None
current_errors = []
for current_pos, group in enumerate(groups):
error = self._get_group_signature_error(group, SUPPORTED_SPONGE)
if error:
current_errors.append(error)
# Pause and retry with the legacy algo.
break
# If validation failed, then go back and try with the legacy
# algo (only applies if we are currently transitioning to a new
# algo).
if current_errors and LEGACY_SPONGE:
for group in groups:
# noinspection PyTypeChecker
if self._get_group_signature_error(group, LEGACY_SPONGE):
# Legacy algo doesn't work, either; no point in
# continuing.
break
else:
# If we get here, then we were able to validate the
# signature fragments successfully using the legacy
# algorithm.
return []
# If we get here, then validation also failed when using the
# legacy algorithm.
# At this point, we know that the bundle is invalid, but we will
# continue validating with the supported algorithm anyway, so
# that we can return an error message for every invalid input.
current_errors.extend(filter(None, (
self._get_group_signature_error(group, SUPPORTED_SPONGE)
for group in groups[current_pos + 1:]
)))
return current_errors | python | {
"resource": ""
} |
q270890 | BundleValidator._get_group_signature_error | test | def _get_group_signature_error(group, sponge_type):
# type: (List[Transaction], type) -> Optional[Text]
"""
Validates the signature fragments for a group of transactions
using the specified sponge type.
Note: this method assumes that the transactions in the group
have already passed basic validation (see
:py:meth:`_create_validator`).
:return:
- ``None``: Indicates that the signature fragments are valid.
- ``Text``: Error message indicating the fragments are invalid.
"""
validate_group_signature = validate_signature_fragments(
fragments=[txn.signature_message_fragment for txn in group],
hash_=group[0].bundle_hash,
public_key=group[0].address,
sponge_type=sponge_type,
)
if validate_group_signature:
return None
return (
'Transaction {i} has invalid signature '
'(using {fragments} fragments).'.format(
fragments=len(group),
i=group[0].current_index,
)
) | python | {
"resource": ""
} |
q270891 | GetBundlesCommand._traverse_bundle | test | def _traverse_bundle(self, txn_hash, target_bundle_hash=None):
# type: (TransactionHash, Optional[BundleHash]) -> List[Transaction]
"""
Recursively traverse the Tangle, collecting transactions until
we hit a new bundle.
This method is (usually) faster than ``findTransactions``, and
it ensures we don't collect transactions from replayed bundles.
"""
trytes = (
GetTrytesCommand(self.adapter)(hashes=[txn_hash])['trytes']
) # type: List[TryteString]
if not trytes:
raise with_context(
exc=BadApiResponse(
'Bundle transactions not visible '
'(``exc.context`` has more info).',
),
context={
'transaction_hash': txn_hash,
'target_bundle_hash': target_bundle_hash,
},
)
transaction = Transaction.from_tryte_string(trytes[0])
if (not target_bundle_hash) and transaction.current_index:
raise with_context(
exc=BadApiResponse(
'``_traverse_bundle`` started with a non-tail transaction '
'(``exc.context`` has more info).',
),
context={
'transaction_object': transaction,
'target_bundle_hash': target_bundle_hash,
},
)
if target_bundle_hash:
if target_bundle_hash != transaction.bundle_hash:
# We've hit a different bundle; we can stop now.
return []
else:
target_bundle_hash = transaction.bundle_hash
if transaction.current_index == transaction.last_index == 0:
# Bundle only has one transaction.
return [transaction]
# Recursively follow the trunk transaction, to fetch the next
# transaction in the bundle.
return [transaction] + self._traverse_bundle(
txn_hash=transaction.trunk_transaction_hash,
target_bundle_hash=target_bundle_hash
) | python | {
"resource": ""
} |
q270892 | IotaReplCommandLineApp._start_repl | test | def _start_repl(api):
# type: (Iota) -> None
"""
Starts the REPL.
"""
banner = (
'IOTA API client for {uri} ({testnet}) '
'initialized as variable `api`.\n'
'Type `help(api)` for list of API commands.'.format(
testnet='testnet' if api.testnet else 'mainnet',
uri=api.adapter.get_uri(),
)
)
scope_vars = {'api': api}
try:
# noinspection PyUnresolvedReferences
import IPython
except ImportError:
# IPython not available; use regular Python REPL.
from code import InteractiveConsole
InteractiveConsole(locals=scope_vars).interact(banner, '')
else:
print(banner)
IPython.start_ipython(argv=[], user_ns=scope_vars) | python | {
"resource": ""
} |
q270893 | Seed.random | test | def random(cls, length=Hash.LEN):
"""
Generates a random seed using a CSPRNG.
:param length:
Length of seed, in trytes.
For maximum security, this should always be set to 81, but
you can change it if you're 110% sure you know what you're
doing.
See https://iota.stackexchange.com/q/249 for more info.
"""
return super(Seed, cls).random(length) | python | {
"resource": ""
} |
q270894 | PrivateKey.get_digest | test | def get_digest(self):
# type: () -> Digest
"""
Generates the digest used to do the actual signing.
Signing keys can have variable length and tend to be quite long,
which makes them not-well-suited for use in crypto algorithms.
The digest is essentially the result of running the signing key
through a PBKDF, yielding a constant-length hash that can be
used for crypto.
"""
hashes_per_fragment = FRAGMENT_LENGTH // Hash.LEN
key_fragments = self.iter_chunks(FRAGMENT_LENGTH)
# The digest will contain one hash per key fragment.
digest = [0] * HASH_LENGTH * len(key_fragments)
# Iterate over each fragment in the key.
for i, fragment in enumerate(key_fragments):
fragment_trits = fragment.as_trits()
key_fragment = [0] * FRAGMENT_LENGTH
hash_trits = []
# Within each fragment, iterate over one hash at a time.
for j in range(hashes_per_fragment):
hash_start = j * HASH_LENGTH
hash_end = hash_start + HASH_LENGTH
hash_trits = fragment_trits[hash_start:hash_end]
for k in range(26):
sponge = Kerl()
sponge.absorb(hash_trits)
sponge.squeeze(hash_trits)
key_fragment[hash_start:hash_end] = hash_trits
# After processing all of the hashes in the fragment,
# generate a final hash and append it to the digest.
#
# Note that we will do this once per fragment in the key, so
# the longer the key is, the longer the digest will be.
sponge = Kerl()
sponge.absorb(key_fragment)
sponge.squeeze(hash_trits)
fragment_hash_start = i * HASH_LENGTH
fragment_hash_end = fragment_hash_start + HASH_LENGTH
digest[fragment_hash_start:fragment_hash_end] = hash_trits
return Digest(TryteString.from_trits(digest), self.key_index) | python | {
"resource": ""
} |
q270895 | PrivateKey.sign_input_transactions | test | def sign_input_transactions(self, bundle, start_index):
# type: (Bundle, int) -> None
"""
Signs the inputs starting at the specified index.
:param bundle:
The bundle that contains the input transactions to sign.
:param start_index:
The index of the first input transaction.
If necessary, the resulting signature will be split across
subsequent transactions automatically.
"""
if not bundle.hash:
raise with_context(
exc=ValueError('Cannot sign inputs without a bundle hash!'),
context={
'bundle': bundle,
'key_index': self.key_index,
'start_index': start_index,
},
)
from iota.crypto.signing import SignatureFragmentGenerator
signature_fragment_generator = (
SignatureFragmentGenerator(self, bundle.hash)
)
# We can only fit one signature fragment into each transaction,
# so we have to split the entire signature.
for j in range(self.security_level):
# Do lots of validation before we attempt to sign the
# transaction, and attach lots of context info to any
# exception.
#
# This method is likely to be invoked at a very low level in
# the application, so if anything goes wrong, we want to
# make sure it's as easy to troubleshoot as possible!
try:
txn = bundle[start_index + j]
except IndexError as e:
raise with_context(
exc=e,
context={
'bundle': bundle,
'key_index': self.key_index,
'current_index': start_index + j,
},
)
# Only inputs can be signed.
if txn.value > 0:
raise with_context(
exc=ValueError(
'Attempting to sign non-input transaction #{i} '
'(value={value}).'.format(
i=txn.current_index,
value=txn.value,
),
),
context={
'bundle': bundle,
'key_index': self.key_index,
'start_index': start_index,
},
)
if txn.signature_message_fragment:
raise with_context(
exc=ValueError(
'Attempting to sign input transaction #{i}, '
'but it has a non-empty fragment '
'(is it already signed?).'.format(
i=txn.current_index,
),
),
context={
'bundle': bundle,
'key_index': self.key_index,
'start_index': start_index,
},
)
txn.signature_message_fragment = next(signature_fragment_generator) | python | {
"resource": ""
} |
q270896 | JsonSerializable._repr_pretty_ | test | def _repr_pretty_(self, p, cycle):
"""
Makes JSON-serializable objects play nice with IPython's default
pretty-printer.
Sadly, :py:func:`pprint.pprint` does not have a similar
mechanism.
References:
- http://ipython.readthedocs.io/en/stable/api/generated/IPython.lib.pretty.html
- :py:meth:`IPython.lib.pretty.RepresentationPrinter.pretty`
- :py:func:`pprint._safe_repr`
"""
class_name = type(self).__name__
if cycle:
p.text('{cls}(...)'.format(
cls=class_name,
))
else:
with p.group(
len(class_name) + 1,
'{cls}('.format(cls=class_name),
')',
):
prepared = self.as_json_compatible()
if isinstance(prepared, Mapping):
p.text('**')
elif isinstance(prepared, Iterable):
p.text('*')
p.pretty(prepared) | python | {
"resource": ""
} |
q270897 | Kerl.absorb | test | def absorb(self, trits, offset=0, length=None):
# type: (MutableSequence[int], int, Optional[int]) -> None
"""
Absorb trits into the sponge from a buffer.
:param trits:
Buffer that contains the trits to absorb.
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to absorb. Defaults to ``len(trits)``.
"""
# Pad input if necessary, so that it can be divided evenly into
# hashes.
# Note that this operation creates a COPY of ``trits``; the
# incoming buffer is not modified!
pad = ((len(trits) % TRIT_HASH_LENGTH) or TRIT_HASH_LENGTH)
trits += [0] * (TRIT_HASH_LENGTH - pad)
if length is None:
length = len(trits)
if length < 1:
raise with_context(
exc=ValueError('Invalid length passed to ``absorb``.'),
context={
'trits': trits,
'offset': offset,
'length': length,
},
)
while offset < length:
stop = min(offset + TRIT_HASH_LENGTH, length)
# If we're copying over a full chunk, zero last trit.
if stop - offset == TRIT_HASH_LENGTH:
trits[stop - 1] = 0
signed_nums = conv.convertToBytes(trits[offset:stop])
# Convert signed bytes into their equivalent unsigned
# representation, in order to use Python's built-in bytes
# type.
unsigned_bytes = bytearray(
conv.convert_sign(b) for b in signed_nums
)
self.k.update(unsigned_bytes)
offset += TRIT_HASH_LENGTH | python | {
"resource": ""
} |
q270898 | Kerl.squeeze | test | def squeeze(self, trits, offset=0, length=None):
# type: (MutableSequence[int], int, Optional[int]) -> None
"""
Squeeze trits from the sponge into a buffer.
:param trits:
Buffer that will hold the squeezed trits.
IMPORTANT: If ``trits`` is too small, it will be extended!
:param offset:
Starting offset in ``trits``.
:param length:
Number of trits to squeeze from the sponge.
If not specified, defaults to :py:data:`TRIT_HASH_LENGTH`
(i.e., by default, we will try to squeeze exactly 1 hash).
"""
# Pad input if necessary, so that it can be divided evenly into
# hashes.
pad = ((len(trits) % TRIT_HASH_LENGTH) or TRIT_HASH_LENGTH)
trits += [0] * (TRIT_HASH_LENGTH - pad)
if length is None:
# By default, we will try to squeeze one hash.
# Note that this is different than ``absorb``.
length = len(trits) or TRIT_HASH_LENGTH
if length < 1:
raise with_context(
exc=ValueError('Invalid length passed to ``squeeze``.'),
context={
'trits': trits,
'offset': offset,
'length': length,
},
)
while offset < length:
unsigned_hash = self.k.digest()
if PY2:
unsigned_hash = map(ord, unsigned_hash) # type: ignore
signed_hash = [conv.convert_sign(b) for b in unsigned_hash]
trits_from_hash = conv.convertToTrits(signed_hash)
trits_from_hash[TRIT_HASH_LENGTH - 1] = 0
stop = min(TRIT_HASH_LENGTH, length - offset)
trits[offset:offset + stop] = trits_from_hash[0:stop]
flipped_bytes = bytearray(
conv.convert_sign(~b) for b in unsigned_hash
)
# Reset internal state before feeding back in.
self.reset()
self.k.update(flipped_bytes)
offset += TRIT_HASH_LENGTH | python | {
"resource": ""
} |
q270899 | with_context | test | def with_context(exc, context):
# type: (Exception, dict) -> Exception
"""
Attaches a ``context`` value to an Exception.
Before:
.. code-block:: python
exc = Exception('Frog blast the vent core!')
exc.context = { ... }
raise exc
After:
.. code-block:: python
raise with_context(
exc=Exception('Frog blast the vent core!'),
context={ ... },
)
"""
if not hasattr(exc, 'context'):
exc.context = {}
exc.context.update(context)
return exc | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.