docstring
stringlengths 52
499
| function
stringlengths 67
35.2k
| __index_level_0__
int64 52.6k
1.16M
|
|---|---|---|
Deserialize full object.
Args:
reader (neocore.IO.BinaryReader):
|
def Deserialize(self, reader):
super(AssetState, self).Deserialize(reader)
self.AssetId = reader.ReadUInt256()
self.AssetType = reader.ReadByte()
self.Name = reader.ReadVarString()
position = reader.stream.tell()
try:
self.Amount = reader.ReadFixed8()
except Exception as e:
reader.stream.seek(position)
self.Amount = reader.ReadFixed8()
self.Available = reader.ReadFixed8()
self.Precision = reader.ReadByte()
# fee mode
reader.ReadByte()
self.Fee = reader.ReadFixed8()
self.FeeAddress = reader.ReadUInt160()
self.Owner = ECDSA.Deserialize_Secp256r1(reader)
self.Admin = reader.ReadUInt160()
self.Issuer = reader.ReadUInt160()
self.Expiration = reader.ReadUInt32()
self.IsFrozen = reader.ReadBool()
| 317,790
|
Serialize full object.
Args:
writer (neo.IO.BinaryWriter):
|
def Serialize(self, writer):
super(AssetState, self).Serialize(writer)
writer.WriteUInt256(self.AssetId)
writer.WriteByte(self.AssetType)
writer.WriteVarString(self.Name)
if self.Amount.value > -1:
writer.WriteFixed8(self.Amount, unsigned=True)
else:
writer.WriteFixed8(self.Amount)
if type(self.Available) is not Fixed8:
raise Exception("AVAILABLE IS NOT FIXED 8!")
writer.WriteFixed8(self.Available, unsigned=True)
writer.WriteByte(self.Precision)
writer.WriteByte(b'\x00')
writer.WriteFixed8(self.Fee)
writer.WriteUInt160(self.FeeAddress)
self.Owner.Serialize(writer)
writer.WriteUInt160(self.Admin)
writer.WriteUInt160(self.Issuer)
writer.WriteUInt32(self.Expiration)
writer.WriteBool(self.IsFrozen)
| 317,791
|
Get the script hash of the consensus node.
Args:
validators (list): of Ellipticcurve.ECPoint's
Returns:
UInt160:
|
def GetConsensusAddress(validators):
vlen = len(validators)
script = Contract.CreateMultiSigRedeemScript(vlen - int((vlen - 1) / 3), validators)
return Crypto.ToScriptHash(script)
| 317,802
|
Get the system fee for the specified block.
Args:
height (int): block height.
Returns:
int:
|
def GetSysFeeAmountByHeight(self, height):
hash = self.GetBlockHash(height)
return self.GetSysFeeAmount(hash)
| 317,804
|
Configure the stdio `StreamHandler` levels on the specified loggers.
If no log configurations are specified then the `default_level` will be applied to all handlers.
Args:
log_configurations: a list of (component name, log level) tuples
default_level: logging level to apply when no log_configurations are specified
|
def config_stdio(self, log_configurations: Optional[List[LogConfiguration]] = None, default_level=logging.INFO) -> None:
# no configuration specified, apply `default_level` to the stdio handler of all known loggers
if not log_configurations:
for logger in self.loggers.values():
self._restrict_output(logger, default_level)
# only apply specified configuration to the stdio `StreamHandler` of the specific component
else:
for component, level in log_configurations:
try:
logger = self.loggers[self.root + component]
except KeyError:
raise ValueError("Failed to configure component. Invalid name: {}".format(component))
self._restrict_output(logger, level)
| 317,823
|
Get the logger instance matching ``component_name`` or create a new one if non-existent.
Args:
component_name: a neo-python component name. e.g. network, vm, db
Returns:
a logger for the specified component.
|
def getLogger(self, component_name: str = None) -> logging.Logger:
logger_name = self.root + (component_name if component_name else 'generic')
_logger = self.loggers.get(logger_name)
if not _logger:
_logger = logging.getLogger(logger_name)
stdio_handler = logging.StreamHandler()
stdio_handler.setFormatter(LogFormatter())
stdio_handler.setLevel(logging.INFO)
_logger.addHandler(stdio_handler)
_logger.setLevel(logging.DEBUG)
self.loggers[logger_name] = _logger
return _logger
| 317,827
|
Write a line to the VM instruction log file.
Args:
message (str): string message to write to file.
|
def write_log(self, message):
if self._is_write_log and self.log_file and not self.log_file.closed:
self.log_file.write(message + '\n')
| 317,828
|
Deserialize full object.
Args:
reader (neocore.IO.BinaryReader):
|
def Deserialize(self, reader: BinaryReader):
self.Type = StateType(reader.ReadByte())
self.Key = reader.ReadVarBytes(max=100)
self.Field = reader.ReadVarString(max=32).decode('utf-8')
self.Value = reader.ReadVarBytes(max=65535)
if self.Type == StateType.Account:
self.CheckAccountState()
elif self.Type == StateType.Validator:
self.CheckValidatorState()
| 317,855
|
Serialize full object.
Args:
writer (neo.IO.BinaryWriter):
|
def Serialize(self, writer: BinaryWriter):
byt = None
if self.Type == StateType.Account:
byt = b'\x40'
elif self.Type == StateType.Validator:
byt = b'\x48'
writer.WriteByte(byt)
writer.WriteVarBytes(self.Key)
writer.WriteVarString(self.Field)
writer.WriteVarBytes(self.Value)
| 317,856
|
Create an instance.
Args:
script (bytes): (Optional)
|
def __init__(self, script=None):
param_list = bytearray(b'\x07\x10')
super(NEP5Token, self).__init__(script=script, param_list=param_list)
| 317,860
|
Get a NEP5Token instance from a database token.
Args:
db_token (neo.Implementations.Wallets.peewee.Models.NEP5Token):
Returns:
NEP5Token: self.
|
def FromDBInstance(db_token):
hash_ar = bytearray(binascii.unhexlify(db_token.ContractHash))
hash_ar.reverse()
hash = UInt160(data=hash_ar)
token = NEP5Token(script=None)
token.SetScriptHash(hash)
token.name = db_token.Name
token.symbol = db_token.Symbol
token.decimals = db_token.Decimals
return token
| 317,861
|
Query the smart contract for its token information (name, symbol, decimals).
Args:
wallet (neo.Wallets.Wallet): a wallet instance.
Returns:
None: if the NEP5Token instance `Name` is already set.
True: if all information was retrieved.
False: if information retrieval failed.
|
def Query(self):
if self.name is not None:
# don't query twice
return
sb = ScriptBuilder()
sb.EmitAppCallWithOperation(self.ScriptHash, 'name')
sb.EmitAppCallWithOperation(self.ScriptHash, 'symbol')
sb.EmitAppCallWithOperation(self.ScriptHash, 'decimals')
engine = None
try:
engine = ApplicationEngine.Run(sb.ToArray(), exit_on_error=True, gas=Fixed8.FromDecimal(10.0), test_mode=False)
except Exception as e:
pass
if engine and len(engine.ResultStack.Items) == 3:
results = engine.ResultStack.Items
try:
self.name = results[0].GetString()
self.symbol = results[1].GetString()
self.decimals = results[2].GetBigInteger()
if len(self.name) > 1 and self.name != 'Stack Item' \
and len(self.symbol) > 1 and self.symbol != 'Stack Item' \
and self.decimals < 10:
return True
except Exception as e:
pass
return False
| 317,863
|
Get the token balance.
Args:
wallet (neo.Wallets.Wallet): a wallet instance.
address (str): public address of the account to get the token balance of.
as_string (bool): whether the return value should be a string. Default is False, returning an integer.
Returns:
int/str: token balance value as int (default), token balanace as string if `as_string` is set to True. 0 if balance retrieval failed.
|
def GetBalance(self, wallet, address, as_string=False):
addr = PromptUtils.parse_param(address, wallet)
if isinstance(addr, UInt160):
addr = addr.Data
sb = ScriptBuilder()
sb.EmitAppCallWithOperationAndArgs(self.ScriptHash, 'balanceOf', [addr])
tx, fee, results, num_ops, engine_success = test_invoke(sb.ToArray(), wallet, [])
if engine_success:
try:
val = results[0].GetBigInteger()
precision_divisor = pow(10, self.decimals)
balance = Decimal(val) / Decimal(precision_divisor)
if as_string:
formatter_str = '.%sf' % self.decimals
balance_str = format(balance, formatter_str)
return balance_str
return balance
except Exception as e:
logger.error("could not get balance: %s " % e)
traceback.print_stack()
else:
addr_str = Crypto.ToAddress(UInt160(data=addr))
logger.error(
f"Could not get balance of address {addr_str} for token contract {self.ScriptHash}. VM execution failed. Make sure the contract exists on the network and that it adheres to the NEP-5 standard")
return 0
| 317,864
|
Register for a crowd sale.
Args:
wallet (neo.Wallets.Wallet): a wallet instance.
register_addresses (list): list of public addresses to register for the sale.
Returns:
tuple:
InvocationTransaction: the transaction.
int: the transaction fee.
list: the neo VM evaluation stack results.
|
def CrowdsaleRegister(self, wallet, register_addresses, from_addr=None):
invoke_args = [self.ScriptHash.ToString(), 'crowdsale_register',
[PromptUtils.parse_param(p, wallet) for p in register_addresses]]
tx, fee, results, num_ops, engine_success = TestInvokeContract(wallet, invoke_args, None, True, from_addr)
return tx, fee, results
| 317,869
|
Serialize this token data to bytes
Args:
writer (neocore.IO.BinaryWriter): binary writer to write serialization data to
|
def Serialize(self, writer):
writer.WriteVarString(self.name)
writer.WriteVarString(self.symbol)
writer.WriteUInt8(self.decimals)
| 317,870
|
Read serialized data from byte stream
Args:
reader (neocore.IO.BinaryReader): reader to read byte data from
|
def Deserialize(self, reader):
self.name = reader.ReadVarString().decode('utf-8')
self.symbol = reader.ReadVarString().decode('utf-8')
self.decimals = reader.ReadUInt8()
| 317,871
|
Create an instance.
Args:
index (int):
height (int):
|
def __init__(self, index, height):
self.index = index
self.height = height
| 317,886
|
Create instance.
Args:
output (int): the index of the previous output.
start_height (int): start block number.
end_height (int): end block number.
|
def __init__(self, output, start_height, end_height):
self.Output = output
self.StartHeight = start_height
self.EndHeight = end_height
| 317,887
|
Create an instance.
Args:
hash (UInt256):
height (int):
items (list):
|
def __init__(self, hash=None, height=None, items=None):
self.TransactionHash = hash
self.TransactionHeight = height
if items is None:
self.Items = []
else:
self.Items = items
| 317,888
|
Flag indicating the index exists in any of the spent coin items.
Args:
index (int):
Returns:
|
def HasIndex(self, index):
for i in self.Items:
if i.index == index:
return True
return False
| 317,889
|
Remove a spent coin based on its index.
Args:
index (int):
|
def DeleteIndex(self, index):
to_remove = None
for i in self.Items:
if i.index == index:
to_remove = i
if to_remove:
self.Items.remove(to_remove)
| 317,890
|
Deserialize full object.
Args:
buffer (bytes, bytearray, BytesIO): (Optional) data to create the stream from.
Returns:
SpentCoinState:
|
def DeserializeFromDB(buffer):
m = StreamManager.GetStream(buffer)
reader = BinaryReader(m)
spentcoin = SpentCoinState()
spentcoin.Deserialize(reader)
StreamManager.ReleaseStream(m)
return spentcoin
| 317,891
|
Deserialize full object.
Args:
reader (neocore.IO.BinaryReader):
|
def Deserialize(self, reader):
super(SpentCoinState, self).Deserialize(reader)
self.TransactionHash = reader.ReadUInt256()
self.TransactionHeight = reader.ReadUInt32()
count = reader.ReadVarInt()
items = [0] * count
for i in range(0, count):
index = reader.ReadUInt16()
height = reader.ReadUInt32()
items[i] = SpentCoinItem(index=index, height=height)
self.Items = items
| 317,892
|
Serialize full object.
Args:
writer (neo.IO.BinaryWriter):
|
def Serialize(self, writer):
super(SpentCoinState, self).Serialize(writer)
writer.WriteUInt256(self.TransactionHash)
writer.WriteUInt32(self.TransactionHeight)
writer.WriteVarInt(len(self.Items))
for item in self.Items:
writer.WriteUInt16(item.index)
writer.WriteUInt32(item.height)
| 317,893
|
Create an instance.
Args:
hash_start (list): a list of hash values. Each value is of the bytearray type. Note: should actually be UInt256 objects.
hash_stop (UInt256):
|
def __init__(self, hash_start=[], hash_stop=UInt256()):
self.HashStart = hash_start
self.HashStop = hash_stop
| 317,914
|
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
|
def Deserialize(self, reader):
self.HashStart = reader.ReadSerializableArray('neocore.UInt256.UInt256')
self.HashStop = reader.ReadUInt256()
| 317,916
|
Serialize object.
Args:
writer (neo.IO.BinaryWriter):
|
def Serialize(self, writer):
writer.WriteHashes(self.HashStart)
if self.HashStop is not None:
writer.WriteUInt256(self.HashStop)
| 317,917
|
Create an instance.
Args:
script_hash (UInt160):
key (bytes):
|
def __init__(self, script_hash=None, key=None):
self.ScriptHash = script_hash
self.Key = key
| 317,918
|
Create a signature contract.
Args:
publicKey (edcsa.Curve.point): e.g. KeyPair.PublicKey.
Returns:
neo.SmartContract.Contract: a Contract instance.
|
def CreateSignatureContract(publicKey):
script = Contract.CreateSignatureRedeemScript(publicKey)
params = b'\x00'
encoded = publicKey.encode_point(True)
pubkey_hash = Crypto.ToScriptHash(encoded, unhex=True)
return Contract(script, params, pubkey_hash)
| 317,926
|
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
|
def Deserialize(self, reader):
self.__hash = None
self.DeserializeUnsigned(reader)
byt = reader.ReadByte()
if int(byt) != 1:
raise Exception('Incorrect format')
witness = Witness()
witness.Deserialize(reader)
self.Script = witness
| 317,934
|
Deserialize unsigned data only.
Args:
reader (neo.IO.BinaryReader):
|
def DeserializeUnsigned(self, reader):
self.Version = reader.ReadUInt32()
self.PrevHash = reader.ReadUInt256()
self.MerkleRoot = reader.ReadUInt256()
self.Timestamp = reader.ReadUInt32()
self.Index = reader.ReadUInt32()
self.ConsensusData = reader.ReadUInt64()
self.NextConsensus = reader.ReadUInt160()
| 317,935
|
Serialize unsigned data only.
Args:
writer (neo.IO.BinaryWriter):
|
def SerializeUnsigned(self, writer):
writer.WriteUInt32(self.Version)
writer.WriteUInt256(self.PrevHash)
writer.WriteUInt256(self.MerkleRoot)
writer.WriteUInt32(self.Timestamp)
writer.WriteUInt32(self.Index)
writer.WriteUInt64(self.ConsensusData)
writer.WriteUInt160(self.NextConsensus)
| 317,936
|
Serialize full object.
Args:
writer (neo.IO.BinaryWriter):
|
def Serialize(self, writer):
self.SerializeUnsigned(writer)
writer.WriteByte(1)
self.Script.Serialize(writer)
| 317,938
|
Create an instance.
Args:
prevHash (UInt160):
timestamp (int): seconds since Unix epoch.
index (int): block height.
consensusData (int): uint64.
nextConsensus (UInt160):
script (neo.Core.Witness): script used to verify the block.
transactions (list): of neo.Core.TX.Transaction.Transaction objects.
build_root (bool): flag indicating whether to rebuild the merkle root.
|
def __init__(self, prevHash=None, timestamp=None, index=None,
consensusData=None, nextConsensus=None,
script=None, transactions=None, build_root=False):
super(Block, self).__init__()
self.Version = 0
self.PrevHash = prevHash
self.Timestamp = timestamp
self.Index = index
self.ConsensusData = consensusData
self.NextConsensus = nextConsensus
self.Script = script
if transactions:
self.Transactions = transactions
else:
self.Transactions = []
if build_root:
self.RebuildMerkleRoot()
| 317,944
|
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
|
def DeserializeForImport(self, reader):
super(Block, self).Deserialize(reader)
self.Transactions = []
transaction_length = reader.ReadVarInt()
for i in range(0, transaction_length):
tx = Transaction.DeserializeFrom(reader)
self.Transactions.append(tx)
if len(self.Transactions) < 1:
raise Exception('Invalid format %s ' % self.Index)
| 317,949
|
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
|
def Deserialize(self, reader):
super(Block, self).Deserialize(reader)
self.Transactions = []
byt = reader.ReadVarInt()
transaction_length = byt
if transaction_length < 1:
raise Exception('Invalid format')
for i in range(0, transaction_length):
tx = Transaction.DeserializeFrom(reader)
self.Transactions.append(tx)
if MerkleTree.ComputeRoot([tx.Hash for tx in self.Transactions]) != self.MerkleRoot:
raise Exception("Merkle Root Mismatch")
| 317,950
|
Deserialize a block from raw bytes.
Args:
byts:
Returns:
Block:
|
def FromTrimmedData(byts):
block = Block()
block.__is_trimmed = True
ms = StreamManager.GetStream(byts)
reader = BinaryReader(ms)
block.DeserializeUnsigned(reader)
reader.ReadByte()
witness = Witness()
witness.Deserialize(reader)
block.Script = witness
bc = GetBlockchain()
tx_list = []
for tx_hash in reader.ReadHashes():
tx = bc.GetTransaction(tx_hash)[0]
if not tx:
raise Exception("Could not find transaction!\n Are you running code against a valid Blockchain instance?\n Tests that accesses transactions or size of a block but inherit from NeoTestCase instead of BlockchainFixtureTestCase will not work.")
tx_list.append(tx)
if len(tx_list) < 1:
raise Exception("Invalid block, no transactions found for block %s " % block.Index)
block.Transactions = tx_list
StreamManager.ReleaseStream(ms)
return block
| 317,951
|
Serialize full object.
Args:
writer (neo.IO.BinaryWriter):
|
def Serialize(self, writer):
super(Block, self).Serialize(writer)
writer.WriteSerializableArray(self.Transactions)
| 317,953
|
Verify the integrity of the block.
Args:
completely: (Not functional at this time).
Returns:
bool: True if valid. False otherwise.
|
def Verify(self, completely=False):
res = super(Block, self).Verify()
if not res:
return False
from neo.Blockchain import GetBlockchain, GetConsensusAddress
# first TX has to be a miner transaction. other tx after that cant be miner tx
if self.Transactions[0].Type != TransactionType.MinerTransaction:
return False
for tx in self.Transactions[1:]:
if tx.Type == TransactionType.MinerTransaction:
return False
if completely:
bc = GetBlockchain()
if self.NextConsensus != GetConsensusAddress(bc.GetValidators(self.Transactions).ToArray()):
return False
for tx in self.Transactions:
if not tx.Verify():
pass
logger.error("Blocks cannot be fully validated at this moment. please pass completely=False")
raise NotImplementedError()
# do this below!
# foreach(Transaction tx in Transactions)
# if (!tx.Verify(Transactions.Where(p = > !p.Hash.Equals(tx.Hash)))) return false;
# Transaction tx_gen = Transactions.FirstOrDefault(p= > p.Type == TransactionType.MinerTransaction);
# if (tx_gen?.Outputs.Sum(p = > p.Value) != CalculateNetFee(Transactions)) return false;
return True
| 317,956
|
Try to get a NEP-5 token based on the symbol or script_hash
Args:
wallet: wallet instance
token_str: symbol or script_hash (accepts script hash with or without 0x prefix)
Raises:
ValueError: if token is not found
Returns:
NEP5Token instance if found.
|
def get_token(wallet: 'Wallet', token_str: str) -> 'NEP5Token.NEP5Token':
if token_str.startswith('0x'):
token_str = token_str[2:]
token = None
for t in wallet.GetTokens().values():
if token_str in [t.symbol, t.ScriptHash.ToString()]:
token = t
break
if not isinstance(token, NEP5Token.NEP5Token):
raise ValueError("The given token argument does not represent a known NEP5 token")
return token
| 318,050
|
Create an instance.
Args:
pub_key (EllipticCurve.ECPoint):
Raises:
Exception: if `pub_key` is not a valid ECPoint.
|
def __init__(self, pub_key=None):
if pub_key is not None and type(pub_key) is not EllipticCurve.ECPoint:
raise Exception("Pubkey must be ECPoint Instance")
self.PublicKey = pub_key
| 318,056
|
Deserialize full object.
Args:
reader (neocore.IO.BinaryReader):
|
def Deserialize(self, reader: BinaryReader):
super(ValidatorState, self).Deserialize(reader)
self.PublicKey = ECDSA.Deserialize_Secp256r1(reader)
self.Registered = reader.ReadBool()
self.Votes = reader.ReadFixed8()
| 318,058
|
Serialize full object.
Args:
writer (neo.IO.BinaryWriter):
|
def Serialize(self, writer: BinaryWriter):
super(ValidatorState, self).Serialize(writer)
self.PublicKey.Serialize(writer)
writer.WriteBool(self.Registered)
writer.WriteFixed8(self.Votes)
| 318,059
|
Create instance.
Args:
*args:
**kwargs:
|
def __init__(self, *args, **kwargs):
super(PublishTransaction, self).__init__(*args, **kwargs)
self.Type = TransactionType.PublishTransaction
| 318,063
|
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
|
def DeserializeExclusiveData(self, reader):
if self.Version > 1:
logger.error("format exception...")
self.Code = FunctionCode()
self.Code.Deserialize(reader)
if self.Version >= 1:
self.NeedStorage = reader.ReadBool()
else:
self.NeedStorage = False
self.Name = reader.ReadVarString()
self.CodeVersion = reader.ReadVarString()
self.Author = reader.ReadVarString()
self.Email = reader.ReadVarString()
self.Description = reader.ReadVarString()
| 318,064
|
Serialize object.
Args:
writer (neo.IO.BinaryWriter):
|
def SerializeExclusiveData(self, writer):
self.Code.Serialize(writer)
if self.Version >= 1:
writer.WriteBool(self.NeedStorage)
writer.WriteVarString(self.Name)
writer.WriteVarString(self.CodeVersion)
writer.WriteVarString(self.Author)
writer.WriteVarString(self.Email)
writer.WriteVarString(self.Description)
| 318,065
|
Initialize
Args:
address: a host:port
last_connection_to: timestamp since we were last connected. Default's to 0 indicating 'never'
|
def __init__(self, address: str, last_connection_to: float = None):
if not last_connection_to:
self.last_connection = 0
else:
self.last_connection = last_connection_to
self.address = address
| 318,067
|
Create an instance.
Args:
port (int):
nonce (int):
userAgent (str): client user agent string.
|
def __init__(self, port=None, nonce=None, userAgent=None):
if port and nonce and userAgent:
self.Port = port
self.Version = 0
self.Services = NetworkAddressWithTime.NODE_NETWORK
self.Timestamp = int(datetime.datetime.utcnow().timestamp())
self.Nonce = nonce
self.UserAgent = userAgent
if Blockchain.Default() is not None and Blockchain.Default().Height is not None:
self.StartHeight = Blockchain.Default().Height
self.Relay = True
| 318,069
|
Deserialize full object.
Args:
reader (neo.IO.BinaryReader):
|
def Deserialize(self, reader):
self.Version = reader.ReadUInt32()
self.Services = reader.ReadUInt64()
self.Timestamp = reader.ReadUInt32()
self.Port = reader.ReadUInt16()
self.Nonce = reader.ReadUInt32()
self.UserAgent = reader.ReadVarString().decode('utf-8')
self.StartHeight = reader.ReadUInt32()
logger.debug("Version start height: T %s " % self.StartHeight)
self.Relay = reader.ReadBool()
| 318,071
|
Serialize object.
Args:
writer (neo.IO.BinaryWriter):
|
def Serialize(self, writer):
writer.WriteUInt32(self.Version)
writer.WriteUInt64(self.Services)
writer.WriteUInt32(self.Timestamp)
writer.WriteUInt16(self.Port)
writer.WriteUInt32(self.Nonce)
writer.WriteVarString(self.UserAgent)
writer.WriteUInt32(self.StartHeight)
writer.WriteBool(self.Relay)
| 318,072
|
Get the local node instance.
Args:
reactor: (optional) custom reactor to use in NodeLeader.
Returns:
NodeLeader: instance.
|
def Instance(reactor=None):
if NodeLeader._LEAD is None:
NodeLeader._LEAD = NodeLeader(reactor)
return NodeLeader._LEAD
| 318,075
|
Start connecting to the seed list.
Args:
seed_list: a list of host:port strings if not supplied use list from `protocol.xxx.json`
skip_seeds: skip connecting to seed list
|
def Start(self, seed_list: List[str] = None, skip_seeds: bool = False) -> None:
if not seed_list:
seed_list = settings.SEED_LIST
logger.debug("Starting up nodeleader")
if not skip_seeds:
logger.debug("Attempting to connect to seed list...")
for bootstrap in seed_list:
if not is_ip_address(bootstrap):
host, port = bootstrap.split(':')
bootstrap = f"{hostname_to_ip(host)}:{port}"
addr = Address(bootstrap)
self.KNOWN_ADDRS.append(addr)
self.SetupConnection(addr)
logger.debug("Starting up nodeleader: starting peer, mempool, and blockheight check loops")
# check in on peers every 10 seconds
self.start_peer_check_loop()
self.start_memcheck_loop()
self.start_blockheight_loop()
if settings.ACCEPT_INCOMING_PEERS and not self.incoming_server_running:
class OneShotFactory(Factory):
def __init__(self, leader):
self.leader = leader
def buildProtocol(self, addr):
print(f"building new protocol for addr: {addr}")
self.leader.AddKnownAddress(Address(f"{addr.host}:{addr.port}"))
p = NeoNode(incoming_client=True)
p.factory = self
return p
def listen_err(err):
print(f"Failed start listening server for reason: {err.value}")
def listen_ok(value):
self.incoming_server_running = True
logger.debug(f"Starting up nodeleader: setting up listen server on port: {settings.NODE_PORT}")
server_endpoint = TCP4ServerEndpoint(self.reactor, settings.NODE_PORT)
listenport_deferred = server_endpoint.listen(OneShotFactory(leader=self))
listenport_deferred.addCallback(listen_ok)
listenport_deferred.addErrback(listen_err)
| 318,089
|
Add a new connect peer to the known peers list.
Args:
peer (NeoNode): instance.
|
def AddConnectedPeer(self, peer):
# if present
self.RemoveFromQueue(peer.address)
self.AddKnownAddress(peer.address)
if len(self.Peers) > settings.CONNECTED_PEER_MAX:
peer.Disconnect("Max connected peers reached", isDead=False)
if peer not in self.Peers:
self.Peers.append(peer)
else:
# either peer is already in the list and it has reconnected before it timed out on our side
# or it's trying to connect multiple times
# or we hit the max connected peer count
self.RemoveKnownAddress(peer.address)
peer.Disconnect()
| 318,095
|
Remove a connected peer from the known peers list.
Args:
peer (NeoNode): instance.
|
def RemoveConnectedPeer(self, peer):
if peer in self.Peers:
self.Peers.remove(peer)
| 318,096
|
Remove an address from the connection queue
Args:
addr:
Returns:
|
def RemoveFromQueue(self, addr):
if addr in self.connection_queue:
self.connection_queue.remove(addr)
| 318,097
|
Process a received inventory.
Args:
inventory (neo.Network.Inventory): expect a Block type.
Returns:
bool: True if processed and verified. False otherwise.
|
def InventoryReceived(self, inventory):
if inventory.Hash.ToBytes() in self._MissedBlocks:
self._MissedBlocks.remove(inventory.Hash.ToBytes())
if inventory is MinerTransaction:
return False
if type(inventory) is Block:
if BC.Default() is None:
return False
if BC.Default().ContainsBlock(inventory.Index):
return False
if not BC.Default().AddBlock(inventory):
return False
else:
if not inventory.Verify(self.MemPool.values()):
return False
| 318,105
|
Relay the inventory to the remote client.
Args:
inventory (neo.Network.Inventory):
Returns:
bool: True if relayed successfully. False otherwise.
|
def RelayDirectly(self, inventory):
relayed = False
self.RelayCache[inventory.Hash.ToBytes()] = inventory
for peer in self.Peers:
relayed |= peer.Relay(inventory)
if len(self.Peers) == 0:
if type(BC.Default()) is TestLevelDBBlockchain:
# mock a true result for tests
return True
logger.info("no connected peers")
return relayed
| 318,106
|
Relay the inventory to the remote client.
Args:
inventory (neo.Network.Inventory):
Returns:
bool: True if relayed successfully. False otherwise.
|
def Relay(self, inventory):
if type(inventory) is MinerTransaction:
return False
if inventory.Hash.ToBytes() in self.KnownHashes:
return False
self.KnownHashes.append(inventory.Hash.ToBytes())
if type(inventory) is Block:
pass
elif type(inventory) is Transaction or issubclass(type(inventory), Transaction):
if not self.AddTransaction(inventory):
# if we fail to add the transaction for whatever reason, remove it from the known hashes list or we cannot retry the same transaction again
try:
self.KnownHashes.remove(inventory.Hash.ToBytes())
except ValueError:
# it not found
pass
return False
else:
# consensus
pass
relayed = self.RelayDirectly(inventory)
return relayed
| 318,107
|
Add a transaction to the memory pool.
Args:
tx (neo.Core.TX.Transaction): instance.
Returns:
bool: True if successfully added. False otherwise.
|
def AddTransaction(self, tx):
if BC.Default() is None:
return False
if tx.Hash.ToBytes() in self.MemPool.keys():
return False
if BC.Default().ContainsTransaction(tx.Hash):
return False
if not tx.Verify(self.MemPool.values()):
logger.error("Verifying tx result... failed")
return False
self.MemPool[tx.Hash.ToBytes()] = tx
return True
| 318,109
|
Remove a transaction from the memory pool if it is found on the blockchain.
Args:
tx (neo.Core.TX.Transaction): instance.
Returns:
bool: True if successfully removed. False otherwise.
|
def RemoveTransaction(self, tx):
if BC.Default() is None:
return False
if not BC.Default().ContainsTransaction(tx.Hash):
return False
if tx.Hash.ToBytes() in self.MemPool:
del self.MemPool[tx.Hash.ToBytes()]
return True
return False
| 318,110
|
Called when we fail to connect to an endpoint
Args:
err: Twisted Failure instance
address: the address we failed to connect to
|
def clientConnectionFailed(self, err, address: Address):
if type(err.value) == error.TimeoutError:
logger.debug(f"Failed connecting to {address} connection timed out")
elif type(err.value) == error.ConnectError:
ce = err.value
if len(ce.args) > 0:
logger.debug(f"Failed connecting to {address} {ce.args[0].value}")
else:
logger.debug(f"Failed connecting to {address}")
else:
logger.debug(f"Failed connecting to {address} {err.value}")
self.peers_connecting -= 1
self.RemoveKnownAddress(address)
self.RemoveFromQueue(address)
# if we failed to connect to new addresses, we should always add them to the DEAD_ADDRS list
self.AddDeadAddress(address)
# for testing
return err.type
| 318,113
|
Open filepath with its default viewing application (platform-specific).
Args:
filepath: Path to the file to open in viewer.
Raises:
RuntimeError: If the current platform is not supported.
|
def view(filepath):
try:
view_func = getattr(view, PLATFORM)
except AttributeError:
raise RuntimeError('platform %r not supported' % PLATFORM)
view_func(filepath)
| 318,138
|
Save the DOT source to file. Ensure the file ends with a newline.
Args:
filename: Filename for saving the source (defaults to ``name`` + ``'.gv'``)
directory: (Sub)directory for source saving and rendering.
Returns:
The (possibly relative) path of the saved source file.
|
def save(self, filename=None, directory=None):
if filename is not None:
self.filename = filename
if directory is not None:
self.directory = directory
filepath = self.filepath
tools.mkdirs(filepath)
data = text_type(self.source)
with io.open(filepath, 'w', encoding=self.encoding) as fd:
fd.write(data)
if not data.endswith(u'\n'):
fd.write(u'\n')
return filepath
| 318,147
|
Return an instance with the source string read from the given file.
Args:
filename: Filename for loading/saving the source.
directory: (Sub)directory for source loading/saving and rendering.
format: Rendering output format (``'pdf'``, ``'png'``, ...).
engine: Layout command used (``'dot'``, ``'neato'``, ...).
encoding: Encoding for loading/saving the source.
|
def from_file(cls, filename, directory=None,
format=None, engine=None, encoding=File._encoding):
filepath = os.path.join(directory or '', filename)
if encoding is None:
encoding = locale.getpreferredencoding()
with io.open(filepath, encoding=encoding) as fd:
source = fd.read()
return cls(source, filename, directory, format, engine, encoding)
| 318,151
|
Return copy of ``s`` that will not treat ``'<...>'`` as DOT HTML string in quoting.
Args:
s: String in which leading ``'<'`` and trailing ``'>'`` should be treated as literal.
Raises:
TypeError: If ``s`` is not a ``str`` on Python 3, or a ``str``/``unicode`` on Python 2.
>>> quote('<>-*-<>')
'<>-*-<>'
>>> quote(nohtml('<>-*-<>'))
'"<>-*-<>"'
|
def nohtml(s):
try:
subcls = NOHTML[type(s)]
except KeyError:
raise TypeError('%r does not have one of the required types: %r' %
(s, list(NOHTML)))
return subcls(s)
| 318,158
|
Reset content to an empty body, clear graph/node/egde_attr mappings.
Args:
keep_attrs (bool): preserve graph/node/egde_attr mappings
|
def clear(self, keep_attrs=False):
if not keep_attrs:
for a in (self.graph_attr, self.node_attr, self.edge_attr):
a.clear()
del self.body[:]
| 318,162
|
Create a node.
Args:
name: Unique identifier for the node inside the source.
label: Caption to be displayed (defaults to the node ``name``).
attrs: Any additional node attributes (must be strings).
|
def node(self, name, label=None, _attributes=None, **attrs):
name = self._quote(name)
attr_list = self._attr_list(label, attrs, _attributes)
line = self._node % (name, attr_list)
self.body.append(line)
| 318,164
|
Create an edge between two nodes.
Args:
tail_name: Start node identifier.
head_name: End node identifier.
label: Caption to be displayed near the edge.
attrs: Any additional edge attributes (must be strings).
|
def edge(self, tail_name, head_name, label=None, _attributes=None, **attrs):
tail_name = self._quote_edge(tail_name)
head_name = self._quote_edge(head_name)
attr_list = self._attr_list(label, attrs, _attributes)
line = self._edge % (tail_name, head_name, attr_list)
self.body.append(line)
| 318,165
|
Create a bunch of edges.
Args:
tail_head_iter: Iterable of ``(tail_name, head_name)`` pairs.
|
def edges(self, tail_head_iter):
edge = self._edge_plain
quote = self._quote_edge
lines = (edge % (quote(t), quote(h)) for t, h in tail_head_iter)
self.body.extend(lines)
| 318,166
|
Add a general or graph/node/edge attribute statement.
Args:
kw: Attributes target (``None`` or ``'graph'``, ``'node'``, ``'edge'``).
attrs: Attributes to be set (must be strings, may be empty).
See the :ref:`usage examples in the User Guide <attributes>`.
|
def attr(self, kw=None, _attributes=None, **attrs):
if kw is not None and kw.lower() not in ('graph', 'node', 'edge'):
raise ValueError('attr statement must target graph, node, or edge: '
'%r' % kw)
if attrs or _attributes:
if kw is None:
a_list = self._a_list(None, attrs, _attributes)
line = self._attr_plain % a_list
else:
attr_list = self._attr_list(None, attrs, _attributes)
line = self._attr % (kw, attr_list)
self.body.append(line)
| 318,167
|
Check config settings and setup Flask-Sendemail.
Args:
app(Flask): The Flask application instance.
|
def __init__(self, app, sender_email=None, sender_name=None):
super(SendmailEmailAdapter, self).__init__(app)
# Setup Flask-Mail
try:
from flask_sendmail import Mail
except ImportError:
raise ConfigError(
"The Flask-Sendmail package is missing. Install Flask-Sendmail with 'pip install Flask-Sendmail'.")
self.mail = Mail(app)
| 320,131
|
Send email message via Flask-Sendmail.
Args:
recipient: Email address or tuple of (Name, Email-address).
subject: Subject line.
html_message: The message body in HTML.
text_message: The message body in plain text.
|
def send_email_message(self, recipient, subject, html_message, text_message, sender_email, sender_name):
if not current_app.testing: # pragma: no cover
# Prepare email message
from flask_sendmail import Message
message = Message(
subject,
recipients=[recipient],
html=html_message,
body=text_message)
# Send email message
self.mail.send(message)
| 320,132
|
Initialize the appropriate DbAdapter, based on the ``db`` parameter type.
Args:
app(Flask): The Flask application instance.
db: The Object-Database Mapper instance.
UserClass: The User class.
UserEmailClass: Optional UserEmail class for multiple-emails-per-user feature.
UserInvitationClass: Optional UserInvitation class for user-invitation feature.
RoleClass: For testing purposes only.
|
def __init__(self, app, db, UserClass, UserEmailClass=None, UserInvitationClass=None, RoleClass=None):
self.app = app
self.db = db
self.UserClass = UserClass
self.UserEmailClass = UserEmailClass
self.UserInvitationClass = UserInvitationClass
self.RoleClass = RoleClass
self.user_manager = app.user_manager
self.db_adapter = None
# Check if db is a SQLAlchemy instance
if self.db_adapter is None:
try:
from flask_sqlalchemy import SQLAlchemy
if isinstance(db, SQLAlchemy):
self.db_adapter = SQLDbAdapter(app, db)
except ImportError:
pass # Ignore ImportErrors
# Check if db is a MongoEngine instance
if self.db_adapter is None:
try:
from flask_mongoengine import MongoEngine
if isinstance(db, MongoEngine):
self.db_adapter = MongoDbAdapter(app, db)
except ImportError:
pass # Ignore ImportErrors
# Check if db is a Flywheel instance
if self.db_adapter is None: # pragma: no cover
try:
from flask_flywheel import Flywheel
if isinstance(db, Flywheel):
self.db_adapter = DynamoDbAdapter(app, db)
except ImportError:
pass # Ignore ImportErrors
# Check if the UserClass is a Pynamo Model
if self.db_adapter is None:
try:
from pynamodb.models import Model
if issubclass(UserClass, Model):
self.db_adapter = PynamoDbAdapter(app)
except ImportError:
pass # Ignore ImportErrors
# Check self.db_adapter
if self.db_adapter is None:
raise ConfigError(
'No Flask-SQLAlchemy, Flask-MongoEngine or Flask-Flywheel installed and no Pynamo Model in use.'\
' You must install one of these Flask extensions.')
| 320,133
|
Check config settings and initialize the Fernet encryption cypher.
Fernet is basically AES128 in CBC mode, with a timestamp and a signature.
Args:
app(Flask): The Flask application instance.
|
def __init__(self, app):
self.app = app
# Use the applications's SECRET_KEY if flask_secret_key is not specified.
flask_secret_key = app.config.get('SECRET_KEY', None)
if not flask_secret_key:
raise ConfigError('Config setting SECRET_KEY is missing.')
# Print a warning if SECRET_KEY is too short
key = flask_secret_key.encode()
if len(key)<32:
print('WARNING: Flask-User TokenManager: SECRET_KEY is shorter than 32 bytes.')
key = key + b' '*32 # Make sure the key is at least 32 bytes long
key32 = key[:32]
base64_key32 = base64.urlsafe_b64encode(key32)
# Create a Fernet cypher to encrypt data -- basically AES128 in CBC mode,
# Encrypt, timestamp, sign, and base64-encode
from cryptography.fernet import Fernet
self.fernet = Fernet(base64_key32)
| 320,179
|
Check config settings and setup Flask-Mail.
Args:
app(Flask): The Flask application instance.
|
def __init__(self, app):
super(SMTPEmailAdapter, self).__init__(app)
# Setup Flask-Mail
try:
from flask_mail import Mail
except ImportError:
raise ConfigError(
"The Flask-Mail package is missing. Install Flask-Mail with 'pip install Flask-Mail'.")
self.mail = Mail(app)
| 320,236
|
Send email message via Flask-Mail.
Args:
recipient: Email address or tuple of (Name, Email-address).
subject: Subject line.
html_message: The message body in HTML.
text_message: The message body in plain text.
|
def send_email_message(self, recipient, subject, html_message, text_message, sender_email, sender_name):
# Construct sender from sender_name and sender_email
sender = '"%s" <%s>' % (sender_name, sender_email) if sender_name else sender_email
# Send email via SMTP except when we're testing
if not current_app.testing: # pragma: no cover
try:
# Prepare email message
from flask_mail import Message
message = Message(
subject,
sender=sender,
recipients=[recipient],
html=html_message,
body=text_message)
# Send email message
self.mail.send(message)
# Print helpful error messages on exceptions
except (socket.gaierror, socket.error) as e:
raise EmailError('SMTP Connection error: Check your MAIL_SERVER and MAIL_PORT settings.')
except smtplib.SMTPAuthenticationError:
raise EmailError('SMTP Authentication error: Check your MAIL_USERNAME and MAIL_PASSWORD settings.')
| 320,237
|
Create a passlib CryptContext.
Args:
password_hash(str): The name of a valid passlib password hash.
Examples: ``'bcrypt', 'pbkdf2_sha512', 'sha512_crypt' or 'argon2'``.
Example:
``password_manager = PasswordManager('bcrypt')``
|
def __init__(self, app):
self.app = app
self.user_manager = app.user_manager
# Create a passlib CryptContext
self.password_crypt_context = CryptContext(
schemes=self.user_manager.USER_PASSLIB_CRYPTCONTEXT_SCHEMES,
**self.user_manager.USER_PASSLIB_CRYPTCONTEXT_KEYWORDS)
| 320,238
|
Verify plaintext ``password`` against ``hashed password``.
Args:
password(str): Plaintext password that the user types in.
password_hash(str): Password hash generated by a previous call to ``hash_password()``.
Returns:
| True when ``password`` matches ``password_hash``.
| False otherwise.
Example:
::
if verify_password('mypassword', user.password):
login_user(user)
|
def verify_password(self, password, password_hash):
# Print deprecation warning if called with (password, user) instead of (password, user.password)
if isinstance(password_hash, self.user_manager.db_manager.UserClass):
print(
'Deprecation warning: verify_password(password, user) has been changed'\
' to: verify_password(password, password_hash). The user param will be deprecated.'\
' Please change your call with verify_password(password, user) into'\
' a call with verify_password(password, user.password)'
' as soon as possible.')
password_hash = password_hash.password # effectively user.password
# Use passlib's CryptContext to verify a password
return self.password_crypt_context.verify(password, password_hash)
| 320,239
|
Check config settings and setup SendGrid Web API v3.
Args:
app(Flask): The Flask application instance.
|
def __init__(self, app):
super(SendgridEmailAdapter, self).__init__(app)
sendgrid_api_key = app.config.get('SENDGRID_API_KEY')
if not sendgrid_api_key:
raise ConfigError(
"The SENDGRID_API_KEY setting is missing. Set SENDGRID_API_KEY in your app config.")
# Setup sendgrid-python
try:
from sendgrid import SendGridAPIClient
self.sg = SendGridAPIClient(apikey=sendgrid_api_key)
except ImportError:
raise ConfigError(SENDGRID_IMPORT_ERROR_MESSAGE)
| 320,240
|
Send email message via sendgrid-python.
Args:
recipient: Email address or tuple of (Name, Email-address).
subject: Subject line.
html_message: The message body in HTML.
text_message: The message body in plain text.
|
def send_email_message(self, recipient, subject, html_message, text_message, sender_email, sender_name):
if not current_app.testing: # pragma: no cover
try:
# Prepare Sendgrid helper objects
from sendgrid.helpers.mail import Email, Content, Substitution, Mail
from_email = Email(sender_email, sender_name)
to_email = Email(recipient)
text_content = Content('text/plain', text_message)
html_content = Content('text/html', html_message)
# Prepare Sendgrid Mail object
# Note: RFC 1341: text must be first, followed by html
mail = Mail(from_email, subject, to_email, text_content)
mail.add_content(html_content)
# Send mail via the Sendgrid API
response = self.sg.client.mail.send.post(request_body=mail.get())
print(response.status_code)
print(response.body)
print(response.headers)
except ImportError:
raise ConfigError(SENDGRID_IMPORT_ERROR_MESSAGE)
except Exception as e:
print(e)
print(e.body)
raise
| 320,241
|
Attempt to put ID3 tags on a file.
Args:
artist (str):
title (str):
year (int):
genre (str):
artwork_url (str):
album (str):
track_number (str):
filename (str):
url (str):
|
def tag_file(filename, artist, title, year=None, genre=None, artwork_url=None, album=None, track_number=None, url=None):
try:
audio = EasyMP3(filename)
audio.tags = None
audio["artist"] = artist
audio["title"] = title
if year:
audio["date"] = str(year)
if album:
audio["album"] = album
if track_number:
audio["tracknumber"] = track_number
if genre:
audio["genre"] = genre
if url: # saves the tag as WOAR
audio["website"] = url
audio.save()
if artwork_url:
artwork_url = artwork_url.replace('https', 'http')
mime = 'image/jpeg'
if '.jpg' in artwork_url:
mime = 'image/jpeg'
if '.png' in artwork_url:
mime = 'image/png'
if '-large' in artwork_url:
new_artwork_url = artwork_url.replace('-large', '-t500x500')
try:
image_data = requests.get(new_artwork_url).content
except Exception as e:
# No very large image available.
image_data = requests.get(artwork_url).content
else:
image_data = requests.get(artwork_url).content
audio = MP3(filename, ID3=OldID3)
audio.tags.add(
APIC(
encoding=3, # 3 is for utf-8
mime=mime,
type=3, # 3 is for the cover image
desc='Cover',
data=image_data
)
)
audio.save()
# because there is software that doesn't seem to use WOAR we save url tag again as WXXX
if url:
audio = MP3(filename, ID3=OldID3)
audio.tags.add( WXXX( encoding=3, url=url ) )
audio.save()
return True
except Exception as e:
puts(colored.red("Problem tagging file: ") + colored.white("Is this file a WAV?"))
return False
| 320,313
|
Scrubs a method name, returning result from local cache if available.
This method wraps fitparse.utils.scrub_method_name and memoizes results,
as scrubbing a method name is expensive.
Args:
method_name: Method name to scrub.
Returns:
Scrubbed method name.
|
def _scrub_method_name(self, method_name):
if method_name not in self._scrubbed_method_names:
self._scrubbed_method_names[method_name] = (
scrub_method_name(method_name))
return self._scrubbed_method_names[method_name]
| 320,382
|
Set Date-Picker as the end-date of a date-range.
Args:
- event_id (string): User-defined unique id for linking two fields
- import_options (bool): inherit options from start-date input,
default: TRUE
|
def end_of(self, event_id, import_options=True):
event_id = str(event_id)
if event_id in DatePickerDictionary.items:
linked_picker = DatePickerDictionary.items[event_id]
self.config['linked_to'] = linked_picker.config['id']
if import_options:
backup_moment_format = self.config['options']['format']
self.config['options'].update(linked_picker.config['options'])
self.config['options'].update(self.options_param)
if self.format_param or 'format' in self.options_param:
self.config['options']['format'] = backup_moment_format
else:
self.format = linked_picker.format
# Setting useCurrent is necessary, see following issue
# https://github.com/Eonasdan/bootstrap-datetimepicker/issues/1075
self.config['options']['useCurrent'] = False
self._link_to(linked_picker)
else:
raise KeyError(
'start-date not specified for event_id "%s"' % event_id)
return self
| 321,106
|
Initialize a tethered nanoparticle.
Args:
ball_radius (float): Radius of the nanoparticle.
n_chains (int): Number of chains to attach to the nanoparticle.
chain_length (int): Length of the chains being attached.
monomer (Compound, optional): Type of chain being attached.
|
def __init__(self, ball_radius=10, n_chains=4, chain_length=10, monomer=None):
super(Tnp, self).__init__()
if not monomer:
monomer = Bead(particle_kind='t')
n = 129 # TODO: make this tweakable
self.add(Sphere(n=n, radius=ball_radius, port_distance_from_surface=0.7), label="np")
# Generate 65 points on the surface of a unit sphere.
pattern = mb.SpherePattern(n_chains)
# Magnify it a bit.
pattern.scale(ball_radius)
chain_proto = mb.Polymer(monomer, n=chain_length)
# Apply chains to pattern.
chain_protos, empty_backfill = pattern.apply_to_compound(chain_proto,
guest_port_name="down", host=self['np'])
self.add(chain_protos)
self.generate_bonds('np', 'np', sqrt(4 * ball_radius ** 2 * pi / n) - 0.5,
sqrt(4 * ball_radius**2 * pi / n) + 0.5)
self.generate_bonds('np', 't', 0.1, 0.3)
self.generate_bonds('t', 'np', 0.1, 0.3)
| 321,221
|
Initialize a Sphere object.
Args:
n (int): Number of points used to construct the Sphere.
radius (float): Radius of the Sphere.
port_distance_from_surface (float): Distance of Ports from Sphere.
|
def __init__(self, n=65, radius=1, port_distance_from_surface=.07):
super(Sphere, self).__init__()
particle = mb.Particle(name='np')
particle.add(mb.Port(anchor=particle), label='out')
# Generate 65 points on the surface of a unit sphere.
pattern = mb.SpherePattern(n)
# Magnify the unit sphere by the provided radius.
pattern.scale(radius)
particles = pattern.apply(particle, orientation='normal', compound_port='out')
self.add(particles, label='np_[$]')
# Create particles and Ports at pattern positions.
for i, pos in enumerate(pattern.points):
particle = mb.Particle(name="np", pos=pos)
self.add(particle, "np_{}".format(i))
port = mb.Port(anchor=particle)
self.add(port, "port_{}".format(i))
# Make the top of the port point toward the positive x axis.
port.spin(-pi/2, [0, 0, 1])
# Raise up (or down) the top of the port in the z direction.
port.spin(-arcsin(pos[2]/radius), [0, 1, 0])
# Rotate the Port along the z axis.
port.spin(arctan2(pos[1], pos[0]), [0, 0, 1])
# Move the Port a bit away from the surface of the Sphere.
port.translate(pos/radius * port_distance_from_surface)
| 321,330
|
Initialize an Alkane Compound.
Args:
n: Number of carbon atoms.
cap_front: Add methyl group to beginning of chain ('down' port).
cap_end: Add methyl group to end of chain ('up' port).
|
def __init__(self, n=3, cap_front=True, cap_end=True):
if n < 2:
raise ValueError('n must be 1 or more')
super(Alkane, self).__init__()
# Adjust length of Polmyer for absence of methyl terminations.
if not cap_front:
n += 1
if not cap_end:
n += 1
chain = mb.recipes.Polymer(CH2(), n=n-2, port_labels=('up', 'down'))
self.add(chain, 'chain')
if cap_front:
self.add(CH3(), "methyl_front")
mb.force_overlap(move_this=self['chain'],
from_positions=self['chain']['up'],
to_positions=self['methyl_front']['up'])
else:
# Hoist port label to Alkane level.
self.add(chain['up'], 'up', containment=False)
if cap_end:
self.add(CH3(), 'methyl_end')
mb.force_overlap(self['methyl_end'], self['methyl_end']['up'], self['chain']['down'])
else:
# Hoist port label to Alkane level.
self.add(chain['down'], 'down', containment=False)
| 321,351
|
Initialize a Bead object.
Args:
particle_kind (str): Descriptive name for the Bead.
|
def __init__(self, particle_kind="bead"):
super(Bead, self).__init__()
self.add(mb.Particle(name=particle_kind), particle_kind)
self.add(mb.Port(anchor=self.labels[particle_kind]), 'up')
self['up'].translate(np.array([0, 0.7, 0]))
self.add(mb.Port(anchor=self.labels[particle_kind]), 'down')
self['down'].translate(np.array([0, -0.7, 0]))
| 321,436
|
The mean forecast for the next point is the mean value of the previous ``n`` points in
the series.
Args:
data (np.array): Observed data, presumed to be ordered in time.
n (int): period over which to calculate the mean
Returns:
float: a single-valued forecast for the next value in the series.
|
def mean(data, n=3, **kwargs):
# don't start averaging until we've seen n points
if len(data[-n:]) < n:
forecast = np.nan
else:
# nb: we'll keep the forecast as a float
forecast = np.mean(data[-n:])
return forecast
| 321,505
|
The drift forecast for the next point is a linear extrapolation from the previous ``n``
points in the series.
Args:
data (np.array): Observed data, presumed to be ordered in time.
n (int): period over which to calculate linear model for extrapolation
Returns:
float: a single-valued forecast for the next value in the series.
|
def drift(data, n=3, **kwargs):
yi = data[-n]
yf = data[-1]
slope = (yf - yi) / (n - 1)
forecast = yf + slope
return forecast
| 321,506
|
Return a file-like object of CSV-encoded rows.
Args:
dataframe (pandas.DataFrame): A chunk of a dataframe to encode
|
def encode_chunk(dataframe):
csv_buffer = six.StringIO()
dataframe.to_csv(
csv_buffer,
index=False,
header=False,
encoding="utf-8",
float_format="%.15g",
date_format="%Y-%m-%d %H:%M:%S.%f",
)
# Convert to a BytesIO buffer so that unicode text is properly handled.
# See: https://github.com/pydata/pandas-gbq/issues/106
body = csv_buffer.getvalue()
if isinstance(body, bytes):
body = body.decode("utf-8")
body = body.encode("utf-8")
return six.BytesIO(body)
| 322,493
|
Given an old BigQuery schema, update it with a new one.
Where a field name is the same, the new will replace the old. Any
new fields not present in the old schema will be added.
Arguments:
schema_old: the old schema to update
schema_new: the new schema which will overwrite/extend the old
|
def update_schema(schema_old, schema_new):
old_fields = schema_old["fields"]
new_fields = schema_new["fields"]
output_fields = list(old_fields)
field_indices = {field["name"]: i for i, field in enumerate(output_fields)}
for field in new_fields:
name = field["name"]
if name in field_indices:
# replace old field with new field of same name
output_fields[field_indices[name]] = field
else:
# add new field
output_fields.append(field)
return {"fields": output_fields}
| 322,522
|
Instantiates a new :class:`ics.alarm.DisplayAlarm`.
Adheres to RFC5545 VALARM standard: http://icalendar.org/iCalendar-RFC-5545/3-6-6-alarm-component.html
Args:
description (string) : RFC5545 DESCRIPTION property
kwargs (dict) : Args to :func:`ics.alarm.Alarm.__init__`
|
def __init__(self,
description=None,
**kwargs):
super(DisplayAlarm, self).__init__(**kwargs)
self.description = description
| 322,930
|
Instantiates a new :class:`ics.alarm.AudioAlarm`.
Adheres to RFC5545 VALARM standard: http://icalendar.org/iCalendar-RFC-5545/3-6-6-alarm-component.html
Args:
attach (string) : RFC5545 ATTACH property, pointing to an audio object
attach_params (dict) : RFC5545 attachparam values
kwargs (dict) : Args to :func:`ics.alarm.Alarm.__init__`
|
def __init__(self,
attach=None,
attach_params=None,
**kwargs):
super(AudioAlarm, self).__init__(**kwargs)
self.attach = attach
self.attach_params = attach_params
| 322,932
|
Iterates (in chronological order) over every event that is included
in the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
|
def included(self, start, stop):
for event in self:
if (start <= event.begin <= stop # if start is between the bonds
and start <= event.end <= stop): # and stop is between the bonds
yield event
| 322,939
|
Iterates (in chronological order) over every event that has an intersection
with the timespan between `start` and `stop`
Args:
start : (Arrow object)
stop : (Arrow object)
|
def overlapping(self, start, stop):
for event in self:
if ((start <= event.begin <= stop # if start is between the bonds
or start <= event.end <= stop) # or stop is between the bonds
or event.begin <= start and event.end >= stop): # or event is a superset of [start,stop]
yield event
| 322,940
|
Iterates (in chronological order) over all events that are occuring during `instant`.
Args:
instant (Arrow object)
|
def at(self, instant):
for event in self:
if event.begin <= instant <= event.end:
yield event
| 322,941
|
Iterates (in chronological order) over all events that occurs on `day`
Args:
day (Arrow object)
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
|
def on(self, day, strict=False):
day_start, day_stop = day.floor('day').span('day')
if strict:
return self.included(day_start, day_stop)
else:
return self.overlapping(day_start, day_stop)
| 322,942
|
Iterates (in chronological order) over all events that occurs today
Args:
strict (bool): if True events will be returned only if they are\
strictly *included* in `day`.
|
def today(self, strict=False):
return self.on(arrow.now(), strict=strict)
| 322,943
|
Create a new event which covers the time range of two intersecting events
All extra parameters are passed to the Event constructor.
Args:
other: the other event
Returns:
a new Event instance
|
def join(self, other, *args, **kwarg):
event = Event(*args, **kwarg)
if self.intersects(other):
if self.starts_within(other):
event.begin = other.begin
else:
event.begin = self.begin
if self.ends_within(other):
event.end = other.end
else:
event.end = self.end
return event
raise ValueError('Cannot join {} with {}: they don\'t intersect.'.format(self, other))
| 322,979
|
Instantiates a new Calendar.
Args:
imports (string or list of lines/strings): data to be imported into the Calendar(),
events (set of Event): :class:`ics.event.Event`s to be added to the calendar
todos (set of Todo): :class:`ics.event.Todo`s to be added to the calendar
creator (string): uid of the creator program.
If `imports` is specified, every other argument will be ignored.
|
def __init__(self, imports=None, events=None, todos=None, creator=None):
# TODO : implement a file-descriptor import and a filename import
self._timezones = {}
self.events = set()
self.todos = set()
self._unused = Container(name='VCALENDAR')
self.scale = None
self.method = None
self.timeline = Timeline(self)
if imports is not None:
if isinstance(imports, string_types):
container = string_to_container(imports)
elif isinstance(imports, collections.Iterable):
container = lines_to_container(imports)
else:
raise TypeError("Expecting a sequence or a string")
# TODO : make a better API for multiple calendars
if len(container) != 1:
raise NotImplementedError(
'Multiple calendars in one file are not supported')
self._populate(container[0]) # Use first calendar
else:
if events is not None:
self.events.update(set(events))
if todos is not None:
self.todos.update(set(todos))
self._creator = creator
| 322,992
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.