code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1
value |
|---|---|---|---|---|---|
from logging import getLogger
from typing import Optional
from eth_account.signers.local import LocalAccount
from eth_typing import ChecksumAddress
from web3.contract import Contract
from gnosis.eth import EthereumClient
from gnosis.eth.contracts import (
get_paying_proxy_deployed_bytecode,
get_proxy_1_0_0_deployed_bytecode,
get_proxy_1_1_1_deployed_bytecode,
get_proxy_1_1_1_mainnet_deployed_bytecode,
get_proxy_1_3_0_deployed_bytecode,
get_proxy_factory_contract,
get_proxy_factory_V1_0_0_contract,
get_proxy_factory_V1_1_1_contract,
)
from gnosis.eth.ethereum_client import EthereumTxSent
from gnosis.eth.utils import compare_byte_code, fast_is_checksum_address
try:
from functools import cache
except ImportError:
from functools import lru_cache
cache = lru_cache(maxsize=None)
logger = getLogger(__name__)
class ProxyFactory:
def __init__(self, address: ChecksumAddress, ethereum_client: EthereumClient):
assert fast_is_checksum_address(address), (
"%s proxy factory address not valid" % address
)
self.address = address
self.ethereum_client = ethereum_client
self.w3 = ethereum_client.w3
@staticmethod
def _deploy_proxy_factory_contract(
ethereum_client: EthereumClient,
deployer_account: LocalAccount,
contract: Contract,
) -> EthereumTxSent:
tx = contract.constructor().buildTransaction({"from": deployer_account.address})
tx_hash = ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
tx_receipt = ethereum_client.get_transaction_receipt(tx_hash, timeout=120)
assert tx_receipt
assert tx_receipt["status"]
contract_address = tx_receipt["contractAddress"]
logger.info(
"Deployed and initialized Proxy Factory Contract=%s by %s",
contract_address,
deployer_account.address,
)
return EthereumTxSent(tx_hash, tx, contract_address)
@classmethod
def deploy_proxy_factory_contract(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy proxy factory contract last version (v1.3.0)
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
proxy_factory_contract = get_proxy_factory_contract(ethereum_client.w3)
return cls._deploy_proxy_factory_contract(
ethereum_client, deployer_account, proxy_factory_contract
)
@classmethod
def deploy_proxy_factory_contract_v1_1_1(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy proxy factory contract v1.1.1
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
proxy_factory_contract = get_proxy_factory_V1_1_1_contract(ethereum_client.w3)
return cls._deploy_proxy_factory_contract(
ethereum_client, deployer_account, proxy_factory_contract
)
@classmethod
def deploy_proxy_factory_contract_v1_0_0(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy proxy factory contract v1.0.0
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
proxy_factory_contract = get_proxy_factory_V1_0_0_contract(ethereum_client.w3)
return cls._deploy_proxy_factory_contract(
ethereum_client, deployer_account, proxy_factory_contract
)
def check_proxy_code(self, address: ChecksumAddress) -> bool:
"""
Check if proxy is valid
:param address: Ethereum address to check
:return: True if proxy is valid, False otherwise
"""
deployed_proxy_code = self.w3.eth.get_code(address)
proxy_code_fns = (
get_proxy_1_3_0_deployed_bytecode,
get_proxy_1_1_1_deployed_bytecode,
get_proxy_1_1_1_mainnet_deployed_bytecode,
get_proxy_1_0_0_deployed_bytecode,
get_paying_proxy_deployed_bytecode,
self.get_proxy_runtime_code,
)
for proxy_code_fn in proxy_code_fns:
if compare_byte_code(deployed_proxy_code, proxy_code_fn()):
return True
return False
def deploy_proxy_contract(
self,
deployer_account: LocalAccount,
master_copy: ChecksumAddress,
initializer: bytes = b"",
gas: Optional[int] = None,
gas_price: Optional[int] = None,
) -> EthereumTxSent:
"""
Deploy proxy contract via ProxyFactory using `createProxy` function
:param deployer_account: Ethereum account
:param master_copy: Address the proxy will point at
:param initializer: Initializer
:param gas: Gas
:param gas_price: Gas Price
:return: EthereumTxSent
"""
proxy_factory_contract = self.get_contract()
create_proxy_fn = proxy_factory_contract.functions.createProxy(
master_copy, initializer
)
tx_parameters = {"from": deployer_account.address}
contract_address = create_proxy_fn.call(tx_parameters)
if gas_price is not None:
tx_parameters["gasPrice"] = gas_price
if gas is not None:
tx_parameters["gas"] = gas
tx = create_proxy_fn.buildTransaction(tx_parameters)
# Auto estimation of gas does not work. We use a little more gas just in case
tx["gas"] = tx["gas"] + 50000
tx_hash = self.ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
return EthereumTxSent(tx_hash, tx, contract_address)
def deploy_proxy_contract_with_nonce(
self,
deployer_account: LocalAccount,
master_copy: ChecksumAddress,
initializer: bytes,
salt_nonce: int,
gas: Optional[int] = None,
gas_price: Optional[int] = None,
nonce: Optional[int] = None,
) -> EthereumTxSent:
"""
Deploy proxy contract via Proxy Factory using `createProxyWithNonce` (create2)
:param deployer_account: Ethereum account
:param master_copy: Address the proxy will point at
:param initializer: Data for safe creation
:param salt_nonce: Uint256 for `create2` salt
:param gas: Gas
:param gas_price: Gas Price
:param nonce: Nonce
:return: Tuple(tx-hash, tx, deployed contract address)
"""
proxy_factory_contract = self.get_contract()
create_proxy_fn = proxy_factory_contract.functions.createProxyWithNonce(
master_copy, initializer, salt_nonce
)
tx_parameters = {"from": deployer_account.address}
contract_address = create_proxy_fn.call(tx_parameters)
if gas_price is not None:
tx_parameters["gasPrice"] = gas_price
if gas is not None:
tx_parameters["gas"] = gas
if nonce is not None:
tx_parameters["nonce"] = nonce
tx = create_proxy_fn.buildTransaction(tx_parameters)
# Auto estimation of gas does not work. We use a little more gas just in case
tx["gas"] = tx["gas"] + 50000
tx_hash = self.ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
return EthereumTxSent(tx_hash, tx, contract_address)
def get_contract(self, address: Optional[ChecksumAddress] = None):
address = address or self.address
return get_proxy_factory_contract(self.ethereum_client.w3, address)
@cache
def get_proxy_runtime_code(self, address: Optional[ChecksumAddress] = None):
"""
Get runtime code for current proxy factory
"""
address = address or self.address
return self.get_contract(address=address).functions.proxyRuntimeCode().call() | /safe_etmp_py-0.0.4-py3-none-any.whl/gnosis/safe/proxy_factory.py | 0.921746 | 0.162812 | proxy_factory.py | pypi |
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from gnosis.eth.constants import (
SIGNATURE_R_MAX_VALUE,
SIGNATURE_R_MIN_VALUE,
SIGNATURE_S_MAX_VALUE,
SIGNATURE_S_MIN_VALUE,
SIGNATURE_V_MAX_VALUE,
SIGNATURE_V_MIN_VALUE,
)
from gnosis.eth.django.serializers import EthereumAddressField, HexadecimalField
from .safe import SafeOperation
class SafeSignatureSerializer(serializers.Serializer):
"""
When using safe signatures `v` can have more values
"""
v = serializers.IntegerField(min_value=0)
r = serializers.IntegerField(min_value=0)
s = serializers.IntegerField(min_value=0)
def validate_v(self, v):
if v == 0: # Contract signature
return v
elif v == 1: # Approved hash
return v
elif v > 30 and self.check_v(v - 4): # Support eth_sign
return v
elif self.check_v(v):
return v
else:
raise serializers.ValidationError(
"v should be 0, 1 or be in %d-%d"
% (SIGNATURE_V_MIN_VALUE, SIGNATURE_V_MAX_VALUE)
)
def validate(self, data):
super().validate(data)
v = data["v"]
r = data["r"]
s = data["s"]
if v not in [0, 1]: # Disable checks for `r` and `s` if v is 0 or 1
if not self.check_r(r):
raise serializers.ValidationError("r not valid")
elif not self.check_s(s):
raise serializers.ValidationError("s not valid")
return data
def check_v(self, v):
return SIGNATURE_V_MIN_VALUE <= v <= SIGNATURE_V_MAX_VALUE
def check_r(self, r):
return SIGNATURE_R_MIN_VALUE <= r <= SIGNATURE_R_MAX_VALUE
def check_s(self, s):
return SIGNATURE_S_MIN_VALUE <= s <= SIGNATURE_S_MAX_VALUE
class SafeMultisigEstimateTxSerializer(serializers.Serializer):
safe = EthereumAddressField()
to = EthereumAddressField()
value = serializers.IntegerField(min_value=0)
data = HexadecimalField(default=None, allow_null=True, allow_blank=True)
operation = serializers.IntegerField(min_value=0)
gas_token = EthereumAddressField(
default=None, allow_null=True, allow_zero_address=True
)
def validate_operation(self, value):
try:
return SafeOperation(value).value
except ValueError:
raise ValidationError("Unknown operation")
def validate(self, data):
super().validate(data)
if not data["to"] and not data["data"]:
raise ValidationError("`data` and `to` cannot both be null")
if not data["to"] and not data["data"]:
raise ValidationError("`data` and `to` cannot both be null")
if data["operation"] == SafeOperation.CREATE.value:
raise ValidationError(
"Operation CREATE not supported. Please use Gnosis Safe CreateLib"
)
# if data['to']:
# raise ValidationError('Operation is Create, but `to` was provided')
# elif not data['data']:
# raise ValidationError('Operation is Create, but not `data` was provided')
return data
class SafeMultisigTxSerializer(SafeMultisigEstimateTxSerializer):
"""
DEPRECATED, use `SafeMultisigTxSerializerV1` instead
"""
safe_tx_gas = serializers.IntegerField(min_value=0)
data_gas = serializers.IntegerField(min_value=0)
gas_price = serializers.IntegerField(min_value=0)
refund_receiver = EthereumAddressField(
default=None, allow_null=True, allow_zero_address=True
)
nonce = serializers.IntegerField(min_value=0)
class SafeMultisigTxSerializerV1(SafeMultisigEstimateTxSerializer):
"""
Version 1.0.0 of the Safe changes `data_gas` to `base_gas`
"""
safe_tx_gas = serializers.IntegerField(min_value=0)
base_gas = serializers.IntegerField(min_value=0)
gas_price = serializers.IntegerField(min_value=0)
refund_receiver = EthereumAddressField(
default=None, allow_null=True, allow_zero_address=True
)
nonce = serializers.IntegerField(min_value=0) | /safe_etmp_py-0.0.4-py3-none-any.whl/gnosis/safe/serializers.py | 0.78838 | 0.331039 | serializers.py | pypi |
import math
import os
from logging import getLogger
from typing import Any, Dict, List, Optional, Tuple
import rlp
from eth.constants import SECPK1_N
from eth.vm.forks.frontier.transactions import FrontierTransaction
from eth_keys.exceptions import BadSignature
from hexbytes import HexBytes
from web3 import Web3
from web3.contract import ContractConstructor
from gnosis.eth.constants import GAS_CALL_DATA_BYTE, NULL_ADDRESS
from gnosis.eth.contracts import (
get_erc20_contract,
get_paying_proxy_contract,
get_safe_V0_0_1_contract,
)
from gnosis.eth.utils import (
fast_is_checksum_address,
fast_to_checksum_address,
mk_contract_address,
)
logger = getLogger(__name__)
class InvalidERC20Token(Exception):
pass
class SafeCreationTx:
def __init__(
self,
w3: Web3,
owners: List[str],
threshold: int,
signature_s: int,
master_copy: str,
gas_price: int,
funder: Optional[str],
payment_token: Optional[str] = None,
payment_token_eth_value: float = 1.0,
fixed_creation_cost: Optional[int] = None,
):
"""
Prepare Safe creation
:param w3: Web3 instance
:param owners: Owners of the Safe
:param threshold: Minimum number of users required to operate the Safe
:param signature_s: Random s value for ecdsa signature
:param master_copy: Safe master copy address
:param gas_price: Gas Price
:param funder: Address to refund when the Safe is created. Address(0) if no need to refund
:param payment_token: Payment token instead of paying the funder with ether. If None Ether will be used
:param payment_token_eth_value: Value of payment token per 1 Ether
:param fixed_creation_cost: Fixed creation cost of Safe (Wei)
"""
assert 0 < threshold <= len(owners)
funder = funder or NULL_ADDRESS
payment_token = payment_token or NULL_ADDRESS
assert fast_is_checksum_address(master_copy)
assert fast_is_checksum_address(funder)
assert fast_is_checksum_address(payment_token)
self.w3 = w3
self.owners = owners
self.threshold = threshold
self.s = signature_s
self.master_copy = master_copy
self.gas_price = gas_price
self.funder = funder
self.payment_token = payment_token
self.payment_token_eth_value = payment_token_eth_value
self.fixed_creation_cost = fixed_creation_cost
# Get bytes for `setup(address[] calldata _owners, uint256 _threshold, address to, bytes calldata data)`
# This initializer will be passed to the proxy and will be called right after proxy is deployed
safe_setup_data: bytes = self._get_initial_setup_safe_data(owners, threshold)
# Calculate gas based on experience of previous deployments of the safe
calculated_gas: int = self._calculate_gas(
owners, safe_setup_data, payment_token
)
# Estimate gas using web3
estimated_gas: int = self._estimate_gas(
master_copy, safe_setup_data, funder, payment_token
)
self.gas = max(calculated_gas, estimated_gas)
# Payment will be safe deploy cost + transfer fees for sending ether to the deployer
self.payment = self._calculate_refund_payment(
self.gas, gas_price, fixed_creation_cost, payment_token_eth_value
)
self.tx_dict: Dict[str, Any] = self._build_proxy_contract_creation_tx(
master_copy=master_copy,
initializer=safe_setup_data,
funder=funder,
payment_token=payment_token,
payment=self.payment,
gas=self.gas,
gas_price=gas_price,
)
self.tx_pyethereum: FrontierTransaction = (
self._build_contract_creation_tx_with_valid_signature(self.tx_dict, self.s)
)
self.tx_raw = rlp.encode(self.tx_pyethereum)
self.tx_hash = self.tx_pyethereum.hash
self.deployer_address = fast_to_checksum_address(self.tx_pyethereum.sender)
self.safe_address = mk_contract_address(self.tx_pyethereum.sender, 0)
self.v = self.tx_pyethereum.v
self.r = self.tx_pyethereum.r
self.safe_setup_data = safe_setup_data
assert mk_contract_address(self.deployer_address, nonce=0) == self.safe_address
@property
def payment_ether(self):
return self.gas * self.gas_price
@staticmethod
def find_valid_random_signature(s: int) -> Tuple[int, int]:
"""
Find v and r valid values for a given s
:param s: random value
:return: v, r
"""
for _ in range(10000):
r = int(os.urandom(31).hex(), 16)
v = (r % 2) + 27
if r < SECPK1_N:
tx = FrontierTransaction(0, 1, 21000, b"", 0, b"", v=v, r=r, s=s)
try:
tx.sender
return v, r
except (BadSignature, ValueError):
logger.debug("Cannot find signature with v=%d r=%d s=%d", v, r, s)
raise ValueError("Valid signature not found with s=%d", s)
@staticmethod
def _calculate_gas(
owners: List[str], safe_setup_data: bytes, payment_token: str
) -> int:
"""
Calculate gas manually, based on tests of previosly deployed safes
:param owners: Safe owners
:param safe_setup_data: Data for proxy setup
:param payment_token: If payment token, we will need more gas to transfer and maybe storage if first time
:return: total gas needed for deployment
"""
# TODO Do gas calculation estimating the call instead this magic
base_gas = 60580 # Transaction standard gas
# If we already have the token, we don't have to pay for storage, so it will be just 5K instead of 20K.
# The other 1K is for overhead of making the call
if payment_token != NULL_ADDRESS:
payment_token_gas = 55000
else:
payment_token_gas = 0
data_gas = GAS_CALL_DATA_BYTE * len(safe_setup_data) # Data gas
gas_per_owner = 18020 # Magic number calculated by testing and averaging owners
return (
base_gas
+ data_gas
+ payment_token_gas
+ 270000
+ len(owners) * gas_per_owner
)
@staticmethod
def _calculate_refund_payment(
gas: int,
gas_price: int,
fixed_creation_cost: Optional[int],
payment_token_eth_value: float,
) -> int:
if fixed_creation_cost is None:
# Payment will be safe deploy cost + transfer fees for sending ether to the deployer
base_payment: int = (gas + 23000) * gas_price
# Calculate payment for tokens using the conversion (if used)
return math.ceil(base_payment / payment_token_eth_value)
else:
return fixed_creation_cost
def _build_proxy_contract_creation_constructor(
self,
master_copy: str,
initializer: bytes,
funder: str,
payment_token: str,
payment: int,
) -> ContractConstructor:
"""
:param master_copy: Master Copy of Gnosis Safe already deployed
:param initializer: Data initializer to send to GnosisSafe setup method
:param funder: Address that should get the payment (if payment set)
:param payment_token: Address if a token is used. If not set, 0x0 will be ether
:param payment: Payment
:return: Transaction dictionary
"""
if not funder or funder == NULL_ADDRESS:
funder = NULL_ADDRESS
payment = 0
return get_paying_proxy_contract(self.w3).constructor(
master_copy, initializer, funder, payment_token, payment
)
def _build_proxy_contract_creation_tx(
self,
master_copy: str,
initializer: bytes,
funder: str,
payment_token: str,
payment: int,
gas: int,
gas_price: int,
nonce: int = 0,
):
"""
:param master_copy: Master Copy of Gnosis Safe already deployed
:param initializer: Data initializer to send to GnosisSafe setup method
:param funder: Address that should get the payment (if payment set)
:param payment_token: Address if a token is used. If not set, 0x0 will be ether
:param payment: Payment
:return: Transaction dictionary
"""
return self._build_proxy_contract_creation_constructor(
master_copy, initializer, funder, payment_token, payment
).buildTransaction(
{
"gas": gas,
"gasPrice": gas_price,
"nonce": nonce,
}
)
def _build_contract_creation_tx_with_valid_signature(
self, tx_dict: Dict[str, Any], s: int
) -> FrontierTransaction:
"""
Use pyethereum `Transaction` to generate valid tx using a random signature
:param tx_dict: Web3 tx dictionary
:param s: Signature s value
:return: PyEthereum creation tx for the proxy contract
"""
zero_address = HexBytes("0x" + "0" * 40)
f_address = HexBytes("0x" + "f" * 40)
nonce = tx_dict["nonce"]
gas_price = tx_dict["gasPrice"]
gas = tx_dict["gas"]
to = tx_dict.get("to", b"") # Contract creation should always have `to` empty
value = tx_dict["value"]
data = tx_dict["data"]
for _ in range(100):
try:
v, r = self.find_valid_random_signature(s)
contract_creation_tx = FrontierTransaction(
nonce, gas_price, gas, to, value, HexBytes(data), v=v, r=r, s=s
)
sender_address = contract_creation_tx.sender
contract_address: bytes = HexBytes(
mk_contract_address(sender_address, nonce)
)
if sender_address in (zero_address, f_address) or contract_address in (
zero_address,
f_address,
):
raise ValueError("Invalid transaction")
return contract_creation_tx
except BadSignature:
pass
raise ValueError("Valid signature not found with s=%d", s)
def _estimate_gas(
self, master_copy: str, initializer: bytes, funder: str, payment_token: str
) -> int:
"""
Gas estimation done using web3 and calling the node
Payment cannot be estimated, as no ether is in the address. So we add some gas later.
:param master_copy: Master Copy of Gnosis Safe already deployed
:param initializer: Data initializer to send to GnosisSafe setup method
:param funder: Address that should get the payment (if payment set)
:param payment_token: Address if a token is used. If not set, 0x0 will be ether
:return: Total gas estimation
"""
# Estimate the contract deployment. We cannot estimate the refunding, as the safe address has not any fund
gas: int = self._build_proxy_contract_creation_constructor(
master_copy, initializer, funder, payment_token, 0
).estimateGas()
# We estimate the refund as a new tx
if payment_token == NULL_ADDRESS:
# Same cost to send 1 ether than 1000
gas += self.w3.eth.estimate_gas({"to": funder, "value": 1})
else:
# Top should be around 52000 when storage is needed (funder no previous owner of token),
# we use value 1 as we are simulating an internal call, and in that calls you don't pay for the data.
# If it was a new tx sending 5000 tokens would be more expensive than sending 1 because of data costs
try:
gas += (
get_erc20_contract(self.w3, payment_token)
.functions.transfer(funder, 1)
.estimateGas({"from": payment_token})
)
except ValueError as exc:
if "transfer amount exceeds balance" in str(exc):
return 70000
raise InvalidERC20Token from exc
return gas
def _get_initial_setup_safe_data(self, owners: List[str], threshold: int) -> bytes:
return (
get_safe_V0_0_1_contract(self.w3, self.master_copy)
.functions.setup(
owners,
threshold,
NULL_ADDRESS, # Contract address for optional delegate call
b"", # Data payload for optional delegate call
)
.buildTransaction(
{
"gas": 1,
"gasPrice": 1,
}
)["data"]
) | /safe_etmp_py-0.0.4-py3-none-any.whl/gnosis/safe/safe_creation_tx.py | 0.821939 | 0.322713 | safe_creation_tx.py | pypi |
from typing import List, Tuple, Union
from eth_keys import keys
from eth_keys.exceptions import BadSignature
from hexbytes import HexBytes
from gnosis.eth.constants import NULL_ADDRESS
def signature_split(
signatures: Union[bytes, str], pos: int = 0
) -> Tuple[int, int, int]:
"""
:param signatures: signatures in form of {bytes32 r}{bytes32 s}{uint8 v}
:param pos: position of the signature
:return: Tuple with v, r, s
"""
signatures = HexBytes(signatures)
signature_pos = 65 * pos
if len(signatures[signature_pos : signature_pos + 65]) < 65:
raise ValueError(f"Signature must be at least 65 bytes {signatures.hex()}")
r = int.from_bytes(signatures[signature_pos : 32 + signature_pos], "big")
s = int.from_bytes(signatures[32 + signature_pos : 64 + signature_pos], "big")
v = signatures[64 + signature_pos]
return v, r, s
def signature_to_bytes(v: int, r: int, s: int) -> bytes:
"""
Convert ecdsa signature to bytes
:param v:
:param r:
:param s:
:return: signature in form of {bytes32 r}{bytes32 s}{uint8 v}
"""
byte_order = "big"
return (
r.to_bytes(32, byteorder=byte_order)
+ s.to_bytes(32, byteorder=byte_order)
+ v.to_bytes(1, byteorder=byte_order)
)
def signatures_to_bytes(signatures: List[Tuple[int, int, int]]) -> bytes:
"""
Convert signatures to bytes
:param signatures: list of tuples(v, r, s)
:return: 65 bytes per signature
"""
return b"".join([signature_to_bytes(v, r, s) for v, r, s in signatures])
def get_signing_address(signed_hash: Union[bytes, str], v: int, r: int, s: int) -> str:
"""
:return: checksummed ethereum address, for example `0x568c93675A8dEb121700A6FAdDdfE7DFAb66Ae4A`
:rtype: str or `NULL_ADDRESS` if signature is not valid
"""
try:
public_key = keys.ecdsa_recover(signed_hash, keys.Signature(vrs=(v - 27, r, s)))
return public_key.to_checksum_address()
except BadSignature:
return NULL_ADDRESS | /safe_etmp_py-0.0.4-py3-none-any.whl/gnosis/safe/signatures.py | 0.923049 | 0.493958 | signatures.py | pypi |
from typing import Any, Dict, List, Optional, Union, cast
import requests
from eip712_structs import make_domain
from eth_account import Account
from eth_account.messages import encode_defunct
from eth_typing import AnyAddress, ChecksumAddress, HexStr
from hexbytes import HexBytes
from web3 import Web3
from gnosis.eth import EthereumNetwork, EthereumNetworkNotSupported
from .order import Order, OrderKind
try:
from typing import TypedDict # pylint: disable=no-name-in-module
except ImportError:
from typing_extensions import TypedDict
try:
from functools import cache
except ImportError:
from functools import lru_cache
cache = lru_cache(maxsize=None)
class TradeResponse(TypedDict):
blockNumber: int
logIndex: int
orderUid: HexStr
buyAmount: str # Stringified int
sellAmount: str # Stringified int
sellAmountBeforeFees: str # Stringified int
owner: AnyAddress # Not checksummed
buyToken: AnyAddress
sellToken: AnyAddress
txHash: HexStr
class AmountResponse(TypedDict):
amount: str
token: AnyAddress
class ErrorResponse(TypedDict):
error_type: str
description: str
class GnosisProtocolAPI:
"""
Client for GnosisProtocol API. More info: https://docs.cowswap.exchange/
"""
settlement_contract_addresses = {
EthereumNetwork.MAINNET: "0x9008D19f58AAbD9eD0D60971565AA8510560ab41",
EthereumNetwork.RINKEBY: "0x9008D19f58AAbD9eD0D60971565AA8510560ab41",
EthereumNetwork.XDAI: "0x9008D19f58AAbD9eD0D60971565AA8510560ab41",
}
api_base_urls = {
EthereumNetwork.MAINNET: "https://api.cow.fi/mainnet/api/v1/",
EthereumNetwork.RINKEBY: "https://api.cow.fi/rinkeby/api/v1/",
EthereumNetwork.XDAI: "https://api.cow.fi/xdai/api/v1/",
}
def __init__(self, ethereum_network: EthereumNetwork):
self.network = ethereum_network
if self.network not in self.api_base_urls:
raise EthereumNetworkNotSupported(
f"{self.network.name} network not supported by Gnosis Protocol"
)
self.domain_separator = self.build_domain_separator(self.network)
self.base_url = self.api_base_urls[self.network]
@classmethod
def build_domain_separator(cls, ethereum_network: EthereumNetwork):
return make_domain(
name="Gnosis Protocol",
version="v2",
chainId=str(ethereum_network.value),
verifyingContract=cls.settlement_contract_addresses[ethereum_network],
)
def get_fee(self, order: Order) -> int:
if order["kind"] == "sell":
amount = order["sellAmount"]
else:
amount = order["buyAmount"]
url = (
self.base_url
+ f'fee/?sellToken={order["sellToken"]}&buyToken={order["buyToken"]}'
f'&amount={amount}&kind={order["kind"]}'
)
result = requests.get(url).json()
if "amount" in result:
return int(result["amount"])
else:
return 0
def place_order(
self, order: Order, private_key: HexStr
) -> Union[HexStr, ErrorResponse]:
"""
Place order. If `feeAmount=0` in Order it will be calculated calling `get_fee(order)`
:return: UUID for the order as an hex hash
"""
assert (
order["buyAmount"] and order["sellAmount"]
), "Order buyAmount and sellAmount cannot be empty"
url = self.base_url + "orders/"
order["feeAmount"] = order["feeAmount"] or self.get_fee(order)
signable_bytes = order.signable_bytes(domain=self.domain_separator)
signable_hash = Web3.keccak(signable_bytes)
message = encode_defunct(primitive=signable_hash)
signed_message = Account.from_key(private_key).sign_message(message)
data_json = {
"sellToken": order["sellToken"].lower(),
"buyToken": order["buyToken"].lower(),
"sellAmount": str(order["sellAmount"]),
"buyAmount": str(order["buyAmount"]),
"validTo": order["validTo"],
"appData": HexBytes(order["appData"]).hex()
if isinstance(order["appData"], bytes)
else order["appData"],
"feeAmount": str(order["feeAmount"]),
"kind": order["kind"],
"partiallyFillable": order["partiallyFillable"],
"signature": signed_message.signature.hex(),
"signingScheme": "ethsign",
"from": Account.from_key(private_key).address,
}
r = requests.post(url, json=data_json)
if r.ok:
return HexStr(r.json())
else:
return ErrorResponse(r.json())
def get_orders(
self, owner: ChecksumAddress, offset: int = 0, limit=10
) -> List[Dict[str, Any]]:
"""
:param owner:
:param offset: Defaults to 0
:param limit: Defaults to 10. Maximum is 1000, minimum is 1
:return: Orders of one user paginated. The orders are ordered by their creation
date descending (newest orders first).
To enumerate all orders start with offset 0 and keep increasing the offset by the
total number of returned results. When a response contains less than the limit
the last page has been reached.
"""
url = self.base_url + f"account/{owner}/orders"
r = requests.get(url)
if r.ok:
return cast(List[Dict[str, Any]], r.json())
else:
return ErrorResponse(r.json())
def get_trades(
self, order_ui: Optional[HexStr] = None, owner: Optional[ChecksumAddress] = None
) -> List[TradeResponse]:
assert bool(order_ui) ^ bool(
owner
), "order_ui or owner must be provided, but not both"
url = self.base_url + "trades/?"
if order_ui:
url += f"orderUid={order_ui}"
elif owner:
url += f"owner={owner}"
r = requests.get(url)
if r.ok:
return cast(List[TradeResponse], r.json())
else:
return ErrorResponse(r.json())
def get_estimated_amount(
self,
base_token: ChecksumAddress,
quote_token: ChecksumAddress,
kind: OrderKind,
amount: int,
) -> Union[AmountResponse, ErrorResponse]:
"""
The estimated amount in quote token for either buying or selling amount of baseToken.
"""
url = self.base_url + f"markets/{base_token}-{quote_token}/{kind.name}/{amount}"
r = requests.get(url)
if r.ok:
return AmountResponse(r.json())
else:
return ErrorResponse(r.json()) | /safe_etmp_py-0.0.4-py3-none-any.whl/gnosis/protocol/gnosis_protocol_api.py | 0.847321 | 0.227813 | gnosis_protocol_api.py | pypi |
from typing import Callable, Optional
import numpy as np
import pandas as pd
from safe_evaluation.calculation import Calculator
from safe_evaluation.constants import OPERATORS, ALLOWED_FUNCS
from safe_evaluation.preprocessing import Preprocessor
from safe_evaluation.settings import Settings
class Evaluator:
allowed_funcs = ALLOWED_FUNCS
operators = OPERATORS
def __init__(self, preprocessor=Preprocessor, calculator=Calculator):
self.preprocessor = preprocessor(self)
self.calculator = calculator(self)
self.settings = Settings()
def change_settings(self, settings: Settings):
self.settings = settings
def _beautify(self, el):
"""
Returns string format for the input element
"""
if isinstance(el, tuple):
return str(el[1])
else:
return str(el)
def get_prev_and_next(self, s, pos):
"""
Return previous and next stack elements.
"""
prev_ = ''.join(map(self._beautify, s[:pos]))
next_ = ''.join(map(self._beautify, s[pos + 1:]))
return prev_, next_
def raise_excess_parentheses(self, s, pos):
prev_, next_ = self.get_prev_and_next(s, pos)
raise Exception(('Excess parenthesis at position: "{expression}"').format(
expression=f'{prev_} --> {s[pos]} <-- {next_}'))
def handle_function(self, func: str) -> Callable:
if func.startswith(('numpy', 'np', 'pandas', 'pd')) and self.settings.is_available(func) and '.' in func:
method = func.split('.')[1]
package = np if func.startswith('n') else pd
return getattr(package, method)
if self.settings.is_available(func):
return self.allowed_funcs[func]
raise Exception(f"Unsupported function {func}")
def solve(self, command: str, df: Optional[pd.DataFrame] = None, local: dict = None):
stack = self.preprocessor.prepare(command, df, local)
output = self.calculator.calculate(stack, df, local)
return output | /safe_evaluation-0.1.14.tar.gz/safe_evaluation-0.1.14/safe_evaluation/evaluation.py | 0.899594 | 0.25965 | evaluation.py | pypi |
import re
from abc import ABCMeta, abstractmethod
from typing import List, Union, Optional
import pandas as pd
from safe_evaluation.constants import TypeOfCommand, OPERATORS_PRIORITIES
class BaseCalculator(metaclass=ABCMeta):
@abstractmethod
def calculate(self, stack, df, local):
pass
class Calculator(BaseCalculator):
operators_priorities = OPERATORS_PRIORITIES
def __init__(self, evaluator):
self.evaluator = evaluator
def _analyse(self, string, df=None, local=None):
"""
Analyses args of the method
Returns arg in correct format(string, float, bool or lambda)
"""
if local and string in local:
return local[string]
return self.evaluator.solve(string, df, local)
def _is_arg(self, string):
"""
Checks if argument is arg or kwarg
"""
if re.match(r' *[\w]* *=', string):
return False
return True
def _split_params(self, s: str):
"""
Splits params of method by comma
"""
# todo: handle "(", ")" as name of column
stack = []
brackets = {'(': ')', '{': '}', '[': ']'}
brackets_possible = {'(', ')', '{', '}', '[', ']'}
pos = amount = prev = 0
answer = []
for element in s:
if element in brackets_possible:
if element in brackets:
stack.append((element, pos))
amount += 1
else:
amount -= 1
if not stack or brackets[stack.pop()[0]] != element:
self.evaluator.raise_excess_parentheses(s, pos)
lambda_start = re.match(r' *lambda .*', s[prev: pos])
whole_lambda = re.match(r' *lambda [^:]*:', s[prev: pos])
if amount == 0 and element == ',' and (not lambda_start or (lambda_start and whole_lambda)):
answer.append(s[prev:pos].strip())
prev = pos + 1
pos += 1
if stack:
self.evaluator.raise_excess_parentheses(s, stack[-1][1])
answer.append(s[prev:].strip())
return answer
def _solve_inside_method(self, command, df, local):
"""
Gets args as command inside the method
Returns args and kwargs
example:
command = "lambda t: t ** 2 > 34, q = 0.5"
returns: [Lambda], {q: 0.5}
"""
if not command:
return [], {}
params = self._split_params(command)
args = []
kwargs = {}
are_args = True
for param in params:
if self._is_arg(param):
if not are_args:
raise SyntaxError("Positional argument follows keyword argument")
arg = self._analyse(param, df, local)
args.append(arg)
else:
are_args = False
keyword = param.split('=')[0].replace(' ', '')
arg = self._analyse(param.split('=', maxsplit=1)[1], df, local)
kwargs[keyword] = arg
return args, kwargs
def _get_variable(self, var, df, local) -> pd.Series:
"""
Returns series format for any var.
"""
variable = None
if isinstance(var, tuple):
if var[0] == TypeOfCommand.VALUE:
variable = var[1]
elif var[0] == TypeOfCommand.COLUMN:
try:
variable = df[var[1]]
except KeyError:
raise KeyError(('The input DataFrame doesn\'t contain "{var}" column').format(var=f'{var[1]}'))
elif var[0] == TypeOfCommand.DATAFRAME:
if len(var) == 1:
variable = df
else:
variable = df[var[1]]
elif var[0] == TypeOfCommand.VARIABLE:
if local and var[1] in local:
variable = local[var[1]]
else:
raise Exception(('Variable "{var}" doesn\'t exist').format(var=f'{var[1]}'))
else:
variable = var
return var if variable is None else variable
def _raise_operation_cant_be_applied(self, stack, op):
if not stack:
raise Exception(('Operation "{operation}" can\'t be applied to Nothing').format(operation=op))
def _operate(self, stack, op, df, local):
"""
Calculates result of operations.
"""
if op == '~':
self._raise_operation_cant_be_applied(stack, op)
l = stack.pop()
var1 = self._get_variable(l, df, local)
stack.append(self.evaluator.operators[op](var1))
else:
self._raise_operation_cant_be_applied(stack, op)
r = stack.pop()
self._raise_operation_cant_be_applied(stack, op)
l = stack.pop()
var1 = self._get_variable(l, df, local)
if isinstance(r, tuple) and r[0] == TypeOfCommand.METHOD:
if hasattr(var1, r[2]):
if r[2] in {'apply', 'quantile'} and not isinstance(var1, (pd.Series, pd.DataFrame)):
raise Exception(('Method "{method}" can only be applied to Series or Dataframe, not {type}')
.format(method=r[2], type=type(var1)))
args, kwargs = self._solve_inside_method(r[1], df, local)
args = [list(arg) if isinstance(arg, tuple) else arg for arg in args]
kwargs = {k: list(v) if isinstance(v, tuple) else v for k, v in kwargs.items()}
stack.append(getattr(var1, r[2])(*args, **kwargs))
else:
raise Exception(('Method "{method}" doesn\'t exist').format(method=r[2]))
elif isinstance(r, tuple) and r[0] == TypeOfCommand.PROPERTY:
if hasattr(var1, r[1]):
var1 = self._get_variable(l, df, local)
stack.append(getattr(var1, r[1]))
else:
raise Exception(('Method "{method}" doesn\'t exist').format(method=r[1]))
else:
var1 = self._get_variable(l, df, local)
var2 = self._get_variable(r, df, local)
stack.append(self.evaluator.operators[op](var1, var2))
def _polish_notation(self, s: List[Union[str, tuple]], df: Optional[pd.DataFrame] = None, local: dict = None):
"""
Returns result of command.
https://e-maxx.ru/algo/expressions_parsing
"""
stack = []
op = []
for element in s:
if element == '(':
op.append(element)
elif element == ')':
while op[-1] != '(':
self._operate(stack, op.pop(), df, local)
op.pop()
elif element in self.evaluator.operators.keys():
curop = element
# {'~', '**'} are right associated
while op and ((curop not in {'~', '**'} and
self.operators_priorities.get(op[-1], -1) >= self.operators_priorities.get(curop, -1)) or
(curop in {'~', '**'} and
self.operators_priorities.get(op[-1], -1) > self.operators_priorities.get(curop, -1))):
self._operate(stack, op.pop(), df, local)
op.append(curop)
else:
stack.append(element)
if element[0] in (TypeOfCommand.METHOD, TypeOfCommand.PROPERTY):
self._operate(stack, '', df, local)
if element[0] == TypeOfCommand.FUNCTION_EXECUTABLE:
r = stack.pop()
args, kwargs = self._solve_inside_method(r[1], df, local)
stack.append(self.evaluator.handle_function(r[2])(*args, **kwargs))
if element[0] == TypeOfCommand.FUNCTION:
r = stack.pop()
if stack:
raise Exception("There can't be function and something else")
return r[1]
while op:
self._operate(stack, op.pop(), df, local)
if len(stack) > 1:
raise Exception("2 or more elements left without operations")
value = stack.pop()
return self._get_variable(value, df, local)
def calculate(self, stack, df, local):
output = self._polish_notation(stack, df, local)
return output | /safe_evaluation-0.1.14.tar.gz/safe_evaluation-0.1.14/safe_evaluation/calculation.py | 0.551815 | 0.268869 | calculation.py | pypi |
import os
import copy
import numpy
import math
import logging
from osgeo import ogr
from tempfile import mkstemp
from urllib2 import urlopen
from safe.api import read_layer
logger = logging.getLogger(__name__)
# Spatial layer file extensions that are recognised in Risiko
# FIXME: Perhaps add '.gml', '.zip', ...
LAYER_TYPES = ['.shp', '.asc', '.tif', '.tiff', '.geotif', '.geotiff']
# Map between extensions and ORG drivers
DRIVER_MAP = {'.shp': 'ESRI Shapefile',
'.gml': 'GML',
'.tif': 'GTiff',
'.asc': 'AAIGrid'}
# Map between Python types and OGR field types
# FIXME (Ole): I can't find a double precision type for OGR
TYPE_MAP = {type(None): ogr.OFTString, # What else should this be?
type(''): ogr.OFTString,
type(0): ogr.OFTInteger,
type(0.0): ogr.OFTReal,
type(numpy.array([0.0])[0]): ogr.OFTReal, # numpy.float64
type(numpy.array([[0.0]])[0]): ogr.OFTReal} # numpy.ndarray
# Templates for downloading layers through rest
WCS_TEMPLATE = '%s?version=1.0.0' + \
'&service=wcs&request=getcoverage&format=GeoTIFF&' + \
'store=false&coverage=%s&crs=EPSG:4326&bbox=%s' + \
'&resx=%s&resy=%s'
WFS_TEMPLATE = '%s?service=WFS&version=1.0.0' + \
'&request=GetFeature&typeName=%s' + \
'&outputFormat=SHAPE-ZIP&bbox=%s'
# Miscellaneous auxiliary functions
def unique_filename(**kwargs):
"""Create new filename guaranteed not to exist previoously
Use mkstemp to create the file, then remove it and return the name
See http://docs.python.org/library/tempfile.html for details.
"""
_, filename = mkstemp(**kwargs)
try:
os.remove(filename)
except:
pass
return filename
# GeoServer utility functions
def is_server_reachable(url):
"""Make an http connection to url to see if it is accesible.
Returns boolean
"""
try:
urlopen(url)
except Exception:
return False
else:
return True
def write_keywords(keywords, filename):
"""Write keywords dictonary to file
Input
keywords: Dictionary of keyword, value pairs
filename: Name of keywords file. Extension expected to be .keywords
Keys must be strings
Values must be strings or None.
If value is None, only the key will be written. Otherwise key, value pairs
will be written as key: value
Trailing or preceding whitespace will be ignored.
"""
# Input checks
basename, ext = os.path.splitext(filename)
# FIXME (Ole): Why don't we just pass in the filename and let
# this function decide the extension?
msg = ('Unknown extension for file %s. '
'Expected %s.keywords' % (filename, basename))
assert ext == '.keywords', msg
# Write
fid = open(filename, 'w')
for k, v in keywords.items():
msg = ('Key in keywords dictionary must be a string. '
'I got %s with type %s' % (k, str(type(k))[1:-1]))
assert isinstance(k, basestring), msg
key = k.strip()
msg = ('Key in keywords dictionary must not contain the ":" '
'character. I got "%s"' % key)
assert ':' not in key, msg
if v is None:
fid.write('%s\n' % key)
else:
val = str(v).strip()
msg = ('Value in keywords dictionary must be a string or None. '
'I got %s with type %s' % (val, type(val)))
assert isinstance(val, basestring), msg
msg = ('Value must not contain the ":" character. '
'I got "%s"' % val)
assert ':' not in val, msg
# FIXME (Ole): Have to remove commas (issue #148)
val = val.replace(',', '')
fid.write('%s: %s\n' % (key, val))
fid.close()
def extract_WGS84_geotransform(layer):
"""Extract geotransform from OWS layer object.
Input
layer: Raster layer object e.g. obtained from WebCoverageService
Output:
geotransform: GDAL geotransform (www.gdal.org/gdal_tutorial.html)
Notes:
The datum of the returned geotransform is always WGS84 geographic
irrespective of the native datum/projection.
Unlike the code for extracting native geotransform, this one
does not require registration to be offset by half a pixel.
Unit test test_geotransform_from_geonode in test_calculations verifies
that the two extraction methods are equivalent for WGS84 layers.
"""
# Get bounding box in WGS84 geographic coordinates
bbox = layer.boundingBoxWGS84
top_left_x = bbox[0]
top_left_y = bbox[3]
bottom_right_x = bbox[2]
bottom_right_y = bbox[1]
# Get number of rows and columns
grid = layer.grid
ncols = int(grid.highlimits[0]) + 1
nrows = int(grid.highlimits[1]) + 1
# Derive resolution
we_pixel_res = (bottom_right_x - top_left_x) / ncols
ns_pixel_res = (bottom_right_y - top_left_y) / nrows
# Return geotransform 6-tuple with rotation 0
x_rotation = 0.0
y_rotation = 0.0
return (top_left_x, we_pixel_res, x_rotation,
top_left_y, y_rotation, ns_pixel_res)
def geotransform2resolution(geotransform, isotropic=False,
# FIXME (Ole): Check these tolerances (issue #173)
rtol=5.0e-2, atol=1.0e-2):
"""Convert geotransform to resolution
Input
geotransform: GDAL geotransform (6-tuple).
(top left x, w-e pixel resolution, rotation,
top left y, rotation, n-s pixel resolution).
See e.g. http://www.gdal.org/gdal_tutorial.html
Input
isotropic: If True, verify that dx == dy and return dx
If False (default) return 2-tuple (dx, dy)
rtol, atol: Used to control how close dx and dy must be
to quality for isotropic. These are passed on to
numpy.allclose for comparison.
Output
resolution: grid spacing (resx, resy) in (positive) decimal
degrees ordered as longitude first, then latitude.
or resx (if isotropic is True)
"""
resx = geotransform[1] # w-e pixel resolution
resy = - geotransform[5] # n-s pixel resolution (always negative)
if isotropic:
msg = ('Resolution requested with '
'isotropic=True, but '
'resolutions in the horizontal and vertical '
'are different: resx = %.12f, resy = %.12f. '
% (resx, resy))
assert numpy.allclose(resx, resy,
rtol=rtol, atol=atol), msg
return resx
else:
return resx, resy
def bbox_intersection(*args):
"""Compute intersection between two or more bounding boxes
Input
args: two or more bounding boxes.
Each is assumed to be a list or a tuple with
four coordinates (W, S, E, N)
Output
result: The minimal common bounding box
"""
msg = 'Function bbox_intersection must take at least 2 arguments.'
assert len(args) > 1, msg
result = [-180, -90, 180, 90]
for a in args:
msg = ('Bounding box expected to be a list of the '
'form [W, S, E, N]. '
'Instead i got "%s"' % str(a))
try:
box = list(a)
except:
raise Exception(msg)
assert len(box) == 4, msg
msg = 'Western boundary must be less than eastern. I got %s' % box
assert box[0] < box[2], msg
msg = 'Southern boundary must be less than northern. I got %s' % box
assert box[1] < box[3], msg
# Compute intersection
# West and South
for i in [0, 1]:
result[i] = max(result[i], box[i])
# East and North
for i in [2, 3]:
result[i] = min(result[i], box[i])
# Check validity and return
if result[0] < result[2] and result[1] < result[3]:
return result
else:
return None
def buffered_bounding_box(bbox, resolution):
"""Grow bounding box with one unit of resolution in each direction
This will ensure there is enough pixels to robustly provide
interpolated values without having to painstakingly deal with
all corner cases such as 1 x 1, 1 x 2 and 2 x 1 arrays.
The border will also make sure that points that would otherwise fall
outside the domain (as defined by a tight bounding box) get assigned
values.
Input
bbox: Bounding box with format [W, S, E, N]
resolution: (resx, resy) - Raster resolution in each direction.
res - Raster resolution in either direction
If resolution is None bbox is returned unchanged.
Ouput
Adjusted bounding box
Case in point: Interpolation point O would fall outside this domain
even though there are enough grid points to support it
--------------
| |
| * * | * *
| O|
| |
| * * | * *
--------------
"""
bbox = copy.copy(list(bbox))
if resolution is None:
return bbox
try:
resx, resy = resolution
except:
resx = resy = resolution
bbox[0] -= resx
bbox[1] -= resy
bbox[2] += resx
bbox[3] += resy
return bbox
def is_sequence(x):
"""Determine if x behaves like a true sequence but not a string
This will for example return True for lists, tuples and numpy arrays
but False for strings and dictionaries.
"""
if isinstance(x, basestring):
return False
try:
x[0]
except:
return False
else:
return True
# Map of ogr numerical geometry types to their textual representation
# FIXME (Ole): Some of them don't exist, even though they show up
# when doing dir(ogr) - Why?:
geometry_type_map = {ogr.wkbPoint: 'Point',
ogr.wkbPoint25D: 'Point25D',
ogr.wkbPolygon: 'Polygon',
ogr.wkbPolygon25D: 'Polygon25D',
#ogr.wkbLinePoint: 'LinePoint', # ??
ogr.wkbGeometryCollection: 'GeometryCollection',
ogr.wkbGeometryCollection25D: 'GeometryCollection25D',
ogr.wkbLineString: 'LineString',
ogr.wkbLineString25D: 'LineString25D',
ogr.wkbLinearRing: 'LinearRing',
ogr.wkbMultiLineString: 'MultiLineString',
ogr.wkbMultiLineString25D: 'MultiLineString25D',
ogr.wkbMultiPoint: 'MultiPoint',
ogr.wkbMultiPoint25D: 'MultiPoint25D',
ogr.wkbMultiPolygon: 'MultiPolygon',
ogr.wkbMultiPolygon25D: 'MultiPolygon25D',
ogr.wkbNDR: 'NDR',
ogr.wkbNone: 'None',
ogr.wkbUnknown: 'Unknown'}
def geometrytype2string(g_type):
"""Provides string representation of numeric geometry types
FIXME (Ole): I can't find anything like this in ORG. Why?
"""
if g_type in geometry_type_map:
return geometry_type_map[g_type]
elif g_type is None:
return 'No geometry type assigned'
else:
return 'Unknown geometry type: %s' % str(g_type)
def points_between_points(point1, point2, delta):
"""Creates an array of points between two points given a delta
u = (x1-x0, y1-y0)/L, where
L=sqrt( (x1-x0)^2 + (y1-y0)^2).
If r is the resolution, then the
points will be given by
(x0, y0) + u * n * r for n = 1, 2, ....
while len(n*u*r) < L
"""
x0, y0 = point1
x1, y1 = point2
L = math.sqrt(math.pow((x1-x0),2) + math.pow((y1-y0), 2))
pieces = int(L / delta)
uu = numpy.array([x1 - x0, y1 -y0]) / L
points = [point1]
for nn in range(pieces):
point = point1 + uu * (nn + 1) * delta
points.append(point)
return numpy.array(points)
def titelize(s):
"""Convert string into title
This is better than the built-in method title() because
it leaves all uppercase words like UK unchanged.
Source http://stackoverflow.com/questions/1549641/
how-to-capitalize-the-first-letter-of-each-word-in-a-string-python
"""
# Replace underscores with spaces
s = s.replace('_', ' ')
# Capitalise
#s = s.title() # This will capitalize first letter force the rest down
s = ' '.join([w[0].upper() + w[1:] for w in s.split(' ')])
return s
def nanallclose(x, y, rtol=1.0e-5, atol=1.0e-8):
"""Numpy allclose function which allows NaN
Input
x, y: Either scalars or numpy arrays
Output
True or False
Returns True if all non-nan elements pass.
"""
xn = numpy.isnan(x)
yn = numpy.isnan(y)
if numpy.any(xn != yn):
# Presence of NaNs is not the same in x and y
return False
if numpy.all(xn):
# Everything is NaN.
# This will also take care of x and y being NaN scalars
return True
# Filter NaN's out
if numpy.any(xn):
x = x[-xn]
y = y[-yn]
# Compare non NaN's and return
return numpy.allclose(x, y, rtol=rtol, atol=atol)
def get_common_resolution(haz_metadata, exp_metadata):
"""Determine common resolution for raster layers
Input
haz_metadata: Metadata for hazard layer
exp_metadata: Metadata for exposure layer
Output
raster_resolution: Common resolution or None (in case of vector layers)
"""
# Determine resolution in case of raster layers
haz_res = exp_res = None
if haz_metadata['layertype'] == 'raster':
haz_res = haz_metadata['resolution']
if exp_metadata['layertype'] == 'raster':
exp_res = exp_metadata['resolution']
# Determine common resolution in case of two raster layers
if haz_res is None or exp_res is None:
# This means native resolution will be used
raster_resolution = None
else:
# Take the minimum
resx = min(haz_res[0], exp_res[0])
resy = min(haz_res[1], exp_res[1])
raster_resolution = (resx, resy)
return raster_resolution
def get_bounding_boxes(haz_metadata, exp_metadata, req_bbox):
"""Check and get appropriate bounding boxes for input layers
Input
haz_metadata: Metadata for hazard layer
exp_metadata: Metadata for exposure layer
req_bbox: Bounding box (string as requested by HTML POST, or list)
Output
haz_bbox: Bounding box to be used for hazard layer.
exp_bbox: Bounding box to be used for exposure layer
imp_bbox: Bounding box to be used for resulting impact layer
Note exp_bbox and imp_bbox are the same and calculated as the
intersection among hazard, exposure and viewport bounds.
haz_bbox may be grown by one pixel size in case exposure data
is vector data to make sure points always can be interpolated
"""
# Check requested bounding box and establish viewport bounding box
if isinstance(req_bbox, basestring):
check_bbox_string(req_bbox)
vpt_bbox = bboxstring2list(req_bbox)
elif is_sequence(req_bbox):
x = bboxlist2string(req_bbox)
check_bbox_string(x)
vpt_bbox = bboxstring2list(x)
else:
msg = ('Invalid bounding box %s (%s). '
'It must be a string or a list' % (str(req_bbox), type(req_bbox)))
raise Exception(msg)
# Get bounding boxes for layers
haz_bbox = haz_metadata['bounding_box']
exp_bbox = exp_metadata['bounding_box']
# New bounding box for data common to hazard, exposure and viewport
# Download only data within this intersection
intersection_bbox = bbox_intersection(vpt_bbox, haz_bbox, exp_bbox)
if intersection_bbox is None:
# Bounding boxes did not overlap
msg = ('Bounding boxes of hazard data [%s], exposure data [%s] '
'and viewport [%s] did not overlap, so no computation was '
'done. Please make sure you pan to where the data is and '
'that hazard and exposure data overlaps.'
% (bboxlist2string(haz_bbox, decimals=3),
bboxlist2string(exp_bbox, decimals=3),
bboxlist2string(vpt_bbox, decimals=3)))
logger.info(msg)
raise Exception(msg)
# Grow hazard bbox to buffer this common bbox in case where
# hazard is raster and exposure is vector
if (haz_metadata['layertype'] == 'raster' and
exp_metadata['layertype'] == 'vector'):
haz_res = haz_metadata['resolution']
haz_bbox = buffered_bounding_box(intersection_bbox, haz_res)
else:
haz_bbox = intersection_bbox
# Usually the intersection bbox is used for both exposure layer and result
exp_bbox = imp_bbox = intersection_bbox
return haz_bbox, exp_bbox, imp_bbox
def check_bbox_string(bbox_string):
"""Check that bbox string is valid
"""
msg = 'Expected bbox as a string with format "W,S,E,N"'
assert isinstance(bbox_string, basestring), msg
# Use checks from string to list conversion
# FIXME (Ole): Would be better to separate the checks from the conversion
# and use those checks directly.
minx, miny, maxx, maxy = bboxstring2list(bbox_string)
# Check semantic integrity
msg = ('Western border %.5f of bounding box %s was out of range '
'for longitudes ([-180:180])' % (minx, bbox_string))
assert -180 <= minx <= 180, msg
msg = ('Eastern border %.5f of bounding box %s was out of range '
'for longitudes ([-180:180])' % (maxx, bbox_string))
assert -180 <= maxx <= 180, msg
msg = ('Southern border %.5f of bounding box %s was out of range '
'for latitudes ([-90:90])' % (miny, bbox_string))
assert -90 <= miny <= 90, msg
msg = ('Northern border %.5f of bounding box %s was out of range '
'for latitudes ([-90:90])' % (maxy, bbox_string))
assert -90 <= maxy <= 90, msg
msg = ('Western border %.5f was greater than or equal to eastern border '
'%.5f of bounding box %s' % (minx, maxx, bbox_string))
assert minx < maxx, msg
msg = ('Southern border %.5f was greater than or equal to northern border '
'%.5f of bounding box %s' % (miny, maxy, bbox_string))
assert miny < maxy, msg
def bboxstring2list(bbox_string):
"""Convert bounding box string to list
Input
bbox_string: String of bounding box coordinates of the form 'W,S,E,N'
Output
bbox: List of floating point numbers with format [W, S, E, N]
"""
msg = ('Bounding box must be a string with coordinates following the '
'format 105.592,-7.809,110.159,-5.647\n'
'Instead I got %s of type %s.' % (str(bbox_string),
type(bbox_string)))
assert isinstance(bbox_string, basestring), msg
fields = bbox_string.split(',')
msg = ('Bounding box string must have 4 coordinates in the form '
'"W,S,E,N". I got bbox == "%s"' % bbox_string)
assert len(fields) == 4, msg
for x in fields:
try:
float(x)
except ValueError, e:
msg = ('Bounding box %s contained non-numeric entry %s, '
'original error was "%s".' % (bbox_string, x, e))
raise AssertionError(msg)
return [float(x) for x in fields]
def get_bounding_box_string(filename):
"""Get bounding box for specified raster or vector file
Input:
filename
Output:
bounding box as python string 'West, South, East, North'
"""
return bboxlist2string(get_bounding_box(filename))
def bboxlist2string(bbox, decimals=6):
"""Convert bounding box list to comma separated string
Input
bbox: List of coordinates of the form [W, S, E, N]
Output
bbox_string: Format 'W,S,E,N' - each will have 6 decimal points
"""
msg = 'Got string %s, but expected bounding box as a list' % str(bbox)
assert not isinstance(bbox, basestring), msg
try:
bbox = list(bbox)
except:
msg = 'Could not coerce bbox %s into a list' % str(bbox)
raise Exception(msg)
msg = ('Bounding box must have 4 coordinates [W, S, E, N]. '
'I got %s' % str(bbox))
assert len(bbox) == 4, msg
for x in bbox:
try:
float(x)
except ValueError, e:
msg = ('Bounding box %s contained non-numeric entry %s, '
'original error was "%s".' % (bbox, x, e))
raise AssertionError(msg)
# Make template of the form '%.5f,%.5f,%.5f,%.5f'
template = (('%%.%if,' % decimals) * 4)[:-1]
# Assign numbers and return
return template % tuple(bbox)
def get_bounding_box(filename):
"""Get bounding box for specified raster or vector file
Input:
filename
Output:
bounding box as python list of numbers [West, South, East, North]
"""
layer = read_layer(filename)
return layer.get_bounding_box() | /safe-geonode-0.2.7.tar.gz/safe-geonode-0.2.7/safe_geonode/utilities.py | 0.472927 | 0.308477 | utilities.py | pypi |
from __future__ import division
from django.db import models
from django.contrib.auth.models import User
from pygments import highlight
from pygments.lexers import PythonLexer
from pygments.formatters import HtmlFormatter
import datetime
class Calculation(models.Model):
"""Calculation model
"""
user = models.ForeignKey(User)
success = models.BooleanField()
run_date = models.DateTimeField()
run_duration = models.FloatField()
impact_function = models.CharField(max_length=255, null=True, blank=True)
impact_function_source = models.TextField()
exposure_server = models.URLField(null=True, blank=True)
exposure_layer = models.CharField(max_length=255, null=True, blank=True)
hazard_server = models.URLField(null=True, blank=True)
hazard_layer = models.CharField(max_length=255, null=True, blank=True)
bbox = models.CharField(max_length=255, null=True, blank=True)
errors = models.TextField()
stacktrace = models.TextField(null=True, blank=True)
layer = models.CharField(max_length=255, null=True, blank=True)
@property
def url(self):
return self.layer.url
def get_absolute_url(self):
return self.layer.get_absolute_url()
def pretty_function_source(self):
return highlight(self.impact_function_source, PythonLexer(), HtmlFormatter())
def __unicode__(self):
if self.success:
name = 'Sucessful Calculation'
else:
name = 'Failed Calculation'
return '%s at %s' % (name, self.run_date)
class Server(models.Model):
name = models.CharField(max_length=255)
url = models.URLField()
def __unicode__(self):
return self.name
class Workspace(models.Model):
user = models.ForeignKey(User)
servers = models.ManyToManyField(Server)
def __unicode__(self):
return self.user.username
def duration(sender, **kwargs):
instance = kwargs['instance']
now = datetime.datetime.now()
td = now - instance.run_date
duration = td.microseconds / 1000000 + td.seconds + td.days * 24 * 3600
instance.run_duration = round(duration, 2)
models.signals.pre_save.connect(duration, sender=Calculation) | /safe-geonode-0.2.7.tar.gz/safe-geonode-0.2.7/safe_geonode/models.py | 0.703244 | 0.167253 | models.py | pypi |
import logging
import urllib.request
import urllib.error
import urllib.parse
import socket
from urllib.parse import urlencode
logger = logging.getLogger(__name__)
class HttpLoadError(RuntimeError):
pass
class Http404(HttpLoadError):
pass
class HttpFailedRepeatedly(HttpLoadError):
pass
def transmit(url, data=None, max_tries=10, timeout=60, error_sleep_time=4, data_dict=None):
"""
Perform a safe HTTP call, or raise a HttpLoadError.
The HttpLoadError will probably come in one of it's subclasses,
Http404 and HttpFailedRepeatedly
:param url: Example: "http://something.com/"
:type url: str
:param data: Data to be posted
:type data: str
:param max_tries: How many times should we retry?
:type max_tries: int
:param timeout: How long should we wait for each try?
:type timeout: float
:param error_sleep_time: How long should we wait before retrying after a failure
:type error_sleep_time: float
:param data_dict: Alternative to passing a string for "data"
:type data_dict: dict
:return: The body of the HTTP transmission result
:rtype: bytes
"""
import time
logger.debug("Loading {0}".format(url))
if data_dict:
if data:
raise ValueError("Cannot pass data_dict and data")
data = urlencode(data_dict).encode()
i = 0
while True:
i += 1
try:
if timeout is not None:
rh = urllib.request.urlopen(url, data, timeout=timeout)
else:
rh = urllib.request.urlopen(url, data)
res = rh.read()
rh.close()
return res
except (urllib.error.HTTPError, socket.error, urllib.error.URLError) as e:
logger.warning("Couldn't load {0}. Got this error: {1}".format(url, e))
if getattr(e, 'code', '') == 404:
raise Http404
if i >= max_tries:
raise HttpFailedRepeatedly(
"Couldn't load {0}. Got this error: {1}".format(url, e))
time.sleep(error_sleep_time) | /safe-http-transmit-1.1.4.tar.gz/safe-http-transmit-1.1.4/lib/_lib.py | 0.472197 | 0.220678 | _lib.py | pypi |
import ast
import inspect
from safeparser.plugins import PluginStore
from safeparser.safe_env import SafeEnv
class ParserException(Exception):
pass
class EnvironmentInjector(ast.NodeVisitor):
def __init__(self, plugin_store):
self.plugin_store = plugin_store
def visit_Call(self, node):
identifier = node.func.id
if not self.plugin_store.has(identifier):
return
plugin = self.plugin_store.get(identifier)
if not callable(plugin):
return
kwonlyargs = inspect.getfullargspec(plugin).kwonlyargs
if 'env' in kwonlyargs:
env_node = ast.Name('__env__', ast.Load())
env_node.lineno = 0
env_node.col_offset = 0
node.keywords.append(
ast.keyword(arg='env', value=env_node)
)
class SafeCodeValidator(ast.NodeVisitor):
def __init__(self, plugin_store):
self.plugin_store = plugin_store
def visit_Module(self, node):
for stmt in node.body:
self.visit(stmt)
def visit_Expr(self, node):
if not isinstance(node.value, ast.Call):
raise ParserException(
f'l.{node.lineno}: Illegal syntax'
)
self.visit_Call(node.value)
def visit_Call(self, node):
if not isinstance(node.func, ast.Name):
raise ParserException(
f'l.{node.lineno}: Illegal syntax'
)
for arg in node.args:
self.visit(arg)
for kw in node.keywords:
self.visit(kw.value)
def visit_Assign(self, node):
if len(node.targets) > 1:
# a = b = c
raise ParserException(
f'l.{node.lineno}: Illegal syntax'
)
if not isinstance(node.targets[0], ast.Name):
# a, b = c
raise ParserException(
f'l.{node.lineno}: Illegal syntax'
)
identifier = node.targets[0].id
if self.plugin_store.has(identifier):
# Disallow overwriting plugins
raise ParserException(
f'l.{node.lineno}: Illegal assignment into plugin {identifier}'
)
if identifier.startswith('__') and identifier.endswith('__'):
# Disallow variables that start and end with double underscores.
# This is to ensure that __builtins__, __env__ and other future
# implementation specific features do not collide with user-defined
# input
raise ParserException(
f'l.{node.lineno}: Illegal assignment into double-underscore variable {identifier}'
)
self.visit(node.value)
def visit_List(self, node):
for elt in node.elts:
self.visit(elt)
def visit_Set(self, node):
for elt in node.elts:
self.visit(elt)
def visit_Dict(self, node):
for key in node.keys:
self.visit(key)
for val in node.values:
self.visit(val)
def visit_Num(self, node):
pass
def visit_NameConstant(self, node):
pass
def visit_Str(self, node):
pass
def visit_Name(self, node):
pass
def generic_visit(self, node):
raise ParserException(
f'l.{node.lineno}: Illegal syntax'
)
class Parser:
def __init__(self, *, env=None, plugin_store=None):
if env is None:
env = {}
if plugin_store is None:
plugin_store = PluginStore()
self.env = env
self.plugin_store = plugin_store
def parse(self, content):
content = self.prepare_content(content)
try:
root = ast.parse(content, filename='')
except SyntaxError as e:
raise ParserException(e)
SafeCodeValidator(self.plugin_store).visit(root)
# Since we're using python's eval function to actually evaluate
# expressions, we must ensure that no builtin python functions leak into
# the environment. Also, there are other important preparations that
# must be executed to the environment before actually evaluating the
# content
self.prepare_environment()
try:
self.execute(root)
finally:
# We do not want to report back the internal variables in the
# environment; as such, we remove them here before the user has the
# possibility of examining the environment
self.strip_environment()
return self.env
def prepare_content(self, content):
if isinstance(content, str):
return content
try:
return content.read()
except:
raise ParserException(
f'Cannot read the contents of a {type(content)} variable'
)
def prepare_environment(self):
self.env['__builtins__'] = {}
self.env['__env__'] = SafeEnv(self.env)
def strip_environment(self):
del self.env['__builtins__']
del self.env['__env__']
def execute(self, root):
for stmt in root.body:
if isinstance(stmt, ast.Assign):
self.execute_assign(stmt)
elif isinstance(stmt, ast.Expr):
self.execute_expr(stmt)
else:
raise ParserException(
f'l.{stmt.lineno}: Illegal syntax'
)
def execute_assign(self, stmt):
identifier = stmt.targets[0].id
if identifier in self.env:
# Disallow overwriting variables
raise ParserException(
f'l.{stmt.lineno}: Illegal assignment into existing variable {identifier}'
)
self.env[identifier] = self.evaluate_expr(stmt.value)
def execute_expr(self, stmt):
return self.evaluate_expr(stmt.value)
def evaluate_expr(self, expr):
if isinstance(expr, ast.Call):
if not self.plugin_store.has(expr.func.id):
raise ParserException(
f'l.{expr.lineno}: Unknown plugin {expr.func.id}'
)
try:
globals = {}
globals.update(self.env)
globals.update(self.plugin_store.plugins)
return eval(self.compile_expr(expr), globals)
except NameError as ex:
raise ParserException(ex)
def compile_expr(self, expr):
EnvironmentInjector(self.plugin_store).visit(expr)
code = ast.Expression(body=expr)
code.lineno = expr.lineno
code.col_offset = expr.col_offset
return compile(code, '<string>', 'eval') | /safe-parser-0.1.3.tar.gz/safe-parser-0.1.3/safeparser/parser.py | 0.462959 | 0.231842 | parser.py | pypi |
import warnings
from secrets import token_bytes
from typing import Tuple, Union
import eth_abi
from eth._utils.address import generate_contract_address
from eth_keys import keys
from eth_typing import AnyAddress, ChecksumAddress, HexStr
from eth_utils import to_normalized_address
from hexbytes import HexBytes
from sha3 import keccak_256
def fast_keccak(value: bytes) -> bytes:
"""
Calculates ethereum keccak256 using fast library `pysha3`
:param value:
:return: Keccak256 used by ethereum as `bytes`
"""
return keccak_256(value).digest()
def fast_keccak_hex(value: bytes) -> HexStr:
"""
Same as `fast_keccak`, but it's a little more optimal calling `hexdigest()`
than calling `digest()` and then `hex()`
:param value:
:return: Keccak256 used by ethereum as an hex string (not 0x prefixed)
"""
return HexStr(keccak_256(value).hexdigest())
def _build_checksum_address(
norm_address: HexStr, address_hash: HexStr
) -> ChecksumAddress:
"""
https://github.com/ethereum/EIPs/blob/master/EIPS/eip-55.md
:param norm_address: address in lowercase (not 0x prefixed)
:param address_hash: keccak256 of `norm_address` (not 0x prefixed)
:return:
"""
return ChecksumAddress(
"0x"
+ (
"".join(
(
norm_address[i].upper()
if int(address_hash[i], 16) > 7
else norm_address[i]
)
for i in range(0, 40)
)
)
)
def fast_to_checksum_address(value: Union[AnyAddress, str, bytes]) -> ChecksumAddress:
"""
Converts to checksum_address. Uses more optimal `pysha3` instead of `eth_utils` for keccak256 calculation
:param value:
:return:
"""
norm_address = to_normalized_address(value)[2:]
address_hash = fast_keccak_hex(norm_address.encode())
return _build_checksum_address(norm_address, address_hash)
def fast_bytes_to_checksum_address(value: bytes) -> ChecksumAddress:
"""
Converts to checksum_address. Uses more optimal `pysha3` instead of `eth_utils` for keccak256 calculation.
As input is already in bytes, some checks and conversions can be skipped, providing a speedup of ~50%
:param value:
:return:
"""
if len(value) != 20:
raise ValueError(
"Cannot convert %s to a checksum address, 20 bytes were expected"
)
norm_address = bytes(value).hex()
address_hash = fast_keccak_hex(norm_address.encode())
return _build_checksum_address(norm_address, address_hash)
def fast_is_checksum_address(value: Union[AnyAddress, str, bytes]) -> bool:
"""
Fast version to check if an address is a checksum_address
:param value:
:return: `True` if checksummed, `False` otherwise
"""
if not isinstance(value, str) or len(value) != 42 or not value.startswith("0x"):
return False
try:
return fast_to_checksum_address(value) == value
except ValueError:
return False
def get_eth_address_with_key() -> Tuple[str, bytes]:
private_key = keys.PrivateKey(token_bytes(32))
address = private_key.public_key.to_checksum_address()
return address, private_key.to_bytes()
def get_eth_address_with_invalid_checksum() -> str:
address, _ = get_eth_address_with_key()
return "0x" + "".join(
[c.lower() if c.isupper() else c.upper() for c in address[2:]]
)
def decode_string_or_bytes32(data: bytes) -> str:
try:
return eth_abi.decode(["string"], data)[0]
except OverflowError:
name = eth_abi.decode(["bytes32"], data)[0]
end_position = name.find(b"\x00")
if end_position == -1:
return name.decode()
else:
return name[:end_position].decode()
def remove_swarm_metadata(code: bytes) -> bytes:
"""
Remove swarm metadata from Solidity bytecode
:param code:
:return: Code without metadata
"""
swarm = b"\xa1\x65bzzr0"
position = code.rfind(swarm)
if position == -1:
raise ValueError("Swarm metadata not found in code %s" % code.hex())
return code[:position]
def compare_byte_code(code_1: bytes, code_2: bytes) -> bool:
"""
Compare code, removing swarm metadata if necessary
:param code_1:
:param code_2:
:return: True if same code, False otherwise
"""
if code_1 == code_2:
return True
else:
codes = []
for code in (code_1, code_2):
try:
codes.append(remove_swarm_metadata(code))
except ValueError:
codes.append(code)
return codes[0] == codes[1]
def mk_contract_address(address: Union[str, bytes], nonce: int) -> ChecksumAddress:
"""
Generate expected contract address when using EVM CREATE
:param address:
:param nonce:
:return:
"""
return fast_to_checksum_address(generate_contract_address(HexBytes(address), nonce))
def mk_contract_address_2(
from_: Union[str, bytes], salt: Union[str, bytes], init_code: Union[str, bytes]
) -> ChecksumAddress:
"""
Generate expected contract address when using EVM CREATE2.
:param from_: The address which is creating this new address (need to be 20 bytes)
:param salt: A salt (32 bytes)
:param init_code: A init code of the contract being created
:return: Address of the new contract
"""
from_ = HexBytes(from_)
salt = HexBytes(salt)
init_code = HexBytes(init_code)
assert len(from_) == 20, f"Address {from_.hex()} is not valid. Must be 20 bytes"
assert len(salt) == 32, f"Salt {salt.hex()} is not valid. Must be 32 bytes"
assert len(init_code) > 0, f"Init code {init_code.hex()} is not valid"
init_code_hash = fast_keccak(init_code)
contract_address = fast_keccak(HexBytes("ff") + from_ + salt + init_code_hash)
return fast_bytes_to_checksum_address(contract_address[12:])
def generate_address_2(
from_: Union[str, bytes], salt: Union[str, bytes], init_code: Union[str, bytes]
) -> ChecksumAddress:
"""
.. deprecated:: use mk_contract_address_2
:param from_:
:param salt:
:param init_code:
:return:
"""
warnings.warn(
"`generate_address_2` is deprecated, use `mk_contract_address_2`",
DeprecationWarning,
)
return mk_contract_address_2(from_, salt, init_code) | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/utils.py | 0.907281 | 0.559892 | utils.py | pypi |
import logging
from dataclasses import dataclass
from typing import Any, List, Optional, Sequence, Tuple
import eth_abi
from eth_abi.exceptions import DecodingError
from eth_account.signers.local import LocalAccount
from eth_typing import BlockIdentifier, BlockNumber, ChecksumAddress
from hexbytes import HexBytes
from web3 import Web3
from web3._utils.abi import map_abi_data
from web3._utils.normalizers import BASE_RETURN_NORMALIZERS
from web3.contract.contract import ContractFunction
from web3.exceptions import ContractLogicError
from . import EthereumClient, EthereumNetwork, EthereumNetworkNotSupported
from .abis.multicall import multicall_v3_abi, multicall_v3_bytecode
from .ethereum_client import EthereumTxSent
from .exceptions import BatchCallFunctionFailed
logger = logging.getLogger(__name__)
@dataclass
class MulticallResult:
success: bool
return_data: Optional[bytes]
@dataclass
class MulticallDecodedResult:
success: bool
return_data_decoded: Optional[Any]
class Multicall:
# https://github.com/mds1/multicall#deployments
ADDRESSES = {
EthereumNetwork.MAINNET: "0xcA11bde05977b3631167028862bE2a173976CA11",
EthereumNetwork.GOERLI: "0xcA11bde05977b3631167028862bE2a173976CA11",
EthereumNetwork.PULSECHAIN_MAINNET: "0xcA11bde05977b3631167028862bE2a173976CA11",
EthereumNetwork.PULSECHAIN_TESTNET: "0xcA11bde05977b3631167028862bE2a173976CA11",
}
def __init__(
self,
ethereum_client: EthereumClient,
multicall_contract_address: Optional[ChecksumAddress] = None,
):
self.ethereum_client = ethereum_client
self.w3 = ethereum_client.w3
ethereum_network = ethereum_client.get_network()
address = multicall_contract_address or self.ADDRESSES.get(ethereum_network)
if not address:
# Try with Multicall V3 deterministic address
address = self.ADDRESSES.get(EthereumNetwork.MAINNET)
if not ethereum_client.is_contract(address):
raise EthereumNetworkNotSupported(
"Multicall contract not available for %s", ethereum_network.name
)
self.contract = self.get_contract(self.w3, address)
def get_contract(self, w3: Web3, address: Optional[ChecksumAddress] = None):
return w3.eth.contract(
address, abi=multicall_v3_abi, bytecode=multicall_v3_bytecode
)
@classmethod
def deploy_contract(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy contract
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
contract = cls.get_contract(cls, ethereum_client.w3)
tx = contract.constructor().build_transaction(
{"from": deployer_account.address}
)
tx_hash = ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
tx_receipt = ethereum_client.get_transaction_receipt(tx_hash, timeout=120)
assert tx_receipt and tx_receipt["status"]
contract_address = tx_receipt["contractAddress"]
logger.info(
"Deployed Multicall V2 Contract %s by %s",
contract_address,
deployer_account.address,
)
# Add address to addresses dictionary
cls.ADDRESSES[ethereum_client.get_network()] = contract_address
return EthereumTxSent(tx_hash, tx, contract_address)
@staticmethod
def _build_payload(
contract_functions: Sequence[ContractFunction],
) -> Tuple[List[Tuple[ChecksumAddress, bytes]], List[List[Any]]]:
targets_with_data = []
output_types = []
for contract_function in contract_functions:
targets_with_data.append(
(
contract_function.address,
HexBytes(contract_function._encode_transaction_data()),
)
)
output_types.append(
[output["type"] for output in contract_function.abi["outputs"]]
)
return targets_with_data, output_types
def _build_payload_same_function(
self,
contract_function: ContractFunction,
contract_addresses: Sequence[ChecksumAddress],
) -> Tuple[List[Tuple[ChecksumAddress, bytes]], List[List[Any]]]:
targets_with_data = []
output_types = []
tx_data = HexBytes(contract_function._encode_transaction_data())
for contract_address in contract_addresses:
targets_with_data.append((contract_address, tx_data))
output_types.append(
[output["type"] for output in contract_function.abi["outputs"]]
)
return targets_with_data, output_types
def _decode_data(self, output_type: Sequence[str], data: bytes) -> Optional[Any]:
"""
:param output_type:
:param data:
:return:
:raises: DecodingError
"""
if data:
try:
decoded_values = eth_abi.decode(output_type, data)
normalized_data = map_abi_data(
BASE_RETURN_NORMALIZERS, output_type, decoded_values
)
if len(normalized_data) == 1:
return normalized_data[0]
else:
return normalized_data
except DecodingError:
logger.warning(
"Cannot decode %s using output-type %s", data, output_type
)
return data
def _aggregate(
self,
targets_with_data: Sequence[Tuple[ChecksumAddress, bytes]],
block_identifier: Optional[BlockIdentifier] = "latest",
) -> Tuple[BlockNumber, List[Optional[Any]]]:
"""
:param targets_with_data: List of target `addresses` and `data` to be called in each Contract
:param block_identifier:
:return:
:raises: BatchCallFunctionFailed
"""
aggregate_parameter = [
{"target": target, "callData": data} for target, data in targets_with_data
]
try:
return self.contract.functions.aggregate(aggregate_parameter).call(
block_identifier=block_identifier
)
except (ContractLogicError, OverflowError):
raise BatchCallFunctionFailed
def aggregate(
self,
contract_functions: Sequence[ContractFunction],
block_identifier: Optional[BlockIdentifier] = "latest",
) -> Tuple[BlockNumber, List[Optional[Any]]]:
"""
Calls ``aggregate`` on MakerDAO's Multicall contract. If a function called raises an error execution is stopped
:param contract_functions:
:param block_identifier:
:return: A tuple with the ``blockNumber`` and a list with the decoded return values
:raises: BatchCallFunctionFailed
"""
targets_with_data, output_types = self._build_payload(contract_functions)
block_number, results = self._aggregate(
targets_with_data, block_identifier=block_identifier
)
decoded_results = [
self._decode_data(output_type, data)
for output_type, data in zip(output_types, results)
]
return block_number, decoded_results
def _try_aggregate(
self,
targets_with_data: Sequence[Tuple[ChecksumAddress, bytes]],
require_success: bool = False,
block_identifier: Optional[BlockIdentifier] = "latest",
) -> List[MulticallResult]:
"""
Calls ``try_aggregate`` on MakerDAO's Multicall contract.
:param targets_with_data:
:param require_success: If ``True``, an exception in any of the functions will stop the execution. Also, an
invalid decoded value will stop the execution
:param block_identifier:
:return: A list with the decoded return values
"""
aggregate_parameter = [
{"target": target, "callData": data} for target, data in targets_with_data
]
try:
result = self.contract.functions.tryAggregate(
require_success, aggregate_parameter
).call(block_identifier=block_identifier)
if require_success and b"" in (data for _, data in result):
# `b''` values are decoding errors/missing contracts/missing functions
raise BatchCallFunctionFailed
return [
MulticallResult(success, data if data else None)
for success, data in result
]
except (ContractLogicError, OverflowError, ValueError):
raise BatchCallFunctionFailed
def try_aggregate(
self,
contract_functions: Sequence[ContractFunction],
require_success: bool = False,
block_identifier: Optional[BlockIdentifier] = "latest",
) -> List[MulticallDecodedResult]:
"""
Calls ``try_aggregate`` on MakerDAO's Multicall contract.
:param contract_functions:
:param require_success: If ``True``, an exception in any of the functions will stop the execution
:param block_identifier:
:return: A list with the decoded return values
"""
targets_with_data, output_types = self._build_payload(contract_functions)
results = self._try_aggregate(
targets_with_data,
require_success=require_success,
block_identifier=block_identifier,
)
return [
MulticallDecodedResult(
multicall_result.success,
self._decode_data(output_type, multicall_result.return_data)
if multicall_result.success
else multicall_result.return_data,
)
for output_type, multicall_result in zip(output_types, results)
]
def try_aggregate_same_function(
self,
contract_function: ContractFunction,
contract_addresses: Sequence[ChecksumAddress],
require_success: bool = False,
block_identifier: Optional[BlockIdentifier] = "latest",
) -> List[MulticallDecodedResult]:
"""
Calls ``try_aggregate`` on MakerDAO's Multicall contract. Reuse same function with multiple contract addresses.
It's more optimal due to instantiating ``ContractFunction`` objects is very demanding
:param contract_function:
:param contract_addresses:
:param require_success: If ``True``, an exception in any of the functions will stop the execution
:param block_identifier:
:return: A list with the decoded return values
"""
targets_with_data, output_types = self._build_payload_same_function(
contract_function, contract_addresses
)
results = self._try_aggregate(
targets_with_data,
require_success=require_success,
block_identifier=block_identifier,
)
return [
MulticallDecodedResult(
multicall_result.success,
self._decode_data(output_type, multicall_result.return_data)
if multicall_result.success
else multicall_result.return_data,
)
for output_type, multicall_result in zip(output_types, results)
] | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/multicall.py | 0.903093 | 0.213193 | multicall.py | pypi |
from typing import Any, Dict, List, Optional
from urllib.parse import urljoin
import requests
from .. import EthereumNetwork
from ..utils import fast_is_checksum_address
from .contract_metadata import ContractMetadata
class Sourcify:
"""
Get contract metadata from Sourcify. Matches can be full or partial:
- Full: Both the source files as well as the meta data files were an exact match between the deployed bytecode
and the published files.
- Partial: Source code compiles to the same bytecode and thus the contract behaves in the same way,
but the source code can be different: Variables can have misleading names,
comments can be different and especially the NatSpec comments could have been modified.
"""
def __init__(
self,
network: EthereumNetwork = EthereumNetwork.MAINNET,
base_url: str = "https://repo.sourcify.dev/",
):
self.network = network
self.base_url = base_url
self.http_session = requests.session()
def _get_abi_from_metadata(self, metadata: Dict[str, Any]) -> List[Dict[str, Any]]:
return metadata["output"]["abi"]
def _get_name_from_metadata(self, metadata: Dict[str, Any]) -> Optional[str]:
values = list(metadata["settings"].get("compilationTarget", {}).values())
if values:
return values[0]
def _do_request(self, url: str) -> Optional[Dict[str, Any]]:
response = self.http_session.get(url, timeout=10)
if not response.ok:
return None
return response.json()
def get_contract_metadata(
self, contract_address: str
) -> Optional[ContractMetadata]:
assert fast_is_checksum_address(
contract_address
), "Expecting a checksummed address"
for match_type in ("full_match", "partial_match"):
url = urljoin(
self.base_url,
f"/contracts/{match_type}/{self.network.value}/{contract_address}/metadata.json",
)
metadata = self._do_request(url)
if metadata:
abi = self._get_abi_from_metadata(metadata)
name = self._get_name_from_metadata(metadata)
return ContractMetadata(name, abi, match_type == "partial_match")
return None | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/clients/sourcify.py | 0.899345 | 0.365966 | sourcify.py | pypi |
import binascii
from typing import Optional, Union
from django.core import exceptions
from django.db import models
from django.utils.translation import gettext_lazy as _
from eth_typing import ChecksumAddress
from eth_utils import to_normalized_address
from hexbytes import HexBytes
from ..utils import fast_bytes_to_checksum_address, fast_to_checksum_address
from .forms import EthereumAddressFieldForm, HexFieldForm, Keccak256FieldForm
from .validators import validate_checksumed_address
try:
from django.db import DefaultConnectionProxy
connection = DefaultConnectionProxy()
except ImportError:
from django.db import connections
connection = connections["default"]
class EthereumAddressField(models.CharField):
default_validators = [validate_checksumed_address]
description = "DEPRECATED. Use `EthereumAddressV2Field`. Ethereum address (EIP55)"
default_error_messages = {
"invalid": _('"%(value)s" value must be an EIP55 checksummed address.'),
}
def __init__(self, *args, **kwargs):
kwargs["max_length"] = 42
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def from_db_value(self, value, expression, connection):
return self.to_python(value)
def to_python(self, value):
value = super().to_python(value)
if value:
try:
return fast_to_checksum_address(value)
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
else:
return value
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
class EthereumAddressV2Field(models.Field):
default_validators = [validate_checksumed_address]
description = "Ethereum address (EIP55)"
default_error_messages = {
"invalid": _('"%(value)s" value must be an EIP55 checksummed address.'),
}
def get_internal_type(self):
return "BinaryField"
def from_db_value(
self, value: memoryview, expression, connection
) -> Optional[ChecksumAddress]:
if value:
return fast_bytes_to_checksum_address(value)
def get_prep_value(self, value: ChecksumAddress) -> Optional[bytes]:
if value:
try:
return HexBytes(to_normalized_address(value))
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def to_python(self, value) -> Optional[ChecksumAddress]:
if value is not None:
try:
return fast_to_checksum_address(value)
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def formfield(self, **kwargs):
defaults = {
"form_class": EthereumAddressFieldForm,
"max_length": 2 + 40,
}
defaults.update(kwargs)
return super().formfield(**defaults)
class Uint256Field(models.DecimalField):
"""
Field to store ethereum uint256 values. Uses Decimal db type without decimals to store
in the database, but retrieve as `int` instead of `Decimal` (https://docs.python.org/3/library/decimal.html)
"""
description = _("Ethereum uint256 number")
def __init__(self, *args, **kwargs):
kwargs["max_digits"] = 79 # 2 ** 256 is 78 digits
kwargs["decimal_places"] = 0
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_digits"]
del kwargs["decimal_places"]
return name, path, args, kwargs
def from_db_value(self, value, expression, connection):
if value is None:
return value
return int(value)
class HexField(models.CharField):
"""
Field to store hex values (without 0x). Returns hex with 0x prefix.
On Database side a CharField is used.
"""
description = "Stores a hex value into a CharField. DEPRECATED, use a BinaryField"
def from_db_value(self, value, expression, connection):
return self.to_python(value)
def to_python(self, value):
return value if value is None else HexBytes(value).hex()
def get_prep_value(self, value):
if value is None:
return value
elif isinstance(value, HexBytes):
return value.hex()[
2:
] # HexBytes.hex() retrieves hexadecimal with '0x', remove it
elif isinstance(value, bytes):
return value.hex() # bytes.hex() retrieves hexadecimal without '0x'
else: # str
return HexBytes(value).hex()[2:]
def formfield(self, **kwargs):
# We need max_lenght + 2 on forms because of `0x`
defaults = {"max_length": self.max_length + 2}
# TODO: Handle multiple backends with different feature flags.
if self.null and not connection.features.interprets_empty_strings_as_nulls:
defaults["empty_value"] = None
defaults.update(kwargs)
return super().formfield(**defaults)
def clean(self, value, model_instance):
value = self.to_python(value)
self.validate(value, model_instance)
# Validation didn't work because of `0x`
self.run_validators(value[2:])
return value
class HexV2Field(models.BinaryField):
def formfield(self, **kwargs):
defaults = {
"form_class": HexFieldForm,
}
defaults.update(kwargs)
return super().formfield(**defaults)
class Sha3HashField(HexField):
description = "DEPRECATED. Use `Keccak256Field`"
def __init__(self, *args, **kwargs):
kwargs["max_length"] = 64
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
class Keccak256Field(models.BinaryField):
description = "Keccak256 hash stored as binary"
default_error_messages = {
"invalid": _('"%(value)s" hash must be a 32 bytes hexadecimal.'),
"length": _('"%(value)s" hash must have exactly 32 bytes.'),
}
def _to_bytes(self, value) -> Optional[bytes]:
if value is None:
return
else:
try:
result = HexBytes(value)
if len(result) != 32:
raise exceptions.ValidationError(
self.error_messages["length"],
code="length",
params={"value": value},
)
return result
except (ValueError, binascii.Error):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def from_db_value(
self, value: memoryview, expression, connection
) -> Optional[bytes]:
if value:
return HexBytes(value.tobytes()).hex()
def get_prep_value(self, value: Union[bytes, str]) -> Optional[bytes]:
if value:
return self._to_bytes(value)
def value_to_string(self, obj):
return str(self.value_from_object(obj))
def to_python(self, value) -> Optional[str]:
if value is not None:
try:
return self._to_bytes(value)
except (ValueError, binascii.Error):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def formfield(self, **kwargs):
defaults = {
"form_class": Keccak256FieldForm,
"max_length": 2 + 64,
}
defaults.update(kwargs)
return super().formfield(**defaults) | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/django/models.py | 0.826362 | 0.198239 | models.py | pypi |
import binascii
from typing import Any, Optional
from django import forms
from django.core import exceptions
from django.core.exceptions import ValidationError
from django.utils.translation import gettext as _
from hexbytes import HexBytes
from gnosis.eth.utils import fast_is_checksum_address
class EthereumAddressFieldForm(forms.CharField):
default_error_messages = {
"invalid": _("Enter a valid checksummed Ethereum Address."),
}
def prepare_value(self, value):
return value
def to_python(self, value):
value = super().to_python(value)
if value in self.empty_values:
return None
elif not fast_is_checksum_address(value):
raise ValidationError(self.error_messages["invalid"], code="invalid")
return value
class HexFieldForm(forms.CharField):
default_error_messages = {
"invalid": _("Enter a valid hexadecimal."),
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.empty_value = None
def prepare_value(self, value: memoryview) -> str:
if value:
return "0x" + bytes(value).hex()
else:
return ""
def to_python(self, value: Optional[Any]) -> Optional[HexBytes]:
if value in self.empty_values:
return self.empty_value
try:
if isinstance(value, str):
value = value.strip()
return HexBytes(value)
except (binascii.Error, TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
class Keccak256FieldForm(HexFieldForm):
default_error_messages = {
"invalid": _('"%(value)s" is not a valid keccak256 hash.'),
"length": _('"%(value)s" keccak256 hash should be 32 bytes.'),
}
def prepare_value(self, value: str) -> str:
# Keccak field already returns a hex str
return value
def to_python(self, value: Optional[Any]) -> HexBytes:
value: Optional[HexBytes] = super().to_python(value)
if value and len(value) != 32:
raise ValidationError(
self.error_messages["length"],
code="length",
params={"value": value.hex()},
)
return value | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/django/forms.py | 0.856107 | 0.237443 | forms.py | pypi |
import functools
import logging
from functools import cached_property
from typing import Optional
from eth_abi.exceptions import DecodingError
from eth_typing import ChecksumAddress
from web3.contract import Contract
from web3.exceptions import Web3Exception
from .. import EthereumClient, EthereumNetwork
from ..constants import NULL_ADDRESS
from ..contracts import get_erc20_contract
from .abis.uniswap_v3 import (
uniswap_v3_factory_abi,
uniswap_v3_pool_abi,
uniswap_v3_router_abi,
)
from .exceptions import CannotGetPriceFromOracle
from .oracles import PriceOracle
from .utils import get_decimals
logger = logging.getLogger(__name__)
class UniswapV3Oracle(PriceOracle):
# https://docs.uniswap.org/protocol/reference/deployments
DEFAULT_ROUTER_ADDRESS = "0x68b3465833fb72A70ecDF485E0e4C7bD8665Fc45"
ROUTER_ADDRESSES = {
# SwapRouter02
EthereumNetwork.MAINNET: DEFAULT_ROUTER_ADDRESS,
}
# Cache to optimize calculation: https://docs.uniswap.org/sdk/guides/fetching-prices#understanding-sqrtprice
PRICE_CONVERSION_CONSTANT = 2**192
def __init__(
self,
ethereum_client: EthereumClient,
uniswap_v3_router_address: Optional[ChecksumAddress] = None,
):
"""
:param ethereum_client:
:param uniswap_v3_router_address: Provide a custom `SwapRouter02` address
"""
self.ethereum_client = ethereum_client
self.w3 = ethereum_client.w3
self.router_address = uniswap_v3_router_address or self.ROUTER_ADDRESSES.get(
self.ethereum_client.get_network(), self.DEFAULT_ROUTER_ADDRESS
)
self.factory = self.get_factory()
@classmethod
def is_available(
cls,
ethereum_client: EthereumClient,
uniswap_v3_router_address: Optional[ChecksumAddress] = None,
) -> bool:
"""
:param ethereum_client:
:param uniswap_v3_router_address: Provide a custom `SwapRouter02` address
:return: `True` if Uniswap V3 is available for the EthereumClient provided, `False` otherwise
"""
router_address = uniswap_v3_router_address or cls.ROUTER_ADDRESSES.get(
ethereum_client.get_network(), cls.DEFAULT_ROUTER_ADDRESS
)
return ethereum_client.is_contract(router_address)
def get_factory(self) -> Contract:
"""
Factory contract creates the pools for token pairs
:return: Uniswap V3 Factory Contract
"""
try:
factory_address = self.router.functions.factory().call()
except Web3Exception:
raise ValueError(
f"Uniswap V3 Router Contract {self.router_address} does not exist"
)
return self.w3.eth.contract(factory_address, abi=uniswap_v3_factory_abi)
@cached_property
def router(self) -> Contract:
"""
Router knows about the `Uniswap Factory` and `Wrapped Eth` addresses for the network
:return: Uniswap V3 Router Contract
"""
return self.w3.eth.contract(self.router_address, abi=uniswap_v3_router_abi)
@cached_property
def weth_address(self) -> ChecksumAddress:
"""
:return: Wrapped ether checksummed address
"""
return self.router.functions.WETH9().call()
@functools.lru_cache(maxsize=512)
def get_pool_address(
self, token_address: str, token_address_2: str, fee: Optional[int] = 3000
) -> Optional[ChecksumAddress]:
"""
Get pool address for tokens with a given fee (by default, 0.3)
:param token_address:
:param token_address_2:
:param fee: Uniswap V3 uses 0.3 as the default fee
:return: Pool address
"""
pool_address = self.factory.functions.getPool(
token_address, token_address_2, fee
).call()
if pool_address == NULL_ADDRESS:
return None
return pool_address
def get_price(
self, token_address: str, token_address_2: Optional[str] = None
) -> float:
"""
:param token_address:
:param token_address_2:
:return: price for `token_address` related to `token_address_2`. If `token_address_2` is not
provided, `Wrapped Eth` address will be used
"""
token_address_2 = token_address_2 or self.weth_address
if token_address == token_address_2:
return 1.0
reversed = token_address.lower() > token_address_2.lower()
# Make it cache friendly as order does not matter
args = (
(token_address_2, token_address)
if reversed
else (token_address, token_address_2)
)
pool_address = self.get_pool_address(*args)
if not pool_address:
raise CannotGetPriceFromOracle(
f"Uniswap V3 pool does not exist for {token_address} and {token_address_2}"
)
# Decimals needs to be adjusted
token_decimals = get_decimals(token_address, self.ethereum_client)
token_2_decimals = get_decimals(token_address_2, self.ethereum_client)
pool_contract = self.w3.eth.contract(pool_address, abi=uniswap_v3_pool_abi)
try:
(
token_balance,
token_2_balance,
(sqrt_price_x96, _, _, _, _, _, _),
) = self.ethereum_client.batch_call(
[
get_erc20_contract(
self.ethereum_client.w3, token_address
).functions.balanceOf(pool_address),
get_erc20_contract(
self.ethereum_client.w3, token_address_2
).functions.balanceOf(pool_address),
pool_contract.functions.slot0(),
]
)
if (token_balance / 10**token_decimals) < 2 or (
token_2_balance / 10**token_2_decimals
) < 2:
message = (
f"Not enough liquidity on uniswap v3 for pair token_1={token_address} "
f"token_2={token_address_2}, at least 2 units of each token are required"
)
logger.debug(message)
raise CannotGetPriceFromOracle(message)
except (
Web3Exception,
DecodingError,
ValueError,
) as e:
message = (
f"Cannot get uniswap v3 price for pair token_1={token_address} "
f"token_2={token_address_2}"
)
logger.debug(message)
raise CannotGetPriceFromOracle(message) from e
# https://docs.uniswap.org/sdk/guides/fetching-prices
if not reversed:
# Multiplying by itself is way faster than exponential
price = (sqrt_price_x96 * sqrt_price_x96) / self.PRICE_CONVERSION_CONSTANT
else:
price = self.PRICE_CONVERSION_CONSTANT / (sqrt_price_x96 * sqrt_price_x96)
return price * 10 ** (token_decimals - token_2_decimals) | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/oracles/uniswap_v3.py | 0.8815 | 0.190291 | uniswap_v3.py | pypi |
import logging
from functools import cached_property
from typing import Optional
from eth_abi.exceptions import DecodingError
from web3.exceptions import Web3Exception
from .. import EthereumClient, EthereumNetwork
from ..contracts import get_kyber_network_proxy_contract
from .exceptions import CannotGetPriceFromOracle, InvalidPriceFromOracle
from .oracles import PriceOracle
from .utils import get_decimals
logger = logging.getLogger(__name__)
class KyberOracle(PriceOracle):
"""
KyberSwap Legacy Oracle
https://docs.kyberswap.com/Legacy/addresses/addresses-mainnet
"""
# This is the `tokenAddress` they use for ETH ¯\_(ツ)_/¯
ETH_TOKEN_ADDRESS = "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE"
ADDRESSES = {
EthereumNetwork.MAINNET: "0x9AAb3f75489902f3a48495025729a0AF77d4b11e",
}
def __init__(
self,
ethereum_client: EthereumClient,
kyber_network_proxy_address: Optional[str] = None,
):
"""
:param ethereum_client:
:param kyber_network_proxy_address: https://developer.kyber.network/docs/MainnetEnvGuide/#contract-addresses
"""
self.ethereum_client = ethereum_client
self.w3 = ethereum_client.w3
self._kyber_network_proxy_address = kyber_network_proxy_address
@classmethod
def is_available(
cls,
ethereum_client: EthereumClient,
) -> bool:
"""
:param ethereum_client:
:return: `True` if Oracle is available for the EthereumClient provided, `False` otherwise
"""
return ethereum_client.get_network() in cls.ADDRESSES
@cached_property
def kyber_network_proxy_address(self):
if self._kyber_network_proxy_address:
return self._kyber_network_proxy_address
return self.ADDRESSES.get(
self.ethereum_client.get_network(),
self.ADDRESSES.get(EthereumNetwork.MAINNET),
) # By default return Mainnet address
@cached_property
def kyber_network_proxy_contract(self):
return get_kyber_network_proxy_contract(
self.w3, self.kyber_network_proxy_address
)
def get_price(
self, token_address_1: str, token_address_2: str = ETH_TOKEN_ADDRESS
) -> float:
if token_address_1 == token_address_2:
return 1.0
try:
# Get decimals for token, estimation will be more accurate
decimals = get_decimals(token_address_1, self.ethereum_client)
token_unit = int(10**decimals)
(
expected_rate,
_,
) = self.kyber_network_proxy_contract.functions.getExpectedRate(
token_address_1, token_address_2, int(token_unit)
).call()
price = expected_rate / 1e18
if price <= 0.0:
# Try again the opposite
(
expected_rate,
_,
) = self.kyber_network_proxy_contract.functions.getExpectedRate(
token_address_2, token_address_1, int(token_unit)
).call()
price = (token_unit / expected_rate) if expected_rate else 0
if price <= 0.0:
message = (
f"price={price} <= 0 from kyber-network-proxy={self.kyber_network_proxy_address} "
f"for token-1={token_address_1} to token-2={token_address_2}"
)
logger.debug(message)
raise InvalidPriceFromOracle(message)
return price
except (Web3Exception, DecodingError, ValueError) as e:
message = (
f"Cannot get price from kyber-network-proxy={self.kyber_network_proxy_address} "
f"for token-1={token_address_1} to token-2={token_address_2}"
)
logger.debug(message)
raise CannotGetPriceFromOracle(message) from e | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/oracles/kyber.py | 0.886365 | 0.1526 | kyber.py | pypi |
mooniswap_abi = [
{
"inputs": [
{"internalType": "contract IERC20", "name": "_token0", "type": "address"},
{"internalType": "contract IERC20", "name": "_token1", "type": "address"},
{"internalType": "string", "name": "name", "type": "string"},
{"internalType": "string", "name": "symbol", "type": "string"},
{
"internalType": "contract IMooniswapFactoryGovernance",
"name": "_mooniswapFactoryGovernance",
"type": "address",
},
],
"stateMutability": "nonpayable",
"type": "constructor",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "owner",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "spender",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "value",
"type": "uint256",
},
],
"name": "Approval",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "user",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "decayPeriod",
"type": "uint256",
},
{
"indexed": False,
"internalType": "bool",
"name": "isDefault",
"type": "bool",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount",
"type": "uint256",
},
],
"name": "DecayPeriodVoteUpdate",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "sender",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "receiver",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "share",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "token0Amount",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "token1Amount",
"type": "uint256",
},
],
"name": "Deposited",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "string",
"name": "reason",
"type": "string",
}
],
"name": "Error",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "user",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "fee",
"type": "uint256",
},
{
"indexed": False,
"internalType": "bool",
"name": "isDefault",
"type": "bool",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount",
"type": "uint256",
},
],
"name": "FeeVoteUpdate",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "previousOwner",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "newOwner",
"type": "address",
},
],
"name": "OwnershipTransferred",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "user",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "slippageFee",
"type": "uint256",
},
{
"indexed": False,
"internalType": "bool",
"name": "isDefault",
"type": "bool",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount",
"type": "uint256",
},
],
"name": "SlippageFeeVoteUpdate",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "sender",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "receiver",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "srcToken",
"type": "address",
},
{
"indexed": False,
"internalType": "address",
"name": "dstToken",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "result",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "srcAdditionBalance",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "dstRemovalBalance",
"type": "uint256",
},
{
"indexed": False,
"internalType": "address",
"name": "referral",
"type": "address",
},
],
"name": "Swapped",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "uint256",
"name": "srcBalance",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "dstBalance",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "fee",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "slippageFee",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "referralShare",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "governanceShare",
"type": "uint256",
},
],
"name": "Sync",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "from",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "to",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "value",
"type": "uint256",
},
],
"name": "Transfer",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "sender",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "receiver",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "share",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "token0Amount",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "token1Amount",
"type": "uint256",
},
],
"name": "Withdrawn",
"type": "event",
},
{
"inputs": [
{"internalType": "address", "name": "owner", "type": "address"},
{"internalType": "address", "name": "spender", "type": "address"},
],
"name": "allowance",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "approve",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "account", "type": "address"}],
"name": "balanceOf",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "decayPeriod",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "uint256", "name": "vote", "type": "uint256"}],
"name": "decayPeriodVote",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "user", "type": "address"}],
"name": "decayPeriodVotes",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "decimals",
"outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "subtractedValue", "type": "uint256"},
],
"name": "decreaseAllowance",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256[2]", "name": "maxAmounts", "type": "uint256[2]"},
{"internalType": "uint256[2]", "name": "minAmounts", "type": "uint256[2]"},
],
"name": "deposit",
"outputs": [
{"internalType": "uint256", "name": "fairSupply", "type": "uint256"},
{
"internalType": "uint256[2]",
"name": "receivedAmounts",
"type": "uint256[2]",
},
],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256[2]", "name": "maxAmounts", "type": "uint256[2]"},
{"internalType": "uint256[2]", "name": "minAmounts", "type": "uint256[2]"},
{"internalType": "address", "name": "target", "type": "address"},
],
"name": "depositFor",
"outputs": [
{"internalType": "uint256", "name": "fairSupply", "type": "uint256"},
{
"internalType": "uint256[2]",
"name": "receivedAmounts",
"type": "uint256[2]",
},
],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [],
"name": "discardDecayPeriodVote",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "discardFeeVote",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "discardSlippageFeeVote",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "fee",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "uint256", "name": "vote", "type": "uint256"}],
"name": "feeVote",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "user", "type": "address"}],
"name": "feeVotes",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "contract IERC20", "name": "token", "type": "address"}
],
"name": "getBalanceForAddition",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "contract IERC20", "name": "token", "type": "address"}
],
"name": "getBalanceForRemoval",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "contract IERC20", "name": "src", "type": "address"},
{"internalType": "contract IERC20", "name": "dst", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "getReturn",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "getTokens",
"outputs": [
{"internalType": "contract IERC20[]", "name": "tokens", "type": "address[]"}
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "addedValue", "type": "uint256"},
],
"name": "increaseAllowance",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "mooniswapFactoryGovernance",
"outputs": [
{
"internalType": "contract IMooniswapFactoryGovernance",
"name": "",
"type": "address",
}
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "name",
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "owner",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "renounceOwnership",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "contract IERC20", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "rescueFunds",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{
"internalType": "contract IMooniswapFactoryGovernance",
"name": "newMooniswapFactoryGovernance",
"type": "address",
}
],
"name": "setMooniswapFactoryGovernance",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "slippageFee",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "uint256", "name": "vote", "type": "uint256"}],
"name": "slippageFeeVote",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "user", "type": "address"}],
"name": "slippageFeeVotes",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "contract IERC20", "name": "src", "type": "address"},
{"internalType": "contract IERC20", "name": "dst", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
{"internalType": "uint256", "name": "minReturn", "type": "uint256"},
{"internalType": "address", "name": "referral", "type": "address"},
],
"name": "swap",
"outputs": [{"internalType": "uint256", "name": "result", "type": "uint256"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "contract IERC20", "name": "src", "type": "address"},
{"internalType": "contract IERC20", "name": "dst", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
{"internalType": "uint256", "name": "minReturn", "type": "uint256"},
{"internalType": "address", "name": "referral", "type": "address"},
{"internalType": "address payable", "name": "receiver", "type": "address"},
],
"name": "swapFor",
"outputs": [{"internalType": "uint256", "name": "result", "type": "uint256"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [],
"name": "symbol",
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "token0",
"outputs": [{"internalType": "contract IERC20", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "token1",
"outputs": [{"internalType": "contract IERC20", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "uint256", "name": "i", "type": "uint256"}],
"name": "tokens",
"outputs": [{"internalType": "contract IERC20", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "totalSupply",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "transfer",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "sender", "type": "address"},
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "transferFrom",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "newOwner", "type": "address"}],
"name": "transferOwnership",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "contract IERC20", "name": "", "type": "address"}],
"name": "virtualBalancesForAddition",
"outputs": [
{"internalType": "uint216", "name": "balance", "type": "uint216"},
{"internalType": "uint40", "name": "time", "type": "uint40"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "contract IERC20", "name": "", "type": "address"}],
"name": "virtualBalancesForRemoval",
"outputs": [
{"internalType": "uint216", "name": "balance", "type": "uint216"},
{"internalType": "uint40", "name": "time", "type": "uint40"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "virtualDecayPeriod",
"outputs": [
{"internalType": "uint104", "name": "", "type": "uint104"},
{"internalType": "uint104", "name": "", "type": "uint104"},
{"internalType": "uint48", "name": "", "type": "uint48"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "virtualFee",
"outputs": [
{"internalType": "uint104", "name": "", "type": "uint104"},
{"internalType": "uint104", "name": "", "type": "uint104"},
{"internalType": "uint48", "name": "", "type": "uint48"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "virtualSlippageFee",
"outputs": [
{"internalType": "uint104", "name": "", "type": "uint104"},
{"internalType": "uint104", "name": "", "type": "uint104"},
{"internalType": "uint48", "name": "", "type": "uint48"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "contract IERC20", "name": "", "type": "address"}],
"name": "volumes",
"outputs": [
{"internalType": "uint128", "name": "confirmed", "type": "uint128"},
{"internalType": "uint128", "name": "result", "type": "uint128"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "amount", "type": "uint256"},
{"internalType": "uint256[]", "name": "minReturns", "type": "uint256[]"},
],
"name": "withdraw",
"outputs": [
{
"internalType": "uint256[2]",
"name": "withdrawnAmounts",
"type": "uint256[2]",
}
],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "amount", "type": "uint256"},
{"internalType": "uint256[]", "name": "minReturns", "type": "uint256[]"},
{"internalType": "address payable", "name": "target", "type": "address"},
],
"name": "withdrawFor",
"outputs": [
{
"internalType": "uint256[2]",
"name": "withdrawnAmounts",
"type": "uint256[2]",
}
],
"stateMutability": "nonpayable",
"type": "function",
},
] | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/oracles/abis/mooniswap_abis.py | 0.535098 | 0.515315 | mooniswap_abis.py | pypi |
uniswap_v3_factory_abi = [
{"inputs": [], "stateMutability": "nonpayable", "type": "constructor"},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "uint24",
"name": "fee",
"type": "uint24",
},
{
"indexed": True,
"internalType": "int24",
"name": "tickSpacing",
"type": "int24",
},
],
"name": "FeeAmountEnabled",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "oldOwner",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "newOwner",
"type": "address",
},
],
"name": "OwnerChanged",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "token0",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "token1",
"type": "address",
},
{
"indexed": True,
"internalType": "uint24",
"name": "fee",
"type": "uint24",
},
{
"indexed": False,
"internalType": "int24",
"name": "tickSpacing",
"type": "int24",
},
{
"indexed": False,
"internalType": "address",
"name": "pool",
"type": "address",
},
],
"name": "PoolCreated",
"type": "event",
},
{
"inputs": [
{"internalType": "address", "name": "tokenA", "type": "address"},
{"internalType": "address", "name": "tokenB", "type": "address"},
{"internalType": "uint24", "name": "fee", "type": "uint24"},
],
"name": "createPool",
"outputs": [{"internalType": "address", "name": "pool", "type": "address"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint24", "name": "fee", "type": "uint24"},
{"internalType": "int24", "name": "tickSpacing", "type": "int24"},
],
"name": "enableFeeAmount",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "uint24", "name": "", "type": "uint24"}],
"name": "feeAmountTickSpacing",
"outputs": [{"internalType": "int24", "name": "", "type": "int24"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "", "type": "address"},
{"internalType": "address", "name": "", "type": "address"},
{"internalType": "uint24", "name": "", "type": "uint24"},
],
"name": "getPool",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "owner",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "parameters",
"outputs": [
{"internalType": "address", "name": "factory", "type": "address"},
{"internalType": "address", "name": "token0", "type": "address"},
{"internalType": "address", "name": "token1", "type": "address"},
{"internalType": "uint24", "name": "fee", "type": "uint24"},
{"internalType": "int24", "name": "tickSpacing", "type": "int24"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "_owner", "type": "address"}],
"name": "setOwner",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
]
uniswap_v3_router_abi = [
{
"inputs": [
{"internalType": "address", "name": "_factoryV2", "type": "address"},
{"internalType": "address", "name": "factoryV3", "type": "address"},
{"internalType": "address", "name": "_positionManager", "type": "address"},
{"internalType": "address", "name": "_WETH9", "type": "address"},
],
"stateMutability": "nonpayable",
"type": "constructor",
},
{
"inputs": [],
"name": "WETH9",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "token", "type": "address"}],
"name": "approveMax",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "token", "type": "address"}],
"name": "approveMaxMinusOne",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "token", "type": "address"}],
"name": "approveZeroThenMax",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "token", "type": "address"}],
"name": "approveZeroThenMaxMinusOne",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [{"internalType": "bytes", "name": "data", "type": "bytes"}],
"name": "callPositionManager",
"outputs": [{"internalType": "bytes", "name": "result", "type": "bytes"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "bytes[]", "name": "paths", "type": "bytes[]"},
{"internalType": "uint128[]", "name": "amounts", "type": "uint128[]"},
{
"internalType": "uint24",
"name": "maximumTickDivergence",
"type": "uint24",
},
{"internalType": "uint32", "name": "secondsAgo", "type": "uint32"},
],
"name": "checkOracleSlippage",
"outputs": [],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "bytes", "name": "path", "type": "bytes"},
{
"internalType": "uint24",
"name": "maximumTickDivergence",
"type": "uint24",
},
{"internalType": "uint32", "name": "secondsAgo", "type": "uint32"},
],
"name": "checkOracleSlippage",
"outputs": [],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{
"components": [
{"internalType": "bytes", "name": "path", "type": "bytes"},
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amountIn", "type": "uint256"},
{
"internalType": "uint256",
"name": "amountOutMinimum",
"type": "uint256",
},
],
"internalType": "struct IV3SwapRouter.ExactInputParams",
"name": "params",
"type": "tuple",
}
],
"name": "exactInput",
"outputs": [
{"internalType": "uint256", "name": "amountOut", "type": "uint256"}
],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{
"components": [
{"internalType": "address", "name": "tokenIn", "type": "address"},
{"internalType": "address", "name": "tokenOut", "type": "address"},
{"internalType": "uint24", "name": "fee", "type": "uint24"},
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amountIn", "type": "uint256"},
{
"internalType": "uint256",
"name": "amountOutMinimum",
"type": "uint256",
},
{
"internalType": "uint160",
"name": "sqrtPriceLimitX96",
"type": "uint160",
},
],
"internalType": "struct IV3SwapRouter.ExactInputSingleParams",
"name": "params",
"type": "tuple",
}
],
"name": "exactInputSingle",
"outputs": [
{"internalType": "uint256", "name": "amountOut", "type": "uint256"}
],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{
"components": [
{"internalType": "bytes", "name": "path", "type": "bytes"},
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amountOut", "type": "uint256"},
{
"internalType": "uint256",
"name": "amountInMaximum",
"type": "uint256",
},
],
"internalType": "struct IV3SwapRouter.ExactOutputParams",
"name": "params",
"type": "tuple",
}
],
"name": "exactOutput",
"outputs": [{"internalType": "uint256", "name": "amountIn", "type": "uint256"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{
"components": [
{"internalType": "address", "name": "tokenIn", "type": "address"},
{"internalType": "address", "name": "tokenOut", "type": "address"},
{"internalType": "uint24", "name": "fee", "type": "uint24"},
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amountOut", "type": "uint256"},
{
"internalType": "uint256",
"name": "amountInMaximum",
"type": "uint256",
},
{
"internalType": "uint160",
"name": "sqrtPriceLimitX96",
"type": "uint160",
},
],
"internalType": "struct IV3SwapRouter.ExactOutputSingleParams",
"name": "params",
"type": "tuple",
}
],
"name": "exactOutputSingle",
"outputs": [{"internalType": "uint256", "name": "amountIn", "type": "uint256"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [],
"name": "factory",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "factoryV2",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "getApprovalType",
"outputs": [
{
"internalType": "enum IApproveAndCall.ApprovalType",
"name": "",
"type": "uint8",
}
],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{
"components": [
{"internalType": "address", "name": "token0", "type": "address"},
{"internalType": "address", "name": "token1", "type": "address"},
{"internalType": "uint256", "name": "tokenId", "type": "uint256"},
{
"internalType": "uint256",
"name": "amount0Min",
"type": "uint256",
},
{
"internalType": "uint256",
"name": "amount1Min",
"type": "uint256",
},
],
"internalType": "struct IApproveAndCall.IncreaseLiquidityParams",
"name": "params",
"type": "tuple",
}
],
"name": "increaseLiquidity",
"outputs": [{"internalType": "bytes", "name": "result", "type": "bytes"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{
"components": [
{"internalType": "address", "name": "token0", "type": "address"},
{"internalType": "address", "name": "token1", "type": "address"},
{"internalType": "uint24", "name": "fee", "type": "uint24"},
{"internalType": "int24", "name": "tickLower", "type": "int24"},
{"internalType": "int24", "name": "tickUpper", "type": "int24"},
{
"internalType": "uint256",
"name": "amount0Min",
"type": "uint256",
},
{
"internalType": "uint256",
"name": "amount1Min",
"type": "uint256",
},
{"internalType": "address", "name": "recipient", "type": "address"},
],
"internalType": "struct IApproveAndCall.MintParams",
"name": "params",
"type": "tuple",
}
],
"name": "mint",
"outputs": [{"internalType": "bytes", "name": "result", "type": "bytes"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "bytes32", "name": "previousBlockhash", "type": "bytes32"},
{"internalType": "bytes[]", "name": "data", "type": "bytes[]"},
],
"name": "multicall",
"outputs": [{"internalType": "bytes[]", "name": "", "type": "bytes[]"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "deadline", "type": "uint256"},
{"internalType": "bytes[]", "name": "data", "type": "bytes[]"},
],
"name": "multicall",
"outputs": [{"internalType": "bytes[]", "name": "", "type": "bytes[]"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [{"internalType": "bytes[]", "name": "data", "type": "bytes[]"}],
"name": "multicall",
"outputs": [{"internalType": "bytes[]", "name": "results", "type": "bytes[]"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [],
"name": "positionManager",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "value", "type": "uint256"},
],
"name": "pull",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [],
"name": "refundETH",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "value", "type": "uint256"},
{"internalType": "uint256", "name": "deadline", "type": "uint256"},
{"internalType": "uint8", "name": "v", "type": "uint8"},
{"internalType": "bytes32", "name": "r", "type": "bytes32"},
{"internalType": "bytes32", "name": "s", "type": "bytes32"},
],
"name": "selfPermit",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "nonce", "type": "uint256"},
{"internalType": "uint256", "name": "expiry", "type": "uint256"},
{"internalType": "uint8", "name": "v", "type": "uint8"},
{"internalType": "bytes32", "name": "r", "type": "bytes32"},
{"internalType": "bytes32", "name": "s", "type": "bytes32"},
],
"name": "selfPermitAllowed",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "nonce", "type": "uint256"},
{"internalType": "uint256", "name": "expiry", "type": "uint256"},
{"internalType": "uint8", "name": "v", "type": "uint8"},
{"internalType": "bytes32", "name": "r", "type": "bytes32"},
{"internalType": "bytes32", "name": "s", "type": "bytes32"},
],
"name": "selfPermitAllowedIfNecessary",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "value", "type": "uint256"},
{"internalType": "uint256", "name": "deadline", "type": "uint256"},
{"internalType": "uint8", "name": "v", "type": "uint8"},
{"internalType": "bytes32", "name": "r", "type": "bytes32"},
{"internalType": "bytes32", "name": "s", "type": "bytes32"},
],
"name": "selfPermitIfNecessary",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "amountIn", "type": "uint256"},
{"internalType": "uint256", "name": "amountOutMin", "type": "uint256"},
{"internalType": "address[]", "name": "path", "type": "address[]"},
{"internalType": "address", "name": "to", "type": "address"},
],
"name": "swapExactTokensForTokens",
"outputs": [
{"internalType": "uint256", "name": "amountOut", "type": "uint256"}
],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "amountOut", "type": "uint256"},
{"internalType": "uint256", "name": "amountInMax", "type": "uint256"},
{"internalType": "address[]", "name": "path", "type": "address[]"},
{"internalType": "address", "name": "to", "type": "address"},
],
"name": "swapTokensForExactTokens",
"outputs": [{"internalType": "uint256", "name": "amountIn", "type": "uint256"}],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "amountMinimum", "type": "uint256"},
{"internalType": "address", "name": "recipient", "type": "address"},
],
"name": "sweepToken",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "amountMinimum", "type": "uint256"},
],
"name": "sweepToken",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "amountMinimum", "type": "uint256"},
{"internalType": "uint256", "name": "feeBips", "type": "uint256"},
{"internalType": "address", "name": "feeRecipient", "type": "address"},
],
"name": "sweepTokenWithFee",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "amountMinimum", "type": "uint256"},
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "feeBips", "type": "uint256"},
{"internalType": "address", "name": "feeRecipient", "type": "address"},
],
"name": "sweepTokenWithFee",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "int256", "name": "amount0Delta", "type": "int256"},
{"internalType": "int256", "name": "amount1Delta", "type": "int256"},
{"internalType": "bytes", "name": "_data", "type": "bytes"},
],
"name": "uniswapV3SwapCallback",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "amountMinimum", "type": "uint256"},
{"internalType": "address", "name": "recipient", "type": "address"},
],
"name": "unwrapWETH9",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "amountMinimum", "type": "uint256"}
],
"name": "unwrapWETH9",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "amountMinimum", "type": "uint256"},
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "feeBips", "type": "uint256"},
{"internalType": "address", "name": "feeRecipient", "type": "address"},
],
"name": "unwrapWETH9WithFee",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "amountMinimum", "type": "uint256"},
{"internalType": "uint256", "name": "feeBips", "type": "uint256"},
{"internalType": "address", "name": "feeRecipient", "type": "address"},
],
"name": "unwrapWETH9WithFee",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [{"internalType": "uint256", "name": "value", "type": "uint256"}],
"name": "wrapETH",
"outputs": [],
"stateMutability": "payable",
"type": "function",
},
{"stateMutability": "payable", "type": "receive"},
]
uniswap_v3_pool_abi = [
{"inputs": [], "stateMutability": "nonpayable", "type": "constructor"},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "owner",
"type": "address",
},
{
"indexed": True,
"internalType": "int24",
"name": "tickLower",
"type": "int24",
},
{
"indexed": True,
"internalType": "int24",
"name": "tickUpper",
"type": "int24",
},
{
"indexed": False,
"internalType": "uint128",
"name": "amount",
"type": "uint128",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount0",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount1",
"type": "uint256",
},
],
"name": "Burn",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "owner",
"type": "address",
},
{
"indexed": False,
"internalType": "address",
"name": "recipient",
"type": "address",
},
{
"indexed": True,
"internalType": "int24",
"name": "tickLower",
"type": "int24",
},
{
"indexed": True,
"internalType": "int24",
"name": "tickUpper",
"type": "int24",
},
{
"indexed": False,
"internalType": "uint128",
"name": "amount0",
"type": "uint128",
},
{
"indexed": False,
"internalType": "uint128",
"name": "amount1",
"type": "uint128",
},
],
"name": "Collect",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "sender",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "recipient",
"type": "address",
},
{
"indexed": False,
"internalType": "uint128",
"name": "amount0",
"type": "uint128",
},
{
"indexed": False,
"internalType": "uint128",
"name": "amount1",
"type": "uint128",
},
],
"name": "CollectProtocol",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "sender",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "recipient",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount0",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount1",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "paid0",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "paid1",
"type": "uint256",
},
],
"name": "Flash",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "uint16",
"name": "observationCardinalityNextOld",
"type": "uint16",
},
{
"indexed": False,
"internalType": "uint16",
"name": "observationCardinalityNextNew",
"type": "uint16",
},
],
"name": "IncreaseObservationCardinalityNext",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "uint160",
"name": "sqrtPriceX96",
"type": "uint160",
},
{
"indexed": False,
"internalType": "int24",
"name": "tick",
"type": "int24",
},
],
"name": "Initialize",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "sender",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "owner",
"type": "address",
},
{
"indexed": True,
"internalType": "int24",
"name": "tickLower",
"type": "int24",
},
{
"indexed": True,
"internalType": "int24",
"name": "tickUpper",
"type": "int24",
},
{
"indexed": False,
"internalType": "uint128",
"name": "amount",
"type": "uint128",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount0",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount1",
"type": "uint256",
},
],
"name": "Mint",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "uint8",
"name": "feeProtocol0Old",
"type": "uint8",
},
{
"indexed": False,
"internalType": "uint8",
"name": "feeProtocol1Old",
"type": "uint8",
},
{
"indexed": False,
"internalType": "uint8",
"name": "feeProtocol0New",
"type": "uint8",
},
{
"indexed": False,
"internalType": "uint8",
"name": "feeProtocol1New",
"type": "uint8",
},
],
"name": "SetFeeProtocol",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "sender",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "recipient",
"type": "address",
},
{
"indexed": False,
"internalType": "int256",
"name": "amount0",
"type": "int256",
},
{
"indexed": False,
"internalType": "int256",
"name": "amount1",
"type": "int256",
},
{
"indexed": False,
"internalType": "uint160",
"name": "sqrtPriceX96",
"type": "uint160",
},
{
"indexed": False,
"internalType": "uint128",
"name": "liquidity",
"type": "uint128",
},
{
"indexed": False,
"internalType": "int24",
"name": "tick",
"type": "int24",
},
],
"name": "Swap",
"type": "event",
},
{
"inputs": [
{"internalType": "int24", "name": "tickLower", "type": "int24"},
{"internalType": "int24", "name": "tickUpper", "type": "int24"},
{"internalType": "uint128", "name": "amount", "type": "uint128"},
],
"name": "burn",
"outputs": [
{"internalType": "uint256", "name": "amount0", "type": "uint256"},
{"internalType": "uint256", "name": "amount1", "type": "uint256"},
],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "int24", "name": "tickLower", "type": "int24"},
{"internalType": "int24", "name": "tickUpper", "type": "int24"},
{"internalType": "uint128", "name": "amount0Requested", "type": "uint128"},
{"internalType": "uint128", "name": "amount1Requested", "type": "uint128"},
],
"name": "collect",
"outputs": [
{"internalType": "uint128", "name": "amount0", "type": "uint128"},
{"internalType": "uint128", "name": "amount1", "type": "uint128"},
],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint128", "name": "amount0Requested", "type": "uint128"},
{"internalType": "uint128", "name": "amount1Requested", "type": "uint128"},
],
"name": "collectProtocol",
"outputs": [
{"internalType": "uint128", "name": "amount0", "type": "uint128"},
{"internalType": "uint128", "name": "amount1", "type": "uint128"},
],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "factory",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "fee",
"outputs": [{"internalType": "uint24", "name": "", "type": "uint24"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "feeGrowthGlobal0X128",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "feeGrowthGlobal1X128",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amount0", "type": "uint256"},
{"internalType": "uint256", "name": "amount1", "type": "uint256"},
{"internalType": "bytes", "name": "data", "type": "bytes"},
],
"name": "flash",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{
"internalType": "uint16",
"name": "observationCardinalityNext",
"type": "uint16",
}
],
"name": "increaseObservationCardinalityNext",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint160", "name": "sqrtPriceX96", "type": "uint160"}
],
"name": "initialize",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "liquidity",
"outputs": [{"internalType": "uint128", "name": "", "type": "uint128"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "maxLiquidityPerTick",
"outputs": [{"internalType": "uint128", "name": "", "type": "uint128"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "int24", "name": "tickLower", "type": "int24"},
{"internalType": "int24", "name": "tickUpper", "type": "int24"},
{"internalType": "uint128", "name": "amount", "type": "uint128"},
{"internalType": "bytes", "name": "data", "type": "bytes"},
],
"name": "mint",
"outputs": [
{"internalType": "uint256", "name": "amount0", "type": "uint256"},
{"internalType": "uint256", "name": "amount1", "type": "uint256"},
],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"name": "observations",
"outputs": [
{"internalType": "uint32", "name": "blockTimestamp", "type": "uint32"},
{"internalType": "int56", "name": "tickCumulative", "type": "int56"},
{
"internalType": "uint160",
"name": "secondsPerLiquidityCumulativeX128",
"type": "uint160",
},
{"internalType": "bool", "name": "initialized", "type": "bool"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "uint32[]", "name": "secondsAgos", "type": "uint32[]"}
],
"name": "observe",
"outputs": [
{"internalType": "int56[]", "name": "tickCumulatives", "type": "int56[]"},
{
"internalType": "uint160[]",
"name": "secondsPerLiquidityCumulativeX128s",
"type": "uint160[]",
},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}],
"name": "positions",
"outputs": [
{"internalType": "uint128", "name": "liquidity", "type": "uint128"},
{
"internalType": "uint256",
"name": "feeGrowthInside0LastX128",
"type": "uint256",
},
{
"internalType": "uint256",
"name": "feeGrowthInside1LastX128",
"type": "uint256",
},
{"internalType": "uint128", "name": "tokensOwed0", "type": "uint128"},
{"internalType": "uint128", "name": "tokensOwed1", "type": "uint128"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "protocolFees",
"outputs": [
{"internalType": "uint128", "name": "token0", "type": "uint128"},
{"internalType": "uint128", "name": "token1", "type": "uint128"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "uint8", "name": "feeProtocol0", "type": "uint8"},
{"internalType": "uint8", "name": "feeProtocol1", "type": "uint8"},
],
"name": "setFeeProtocol",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "slot0",
"outputs": [
{"internalType": "uint160", "name": "sqrtPriceX96", "type": "uint160"},
{"internalType": "int24", "name": "tick", "type": "int24"},
{"internalType": "uint16", "name": "observationIndex", "type": "uint16"},
{
"internalType": "uint16",
"name": "observationCardinality",
"type": "uint16",
},
{
"internalType": "uint16",
"name": "observationCardinalityNext",
"type": "uint16",
},
{"internalType": "uint8", "name": "feeProtocol", "type": "uint8"},
{"internalType": "bool", "name": "unlocked", "type": "bool"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "int24", "name": "tickLower", "type": "int24"},
{"internalType": "int24", "name": "tickUpper", "type": "int24"},
],
"name": "snapshotCumulativesInside",
"outputs": [
{"internalType": "int56", "name": "tickCumulativeInside", "type": "int56"},
{
"internalType": "uint160",
"name": "secondsPerLiquidityInsideX128",
"type": "uint160",
},
{"internalType": "uint32", "name": "secondsInside", "type": "uint32"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "bool", "name": "zeroForOne", "type": "bool"},
{"internalType": "int256", "name": "amountSpecified", "type": "int256"},
{"internalType": "uint160", "name": "sqrtPriceLimitX96", "type": "uint160"},
{"internalType": "bytes", "name": "data", "type": "bytes"},
],
"name": "swap",
"outputs": [
{"internalType": "int256", "name": "amount0", "type": "int256"},
{"internalType": "int256", "name": "amount1", "type": "int256"},
],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "int16", "name": "", "type": "int16"}],
"name": "tickBitmap",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "tickSpacing",
"outputs": [{"internalType": "int24", "name": "", "type": "int24"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "int24", "name": "", "type": "int24"}],
"name": "ticks",
"outputs": [
{"internalType": "uint128", "name": "liquidityGross", "type": "uint128"},
{"internalType": "int128", "name": "liquidityNet", "type": "int128"},
{
"internalType": "uint256",
"name": "feeGrowthOutside0X128",
"type": "uint256",
},
{
"internalType": "uint256",
"name": "feeGrowthOutside1X128",
"type": "uint256",
},
{"internalType": "int56", "name": "tickCumulativeOutside", "type": "int56"},
{
"internalType": "uint160",
"name": "secondsPerLiquidityOutsideX128",
"type": "uint160",
},
{"internalType": "uint32", "name": "secondsOutside", "type": "uint32"},
{"internalType": "bool", "name": "initialized", "type": "bool"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "token0",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "token1",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
] | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/oracles/abis/uniswap_v3.py | 0.515864 | 0.476275 | uniswap_v3.py | pypi |
AAVE_ATOKEN_ABI = [
{
"inputs": [
{
"internalType": "contract ILendingPool",
"name": "pool",
"type": "address",
},
{
"internalType": "address",
"name": "underlyingAssetAddress",
"type": "address",
},
{
"internalType": "address",
"name": "reserveTreasuryAddress",
"type": "address",
},
{"internalType": "string", "name": "tokenName", "type": "string"},
{"internalType": "string", "name": "tokenSymbol", "type": "string"},
{
"internalType": "address",
"name": "incentivesController",
"type": "address",
},
],
"stateMutability": "nonpayable",
"type": "constructor",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "owner",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "spender",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "value",
"type": "uint256",
},
],
"name": "Approval",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "from",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "to",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "value",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "index",
"type": "uint256",
},
],
"name": "BalanceTransfer",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "from",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "target",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "value",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "index",
"type": "uint256",
},
],
"name": "Burn",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "underlyingAsset",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "pool",
"type": "address",
},
{
"indexed": False,
"internalType": "address",
"name": "treasury",
"type": "address",
},
{
"indexed": False,
"internalType": "address",
"name": "incentivesController",
"type": "address",
},
{
"indexed": False,
"internalType": "uint8",
"name": "aTokenDecimals",
"type": "uint8",
},
{
"indexed": False,
"internalType": "string",
"name": "aTokenName",
"type": "string",
},
{
"indexed": False,
"internalType": "string",
"name": "aTokenSymbol",
"type": "string",
},
{
"indexed": False,
"internalType": "bytes",
"name": "params",
"type": "bytes",
},
],
"name": "Initialized",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "from",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "value",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "index",
"type": "uint256",
},
],
"name": "Mint",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "from",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "to",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "value",
"type": "uint256",
},
],
"name": "Transfer",
"type": "event",
},
{
"inputs": [],
"name": "ATOKEN_REVISION",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "DOMAIN_SEPARATOR",
"outputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "EIP712_REVISION",
"outputs": [{"internalType": "bytes", "name": "", "type": "bytes"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "PERMIT_TYPEHASH",
"outputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "POOL",
"outputs": [
{"internalType": "contract ILendingPool", "name": "", "type": "address"}
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "RESERVE_TREASURY_ADDRESS",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "UINT_MAX_VALUE",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "UNDERLYING_ASSET_ADDRESS",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "", "type": "address"}],
"name": "_nonces",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "owner", "type": "address"},
{"internalType": "address", "name": "spender", "type": "address"},
],
"name": "allowance",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "approve",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "user", "type": "address"}],
"name": "balanceOf",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "user", "type": "address"},
{
"internalType": "address",
"name": "receiverOfUnderlying",
"type": "address",
},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
{"internalType": "uint256", "name": "index", "type": "uint256"},
],
"name": "burn",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "decimals",
"outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "subtractedValue", "type": "uint256"},
],
"name": "decreaseAllowance",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "getIncentivesController",
"outputs": [
{
"internalType": "contract IAaveIncentivesController",
"name": "",
"type": "address",
}
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "user", "type": "address"}],
"name": "getScaledUserBalanceAndSupply",
"outputs": [
{"internalType": "uint256", "name": "", "type": "uint256"},
{"internalType": "uint256", "name": "", "type": "uint256"},
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "addedValue", "type": "uint256"},
],
"name": "increaseAllowance",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{
"internalType": "uint8",
"name": "underlyingAssetDecimals",
"type": "uint8",
},
{"internalType": "string", "name": "tokenName", "type": "string"},
{"internalType": "string", "name": "tokenSymbol", "type": "string"},
],
"name": "initialize",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "user", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
{"internalType": "uint256", "name": "index", "type": "uint256"},
],
"name": "mint",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "amount", "type": "uint256"},
{"internalType": "uint256", "name": "index", "type": "uint256"},
],
"name": "mintToTreasury",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "name",
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "owner", "type": "address"},
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "value", "type": "uint256"},
{"internalType": "uint256", "name": "deadline", "type": "uint256"},
{"internalType": "uint8", "name": "v", "type": "uint8"},
{"internalType": "bytes32", "name": "r", "type": "bytes32"},
{"internalType": "bytes32", "name": "s", "type": "bytes32"},
],
"name": "permit",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "user", "type": "address"}],
"name": "scaledBalanceOf",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "scaledTotalSupply",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "symbol",
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "totalSupply",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "transfer",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "sender", "type": "address"},
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "transferFrom",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "from", "type": "address"},
{"internalType": "address", "name": "to", "type": "address"},
{"internalType": "uint256", "name": "value", "type": "uint256"},
],
"name": "transferOnLiquidation",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [
{"internalType": "address", "name": "target", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "transferUnderlyingTo",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"stateMutability": "nonpayable",
"type": "function",
},
] | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/oracles/abis/aave_abis.py | 0.566019 | 0.415847 | aave_abis.py | pypi |
curve_address_provider_abi = [
{
"name": "NewAddressIdentifier",
"inputs": [
{"type": "uint256", "name": "id", "indexed": True},
{"type": "address", "name": "addr", "indexed": False},
{"type": "string", "name": "description", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "AddressModified",
"inputs": [
{"type": "uint256", "name": "id", "indexed": True},
{"type": "address", "name": "new_address", "indexed": False},
{"type": "uint256", "name": "version", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "CommitNewAdmin",
"inputs": [
{"type": "uint256", "name": "deadline", "indexed": True},
{"type": "address", "name": "admin", "indexed": True},
],
"anonymous": False,
"type": "event",
},
{
"name": "NewAdmin",
"inputs": [{"type": "address", "name": "admin", "indexed": True}],
"anonymous": False,
"type": "event",
},
{
"outputs": [],
"inputs": [{"type": "address", "name": "_admin"}],
"stateMutability": "nonpayable",
"type": "constructor",
},
{
"name": "get_registry",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 1061,
},
{
"name": "max_id",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 1258,
},
{
"name": "get_address",
"outputs": [{"type": "address", "name": ""}],
"inputs": [{"type": "uint256", "name": "_id"}],
"stateMutability": "view",
"type": "function",
"gas": 1308,
},
{
"name": "add_new_id",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [
{"type": "address", "name": "_address"},
{"type": "string", "name": "_description"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 291275,
},
{
"name": "set_address",
"outputs": [{"type": "bool", "name": ""}],
"inputs": [
{"type": "uint256", "name": "_id"},
{"type": "address", "name": "_address"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 182430,
},
{
"name": "unset_address",
"outputs": [{"type": "bool", "name": ""}],
"inputs": [{"type": "uint256", "name": "_id"}],
"stateMutability": "nonpayable",
"type": "function",
"gas": 101348,
},
{
"name": "commit_transfer_ownership",
"outputs": [{"type": "bool", "name": ""}],
"inputs": [{"type": "address", "name": "_new_admin"}],
"stateMutability": "nonpayable",
"type": "function",
"gas": 74048,
},
{
"name": "apply_transfer_ownership",
"outputs": [{"type": "bool", "name": ""}],
"inputs": [],
"stateMutability": "nonpayable",
"type": "function",
"gas": 60125,
},
{
"name": "revert_transfer_ownership",
"outputs": [{"type": "bool", "name": ""}],
"inputs": [],
"stateMutability": "nonpayable",
"type": "function",
"gas": 21400,
},
{
"name": "admin",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 1331,
},
{
"name": "transfer_ownership_deadline",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 1361,
},
{
"name": "future_admin",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 1391,
},
{
"name": "get_id_info",
"outputs": [
{"type": "address", "name": "addr"},
{"type": "bool", "name": "is_active"},
{"type": "uint256", "name": "version"},
{"type": "uint256", "name": "last_modified"},
{"type": "string", "name": "description"},
],
"inputs": [{"type": "uint256", "name": "arg0"}],
"stateMutability": "view",
"type": "function",
"gas": 12168,
},
]
curve_registry_abi = [
{
"name": "PoolAdded",
"inputs": [
{"type": "address", "name": "pool", "indexed": True},
{"type": "bytes", "name": "rate_method_id", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "PoolRemoved",
"inputs": [{"type": "address", "name": "pool", "indexed": True}],
"anonymous": False,
"type": "event",
},
{
"outputs": [],
"inputs": [
{"type": "address", "name": "_address_provider"},
{"type": "address", "name": "_gauge_controller"},
],
"stateMutability": "nonpayable",
"type": "constructor",
},
{
"name": "find_pool_for_coins",
"outputs": [{"type": "address", "name": ""}],
"inputs": [
{"type": "address", "name": "_from"},
{"type": "address", "name": "_to"},
],
"stateMutability": "view",
"type": "function",
},
{
"name": "find_pool_for_coins",
"outputs": [{"type": "address", "name": ""}],
"inputs": [
{"type": "address", "name": "_from"},
{"type": "address", "name": "_to"},
{"type": "uint256", "name": "i"},
],
"stateMutability": "view",
"type": "function",
},
{
"name": "get_n_coins",
"outputs": [{"type": "uint256[2]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 1704,
},
{
"name": "get_coins",
"outputs": [{"type": "address[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 12285,
},
{
"name": "get_underlying_coins",
"outputs": [{"type": "address[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 12347,
},
{
"name": "get_decimals",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 8199,
},
{
"name": "get_underlying_decimals",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 8261,
},
{
"name": "get_rates",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 34780,
},
{
"name": "get_gauges",
"outputs": [
{"type": "address[10]", "name": ""},
{"type": "int128[10]", "name": ""},
],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 20310,
},
{
"name": "get_balances",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 16818,
},
{
"name": "get_underlying_balances",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 158953,
},
{
"name": "get_virtual_price_from_lp_token",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [{"type": "address", "name": "_token"}],
"stateMutability": "view",
"type": "function",
"gas": 2080,
},
{
"name": "get_A",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 1198,
},
{
"name": "get_parameters",
"outputs": [
{"type": "uint256", "name": "A"},
{"type": "uint256", "name": "future_A"},
{"type": "uint256", "name": "fee"},
{"type": "uint256", "name": "admin_fee"},
{"type": "uint256", "name": "future_fee"},
{"type": "uint256", "name": "future_admin_fee"},
{"type": "address", "name": "future_owner"},
{"type": "uint256", "name": "initial_A"},
{"type": "uint256", "name": "initial_A_time"},
{"type": "uint256", "name": "future_A_time"},
],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 6458,
},
{
"name": "get_fees",
"outputs": [{"type": "uint256[2]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 1603,
},
{
"name": "get_admin_balances",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 36719,
},
{
"name": "get_coin_indices",
"outputs": [
{"type": "int128", "name": ""},
{"type": "int128", "name": ""},
{"type": "bool", "name": ""},
],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "address", "name": "_from"},
{"type": "address", "name": "_to"},
],
"stateMutability": "view",
"type": "function",
"gas": 27456,
},
{
"name": "estimate_gas_used",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "address", "name": "_from"},
{"type": "address", "name": "_to"},
],
"stateMutability": "view",
"type": "function",
"gas": 32329,
},
{
"name": "add_pool",
"outputs": [],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "uint256", "name": "_n_coins"},
{"type": "address", "name": "_lp_token"},
{"type": "bytes32", "name": "_rate_method_id"},
{"type": "uint256", "name": "_decimals"},
{"type": "uint256", "name": "_underlying_decimals"},
{"type": "bool", "name": "_has_initial_A"},
{"type": "bool", "name": "_is_v1"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 10196577,
},
{
"name": "add_pool_without_underlying",
"outputs": [],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "uint256", "name": "_n_coins"},
{"type": "address", "name": "_lp_token"},
{"type": "bytes32", "name": "_rate_method_id"},
{"type": "uint256", "name": "_decimals"},
{"type": "uint256", "name": "_use_rates"},
{"type": "bool", "name": "_has_initial_A"},
{"type": "bool", "name": "_is_v1"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 5590664,
},
{
"name": "add_metapool",
"outputs": [],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "uint256", "name": "_n_coins"},
{"type": "address", "name": "_lp_token"},
{"type": "uint256", "name": "_decimals"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 10226976,
},
{
"name": "remove_pool",
"outputs": [],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "nonpayable",
"type": "function",
"gas": 779646579509,
},
{
"name": "set_pool_gas_estimates",
"outputs": [],
"inputs": [
{"type": "address[5]", "name": "_addr"},
{"type": "uint256[2][5]", "name": "_amount"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 355578,
},
{
"name": "set_coin_gas_estimates",
"outputs": [],
"inputs": [
{"type": "address[10]", "name": "_addr"},
{"type": "uint256[10]", "name": "_amount"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 357165,
},
{
"name": "set_gas_estimate_contract",
"outputs": [],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "address", "name": "_estimator"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 37747,
},
{
"name": "set_liquidity_gauges",
"outputs": [],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "address[10]", "name": "_liquidity_gauges"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 365793,
},
{
"name": "address_provider",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 2111,
},
{
"name": "gauge_controller",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 2141,
},
{
"name": "pool_list",
"outputs": [{"type": "address", "name": ""}],
"inputs": [{"type": "uint256", "name": "arg0"}],
"stateMutability": "view",
"type": "function",
"gas": 2280,
},
{
"name": "pool_count",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 2201,
},
{
"name": "get_pool_from_lp_token",
"outputs": [{"type": "address", "name": ""}],
"inputs": [{"type": "address", "name": "arg0"}],
"stateMutability": "view",
"type": "function",
"gas": 2446,
},
{
"name": "get_lp_token",
"outputs": [{"type": "address", "name": ""}],
"inputs": [{"type": "address", "name": "arg0"}],
"stateMutability": "view",
"type": "function",
"gas": 2476,
},
]
curve_pool_abi = [
{
"name": "TokenExchange",
"inputs": [
{"type": "address", "name": "buyer", "indexed": True},
{"type": "int128", "name": "sold_id", "indexed": False},
{"type": "uint256", "name": "tokens_sold", "indexed": False},
{"type": "int128", "name": "bought_id", "indexed": False},
{"type": "uint256", "name": "tokens_bought", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "TokenExchangeUnderlying",
"inputs": [
{"type": "address", "name": "buyer", "indexed": True},
{"type": "int128", "name": "sold_id", "indexed": False},
{"type": "uint256", "name": "tokens_sold", "indexed": False},
{"type": "int128", "name": "bought_id", "indexed": False},
{"type": "uint256", "name": "tokens_bought", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "AddLiquidity",
"inputs": [
{"type": "address", "name": "provider", "indexed": True},
{"type": "uint256[4]", "name": "token_amounts", "indexed": False},
{"type": "uint256[4]", "name": "fees", "indexed": False},
{"type": "uint256", "name": "invariant", "indexed": False},
{"type": "uint256", "name": "token_supply", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "RemoveLiquidity",
"inputs": [
{"type": "address", "name": "provider", "indexed": True},
{"type": "uint256[4]", "name": "token_amounts", "indexed": False},
{"type": "uint256[4]", "name": "fees", "indexed": False},
{"type": "uint256", "name": "token_supply", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "RemoveLiquidityImbalance",
"inputs": [
{"type": "address", "name": "provider", "indexed": True},
{"type": "uint256[4]", "name": "token_amounts", "indexed": False},
{"type": "uint256[4]", "name": "fees", "indexed": False},
{"type": "uint256", "name": "invariant", "indexed": False},
{"type": "uint256", "name": "token_supply", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "CommitNewAdmin",
"inputs": [
{"type": "uint256", "name": "deadline", "indexed": True, "unit": "sec"},
{"type": "address", "name": "admin", "indexed": True},
],
"anonymous": False,
"type": "event",
},
{
"name": "NewAdmin",
"inputs": [{"type": "address", "name": "admin", "indexed": True}],
"anonymous": False,
"type": "event",
},
{
"name": "CommitNewParameters",
"inputs": [
{"type": "uint256", "name": "deadline", "indexed": True, "unit": "sec"},
{"type": "uint256", "name": "A", "indexed": False},
{"type": "uint256", "name": "fee", "indexed": False},
{"type": "uint256", "name": "admin_fee", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "NewParameters",
"inputs": [
{"type": "uint256", "name": "A", "indexed": False},
{"type": "uint256", "name": "fee", "indexed": False},
{"type": "uint256", "name": "admin_fee", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"outputs": [],
"inputs": [
{"type": "address[4]", "name": "_coins"},
{"type": "address[4]", "name": "_underlying_coins"},
{"type": "address", "name": "_pool_token"},
{"type": "uint256", "name": "_A"},
{"type": "uint256", "name": "_fee"},
],
"constant": False,
"payable": False,
"type": "constructor",
},
{
"name": "get_virtual_price",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 1570535,
},
{
"name": "calc_token_amount",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [
{"type": "uint256[4]", "name": "amounts"},
{"type": "bool", "name": "deposit"},
],
"constant": True,
"payable": False,
"type": "function",
"gas": 6103471,
},
{
"name": "add_liquidity",
"outputs": [],
"inputs": [
{"type": "uint256[4]", "name": "amounts"},
{"type": "uint256", "name": "min_mint_amount"},
],
"constant": False,
"payable": False,
"type": "function",
"gas": 9331701,
},
{
"name": "get_dy",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [
{"type": "int128", "name": "i"},
{"type": "int128", "name": "j"},
{"type": "uint256", "name": "dx"},
],
"constant": True,
"payable": False,
"type": "function",
"gas": 3489637,
},
{
"name": "get_dy_underlying",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [
{"type": "int128", "name": "i"},
{"type": "int128", "name": "j"},
{"type": "uint256", "name": "dx"},
],
"constant": True,
"payable": False,
"type": "function",
"gas": 3489467,
},
{
"name": "exchange",
"outputs": [],
"inputs": [
{"type": "int128", "name": "i"},
{"type": "int128", "name": "j"},
{"type": "uint256", "name": "dx"},
{"type": "uint256", "name": "min_dy"},
],
"constant": False,
"payable": False,
"type": "function",
"gas": 7034253,
},
{
"name": "exchange_underlying",
"outputs": [],
"inputs": [
{"type": "int128", "name": "i"},
{"type": "int128", "name": "j"},
{"type": "uint256", "name": "dx"},
{"type": "uint256", "name": "min_dy"},
],
"constant": False,
"payable": False,
"type": "function",
"gas": 7050488,
},
{
"name": "remove_liquidity",
"outputs": [],
"inputs": [
{"type": "uint256", "name": "_amount"},
{"type": "uint256[4]", "name": "min_amounts"},
],
"constant": False,
"payable": False,
"type": "function",
"gas": 241191,
},
{
"name": "remove_liquidity_imbalance",
"outputs": [],
"inputs": [
{"type": "uint256[4]", "name": "amounts"},
{"type": "uint256", "name": "max_burn_amount"},
],
"constant": False,
"payable": False,
"type": "function",
"gas": 9330864,
},
{
"name": "commit_new_parameters",
"outputs": [],
"inputs": [
{"type": "uint256", "name": "amplification"},
{"type": "uint256", "name": "new_fee"},
{"type": "uint256", "name": "new_admin_fee"},
],
"constant": False,
"payable": False,
"type": "function",
"gas": 146045,
},
{
"name": "apply_new_parameters",
"outputs": [],
"inputs": [],
"constant": False,
"payable": False,
"type": "function",
"gas": 133452,
},
{
"name": "revert_new_parameters",
"outputs": [],
"inputs": [],
"constant": False,
"payable": False,
"type": "function",
"gas": 21775,
},
{
"name": "commit_transfer_ownership",
"outputs": [],
"inputs": [{"type": "address", "name": "_owner"}],
"constant": False,
"payable": False,
"type": "function",
"gas": 74452,
},
{
"name": "apply_transfer_ownership",
"outputs": [],
"inputs": [],
"constant": False,
"payable": False,
"type": "function",
"gas": 60508,
},
{
"name": "revert_transfer_ownership",
"outputs": [],
"inputs": [],
"constant": False,
"payable": False,
"type": "function",
"gas": 21865,
},
{
"name": "withdraw_admin_fees",
"outputs": [],
"inputs": [],
"constant": False,
"payable": False,
"type": "function",
"gas": 23448,
},
{
"name": "kill_me",
"outputs": [],
"inputs": [],
"constant": False,
"payable": False,
"type": "function",
"gas": 37818,
},
{
"name": "unkill_me",
"outputs": [],
"inputs": [],
"constant": False,
"payable": False,
"type": "function",
"gas": 21955,
},
{
"name": "coins",
"outputs": [{"type": "address", "name": ""}],
"inputs": [{"type": "int128", "name": "arg0"}],
"constant": True,
"payable": False,
"type": "function",
"gas": 2130,
},
{
"name": "underlying_coins",
"outputs": [{"type": "address", "name": ""}],
"inputs": [{"type": "int128", "name": "arg0"}],
"constant": True,
"payable": False,
"type": "function",
"gas": 2160,
},
{
"name": "balances",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [{"type": "int128", "name": "arg0"}],
"constant": True,
"payable": False,
"type": "function",
"gas": 2190,
},
{
"name": "A",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 2021,
},
{
"name": "fee",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 2051,
},
{
"name": "admin_fee",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 2081,
},
{
"name": "owner",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 2111,
},
{
"name": "admin_actions_deadline",
"outputs": [{"type": "uint256", "unit": "sec", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 2141,
},
{
"name": "transfer_ownership_deadline",
"outputs": [{"type": "uint256", "unit": "sec", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 2171,
},
{
"name": "future_A",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 2201,
},
{
"name": "future_fee",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 2231,
},
{
"name": "future_admin_fee",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 2261,
},
{
"name": "future_owner",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"constant": True,
"payable": False,
"type": "function",
"gas": 2291,
},
] | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/oracles/abis/curve_abis.py | 0.663124 | 0.531149 | curve_abis.py | pypi |
cream_ctoken_abi = [
{
"inputs": [
{"internalType": "address", "name": "underlying_", "type": "address"},
{
"internalType": "contract ComptrollerInterface",
"name": "comptroller_",
"type": "address",
},
{
"internalType": "contract InterestRateModel",
"name": "interestRateModel_",
"type": "address",
},
{
"internalType": "uint256",
"name": "initialExchangeRateMantissa_",
"type": "uint256",
},
{"internalType": "string", "name": "name_", "type": "string"},
{"internalType": "string", "name": "symbol_", "type": "string"},
{"internalType": "uint8", "name": "decimals_", "type": "uint8"},
{"internalType": "address payable", "name": "admin_", "type": "address"},
{"internalType": "address", "name": "implementation_", "type": "address"},
{
"internalType": "bytes",
"name": "becomeImplementationData",
"type": "bytes",
},
],
"payable": False,
"stateMutability": "nonpayable",
"type": "constructor",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "uint256",
"name": "cashPrior",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "interestAccumulated",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "borrowIndex",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "totalBorrows",
"type": "uint256",
},
],
"name": "AccrueInterest",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "owner",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "spender",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount",
"type": "uint256",
},
],
"name": "Approval",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "borrower",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "borrowAmount",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "accountBorrows",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "totalBorrows",
"type": "uint256",
},
],
"name": "Borrow",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "uint256",
"name": "error",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "info",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "detail",
"type": "uint256",
},
],
"name": "Failure",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "liquidator",
"type": "address",
},
{
"indexed": False,
"internalType": "address",
"name": "borrower",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "repayAmount",
"type": "uint256",
},
{
"indexed": False,
"internalType": "address",
"name": "cTokenCollateral",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "seizeTokens",
"type": "uint256",
},
],
"name": "LiquidateBorrow",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "minter",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "mintAmount",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "mintTokens",
"type": "uint256",
},
],
"name": "Mint",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "oldAdmin",
"type": "address",
},
{
"indexed": False,
"internalType": "address",
"name": "newAdmin",
"type": "address",
},
],
"name": "NewAdmin",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "contract ComptrollerInterface",
"name": "oldComptroller",
"type": "address",
},
{
"indexed": False,
"internalType": "contract ComptrollerInterface",
"name": "newComptroller",
"type": "address",
},
],
"name": "NewComptroller",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "oldImplementation",
"type": "address",
},
{
"indexed": False,
"internalType": "address",
"name": "newImplementation",
"type": "address",
},
],
"name": "NewImplementation",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "contract InterestRateModel",
"name": "oldInterestRateModel",
"type": "address",
},
{
"indexed": False,
"internalType": "contract InterestRateModel",
"name": "newInterestRateModel",
"type": "address",
},
],
"name": "NewMarketInterestRateModel",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "oldPendingAdmin",
"type": "address",
},
{
"indexed": False,
"internalType": "address",
"name": "newPendingAdmin",
"type": "address",
},
],
"name": "NewPendingAdmin",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "uint256",
"name": "oldReserveFactorMantissa",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "newReserveFactorMantissa",
"type": "uint256",
},
],
"name": "NewReserveFactor",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "redeemer",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "redeemAmount",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "redeemTokens",
"type": "uint256",
},
],
"name": "Redeem",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "payer",
"type": "address",
},
{
"indexed": False,
"internalType": "address",
"name": "borrower",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "repayAmount",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "accountBorrows",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "totalBorrows",
"type": "uint256",
},
],
"name": "RepayBorrow",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "benefactor",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "addAmount",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "newTotalReserves",
"type": "uint256",
},
],
"name": "ReservesAdded",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": False,
"internalType": "address",
"name": "admin",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "reduceAmount",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "newTotalReserves",
"type": "uint256",
},
],
"name": "ReservesReduced",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "from",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "to",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amount",
"type": "uint256",
},
],
"name": "Transfer",
"type": "event",
},
{"payable": True, "stateMutability": "payable", "type": "fallback"},
{
"constant": False,
"inputs": [],
"name": "_acceptAdmin",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "uint256", "name": "addAmount", "type": "uint256"}],
"name": "_addReserves",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "uint256", "name": "reduceAmount", "type": "uint256"}
],
"name": "_reduceReserves",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{
"internalType": "contract ComptrollerInterface",
"name": "newComptroller",
"type": "address",
}
],
"name": "_setComptroller",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "implementation_", "type": "address"},
{"internalType": "bool", "name": "allowResign", "type": "bool"},
{
"internalType": "bytes",
"name": "becomeImplementationData",
"type": "bytes",
},
],
"name": "_setImplementation",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{
"internalType": "contract InterestRateModel",
"name": "newInterestRateModel",
"type": "address",
}
],
"name": "_setInterestRateModel",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{
"internalType": "address payable",
"name": "newPendingAdmin",
"type": "address",
}
],
"name": "_setPendingAdmin",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{
"internalType": "uint256",
"name": "newReserveFactorMantissa",
"type": "uint256",
}
],
"name": "_setReserveFactor",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "accrualBlockNumber",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [],
"name": "accrueInterest",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "admin",
"outputs": [{"internalType": "address payable", "name": "", "type": "address"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "address", "name": "owner", "type": "address"},
{"internalType": "address", "name": "spender", "type": "address"},
],
"name": "allowance",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "approve",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [{"internalType": "address", "name": "owner", "type": "address"}],
"name": "balanceOf",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "address", "name": "owner", "type": "address"}],
"name": "balanceOfUnderlying",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "uint256", "name": "borrowAmount", "type": "uint256"}
],
"name": "borrow",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "address", "name": "account", "type": "address"}],
"name": "borrowBalanceCurrent",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [{"internalType": "address", "name": "account", "type": "address"}],
"name": "borrowBalanceStored",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "borrowIndex",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "borrowRatePerBlock",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "comptroller",
"outputs": [
{
"internalType": "contract ComptrollerInterface",
"name": "",
"type": "address",
}
],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "decimals",
"outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "bytes", "name": "data", "type": "bytes"}],
"name": "delegateToImplementation",
"outputs": [{"internalType": "bytes", "name": "", "type": "bytes"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [{"internalType": "bytes", "name": "data", "type": "bytes"}],
"name": "delegateToViewImplementation",
"outputs": [{"internalType": "bytes", "name": "", "type": "bytes"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [],
"name": "exchangeRateCurrent",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "exchangeRateStored",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [{"internalType": "address", "name": "account", "type": "address"}],
"name": "getAccountSnapshot",
"outputs": [
{"internalType": "uint256", "name": "", "type": "uint256"},
{"internalType": "uint256", "name": "", "type": "uint256"},
{"internalType": "uint256", "name": "", "type": "uint256"},
{"internalType": "uint256", "name": "", "type": "uint256"},
],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "getCash",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "implementation",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "interestRateModel",
"outputs": [
{
"internalType": "contract InterestRateModel",
"name": "",
"type": "address",
}
],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "isCToken",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "borrower", "type": "address"},
{"internalType": "uint256", "name": "repayAmount", "type": "uint256"},
{
"internalType": "contract CTokenInterface",
"name": "cTokenCollateral",
"type": "address",
},
],
"name": "liquidateBorrow",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "uint256", "name": "mintAmount", "type": "uint256"}
],
"name": "mint",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "name",
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "pendingAdmin",
"outputs": [{"internalType": "address payable", "name": "", "type": "address"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "uint256", "name": "redeemTokens", "type": "uint256"}
],
"name": "redeem",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "uint256", "name": "redeemAmount", "type": "uint256"}
],
"name": "redeemUnderlying",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "uint256", "name": "repayAmount", "type": "uint256"}
],
"name": "repayBorrow",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "reserveFactorMantissa",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "liquidator", "type": "address"},
{"internalType": "address", "name": "borrower", "type": "address"},
{"internalType": "uint256", "name": "seizeTokens", "type": "uint256"},
],
"name": "seize",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "supplyRatePerBlock",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "symbol",
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "totalBorrows",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [],
"name": "totalBorrowsCurrent",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "totalReserves",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "totalSupply",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "dst", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "transfer",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "src", "type": "address"},
{"internalType": "address", "name": "dst", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "transferFrom",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "underlying",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
] | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/oracles/abis/cream_abis.py | 0.577495 | 0.492249 | cream_abis.py | pypi |
balancer_pool_abi = [
{
"inputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "constructor",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "src",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "dst",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amt",
"type": "uint256",
},
],
"name": "Approval",
"type": "event",
},
{
"anonymous": True,
"inputs": [
{
"indexed": True,
"internalType": "bytes4",
"name": "sig",
"type": "bytes4",
},
{
"indexed": True,
"internalType": "address",
"name": "caller",
"type": "address",
},
{
"indexed": False,
"internalType": "bytes",
"name": "data",
"type": "bytes",
},
],
"name": "LOG_CALL",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "caller",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "tokenOut",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "tokenAmountOut",
"type": "uint256",
},
],
"name": "LOG_EXIT",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "caller",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "tokenIn",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "tokenAmountIn",
"type": "uint256",
},
],
"name": "LOG_JOIN",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "caller",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "tokenIn",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "tokenOut",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "tokenAmountIn",
"type": "uint256",
},
{
"indexed": False,
"internalType": "uint256",
"name": "tokenAmountOut",
"type": "uint256",
},
],
"name": "LOG_SWAP",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "src",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "dst",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "amt",
"type": "uint256",
},
],
"name": "Transfer",
"type": "event",
},
{
"constant": True,
"inputs": [],
"name": "BONE",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "BPOW_PRECISION",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "EXIT_FEE",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "INIT_POOL_SUPPLY",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MAX_BOUND_TOKENS",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MAX_BPOW_BASE",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MAX_FEE",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MAX_IN_RATIO",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MAX_OUT_RATIO",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MAX_TOTAL_WEIGHT",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MAX_WEIGHT",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MIN_BALANCE",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MIN_BOUND_TOKENS",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MIN_BPOW_BASE",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MIN_FEE",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "MIN_WEIGHT",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "address", "name": "src", "type": "address"},
{"internalType": "address", "name": "dst", "type": "address"},
],
"name": "allowance",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "dst", "type": "address"},
{"internalType": "uint256", "name": "amt", "type": "uint256"},
],
"name": "approve",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [{"internalType": "address", "name": "whom", "type": "address"}],
"name": "balanceOf",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "balance", "type": "uint256"},
{"internalType": "uint256", "name": "denorm", "type": "uint256"},
],
"name": "bind",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "uint256", "name": "tokenBalanceIn", "type": "uint256"},
{"internalType": "uint256", "name": "tokenWeightIn", "type": "uint256"},
{"internalType": "uint256", "name": "tokenBalanceOut", "type": "uint256"},
{"internalType": "uint256", "name": "tokenWeightOut", "type": "uint256"},
{"internalType": "uint256", "name": "tokenAmountOut", "type": "uint256"},
{"internalType": "uint256", "name": "swapFee", "type": "uint256"},
],
"name": "calcInGivenOut",
"outputs": [
{"internalType": "uint256", "name": "tokenAmountIn", "type": "uint256"}
],
"payable": False,
"stateMutability": "pure",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "uint256", "name": "tokenBalanceIn", "type": "uint256"},
{"internalType": "uint256", "name": "tokenWeightIn", "type": "uint256"},
{"internalType": "uint256", "name": "tokenBalanceOut", "type": "uint256"},
{"internalType": "uint256", "name": "tokenWeightOut", "type": "uint256"},
{"internalType": "uint256", "name": "tokenAmountIn", "type": "uint256"},
{"internalType": "uint256", "name": "swapFee", "type": "uint256"},
],
"name": "calcOutGivenIn",
"outputs": [
{"internalType": "uint256", "name": "tokenAmountOut", "type": "uint256"}
],
"payable": False,
"stateMutability": "pure",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "uint256", "name": "tokenBalanceOut", "type": "uint256"},
{"internalType": "uint256", "name": "tokenWeightOut", "type": "uint256"},
{"internalType": "uint256", "name": "poolSupply", "type": "uint256"},
{"internalType": "uint256", "name": "totalWeight", "type": "uint256"},
{"internalType": "uint256", "name": "tokenAmountOut", "type": "uint256"},
{"internalType": "uint256", "name": "swapFee", "type": "uint256"},
],
"name": "calcPoolInGivenSingleOut",
"outputs": [
{"internalType": "uint256", "name": "poolAmountIn", "type": "uint256"}
],
"payable": False,
"stateMutability": "pure",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "uint256", "name": "tokenBalanceIn", "type": "uint256"},
{"internalType": "uint256", "name": "tokenWeightIn", "type": "uint256"},
{"internalType": "uint256", "name": "poolSupply", "type": "uint256"},
{"internalType": "uint256", "name": "totalWeight", "type": "uint256"},
{"internalType": "uint256", "name": "tokenAmountIn", "type": "uint256"},
{"internalType": "uint256", "name": "swapFee", "type": "uint256"},
],
"name": "calcPoolOutGivenSingleIn",
"outputs": [
{"internalType": "uint256", "name": "poolAmountOut", "type": "uint256"}
],
"payable": False,
"stateMutability": "pure",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "uint256", "name": "tokenBalanceIn", "type": "uint256"},
{"internalType": "uint256", "name": "tokenWeightIn", "type": "uint256"},
{"internalType": "uint256", "name": "poolSupply", "type": "uint256"},
{"internalType": "uint256", "name": "totalWeight", "type": "uint256"},
{"internalType": "uint256", "name": "poolAmountOut", "type": "uint256"},
{"internalType": "uint256", "name": "swapFee", "type": "uint256"},
],
"name": "calcSingleInGivenPoolOut",
"outputs": [
{"internalType": "uint256", "name": "tokenAmountIn", "type": "uint256"}
],
"payable": False,
"stateMutability": "pure",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "uint256", "name": "tokenBalanceOut", "type": "uint256"},
{"internalType": "uint256", "name": "tokenWeightOut", "type": "uint256"},
{"internalType": "uint256", "name": "poolSupply", "type": "uint256"},
{"internalType": "uint256", "name": "totalWeight", "type": "uint256"},
{"internalType": "uint256", "name": "poolAmountIn", "type": "uint256"},
{"internalType": "uint256", "name": "swapFee", "type": "uint256"},
],
"name": "calcSingleOutGivenPoolIn",
"outputs": [
{"internalType": "uint256", "name": "tokenAmountOut", "type": "uint256"}
],
"payable": False,
"stateMutability": "pure",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "uint256", "name": "tokenBalanceIn", "type": "uint256"},
{"internalType": "uint256", "name": "tokenWeightIn", "type": "uint256"},
{"internalType": "uint256", "name": "tokenBalanceOut", "type": "uint256"},
{"internalType": "uint256", "name": "tokenWeightOut", "type": "uint256"},
{"internalType": "uint256", "name": "swapFee", "type": "uint256"},
],
"name": "calcSpotPrice",
"outputs": [
{"internalType": "uint256", "name": "spotPrice", "type": "uint256"}
],
"payable": False,
"stateMutability": "pure",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "decimals",
"outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "dst", "type": "address"},
{"internalType": "uint256", "name": "amt", "type": "uint256"},
],
"name": "decreaseApproval",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "uint256", "name": "poolAmountIn", "type": "uint256"},
{"internalType": "uint256[]", "name": "minAmountsOut", "type": "uint256[]"},
],
"name": "exitPool",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "tokenOut", "type": "address"},
{"internalType": "uint256", "name": "tokenAmountOut", "type": "uint256"},
{"internalType": "uint256", "name": "maxPoolAmountIn", "type": "uint256"},
],
"name": "exitswapExternAmountOut",
"outputs": [
{"internalType": "uint256", "name": "poolAmountIn", "type": "uint256"}
],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "tokenOut", "type": "address"},
{"internalType": "uint256", "name": "poolAmountIn", "type": "uint256"},
{"internalType": "uint256", "name": "minAmountOut", "type": "uint256"},
],
"name": "exitswapPoolAmountIn",
"outputs": [
{"internalType": "uint256", "name": "tokenAmountOut", "type": "uint256"}
],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [],
"name": "finalize",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [{"internalType": "address", "name": "token", "type": "address"}],
"name": "getBalance",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "getColor",
"outputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "getController",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "getCurrentTokens",
"outputs": [
{"internalType": "address[]", "name": "tokens", "type": "address[]"}
],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [{"internalType": "address", "name": "token", "type": "address"}],
"name": "getDenormalizedWeight",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "getFinalTokens",
"outputs": [
{"internalType": "address[]", "name": "tokens", "type": "address[]"}
],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [{"internalType": "address", "name": "token", "type": "address"}],
"name": "getNormalizedWeight",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "getNumTokens",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "address", "name": "tokenIn", "type": "address"},
{"internalType": "address", "name": "tokenOut", "type": "address"},
],
"name": "getSpotPrice",
"outputs": [
{"internalType": "uint256", "name": "spotPrice", "type": "uint256"}
],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [
{"internalType": "address", "name": "tokenIn", "type": "address"},
{"internalType": "address", "name": "tokenOut", "type": "address"},
],
"name": "getSpotPriceSansFee",
"outputs": [
{"internalType": "uint256", "name": "spotPrice", "type": "uint256"}
],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "getSwapFee",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "getTotalDenormalizedWeight",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "address", "name": "token", "type": "address"}],
"name": "gulp",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "dst", "type": "address"},
{"internalType": "uint256", "name": "amt", "type": "uint256"},
],
"name": "increaseApproval",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [{"internalType": "address", "name": "t", "type": "address"}],
"name": "isBound",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "isFinalized",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "isPublicSwap",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "uint256", "name": "poolAmountOut", "type": "uint256"},
{"internalType": "uint256[]", "name": "maxAmountsIn", "type": "uint256[]"},
],
"name": "joinPool",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "tokenIn", "type": "address"},
{"internalType": "uint256", "name": "tokenAmountIn", "type": "uint256"},
{"internalType": "uint256", "name": "minPoolAmountOut", "type": "uint256"},
],
"name": "joinswapExternAmountIn",
"outputs": [
{"internalType": "uint256", "name": "poolAmountOut", "type": "uint256"}
],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "tokenIn", "type": "address"},
{"internalType": "uint256", "name": "poolAmountOut", "type": "uint256"},
{"internalType": "uint256", "name": "maxAmountIn", "type": "uint256"},
],
"name": "joinswapPoolAmountOut",
"outputs": [
{"internalType": "uint256", "name": "tokenAmountIn", "type": "uint256"}
],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "name",
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "token", "type": "address"},
{"internalType": "uint256", "name": "balance", "type": "uint256"},
{"internalType": "uint256", "name": "denorm", "type": "uint256"},
],
"name": "rebind",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "address", "name": "manager", "type": "address"}],
"name": "setController",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "bool", "name": "public_", "type": "bool"}],
"name": "setPublicSwap",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "uint256", "name": "swapFee", "type": "uint256"}],
"name": "setSwapFee",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "tokenIn", "type": "address"},
{"internalType": "uint256", "name": "tokenAmountIn", "type": "uint256"},
{"internalType": "address", "name": "tokenOut", "type": "address"},
{"internalType": "uint256", "name": "minAmountOut", "type": "uint256"},
{"internalType": "uint256", "name": "maxPrice", "type": "uint256"},
],
"name": "swapExactAmountIn",
"outputs": [
{"internalType": "uint256", "name": "tokenAmountOut", "type": "uint256"},
{"internalType": "uint256", "name": "spotPriceAfter", "type": "uint256"},
],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "tokenIn", "type": "address"},
{"internalType": "uint256", "name": "maxAmountIn", "type": "uint256"},
{"internalType": "address", "name": "tokenOut", "type": "address"},
{"internalType": "uint256", "name": "tokenAmountOut", "type": "uint256"},
{"internalType": "uint256", "name": "maxPrice", "type": "uint256"},
],
"name": "swapExactAmountOut",
"outputs": [
{"internalType": "uint256", "name": "tokenAmountIn", "type": "uint256"},
{"internalType": "uint256", "name": "spotPriceAfter", "type": "uint256"},
],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "symbol",
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "totalSupply",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "dst", "type": "address"},
{"internalType": "uint256", "name": "amt", "type": "uint256"},
],
"name": "transfer",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "src", "type": "address"},
{"internalType": "address", "name": "dst", "type": "address"},
{"internalType": "uint256", "name": "amt", "type": "uint256"},
],
"name": "transferFrom",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "address", "name": "token", "type": "address"}],
"name": "unbind",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
] | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/oracles/abis/balancer_abis.py | 0.493897 | 0.450299 | balancer_abis.py | pypi |
YVAULT_ABI = [
{
"inputs": [
{"internalType": "address", "name": "_token", "type": "address"},
{"internalType": "address", "name": "_controller", "type": "address"},
],
"payable": False,
"stateMutability": "nonpayable",
"type": "constructor",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "owner",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "spender",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "value",
"type": "uint256",
},
],
"name": "Approval",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{
"indexed": True,
"internalType": "address",
"name": "from",
"type": "address",
},
{
"indexed": True,
"internalType": "address",
"name": "to",
"type": "address",
},
{
"indexed": False,
"internalType": "uint256",
"name": "value",
"type": "uint256",
},
],
"name": "Transfer",
"type": "event",
},
{
"constant": True,
"inputs": [
{"internalType": "address", "name": "owner", "type": "address"},
{"internalType": "address", "name": "spender", "type": "address"},
],
"name": "allowance",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "approve",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "available",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "balance",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [{"internalType": "address", "name": "account", "type": "address"}],
"name": "balanceOf",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "controller",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "decimals",
"outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "subtractedValue", "type": "uint256"},
],
"name": "decreaseAllowance",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "uint256", "name": "_amount", "type": "uint256"}],
"name": "deposit",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [],
"name": "depositAll",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [],
"name": "earn",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "getPricePerFullShare",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "governance",
"outputs": [{"internalType": "address", "name": "", "type": "address"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "spender", "type": "address"},
{"internalType": "uint256", "name": "addedValue", "type": "uint256"},
],
"name": "increaseAllowance",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "max",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "min",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "name",
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "_controller", "type": "address"}
],
"name": "setController",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "_governance", "type": "address"}
],
"name": "setGovernance",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "uint256", "name": "_min", "type": "uint256"}],
"name": "setMin",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "string", "name": "_name", "type": "string"}],
"name": "setName",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "string", "name": "_symbol", "type": "string"}],
"name": "setSymbol",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "symbol",
"outputs": [{"internalType": "string", "name": "", "type": "string"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "token",
"outputs": [{"internalType": "contract IERC20", "name": "", "type": "address"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "totalSupply",
"outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
"payable": False,
"stateMutability": "view",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "transfer",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [
{"internalType": "address", "name": "sender", "type": "address"},
{"internalType": "address", "name": "recipient", "type": "address"},
{"internalType": "uint256", "name": "amount", "type": "uint256"},
],
"name": "transferFrom",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [{"internalType": "uint256", "name": "_shares", "type": "uint256"}],
"name": "withdraw",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
{
"constant": False,
"inputs": [],
"name": "withdrawAll",
"outputs": [],
"payable": False,
"stateMutability": "nonpayable",
"type": "function",
},
]
YTOKEN_ABI = [
{
"name": "Transfer",
"inputs": [
{"name": "sender", "type": "address", "indexed": True},
{"name": "receiver", "type": "address", "indexed": True},
{"name": "value", "type": "uint256", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "Approval",
"inputs": [
{"name": "owner", "type": "address", "indexed": True},
{"name": "spender", "type": "address", "indexed": True},
{"name": "value", "type": "uint256", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "StrategyAdded",
"inputs": [
{"name": "strategy", "type": "address", "indexed": True},
{"name": "debtRatio", "type": "uint256", "indexed": False},
{"name": "minDebtPerHarvest", "type": "uint256", "indexed": False},
{"name": "maxDebtPerHarvest", "type": "uint256", "indexed": False},
{"name": "performanceFee", "type": "uint256", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "StrategyReported",
"inputs": [
{"name": "strategy", "type": "address", "indexed": True},
{"name": "gain", "type": "uint256", "indexed": False},
{"name": "loss", "type": "uint256", "indexed": False},
{"name": "debtPaid", "type": "uint256", "indexed": False},
{"name": "totalGain", "type": "uint256", "indexed": False},
{"name": "totalLoss", "type": "uint256", "indexed": False},
{"name": "totalDebt", "type": "uint256", "indexed": False},
{"name": "debtAdded", "type": "uint256", "indexed": False},
{"name": "debtRatio", "type": "uint256", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "UpdateGovernance",
"inputs": [{"name": "governance", "type": "address", "indexed": False}],
"anonymous": False,
"type": "event",
},
{
"name": "UpdateManagement",
"inputs": [{"name": "management", "type": "address", "indexed": False}],
"anonymous": False,
"type": "event",
},
{
"name": "UpdateGuestList",
"inputs": [{"name": "guestList", "type": "address", "indexed": False}],
"anonymous": False,
"type": "event",
},
{
"name": "UpdateRewards",
"inputs": [{"name": "rewards", "type": "address", "indexed": False}],
"anonymous": False,
"type": "event",
},
{
"name": "UpdateDepositLimit",
"inputs": [{"name": "depositLimit", "type": "uint256", "indexed": False}],
"anonymous": False,
"type": "event",
},
{
"name": "UpdatePerformanceFee",
"inputs": [{"name": "performanceFee", "type": "uint256", "indexed": False}],
"anonymous": False,
"type": "event",
},
{
"name": "UpdateManagementFee",
"inputs": [{"name": "managementFee", "type": "uint256", "indexed": False}],
"anonymous": False,
"type": "event",
},
{
"name": "UpdateGuardian",
"inputs": [{"name": "guardian", "type": "address", "indexed": False}],
"anonymous": False,
"type": "event",
},
{
"name": "EmergencyShutdown",
"inputs": [{"name": "active", "type": "bool", "indexed": False}],
"anonymous": False,
"type": "event",
},
{
"name": "UpdateWithdrawalQueue",
"inputs": [{"name": "queue", "type": "address[20]", "indexed": False}],
"anonymous": False,
"type": "event",
},
{
"name": "StrategyUpdateDebtRatio",
"inputs": [
{"name": "strategy", "type": "address", "indexed": True},
{"name": "debtRatio", "type": "uint256", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "StrategyUpdateMinDebtPerHarvest",
"inputs": [
{"name": "strategy", "type": "address", "indexed": True},
{"name": "minDebtPerHarvest", "type": "uint256", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "StrategyUpdateMaxDebtPerHarvest",
"inputs": [
{"name": "strategy", "type": "address", "indexed": True},
{"name": "maxDebtPerHarvest", "type": "uint256", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "StrategyUpdatePerformanceFee",
"inputs": [
{"name": "strategy", "type": "address", "indexed": True},
{"name": "performanceFee", "type": "uint256", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "StrategyMigrated",
"inputs": [
{"name": "oldVersion", "type": "address", "indexed": True},
{"name": "newVersion", "type": "address", "indexed": True},
],
"anonymous": False,
"type": "event",
},
{
"name": "StrategyRevoked",
"inputs": [{"name": "strategy", "type": "address", "indexed": True}],
"anonymous": False,
"type": "event",
},
{
"name": "StrategyRemovedFromQueue",
"inputs": [{"name": "strategy", "type": "address", "indexed": True}],
"anonymous": False,
"type": "event",
},
{
"name": "StrategyAddedToQueue",
"inputs": [{"name": "strategy", "type": "address", "indexed": True}],
"anonymous": False,
"type": "event",
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "initialize",
"inputs": [
{"name": "token", "type": "address"},
{"name": "governance", "type": "address"},
{"name": "rewards", "type": "address"},
{"name": "nameOverride", "type": "string"},
{"name": "symbolOverride", "type": "string"},
],
"outputs": [],
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "initialize",
"inputs": [
{"name": "token", "type": "address"},
{"name": "governance", "type": "address"},
{"name": "rewards", "type": "address"},
{"name": "nameOverride", "type": "string"},
{"name": "symbolOverride", "type": "string"},
{"name": "guardian", "type": "address"},
],
"outputs": [],
},
{
"stateMutability": "pure",
"type": "function",
"name": "apiVersion",
"inputs": [],
"outputs": [{"name": "", "type": "string"}],
"gas": 4546,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setName",
"inputs": [{"name": "name", "type": "string"}],
"outputs": [],
"gas": 107044,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setSymbol",
"inputs": [{"name": "symbol", "type": "string"}],
"outputs": [],
"gas": 71894,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setGovernance",
"inputs": [{"name": "governance", "type": "address"}],
"outputs": [],
"gas": 36365,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "acceptGovernance",
"inputs": [],
"outputs": [],
"gas": 37637,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setManagement",
"inputs": [{"name": "management", "type": "address"}],
"outputs": [],
"gas": 37775,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setGuestList",
"inputs": [{"name": "guestList", "type": "address"}],
"outputs": [],
"gas": 37805,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setRewards",
"inputs": [{"name": "rewards", "type": "address"}],
"outputs": [],
"gas": 37835,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setLockedProfitDegration",
"inputs": [{"name": "degration", "type": "uint256"}],
"outputs": [],
"gas": 36519,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setDepositLimit",
"inputs": [{"name": "limit", "type": "uint256"}],
"outputs": [],
"gas": 37795,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setPerformanceFee",
"inputs": [{"name": "fee", "type": "uint256"}],
"outputs": [],
"gas": 37929,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setManagementFee",
"inputs": [{"name": "fee", "type": "uint256"}],
"outputs": [],
"gas": 37959,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setGuardian",
"inputs": [{"name": "guardian", "type": "address"}],
"outputs": [],
"gas": 39203,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setEmergencyShutdown",
"inputs": [{"name": "active", "type": "bool"}],
"outputs": [],
"gas": 39274,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "setWithdrawalQueue",
"inputs": [{"name": "queue", "type": "address[20]"}],
"outputs": [],
"gas": 763950,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "transfer",
"inputs": [
{"name": "receiver", "type": "address"},
{"name": "amount", "type": "uint256"},
],
"outputs": [{"name": "", "type": "bool"}],
"gas": 76768,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "transferFrom",
"inputs": [
{"name": "sender", "type": "address"},
{"name": "receiver", "type": "address"},
{"name": "amount", "type": "uint256"},
],
"outputs": [{"name": "", "type": "bool"}],
"gas": 116531,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "approve",
"inputs": [
{"name": "spender", "type": "address"},
{"name": "amount", "type": "uint256"},
],
"outputs": [{"name": "", "type": "bool"}],
"gas": 38271,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "increaseAllowance",
"inputs": [
{"name": "spender", "type": "address"},
{"name": "amount", "type": "uint256"},
],
"outputs": [{"name": "", "type": "bool"}],
"gas": 40312,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "decreaseAllowance",
"inputs": [
{"name": "spender", "type": "address"},
{"name": "amount", "type": "uint256"},
],
"outputs": [{"name": "", "type": "bool"}],
"gas": 40336,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "permit",
"inputs": [
{"name": "owner", "type": "address"},
{"name": "spender", "type": "address"},
{"name": "amount", "type": "uint256"},
{"name": "expiry", "type": "uint256"},
{"name": "signature", "type": "bytes"},
],
"outputs": [{"name": "", "type": "bool"}],
"gas": 81264,
},
{
"stateMutability": "view",
"type": "function",
"name": "totalAssets",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 4098,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "deposit",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "deposit",
"inputs": [{"name": "_amount", "type": "uint256"}],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "deposit",
"inputs": [
{"name": "_amount", "type": "uint256"},
{"name": "recipient", "type": "address"},
],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "view",
"type": "function",
"name": "maxAvailableShares",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 383839,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "withdraw",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "withdraw",
"inputs": [{"name": "maxShares", "type": "uint256"}],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "withdraw",
"inputs": [
{"name": "maxShares", "type": "uint256"},
{"name": "recipient", "type": "address"},
],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "withdraw",
"inputs": [
{"name": "maxShares", "type": "uint256"},
{"name": "recipient", "type": "address"},
{"name": "maxLoss", "type": "uint256"},
],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "view",
"type": "function",
"name": "pricePerShare",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 18195,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "addStrategy",
"inputs": [
{"name": "strategy", "type": "address"},
{"name": "debtRatio", "type": "uint256"},
{"name": "minDebtPerHarvest", "type": "uint256"},
{"name": "maxDebtPerHarvest", "type": "uint256"},
{"name": "performanceFee", "type": "uint256"},
],
"outputs": [],
"gas": 1485796,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "updateStrategyDebtRatio",
"inputs": [
{"name": "strategy", "type": "address"},
{"name": "debtRatio", "type": "uint256"},
],
"outputs": [],
"gas": 115193,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "updateStrategyMinDebtPerHarvest",
"inputs": [
{"name": "strategy", "type": "address"},
{"name": "minDebtPerHarvest", "type": "uint256"},
],
"outputs": [],
"gas": 42441,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "updateStrategyMaxDebtPerHarvest",
"inputs": [
{"name": "strategy", "type": "address"},
{"name": "maxDebtPerHarvest", "type": "uint256"},
],
"outputs": [],
"gas": 42471,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "updateStrategyPerformanceFee",
"inputs": [
{"name": "strategy", "type": "address"},
{"name": "performanceFee", "type": "uint256"},
],
"outputs": [],
"gas": 41251,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "migrateStrategy",
"inputs": [
{"name": "oldVersion", "type": "address"},
{"name": "newVersion", "type": "address"},
],
"outputs": [],
"gas": 1141468,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "revokeStrategy",
"inputs": [],
"outputs": [],
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "revokeStrategy",
"inputs": [{"name": "strategy", "type": "address"}],
"outputs": [],
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "addStrategyToQueue",
"inputs": [{"name": "strategy", "type": "address"}],
"outputs": [],
"gas": 1199804,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "removeStrategyFromQueue",
"inputs": [{"name": "strategy", "type": "address"}],
"outputs": [],
"gas": 23088703,
},
{
"stateMutability": "view",
"type": "function",
"name": "debtOutstanding",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "view",
"type": "function",
"name": "debtOutstanding",
"inputs": [{"name": "strategy", "type": "address"}],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "view",
"type": "function",
"name": "creditAvailable",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "view",
"type": "function",
"name": "creditAvailable",
"inputs": [{"name": "strategy", "type": "address"}],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "view",
"type": "function",
"name": "availableDepositLimit",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 9551,
},
{
"stateMutability": "view",
"type": "function",
"name": "expectedReturn",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "view",
"type": "function",
"name": "expectedReturn",
"inputs": [{"name": "strategy", "type": "address"}],
"outputs": [{"name": "", "type": "uint256"}],
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "report",
"inputs": [
{"name": "gain", "type": "uint256"},
{"name": "loss", "type": "uint256"},
{"name": "_debtPayment", "type": "uint256"},
],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 1015170,
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "sweep",
"inputs": [{"name": "token", "type": "address"}],
"outputs": [],
},
{
"stateMutability": "nonpayable",
"type": "function",
"name": "sweep",
"inputs": [
{"name": "token", "type": "address"},
{"name": "amount", "type": "uint256"},
],
"outputs": [],
},
{
"stateMutability": "view",
"type": "function",
"name": "name",
"inputs": [],
"outputs": [{"name": "", "type": "string"}],
"gas": 8750,
},
{
"stateMutability": "view",
"type": "function",
"name": "symbol",
"inputs": [],
"outputs": [{"name": "", "type": "string"}],
"gas": 7803,
},
{
"stateMutability": "view",
"type": "function",
"name": "decimals",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2408,
},
{
"stateMutability": "view",
"type": "function",
"name": "precisionFactor",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2438,
},
{
"stateMutability": "view",
"type": "function",
"name": "balanceOf",
"inputs": [{"name": "arg0", "type": "address"}],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2683,
},
{
"stateMutability": "view",
"type": "function",
"name": "allowance",
"inputs": [
{"name": "arg0", "type": "address"},
{"name": "arg1", "type": "address"},
],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2928,
},
{
"stateMutability": "view",
"type": "function",
"name": "totalSupply",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2528,
},
{
"stateMutability": "view",
"type": "function",
"name": "token",
"inputs": [],
"outputs": [{"name": "", "type": "address"}],
"gas": 2558,
},
{
"stateMutability": "view",
"type": "function",
"name": "governance",
"inputs": [],
"outputs": [{"name": "", "type": "address"}],
"gas": 2588,
},
{
"stateMutability": "view",
"type": "function",
"name": "management",
"inputs": [],
"outputs": [{"name": "", "type": "address"}],
"gas": 2618,
},
{
"stateMutability": "view",
"type": "function",
"name": "guardian",
"inputs": [],
"outputs": [{"name": "", "type": "address"}],
"gas": 2648,
},
{
"stateMutability": "view",
"type": "function",
"name": "guestList",
"inputs": [],
"outputs": [{"name": "", "type": "address"}],
"gas": 2678,
},
{
"stateMutability": "view",
"type": "function",
"name": "strategies",
"inputs": [{"name": "arg0", "type": "address"}],
"outputs": [
{"name": "performanceFee", "type": "uint256"},
{"name": "activation", "type": "uint256"},
{"name": "debtRatio", "type": "uint256"},
{"name": "minDebtPerHarvest", "type": "uint256"},
{"name": "maxDebtPerHarvest", "type": "uint256"},
{"name": "lastReport", "type": "uint256"},
{"name": "totalDebt", "type": "uint256"},
{"name": "totalGain", "type": "uint256"},
{"name": "totalLoss", "type": "uint256"},
],
"gas": 11031,
},
{
"stateMutability": "view",
"type": "function",
"name": "withdrawalQueue",
"inputs": [{"name": "arg0", "type": "uint256"}],
"outputs": [{"name": "", "type": "address"}],
"gas": 2847,
},
{
"stateMutability": "view",
"type": "function",
"name": "emergencyShutdown",
"inputs": [],
"outputs": [{"name": "", "type": "bool"}],
"gas": 2768,
},
{
"stateMutability": "view",
"type": "function",
"name": "depositLimit",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2798,
},
{
"stateMutability": "view",
"type": "function",
"name": "debtRatio",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2828,
},
{
"stateMutability": "view",
"type": "function",
"name": "totalDebt",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2858,
},
{
"stateMutability": "view",
"type": "function",
"name": "lastReport",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2888,
},
{
"stateMutability": "view",
"type": "function",
"name": "activation",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2918,
},
{
"stateMutability": "view",
"type": "function",
"name": "lockedProfit",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2948,
},
{
"stateMutability": "view",
"type": "function",
"name": "lockedProfitDegration",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 2978,
},
{
"stateMutability": "view",
"type": "function",
"name": "rewards",
"inputs": [],
"outputs": [{"name": "", "type": "address"}],
"gas": 3008,
},
{
"stateMutability": "view",
"type": "function",
"name": "managementFee",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 3038,
},
{
"stateMutability": "view",
"type": "function",
"name": "performanceFee",
"inputs": [],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 3068,
},
{
"stateMutability": "view",
"type": "function",
"name": "nonces",
"inputs": [{"name": "arg0", "type": "address"}],
"outputs": [{"name": "", "type": "uint256"}],
"gas": 3313,
},
{
"stateMutability": "view",
"type": "function",
"name": "DOMAIN_SEPARATOR",
"inputs": [],
"outputs": [{"name": "", "type": "bytes32"}],
"gas": 3128,
},
] | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/oracles/abis/yearn_abis.py | 0.521959 | 0.48182 | yearn_abis.py | pypi |
multicall_v3_abi = [
{
"inputs": [
{
"components": [
{"internalType": "address", "name": "target", "type": "address"},
{"internalType": "bytes", "name": "callData", "type": "bytes"},
],
"internalType": "struct Multicall3.Call[]",
"name": "calls",
"type": "tuple[]",
}
],
"name": "aggregate",
"outputs": [
{"internalType": "uint256", "name": "blockNumber", "type": "uint256"},
{"internalType": "bytes[]", "name": "returnData", "type": "bytes[]"},
],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{
"components": [
{"internalType": "address", "name": "target", "type": "address"},
{"internalType": "bool", "name": "allowFailure", "type": "bool"},
{"internalType": "bytes", "name": "callData", "type": "bytes"},
],
"internalType": "struct Multicall3.Call3[]",
"name": "calls",
"type": "tuple[]",
}
],
"name": "aggregate3",
"outputs": [
{
"components": [
{"internalType": "bool", "name": "success", "type": "bool"},
{"internalType": "bytes", "name": "returnData", "type": "bytes"},
],
"internalType": "struct Multicall3.Result[]",
"name": "returnData",
"type": "tuple[]",
}
],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{
"components": [
{"internalType": "address", "name": "target", "type": "address"},
{"internalType": "bool", "name": "allowFailure", "type": "bool"},
{"internalType": "uint256", "name": "value", "type": "uint256"},
{"internalType": "bytes", "name": "callData", "type": "bytes"},
],
"internalType": "struct Multicall3.Call3Value[]",
"name": "calls",
"type": "tuple[]",
}
],
"name": "aggregate3Value",
"outputs": [
{
"components": [
{"internalType": "bool", "name": "success", "type": "bool"},
{"internalType": "bytes", "name": "returnData", "type": "bytes"},
],
"internalType": "struct Multicall3.Result[]",
"name": "returnData",
"type": "tuple[]",
}
],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{
"components": [
{"internalType": "address", "name": "target", "type": "address"},
{"internalType": "bytes", "name": "callData", "type": "bytes"},
],
"internalType": "struct Multicall3.Call[]",
"name": "calls",
"type": "tuple[]",
}
],
"name": "blockAndAggregate",
"outputs": [
{"internalType": "uint256", "name": "blockNumber", "type": "uint256"},
{"internalType": "bytes32", "name": "blockHash", "type": "bytes32"},
{
"components": [
{"internalType": "bool", "name": "success", "type": "bool"},
{"internalType": "bytes", "name": "returnData", "type": "bytes"},
],
"internalType": "struct Multicall3.Result[]",
"name": "returnData",
"type": "tuple[]",
},
],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [],
"name": "getBasefee",
"outputs": [{"internalType": "uint256", "name": "basefee", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "uint256", "name": "blockNumber", "type": "uint256"}
],
"name": "getBlockHash",
"outputs": [
{"internalType": "bytes32", "name": "blockHash", "type": "bytes32"}
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "getBlockNumber",
"outputs": [
{"internalType": "uint256", "name": "blockNumber", "type": "uint256"}
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "getChainId",
"outputs": [{"internalType": "uint256", "name": "chainid", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "getCurrentBlockCoinbase",
"outputs": [{"internalType": "address", "name": "coinbase", "type": "address"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "getCurrentBlockDifficulty",
"outputs": [
{"internalType": "uint256", "name": "difficulty", "type": "uint256"}
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "getCurrentBlockGasLimit",
"outputs": [{"internalType": "uint256", "name": "gaslimit", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "getCurrentBlockTimestamp",
"outputs": [
{"internalType": "uint256", "name": "timestamp", "type": "uint256"}
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [{"internalType": "address", "name": "addr", "type": "address"}],
"name": "getEthBalance",
"outputs": [{"internalType": "uint256", "name": "balance", "type": "uint256"}],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [],
"name": "getLastBlockHash",
"outputs": [
{"internalType": "bytes32", "name": "blockHash", "type": "bytes32"}
],
"stateMutability": "view",
"type": "function",
},
{
"inputs": [
{"internalType": "bool", "name": "requireSuccess", "type": "bool"},
{
"components": [
{"internalType": "address", "name": "target", "type": "address"},
{"internalType": "bytes", "name": "callData", "type": "bytes"},
],
"internalType": "struct Multicall3.Call[]",
"name": "calls",
"type": "tuple[]",
},
],
"name": "tryAggregate",
"outputs": [
{
"components": [
{"internalType": "bool", "name": "success", "type": "bool"},
{"internalType": "bytes", "name": "returnData", "type": "bytes"},
],
"internalType": "struct Multicall3.Result[]",
"name": "returnData",
"type": "tuple[]",
}
],
"stateMutability": "payable",
"type": "function",
},
{
"inputs": [
{"internalType": "bool", "name": "requireSuccess", "type": "bool"},
{
"components": [
{"internalType": "address", "name": "target", "type": "address"},
{"internalType": "bytes", "name": "callData", "type": "bytes"},
],
"internalType": "struct Multicall3.Call[]",
"name": "calls",
"type": "tuple[]",
},
],
"name": "tryBlockAndAggregate",
"outputs": [
{"internalType": "uint256", "name": "blockNumber", "type": "uint256"},
{"internalType": "bytes32", "name": "blockHash", "type": "bytes32"},
{
"components": [
{"internalType": "bool", "name": "success", "type": "bool"},
{"internalType": "bytes", "name": "returnData", "type": "bytes"},
],
"internalType": "struct Multicall3.Result[]",
"name": "returnData",
"type": "tuple[]",
},
],
"stateMutability": "payable",
"type": "function",
},
]
multicall_v3_bytecode = b"`\x80`@R4\x80\x15a\x00\x10W`\x00\x80\xfd[Pa\x0e\xe0\x80a\x00 `\x009`\x00\xf3\xfe`\x80`@R`\x046\x10a\x00\xf3W`\x005`\xe0\x1c\x80cM#\x01\xcc\x11a\x00\x8aW\x80c\xa8\xb0WN\x11a\x00YW\x80c\xa8\xb0WN\x14a\x02ZW\x80c\xbc\xe3\x8b\xd7\x14a\x02uW\x80c\xc3\x07\x7f\xa9\x14a\x02\x88W\x80c\xee\x82\xac^\x14a\x02\x9bW`\x00\x80\xfd[\x80cM#\x01\xcc\x14a\x01\xecW\x80crB]\x9d\x14a\x02!W\x80c\x82\xadV\xcb\x14a\x024W\x80c\x86\xd5\x16\xe8\x14a\x02GW`\x00\x80\xfd[\x80c4\x08\xe4p\x11a\x00\xc6W\x80c4\x08\xe4p\x14a\x01\x91W\x80c9\x95B\xe9\x14a\x01\xa4W\x80c>d\xa6\x96\x14a\x01\xc6W\x80cB\xcb\xb1\\\x14a\x01\xd9W`\x00\x80\xfd[\x80c\x0f(\xc9}\x14a\x00\xf8W\x80c\x17M\xeaq\x14a\x01\x1aW\x80c%-\xbaB\x14a\x01:W\x80c'\xe8mn\x14a\x01[W[`\x00\x80\xfd[4\x80\x15a\x01\x04W`\x00\x80\xfd[PB[`@Q\x90\x81R` \x01[`@Q\x80\x91\x03\x90\xf3[a\x01-a\x01(6`\x04a\n\x85V[a\x02\xbaV[`@Qa\x01\x11\x91\x90a\x0b\xbeV[a\x01Ma\x01H6`\x04a\n\x85V[a\x04\xefV[`@Qa\x01\x11\x92\x91\x90a\x0b\xd8V[4\x80\x15a\x01gW`\x00\x80\xfd[PC\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01@a\x01\x07V[4\x80\x15a\x01\x9dW`\x00\x80\xfd[PFa\x01\x07V[a\x01\xb7a\x01\xb26`\x04a\x0c`V[a\x06\x90V[`@Qa\x01\x11\x93\x92\x91\x90a\x0c\xbaV[4\x80\x15a\x01\xd2W`\x00\x80\xfd[PHa\x01\x07V[4\x80\x15a\x01\xe5W`\x00\x80\xfd[PCa\x01\x07V[4\x80\x15a\x01\xf8W`\x00\x80\xfd[Pa\x01\x07a\x02\x076`\x04a\x0c\xe2V[s\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x161\x90V[4\x80\x15a\x02-W`\x00\x80\xfd[PDa\x01\x07V[a\x01-a\x02B6`\x04a\n\x85V[a\x06\xabV[4\x80\x15a\x02SW`\x00\x80\xfd[PEa\x01\x07V[4\x80\x15a\x02fW`\x00\x80\xfd[P`@QA\x81R` \x01a\x01\x11V[a\x01-a\x02\x836`\x04a\x0c`V[a\x08ZV[a\x01\xb7a\x02\x966`\x04a\n\x85V[a\n\x1aV[4\x80\x15a\x02\xa7W`\x00\x80\xfd[Pa\x01\x07a\x02\xb66`\x04a\r\x18V[@\x90V[```\x00\x82\x80g\xff\xff\xff\xff\xff\xff\xff\xff\x81\x11\x15a\x02\xd8Wa\x02\xd8a\r1V[`@Q\x90\x80\x82R\x80` \x02` \x01\x82\x01`@R\x80\x15a\x03\x1eW\x81` \x01[`@\x80Q\x80\x82\x01\x90\x91R`\x00\x81R``` \x82\x01R\x81R` \x01\x90`\x01\x90\x03\x90\x81a\x02\xf6W\x90P[P\x92P6`\x00[\x82\x81\x10\x15a\x04wW`\x00\x85\x82\x81Q\x81\x10a\x03AWa\x03Aa\r`V[` \x02` \x01\x01Q\x90P\x87\x87\x83\x81\x81\x10a\x03]Wa\x03]a\r`V[\x90P` \x02\x81\x01\x90a\x03o\x91\x90a\r\x8fV[`@\x81\x015\x95\x86\x01\x95\x90\x93Pa\x03\x88` \x85\x01\x85a\x0c\xe2V[s\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x16\x81a\x03\xac``\x87\x01\x87a\r\xcdV[`@Qa\x03\xba\x92\x91\x90a\x0e2V[`\x00`@Q\x80\x83\x03\x81\x85\x87Z\xf1\x92PPP=\x80`\x00\x81\x14a\x03\xf7W`@Q\x91P`\x1f\x19`?=\x01\x16\x82\x01`@R=\x82R=`\x00` \x84\x01>a\x03\xfcV[``\x91P[P` \x80\x85\x01\x91\x90\x91R\x90\x15\x15\x80\x84R\x90\x85\x015\x17a\x04mW\x7f\x08\xc3y\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00`\x00R` `\x04R`\x17`$R\x7fMulticall3: call failed\x00\x00\x00\x00\x00\x00\x00\x00\x00`DR`\x84`\x00\xfd[PP`\x01\x01a\x03%V[P\x824\x14a\x04\xe6W`@Q\x7f\x08\xc3y\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x81R` `\x04\x82\x01R`\x1a`$\x82\x01R\x7fMulticall3: value mismatch\x00\x00\x00\x00\x00\x00`D\x82\x01R`d\x01[`@Q\x80\x91\x03\x90\xfd[PPP\x92\x91PPV[C``\x82\x80g\xff\xff\xff\xff\xff\xff\xff\xff\x81\x11\x15a\x05\x0cWa\x05\x0ca\r1V[`@Q\x90\x80\x82R\x80` \x02` \x01\x82\x01`@R\x80\x15a\x05?W\x81` \x01[``\x81R` \x01\x90`\x01\x90\x03\x90\x81a\x05*W\x90P[P\x91P6`\x00[\x82\x81\x10\x15a\x06\x86W`\x00\x87\x87\x83\x81\x81\x10a\x05bWa\x05ba\r`V[\x90P` \x02\x81\x01\x90a\x05t\x91\x90a\x0eBV[\x92Pa\x05\x83` \x84\x01\x84a\x0c\xe2V[s\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x16a\x05\xa6` \x85\x01\x85a\r\xcdV[`@Qa\x05\xb4\x92\x91\x90a\x0e2V[`\x00`@Q\x80\x83\x03\x81`\x00\x86Z\xf1\x91PP=\x80`\x00\x81\x14a\x05\xf1W`@Q\x91P`\x1f\x19`?=\x01\x16\x82\x01`@R=\x82R=`\x00` \x84\x01>a\x05\xf6V[``\x91P[P\x86\x84\x81Q\x81\x10a\x06\tWa\x06\ta\r`V[` \x90\x81\x02\x91\x90\x91\x01\x01R\x90P\x80a\x06}W`@Q\x7f\x08\xc3y\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x81R` `\x04\x82\x01R`\x17`$\x82\x01R\x7fMulticall3: call failed\x00\x00\x00\x00\x00\x00\x00\x00\x00`D\x82\x01R`d\x01a\x04\xddV[P`\x01\x01a\x05FV[PPP\x92P\x92\x90PV[C\x80@``a\x06\xa0\x86\x86\x86a\x08ZV[\x90P\x93P\x93P\x93\x90PV[``\x81\x80g\xff\xff\xff\xff\xff\xff\xff\xff\x81\x11\x15a\x06\xc7Wa\x06\xc7a\r1V[`@Q\x90\x80\x82R\x80` \x02` \x01\x82\x01`@R\x80\x15a\x07\rW\x81` \x01[`@\x80Q\x80\x82\x01\x90\x91R`\x00\x81R``` \x82\x01R\x81R` \x01\x90`\x01\x90\x03\x90\x81a\x06\xe5W\x90P[P\x91P6`\x00[\x82\x81\x10\x15a\x04\xe6W`\x00\x84\x82\x81Q\x81\x10a\x070Wa\x070a\r`V[` \x02` \x01\x01Q\x90P\x86\x86\x83\x81\x81\x10a\x07LWa\x07La\r`V[\x90P` \x02\x81\x01\x90a\x07^\x91\x90a\x0evV[\x92Pa\x07m` \x84\x01\x84a\x0c\xe2V[s\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x16a\x07\x90`@\x85\x01\x85a\r\xcdV[`@Qa\x07\x9e\x92\x91\x90a\x0e2V[`\x00`@Q\x80\x83\x03\x81`\x00\x86Z\xf1\x91PP=\x80`\x00\x81\x14a\x07\xdbW`@Q\x91P`\x1f\x19`?=\x01\x16\x82\x01`@R=\x82R=`\x00` \x84\x01>a\x07\xe0V[``\x91P[P` \x80\x84\x01\x91\x90\x91R\x90\x15\x15\x80\x83R\x90\x84\x015\x17a\x08QW\x7f\x08\xc3y\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00`\x00R` `\x04R`\x17`$R\x7fMulticall3: call failed\x00\x00\x00\x00\x00\x00\x00\x00\x00`DR`d`\x00\xfd[P`\x01\x01a\x07\x14V[``\x81\x80g\xff\xff\xff\xff\xff\xff\xff\xff\x81\x11\x15a\x08vWa\x08va\r1V[`@Q\x90\x80\x82R\x80` \x02` \x01\x82\x01`@R\x80\x15a\x08\xbcW\x81` \x01[`@\x80Q\x80\x82\x01\x90\x91R`\x00\x81R``` \x82\x01R\x81R` \x01\x90`\x01\x90\x03\x90\x81a\x08\x94W\x90P[P\x91P6`\x00[\x82\x81\x10\x15a\n\x10W`\x00\x84\x82\x81Q\x81\x10a\x08\xdfWa\x08\xdfa\r`V[` \x02` \x01\x01Q\x90P\x86\x86\x83\x81\x81\x10a\x08\xfbWa\x08\xfba\r`V[\x90P` \x02\x81\x01\x90a\t\r\x91\x90a\x0eBV[\x92Pa\t\x1c` \x84\x01\x84a\x0c\xe2V[s\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x16a\t?` \x85\x01\x85a\r\xcdV[`@Qa\tM\x92\x91\x90a\x0e2V[`\x00`@Q\x80\x83\x03\x81`\x00\x86Z\xf1\x91PP=\x80`\x00\x81\x14a\t\x8aW`@Q\x91P`\x1f\x19`?=\x01\x16\x82\x01`@R=\x82R=`\x00` \x84\x01>a\t\x8fV[``\x91P[P` \x83\x01R\x15\x15\x81R\x87\x15a\n\x07W\x80Qa\n\x07W`@Q\x7f\x08\xc3y\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x81R` `\x04\x82\x01R`\x17`$\x82\x01R\x7fMulticall3: call failed\x00\x00\x00\x00\x00\x00\x00\x00\x00`D\x82\x01R`d\x01a\x04\xddV[P`\x01\x01a\x08\xc3V[PPP\x93\x92PPPV[`\x00\x80``a\n+`\x01\x86\x86a\x06\x90V[\x91\x97\x90\x96P\x90\x94P\x92PPPV[`\x00\x80\x83`\x1f\x84\x01\x12a\nKW`\x00\x80\xfd[P\x815g\xff\xff\xff\xff\xff\xff\xff\xff\x81\x11\x15a\ncW`\x00\x80\xfd[` \x83\x01\x91P\x83` \x82`\x05\x1b\x85\x01\x01\x11\x15a\n~W`\x00\x80\xfd[\x92P\x92\x90PV[`\x00\x80` \x83\x85\x03\x12\x15a\n\x98W`\x00\x80\xfd[\x825g\xff\xff\xff\xff\xff\xff\xff\xff\x81\x11\x15a\n\xafW`\x00\x80\xfd[a\n\xbb\x85\x82\x86\x01a\n9V[\x90\x96\x90\x95P\x93PPPPV[`\x00\x81Q\x80\x84R`\x00[\x81\x81\x10\x15a\n\xedW` \x81\x85\x01\x81\x01Q\x86\x83\x01\x82\x01R\x01a\n\xd1V[\x81\x81\x11\x15a\n\xffW`\x00` \x83\x87\x01\x01R[P`\x1f\x01\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x16\x92\x90\x92\x01` \x01\x92\x91PPV[`\x00\x82\x82Q\x80\x85R` \x80\x86\x01\x95P\x80\x82`\x05\x1b\x84\x01\x01\x81\x86\x01`\x00[\x84\x81\x10\x15a\x0b\xb1W\x85\x83\x03\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe0\x01\x89R\x81Q\x80Q\x15\x15\x84R\x84\x01Q`@\x85\x85\x01\x81\x90Ra\x0b\x9d\x81\x86\x01\x83a\n\xc7V[\x9a\x86\x01\x9a\x94PPP\x90\x83\x01\x90`\x01\x01a\x0bOV[P\x90\x97\x96PPPPPPPV[` \x81R`\x00a\x0b\xd1` \x83\x01\x84a\x0b2V[\x93\x92PPPV[`\x00`@\x82\x01\x84\x83R` `@\x81\x85\x01R\x81\x85Q\x80\x84R``\x86\x01\x91P``\x81`\x05\x1b\x87\x01\x01\x93P\x82\x87\x01`\x00[\x82\x81\x10\x15a\x0cRW\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xa0\x88\x87\x03\x01\x84Ra\x0c@\x86\x83Qa\n\xc7V[\x95P\x92\x84\x01\x92\x90\x84\x01\x90`\x01\x01a\x0c\x06V[P\x93\x98\x97PPPPPPPPV[`\x00\x80`\x00`@\x84\x86\x03\x12\x15a\x0cuW`\x00\x80\xfd[\x835\x80\x15\x15\x81\x14a\x0c\x85W`\x00\x80\xfd[\x92P` \x84\x015g\xff\xff\xff\xff\xff\xff\xff\xff\x81\x11\x15a\x0c\xa1W`\x00\x80\xfd[a\x0c\xad\x86\x82\x87\x01a\n9V[\x94\x97\x90\x96P\x93\x94PPPPV[\x83\x81R\x82` \x82\x01R```@\x82\x01R`\x00a\x0c\xd9``\x83\x01\x84a\x0b2V[\x95\x94PPPPPV[`\x00` \x82\x84\x03\x12\x15a\x0c\xf4W`\x00\x80\xfd[\x815s\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x81\x16\x81\x14a\x0b\xd1W`\x00\x80\xfd[`\x00` \x82\x84\x03\x12\x15a\r*W`\x00\x80\xfd[P5\x91\x90PV[\x7fNH{q\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00`\x00R`A`\x04R`$`\x00\xfd[\x7fNH{q\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00`\x00R`2`\x04R`$`\x00\xfd[`\x00\x825\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x81\x836\x03\x01\x81\x12a\r\xc3W`\x00\x80\xfd[\x91\x90\x91\x01\x92\x91PPV[`\x00\x80\x835\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe1\x846\x03\x01\x81\x12a\x0e\x02W`\x00\x80\xfd[\x83\x01\x805\x91Pg\xff\xff\xff\xff\xff\xff\xff\xff\x82\x11\x15a\x0e\x1dW`\x00\x80\xfd[` \x01\x91P6\x81\x90\x03\x82\x13\x15a\n~W`\x00\x80\xfd[\x81\x83\x827`\x00\x91\x01\x90\x81R\x91\x90PV[`\x00\x825\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc1\x836\x03\x01\x81\x12a\r\xc3W`\x00\x80\xfd[`\x00\x825\x7f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xa1\x836\x03\x01\x81\x12a\r\xc3W`\x00\x80\xfd\xfe\xa2dipfsX\"\x12 \xbb+\\q\xa3(\x03/\x97\xc6v\xae9\xa1\xec!H\xd3\xe5\xd6\xf7=\x95\xe9\xb1y\x10\x15-a\xf1bdsolcC\x00\x08\x0c\x003" | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/abis/multicall.py | 0.557364 | 0.571288 | multicall.py | pypi |
import re
from typing import Any, Dict, List, Union
from eth_abi import encode as encode_abi
from eth_account import Account
from eth_typing import Hash32, HexStr
from hexbytes import HexBytes
from ..utils import fast_keccak
def encode_data(primary_type: str, data, types):
"""
Encode structured data as per Ethereum's signTypeData_v4.
https://docs.metamask.io/guide/signing-data.html#sign-typed-data-v4
This code is ported from the Javascript "eth-sig-util" package.
"""
encoded_types = ["bytes32"]
encoded_values = [hash_type(primary_type, types)]
def _encode_field(name, typ, value):
if typ in types:
if value is None:
return [
"bytes32",
"0x0000000000000000000000000000000000000000000000000000000000000000",
]
else:
return ["bytes32", fast_keccak(encode_data(typ, value, types))]
if value is None:
raise Exception(f"Missing value for field {name} of type {type}")
# Accept string bytes
if "bytes" in typ and isinstance(value, str):
value = HexBytes(value)
# Accept string uint and int
if "int" in typ and isinstance(value, str):
value = int(value)
if typ == "bytes":
return ["bytes32", fast_keccak(value)]
if typ == "string":
# Convert string to bytes.
value = value.encode("utf-8")
return ["bytes32", fast_keccak(value)]
if typ.endswith("]"):
# Array type
if value:
parsed_type = typ[: typ.rindex("[")]
type_value_pairs = [_encode_field(name, parsed_type, v) for v in value]
data_types, data_hashes = zip(*type_value_pairs)
else:
# Empty array
data_types, data_hashes = [], []
h = fast_keccak(encode_abi(data_types, data_hashes))
return ["bytes32", h]
return [typ, value]
for field in types[primary_type]:
typ, val = _encode_field(field["name"], field["type"], data[field["name"]])
encoded_types.append(typ)
encoded_values.append(val)
return encode_abi(encoded_types, encoded_values)
def encode_type(primary_type: str, types) -> str:
result = ""
deps = find_type_dependencies(primary_type, types)
deps = sorted([d for d in deps if d != primary_type])
deps = [primary_type] + deps
for typ in deps:
children = types[typ]
if not children:
raise Exception(f"No type definition specified: {type}")
defs = [f"{t['type']} {t['name']}" for t in types[typ]]
result += typ + "(" + ",".join(defs) + ")"
return result
def find_type_dependencies(primary_type: str, types, results=None):
if results is None:
results = []
primary_type = re.split(r"\W", primary_type)[0]
if primary_type in results or not types.get(primary_type):
return results
results.append(primary_type)
for field in types[primary_type]:
deps = find_type_dependencies(field["type"], types, results)
for dep in deps:
if dep not in results:
results.append(dep)
return results
def hash_type(primary_type: str, types) -> Hash32:
return fast_keccak(encode_type(primary_type, types).encode())
def hash_struct(primary_type: str, data, types) -> Hash32:
return fast_keccak(encode_data(primary_type, data, types))
def eip712_encode(typed_data: Dict[str, Any]) -> List[bytes]:
"""
Given a dict of structured data and types, return a 3-element list of
the encoded, signable data.
0: The magic & version (0x1901)
1: The encoded types
2: The encoded data
"""
try:
parts = [
bytes.fromhex("1901"),
hash_struct("EIP712Domain", typed_data["domain"], typed_data["types"]),
]
if typed_data["primaryType"] != "EIP712Domain":
parts.append(
hash_struct(
typed_data["primaryType"],
typed_data["message"],
typed_data["types"],
)
)
return parts
except (KeyError, AttributeError, TypeError, IndexError) as exc:
raise ValueError(f"Not valid {typed_data}") from exc
def eip712_encode_hash(typed_data: Dict[str, Any]) -> Hash32:
"""
:param typed_data: EIP712 structured data and types
:return: Keccak256 hash of encoded signable data
"""
return fast_keccak(b"".join(eip712_encode(typed_data)))
def eip712_signature(
payload: Dict[str, Any], private_key: Union[HexStr, bytes]
) -> bytes:
"""
Given a bytes object and a private key, return a signature suitable for
EIP712 and EIP191 messages.
"""
if isinstance(payload, (list, tuple)):
payload = b"".join(payload)
if isinstance(private_key, str) and private_key.startswith("0x"):
private_key = private_key[2:]
elif isinstance(private_key, bytes):
private_key = bytes.hex()
account = Account.from_key(private_key)
hashed_payload = fast_keccak(payload)
return account.signHash(hashed_payload)["signature"] | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/eip712/__init__.py | 0.755366 | 0.363449 | __init__.py | pypi |
import json
import os
import sys
from typing import Any, Dict, Optional
from eth_typing import ChecksumAddress
from hexbytes import HexBytes
from web3 import Web3
from web3.contract import Contract
from gnosis.util import cache
def load_contract_interface(file_name):
return _load_json_file(_abi_file_path(file_name))
def _abi_file_path(file):
return os.path.abspath(os.path.join(os.path.dirname(__file__), file))
def _load_json_file(path):
with open(path) as f:
return json.load(f)
current_module = sys.modules[__name__]
contracts = {
"safe_V1_3_0": "GnosisSafe_V1_3_0.json",
"safe_V1_1_1": "GnosisSafe_V1_1_1.json",
"safe_V1_0_0": "GnosisSafe_V1_0_0.json",
"safe_V0_0_1": "GnosisSafe_V0_0_1.json",
"compatibility_fallback_handler_V1_3_0": "CompatibilityFallbackHandler_V1_3_0.json",
"erc20": "ERC20.json",
"erc721": "ERC721.json",
"erc1155": "ERC1155.json",
"example_erc20": "ERC20TestToken.json",
"delegate_constructor_proxy": "DelegateConstructorProxy.json",
"multi_send": "MultiSend.json",
"paying_proxy": "PayingProxy.json",
"proxy_factory": "ProxyFactory_V1_3_0.json",
"proxy_factory_V1_1_1": "ProxyFactory_V1_1_1.json",
"proxy_factory_V1_0_0": "ProxyFactory_V1_0_0.json",
"proxy": "Proxy_V1_1_1.json",
"uniswap_exchange": "uniswap_exchange.json",
"uniswap_factory": "uniswap_factory.json",
"uniswap_v2_factory": "uniswap_v2_factory.json",
"uniswap_v2_pair": "uniswap_v2_pair.json",
"uniswap_v2_router": "uniswap_v2_router.json", # Router02
"kyber_network_proxy": "kyber_network_proxy.json",
"cpk_factory": "CPKFactory.json",
}
def generate_contract_fn(contract: Dict[str, Any]):
"""
Dynamically generate functions to work with the contracts
:param contract:
:return:
"""
def fn(w3: Web3, address: Optional[ChecksumAddress] = None):
return w3.eth.contract(
address=address, abi=contract["abi"], bytecode=contract.get("bytecode")
)
return fn
# Anotate functions that will be generated later with `setattr` so typing does not complains
def get_safe_contract(w3: Web3, address: Optional[str] = None) -> Contract:
"""
:param w3:
:param address:
:return: Latest available safe contract (v1.3.0)
"""
return get_safe_V1_3_0_contract(w3, address=address)
def get_safe_V1_3_0_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_safe_V1_1_1_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_safe_V1_0_0_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_safe_V0_0_1_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_compatibility_fallback_handler_V1_3_0_contract(
w3: Web3, address: Optional[str] = None
) -> Contract:
pass
def get_erc20_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_erc721_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_erc1155_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_example_erc20_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_delegate_constructor_proxy_contract(
w3: Web3, address: Optional[str] = None
) -> Contract:
pass
def get_multi_send_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_paying_proxy_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_proxy_factory_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_proxy_factory_V1_1_1_contract(
w3: Web3, address: Optional[str] = None
) -> Contract:
pass
def get_proxy_factory_V1_0_0_contract(
w3: Web3, address: Optional[str] = None
) -> Contract:
pass
def get_proxy_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_uniswap_exchange_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_uniswap_factory_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_uniswap_v2_factory_contract(
w3: Web3, address: Optional[str] = None
) -> Contract:
pass
def get_uniswap_v2_pair_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_uniswap_v2_router_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
def get_kyber_network_proxy_contract(
w3: Web3, address: Optional[str] = None
) -> Contract:
pass
def get_cpk_factory_contract(w3: Web3, address: Optional[str] = None) -> Contract:
pass
@cache
def get_proxy_1_3_0_deployed_bytecode() -> bytes:
return HexBytes(load_contract_interface("Proxy_V1_3_0.json")["deployedBytecode"])
def get_proxy_1_1_1_mainnet_deployed_bytecode() -> bytes:
"""
Somehow it's different from the generated version compiling the contracts
"""
return HexBytes(
"0x608060405273ffffffffffffffffffffffffffffffffffffffff600054167fa619486e0000000000000000000000000000000000000000000000000000000060003514156050578060005260206000f35b3660008037600080366000845af43d6000803e60008114156070573d6000fd5b3d6000f3fea265627a7a72315820d8a00dc4fe6bf675a9d7416fc2d00bb3433362aa8186b750f76c4027269667ff64736f6c634300050e0032"
)
@cache
def get_proxy_1_1_1_deployed_bytecode() -> bytes:
return HexBytes(load_contract_interface("Proxy_V1_1_1.json")["deployedBytecode"])
@cache
def get_proxy_1_0_0_deployed_bytecode() -> bytes:
return HexBytes(load_contract_interface("Proxy_V1_0_0.json")["deployedBytecode"])
@cache
def get_paying_proxy_deployed_bytecode() -> bytes:
return HexBytes(load_contract_interface("PayingProxy.json")["deployedBytecode"])
for contract_name, json_contract_filename in contracts.items():
fn_name = "get_{}_contract".format(contract_name)
contract_dict = load_contract_interface(json_contract_filename)
if not contract_dict:
raise ValueError(f"{contract_name} json cannot be empty")
setattr(current_module, fn_name, generate_contract_fn(contract_dict)) | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/eth/contracts/__init__.py | 0.621885 | 0.210543 | __init__.py | pypi |
from enum import Enum
from logging import getLogger
from typing import List, Optional, Union
from eth_account.signers.local import LocalAccount
from eth_typing import ChecksumAddress
from hexbytes import HexBytes
from web3 import Web3
from gnosis.eth import EthereumClient, EthereumTxSent
from gnosis.eth.contracts import get_multi_send_contract
from gnosis.eth.typing import EthereumData
from gnosis.eth.utils import fast_bytes_to_checksum_address, fast_is_checksum_address
logger = getLogger(__name__)
class MultiSendOperation(Enum):
CALL = 0
DELEGATE_CALL = 1
class MultiSendTx:
"""
Wrapper for a single MultiSendTx
"""
def __init__(
self,
operation: MultiSendOperation,
to: str,
value: int,
data: EthereumData,
old_encoding: bool = False,
):
"""
:param operation: MultisendOperation, CALL or DELEGATE_CALL
:param to: Address
:param value: Value in Wei
:param data: data as hex string or bytes
:param old_encoding: True if using old multisend ABI Encoded data, False otherwise
"""
self.operation = operation
self.to = to
self.value = value
self.data = HexBytes(data) if data else b""
self.old_encoding = old_encoding
def __eq__(self, other):
if not isinstance(other, MultiSendTx):
return NotImplemented
return (
self.operation == other.operation
and self.to == other.to
and self.value == other.value
and self.data == other.data
)
def __len__(self):
"""
:return: Size on bytes of the tx
"""
return 21 + 32 * 2 + self.data_length
def __repr__(self):
data = self.data[:4].hex() + ("..." if len(self.data) > 4 else "")
return (
f"MultisendTx operation={self.operation.name} to={self.to} value={self.value} "
f"data={data}"
)
@property
def data_length(self) -> int:
return len(self.data)
@property
def encoded_data(self):
operation = HexBytes("{:0>2x}".format(self.operation.value)) # Operation 1 byte
to = HexBytes("{:0>40x}".format(int(self.to, 16))) # Address 20 bytes
value = HexBytes("{:0>64x}".format(self.value)) # Value 32 bytes
data_length = HexBytes(
"{:0>64x}".format(self.data_length)
) # Data length 32 bytes
return operation + to + value + data_length + self.data
@classmethod
def from_bytes(cls, encoded_multisend_tx: Union[str, bytes]) -> "MultiSendTx":
"""
Decoded one MultiSend transaction. ABI must be used to get the `transactions` parameter and use that data
for this function
:param encoded_multisend_tx:
:return:
"""
encoded_multisend_tx = HexBytes(encoded_multisend_tx)
try:
return cls._decode_multisend_data(encoded_multisend_tx)
except ValueError:
# Try using the old decoding method
return cls._decode_multisend_old_transaction(encoded_multisend_tx)
@classmethod
def _decode_multisend_data(cls, encoded_multisend_tx: Union[str, bytes]):
"""
Decodes one Multisend transaction. If there's more data after `data` it's ignored. Fallbacks to the old
multisend structure if this structure cannot be decoded.
https://etherscan.io/address/0x8D29bE29923b68abfDD21e541b9374737B49cdAD#code
Structure:
- operation -> MultiSendOperation 1 byte
- to -> ethereum address 20 bytes
- value -> tx value 32 bytes
- data_length -> 32 bytes
- data -> `data_length` bytes
:param encoded_multisend_tx: 1 multisend transaction encoded
:return: Tx as a MultisendTx
"""
encoded_multisend_tx = HexBytes(encoded_multisend_tx)
operation = MultiSendOperation(encoded_multisend_tx[0])
to = fast_bytes_to_checksum_address(encoded_multisend_tx[1 : 1 + 20])
value = int.from_bytes(encoded_multisend_tx[21 : 21 + 32], byteorder="big")
data_length = int.from_bytes(
encoded_multisend_tx[21 + 32 : 21 + 32 * 2], byteorder="big"
)
data = encoded_multisend_tx[21 + 32 * 2 : 21 + 32 * 2 + data_length]
len_data = len(data)
if len_data != data_length:
raise ValueError(
f"Data length {data_length} is different from len(data) {len_data}"
)
return cls(operation, to, value, data, old_encoding=False)
@classmethod
def _decode_multisend_old_transaction(
cls, encoded_multisend_tx: Union[str, bytes]
) -> "MultiSendTx":
"""
Decodes one old multisend transaction. If there's more data after `data` it's ignored. The difference with
the new MultiSend is that every value but `data` is padded to 32 bytes, wasting a lot of bytes.
https://etherscan.io/address/0xE74d6AF1670FB6560dd61EE29eB57C7Bc027Ce4E#code
Structure:
- operation -> MultiSendOperation 32 byte
- to -> ethereum address 32 bytes
- value -> tx value 32 bytes
- data_length -> 32 bytes
- data -> `data_length` bytes
:param encoded_multisend_tx: 1 multisend transaction encoded
:return: Tx as a MultisendTx
"""
encoded_multisend_tx = HexBytes(encoded_multisend_tx)
operation = MultiSendOperation(
int.from_bytes(encoded_multisend_tx[:32], byteorder="big")
)
to = fast_bytes_to_checksum_address(encoded_multisend_tx[32:64][-20:])
value = int.from_bytes(encoded_multisend_tx[64:96], byteorder="big")
data_length = int.from_bytes(encoded_multisend_tx[128:160], byteorder="big")
data = encoded_multisend_tx[160 : 160 + data_length]
len_data = len(data)
if len_data != data_length:
raise ValueError(
f"Data length {data_length} is different from len(data) {len_data}"
)
return cls(operation, to, value, data, old_encoding=True)
class MultiSend:
dummy_w3 = Web3()
MULTISEND_ADDRESSES = (
"0xA238CBeb142c10Ef7Ad8442C6D1f9E89e07e7761", # MultiSend v1.3.0
"0x998739BFdAAdde7C933B942a68053933098f9EDa", # MultiSend v1.3.0 (EIP-155)
)
MULTISEND_CALL_ONLY_ADDRESSES = (
"0x40A2aCCbd92BCA938b02010E17A5b8929b49130D", # MultiSend Call Only v1.3.0
"0xA1dabEF33b3B82c7814B6D82A79e50F4AC44102B", # MultiSend Call Only v1.3.0 (EIP-155)
)
def __init__(
self,
ethereum_client: Optional[EthereumClient] = None,
address: Optional[ChecksumAddress] = None,
call_only: bool = True,
):
"""
:param ethereum_client: Required for detecting the address in the network.
:param address: If not provided, will try to detect it from the hardcoded addresses using `ethereum_client`.
:param call_only: If `True` use `call only` MultiSend, otherwise use regular one.
Only if `address` not provided
"""
self.address = address
self.ethereum_client = ethereum_client
self.call_only = call_only
addresses = (
self.MULTISEND_CALL_ONLY_ADDRESSES
if call_only
else self.MULTISEND_ADDRESSES
)
if address:
assert fast_is_checksum_address(address), (
"%s proxy factory address not valid" % address
)
elif ethereum_client:
# Try to detect MultiSend address if not provided
for address in addresses:
if ethereum_client.is_contract(address):
self.address = address
break
else:
self.address = addresses[0]
if not self.address:
raise ValueError(
f"Cannot find a MultiSend contract for chainId={self.ethereum_client.get_chain_id()}"
)
@property
def w3(self):
return (self.ethereum_client and self.ethereum_client.w3) or Web3()
@classmethod
def from_bytes(cls, encoded_multisend_txs: Union[str, bytes]) -> List[MultiSendTx]:
"""
Decodes one or more multisend transactions from `bytes transactions` (Abi decoded)
:param encoded_multisend_txs:
:return: List of MultiSendTxs
"""
if not encoded_multisend_txs:
return []
encoded_multisend_txs = HexBytes(encoded_multisend_txs)
multisend_tx = MultiSendTx.from_bytes(encoded_multisend_txs)
multisend_tx_size = len(multisend_tx)
assert (
multisend_tx_size > 0
), "Multisend tx cannot be empty" # This should never happen, just in case
if multisend_tx.old_encoding:
next_data_position = (
(multisend_tx.data_length + 0x1F) // 0x20 * 0x20
) + 0xA0
else:
next_data_position = multisend_tx_size
remaining_data = encoded_multisend_txs[next_data_position:]
return [multisend_tx] + cls.from_bytes(remaining_data)
@classmethod
def from_transaction_data(
cls, multisend_data: Union[str, bytes]
) -> List[MultiSendTx]:
"""
Decodes multisend transactions from transaction data (ABI encoded with selector)
:return:
"""
try:
_, data = get_multi_send_contract(cls.dummy_w3).decode_function_input(
multisend_data
)
return cls.from_bytes(data["transactions"])
except ValueError:
return []
@staticmethod
def deploy_contract(
ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy proxy factory contract
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
contract = get_multi_send_contract(ethereum_client.w3)
tx = contract.constructor().build_transaction(
{"from": deployer_account.address}
)
tx_hash = ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
tx_receipt = ethereum_client.get_transaction_receipt(tx_hash, timeout=120)
assert tx_receipt
assert tx_receipt["status"]
contract_address = tx_receipt["contractAddress"]
logger.info(
"Deployed and initialized Proxy Factory Contract=%s by %s",
contract_address,
deployer_account.address,
)
return EthereumTxSent(tx_hash, tx, contract_address)
def get_contract(self):
return get_multi_send_contract(self.w3, self.address)
def build_tx_data(self, multi_send_txs: List[MultiSendTx]) -> bytes:
"""
Txs don't need to be valid to get through
:param multi_send_txs:
:return:
"""
multisend_contract = self.get_contract()
encoded_multisend_data = b"".join([x.encoded_data for x in multi_send_txs])
return multisend_contract.functions.multiSend(
encoded_multisend_data
).build_transaction({"gas": 1, "gasPrice": 1})["data"] | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/multi_send.py | 0.915252 | 0.451206 | multi_send.py | pypi |
from abc import ABC, abstractmethod
from enum import Enum
from logging import getLogger
from typing import List, Union
from eth_abi import decode as decode_abi
from eth_abi import encode as encode_abi
from eth_abi.exceptions import DecodingError
from eth_account.messages import defunct_hash_message
from eth_typing import ChecksumAddress
from hexbytes import HexBytes
from web3.exceptions import Web3Exception
from gnosis.eth import EthereumClient
from gnosis.eth.contracts import get_safe_contract, get_safe_V1_1_1_contract
from gnosis.eth.utils import fast_to_checksum_address
from gnosis.safe.signatures import (
get_signing_address,
signature_split,
signature_to_bytes,
)
logger = getLogger(__name__)
EthereumBytes = Union[bytes, str]
class SafeSignatureException(Exception):
pass
class CannotCheckEIP1271ContractSignature(SafeSignatureException):
pass
class SafeSignatureType(Enum):
CONTRACT_SIGNATURE = 0
APPROVED_HASH = 1
EOA = 2
ETH_SIGN = 3
@staticmethod
def from_v(v: int):
if v == 0:
return SafeSignatureType.CONTRACT_SIGNATURE
elif v == 1:
return SafeSignatureType.APPROVED_HASH
elif v > 30:
return SafeSignatureType.ETH_SIGN
else:
return SafeSignatureType.EOA
def uint_to_address(value: int) -> ChecksumAddress:
"""
Convert a Solidity `uint` value to a checksummed `address`, removing
invalid padding bytes if present
:return: Checksummed address
"""
encoded = encode_abi(["uint"], [value])
# Remove padding bytes, as Solidity will ignore it but `eth_abi` will not
encoded_without_padding_bytes = b"\x00" * 12 + encoded[-20:]
return fast_to_checksum_address(
decode_abi(["address"], encoded_without_padding_bytes)[0]
)
class SafeSignature(ABC):
def __init__(self, signature: EthereumBytes, safe_tx_hash: EthereumBytes):
self.signature = HexBytes(signature)
self.safe_tx_hash = HexBytes(safe_tx_hash)
self.v, self.r, self.s = signature_split(self.signature)
def __str__(self):
return f"SafeSignature type={self.signature_type.name} owner={self.owner}"
@classmethod
def parse_signature(
cls,
signatures: EthereumBytes,
safe_tx_hash: EthereumBytes,
ignore_trailing: bool = True,
) -> List["SafeSignature"]:
"""
:param signatures: One or more signatures appended. EIP1271 data at the end is supported.
:param safe_tx_hash:
:param ignore_trailing: Ignore trailing data on the signature. Some libraries pad it and add some zeroes at
the end
:return: List of SafeSignatures decoded
"""
if not signatures:
return []
elif isinstance(signatures, str):
signatures = HexBytes(signatures)
signature_size = 65 # For contract signatures there'll be some data at the end
data_position = len(
signatures
) # For contract signatures, to stop parsing at data position
safe_signatures = []
for i in range(0, len(signatures), signature_size):
if (
i >= data_position
): # If contract signature data position is reached, stop
break
signature = signatures[i : i + signature_size]
if ignore_trailing and len(signature) < 65:
# Trailing stuff
break
v, r, s = signature_split(signature)
signature_type = SafeSignatureType.from_v(v)
safe_signature: "SafeSignature"
if signature_type == SafeSignatureType.CONTRACT_SIGNATURE:
if s < data_position:
data_position = s
contract_signature_len = int.from_bytes(
signatures[s : s + 32], "big"
) # Len size is 32 bytes
contract_signature = signatures[
s + 32 : s + 32 + contract_signature_len
] # Skip array size (32 bytes)
safe_signature = SafeSignatureContract(
signature, safe_tx_hash, contract_signature
)
elif signature_type == SafeSignatureType.APPROVED_HASH:
safe_signature = SafeSignatureApprovedHash(signature, safe_tx_hash)
elif signature_type == SafeSignatureType.EOA:
safe_signature = SafeSignatureEOA(signature, safe_tx_hash)
elif signature_type == SafeSignatureType.ETH_SIGN:
safe_signature = SafeSignatureEthSign(signature, safe_tx_hash)
safe_signatures.append(safe_signature)
return safe_signatures
def export_signature(self) -> HexBytes:
"""
Exports signature in a format that's valid individually. That's important for contract signatures, as it
will fix the offset
:return:
"""
return self.signature
@property
@abstractmethod
def owner(self):
"""
:return: Decode owner from signature, without any further validation (signature can be not valid)
"""
raise NotImplementedError
@abstractmethod
def is_valid(self, ethereum_client: EthereumClient, safe_address: str) -> bool:
"""
:param ethereum_client: Required for Contract Signature and Approved Hash check
:param safe_address: Required for Approved Hash check
:return: `True` if signature is valid, `False` otherwise
"""
raise NotImplementedError
@property
@abstractmethod
def signature_type(self) -> SafeSignatureType:
raise NotImplementedError
class SafeSignatureContract(SafeSignature):
EIP1271_MAGIC_VALUE = HexBytes(0x20C13B0B)
EIP1271_MAGIC_VALUE_UPDATED = HexBytes(0x1626BA7E)
def __init__(
self,
signature: EthereumBytes,
safe_tx_hash: EthereumBytes,
contract_signature: EthereumBytes,
):
super().__init__(signature, safe_tx_hash)
self.contract_signature = HexBytes(contract_signature)
@property
def owner(self) -> ChecksumAddress:
"""
:return: Address of contract signing. No further checks to get the owner are needed,
but it could be a non existing contract
"""
return uint_to_address(self.r)
@property
def signature_type(self) -> SafeSignatureType:
return SafeSignatureType.CONTRACT_SIGNATURE
def export_signature(self) -> HexBytes:
"""
Fix offset (s) and append `contract_signature` at the end of the signature
:return:
"""
# encode_abi adds {32 bytes offset}{32 bytes size}. We don't need offset
contract_signature = encode_abi(["bytes"], [self.contract_signature])[32:]
dynamic_offset = 65
return HexBytes(
signature_to_bytes(self.v, self.r, dynamic_offset) + contract_signature
)
def is_valid(self, ethereum_client: EthereumClient, *args) -> bool:
safe_contract = get_safe_V1_1_1_contract(ethereum_client.w3, self.owner)
# Newest versions of the Safe contract have `isValidSignature` on the compatibility fallback handler
for block_identifier in ("pending", "latest"):
try:
return safe_contract.functions.isValidSignature(
self.safe_tx_hash, self.contract_signature
).call(block_identifier=block_identifier) in (
self.EIP1271_MAGIC_VALUE,
self.EIP1271_MAGIC_VALUE_UPDATED,
)
except (Web3Exception, DecodingError, ValueError):
# Error using `pending` block identifier or contract does not exist
logger.warning(
"Cannot check EIP1271 signature from contract %s", self.owner
)
return False
class SafeSignatureApprovedHash(SafeSignature):
@property
def owner(self):
return uint_to_address(self.r)
@property
def signature_type(self):
return SafeSignatureType.APPROVED_HASH
@classmethod
def build_for_owner(
cls, owner: str, safe_tx_hash: str
) -> "SafeSignatureApprovedHash":
r = owner.lower().replace("0x", "").rjust(64, "0")
s = "0" * 64
v = "01"
return cls(HexBytes(r + s + v), safe_tx_hash)
def is_valid(self, ethereum_client: EthereumClient, safe_address: str) -> bool:
safe_contract = get_safe_contract(ethereum_client.w3, safe_address)
exception: Exception
for block_identifier in ("pending", "latest"):
try:
return (
safe_contract.functions.approvedHashes(
self.owner, self.safe_tx_hash
).call(block_identifier=block_identifier)
== 1
)
except (Web3Exception, DecodingError, ValueError) as e:
# Error using `pending` block identifier
exception = e
raise exception # This should never happen
class SafeSignatureEthSign(SafeSignature):
@property
def owner(self):
# defunct_hash_message prepends `\x19Ethereum Signed Message:\n32`
message_hash = defunct_hash_message(primitive=self.safe_tx_hash)
return get_signing_address(message_hash, self.v - 4, self.r, self.s)
@property
def signature_type(self):
return SafeSignatureType.ETH_SIGN
def is_valid(self, *args) -> bool:
return True
class SafeSignatureEOA(SafeSignature):
@property
def owner(self):
return get_signing_address(self.safe_tx_hash, self.v, self.r, self.s)
@property
def signature_type(self):
return SafeSignatureType.EOA
def is_valid(self, *args) -> bool:
return True | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/safe_signature.py | 0.889205 | 0.310002 | safe_signature.py | pypi |
from functools import cached_property
from typing import Any, Dict, List, NoReturn, Optional, Tuple, Type
from eth_account import Account
from hexbytes import HexBytes
from packaging.version import Version
from web3.exceptions import Web3Exception
from web3.types import BlockIdentifier, TxParams, Wei
from gnosis.eth import EthereumClient
from gnosis.eth.constants import NULL_ADDRESS
from gnosis.eth.contracts import get_safe_contract
from gnosis.eth.eip712 import eip712_encode_hash
from gnosis.eth.ethereum_client import TxSpeed
from .exceptions import (
CouldNotFinishInitialization,
CouldNotPayGasWithEther,
CouldNotPayGasWithToken,
HashHasNotBeenApproved,
InvalidContractSignatureLocation,
InvalidInternalTx,
InvalidMultisigTx,
InvalidOwnerProvided,
InvalidSignaturesProvided,
MethodCanOnlyBeCalledFromThisContract,
ModuleManagerException,
NotEnoughSafeTransactionGas,
OnlyOwnersCanApproveAHash,
OwnerManagerException,
SafeTransactionFailedWhenGasPriceAndSafeTxGasEmpty,
SignatureNotProvidedByOwner,
SignaturesDataTooShort,
ThresholdNeedsToBeDefined,
)
from .safe_signature import SafeSignature
from .signatures import signature_to_bytes
class SafeTx:
def __init__(
self,
ethereum_client: EthereumClient,
safe_address: str,
to: Optional[str],
value: int,
data: bytes,
operation: int,
safe_tx_gas: int,
base_gas: int,
gas_price: int,
gas_token: Optional[str],
refund_receiver: Optional[str],
signatures: Optional[bytes] = None,
safe_nonce: Optional[int] = None,
safe_version: str = None,
chain_id: Optional[int] = None,
):
"""
:param ethereum_client:
:param safe_address:
:param to:
:param value:
:param data:
:param operation:
:param safe_tx_gas:
:param base_gas:
:param gas_price:
:param gas_token:
:param refund_receiver:
:param signatures:
:param safe_nonce: Current nonce of the Safe. If not provided, it will be retrieved from network
:param safe_version: Safe version 1.0.0 renamed `baseGas` to `dataGas`. Safe version 1.3.0 added `chainId` to
the `domainSeparator`. If not provided, it will be retrieved from network
:param chain_id: Ethereum network chain_id is used in hash calculation for Safes >= 1.3.0. If not provided,
it will be retrieved from the provided ethereum_client
"""
self.ethereum_client = ethereum_client
self.safe_address = safe_address
self.to = to or NULL_ADDRESS
self.value = int(value)
self.data = HexBytes(data) if data else b""
self.operation = int(operation)
self.safe_tx_gas = int(safe_tx_gas)
self.base_gas = int(base_gas)
self.gas_price = int(gas_price)
self.gas_token = gas_token or NULL_ADDRESS
self.refund_receiver = refund_receiver or NULL_ADDRESS
self.signatures = signatures or b""
self._safe_nonce = safe_nonce and int(safe_nonce)
self._safe_version = safe_version
self._chain_id = chain_id and int(chain_id)
self.tx: Optional[TxParams] = None # If executed, `tx` is set
self.tx_hash: Optional[bytes] = None # If executed, `tx_hash` is set
def __str__(self):
return (
f"SafeTx - safe={self.safe_address} - to={self.to} - value={self.value} - data={self.data.hex()} - "
f"operation={self.operation} - safe-tx-gas={self.safe_tx_gas} - base-gas={self.base_gas} - "
f"gas-price={self.gas_price} - gas-token={self.gas_token} - refund-receiver={self.refund_receiver} - "
f"signers = {self.signers}"
)
@property
def w3(self):
return self.ethereum_client.w3
@cached_property
def contract(self):
return get_safe_contract(self.w3, address=self.safe_address)
@cached_property
def chain_id(self) -> int:
if self._chain_id is not None:
return self._chain_id
else:
return self.ethereum_client.get_chain_id()
@cached_property
def safe_nonce(self) -> str:
if self._safe_nonce is not None:
return self._safe_nonce
else:
return self.contract.functions.nonce().call()
@cached_property
def safe_version(self) -> str:
if self._safe_version is not None:
return self._safe_version
else:
return self.contract.functions.VERSION().call()
@property
def eip712_structured_data(self) -> Dict[str, Any]:
safe_version = Version(self.safe_version)
# Safes >= 1.0.0 Renamed `baseGas` to `dataGas`
base_gas_key = "baseGas" if safe_version >= Version("1.0.0") else "dataGas"
types = {
"EIP712Domain": [{"name": "verifyingContract", "type": "address"}],
"SafeTx": [
{"name": "to", "type": "address"},
{"name": "value", "type": "uint256"},
{"name": "data", "type": "bytes"},
{"name": "operation", "type": "uint8"},
{"name": "safeTxGas", "type": "uint256"},
{"name": base_gas_key, "type": "uint256"},
{"name": "gasPrice", "type": "uint256"},
{"name": "gasToken", "type": "address"},
{"name": "refundReceiver", "type": "address"},
{"name": "nonce", "type": "uint256"},
],
}
message = {
"to": self.to,
"value": self.value,
"data": self.data,
"operation": self.operation,
"safeTxGas": self.safe_tx_gas,
base_gas_key: self.base_gas,
"dataGas": self.base_gas,
"gasPrice": self.gas_price,
"gasToken": self.gas_token,
"refundReceiver": self.refund_receiver,
"nonce": self.safe_nonce,
}
payload = {
"types": types,
"primaryType": "SafeTx",
"domain": {"verifyingContract": self.safe_address},
"message": message,
}
# Enable chainId from v1.3.0 onwards
if safe_version >= Version("1.3.0"):
payload["domain"]["chainId"] = self.chain_id
types["EIP712Domain"].insert(0, {"name": "chainId", "type": "uint256"})
return payload
@property
def safe_tx_hash(self) -> HexBytes:
return HexBytes(eip712_encode_hash(self.eip712_structured_data))
@property
def signers(self) -> List[str]:
if not self.signatures:
return []
else:
return [
safe_signature.owner
for safe_signature in SafeSignature.parse_signature(
self.signatures, self.safe_tx_hash
)
]
@property
def sorted_signers(self):
return sorted(self.signers, key=lambda x: int(x, 16))
@property
def w3_tx(self):
"""
:return: Web3 contract tx prepared for `call`, `transact` or `build_transaction`
"""
return self.contract.functions.execTransaction(
self.to,
self.value,
self.data,
self.operation,
self.safe_tx_gas,
self.base_gas,
self.gas_price,
self.gas_token,
self.refund_receiver,
self.signatures,
)
def _raise_safe_vm_exception(self, message: str) -> NoReturn:
error_with_exception: Dict[str, Type[InvalidMultisigTx]] = {
# https://github.com/safe-global/safe-contracts/blob/v1.3.0/docs/error_codes.md
"GS000": CouldNotFinishInitialization,
"GS001": ThresholdNeedsToBeDefined,
"Could not pay gas costs with ether": CouldNotPayGasWithEther,
"GS011": CouldNotPayGasWithEther,
"Could not pay gas costs with token": CouldNotPayGasWithToken,
"GS012": CouldNotPayGasWithToken,
"GS013": SafeTransactionFailedWhenGasPriceAndSafeTxGasEmpty,
"Hash has not been approved": HashHasNotBeenApproved,
"Hash not approved": HashHasNotBeenApproved,
"GS025": HashHasNotBeenApproved,
"Invalid contract signature location: data not complete": InvalidContractSignatureLocation,
"GS023": InvalidContractSignatureLocation,
"Invalid contract signature location: inside static part": InvalidContractSignatureLocation,
"GS021": InvalidContractSignatureLocation,
"Invalid contract signature location: length not present": InvalidContractSignatureLocation,
"GS022": InvalidContractSignatureLocation,
"Invalid contract signature provided": InvalidContractSignatureLocation,
"GS024": InvalidContractSignatureLocation,
"Invalid owner provided": InvalidOwnerProvided,
"Invalid owner address provided": InvalidOwnerProvided,
"GS026": InvalidOwnerProvided,
"Invalid signatures provided": InvalidSignaturesProvided,
"Not enough gas to execute safe transaction": NotEnoughSafeTransactionGas,
"GS010": NotEnoughSafeTransactionGas,
"Only owners can approve a hash": OnlyOwnersCanApproveAHash,
"GS030": OnlyOwnersCanApproveAHash,
"GS031": MethodCanOnlyBeCalledFromThisContract,
"Signature not provided by owner": SignatureNotProvidedByOwner,
"Signatures data too short": SignaturesDataTooShort,
"GS020": SignaturesDataTooShort,
# ModuleManager
"GS100": ModuleManagerException,
"Invalid module address provided": ModuleManagerException,
"GS101": ModuleManagerException,
"GS102": ModuleManagerException,
"Invalid prevModule, module pair provided": ModuleManagerException,
"GS103": ModuleManagerException,
"Method can only be called from an enabled module": ModuleManagerException,
"GS104": ModuleManagerException,
"Module has already been added": ModuleManagerException,
# OwnerManager
"Address is already an owner": OwnerManagerException,
"GS200": OwnerManagerException, # Owners have already been setup
"GS201": OwnerManagerException, # Threshold cannot exceed owner count
"GS202": OwnerManagerException, # Invalid owner address provided
"GS203": OwnerManagerException, # Invalid ower address provided
"GS204": OwnerManagerException, # Address is already an owner
"GS205": OwnerManagerException, # Invalid prevOwner, owner pair provided
"Invalid prevOwner, owner pair provided": OwnerManagerException,
"New owner count needs to be larger than new threshold": OwnerManagerException,
"Threshold cannot exceed owner count": OwnerManagerException,
"Threshold needs to be greater than 0": OwnerManagerException,
}
for reason, custom_exception in error_with_exception.items():
if reason in message:
raise custom_exception(message)
raise InvalidMultisigTx(message)
def call(
self,
tx_sender_address: Optional[str] = None,
tx_gas: Optional[int] = None,
block_identifier: Optional[BlockIdentifier] = "latest",
) -> int:
"""
:param tx_sender_address:
:param tx_gas: Force a gas limit
:param block_identifier:
:return: `1` if everything ok
"""
parameters: Dict[str, Any] = {
"from": tx_sender_address if tx_sender_address else self.safe_address
}
if tx_gas:
parameters["gas"] = tx_gas
try:
success = self.w3_tx.call(parameters, block_identifier=block_identifier)
if not success:
raise InvalidInternalTx(
"Success bit is %d, should be equal to 1" % success
)
return success
except (Web3Exception, ValueError) as exc:
# e.g. web3.exceptions.ContractLogicError: execution reverted: Invalid owner provided
return self._raise_safe_vm_exception(str(exc))
except ValueError as exc: # Parity
"""
Parity throws a ValueError, e.g.
{'code': -32015,
'message': 'VM execution error.',
'data': 'Reverted 0x08c379a0000000000000000000000000000000000000000000000000000000000000020000000000000000
000000000000000000000000000000000000000000000001b496e76616c6964207369676e6174757265732070726f7669
6465640000000000'
}
"""
error_dict = exc.args[0]
data = error_dict.get("data")
if data and isinstance(data, str) and "Reverted " in data:
# Parity
result = HexBytes(data.replace("Reverted ", ""))
return self._raise_safe_vm_exception(str(result))
else:
raise exc
def recommended_gas(self) -> Wei:
"""
:return: Recommended gas to use on the ethereum_tx
"""
return Wei(self.base_gas + self.safe_tx_gas + 75000)
def execute(
self,
tx_sender_private_key: str,
tx_gas: Optional[int] = None,
tx_gas_price: Optional[int] = None,
tx_nonce: Optional[int] = None,
block_identifier: Optional[BlockIdentifier] = "latest",
eip1559_speed: Optional[TxSpeed] = None,
) -> Tuple[HexBytes, TxParams]:
"""
Send multisig tx to the Safe
:param tx_sender_private_key: Sender private key
:param tx_gas: Gas for the external tx. If not, `(safe_tx_gas + base_gas) * 2` will be used
:param tx_gas_price: Gas price of the external tx. If not, `gas_price` will be used
:param tx_nonce: Force nonce for `tx_sender`
:param block_identifier: `latest` or `pending`
:param eip1559_speed: If provided, use EIP1559 transaction
:return: Tuple(tx_hash, tx)
:raises: InvalidMultisigTx: If user tx cannot go through the Safe
"""
sender_account = Account.from_key(tx_sender_private_key)
if eip1559_speed and self.ethereum_client.is_eip1559_supported():
tx_parameters = self.ethereum_client.set_eip1559_fees(
{
"from": sender_account.address,
},
tx_speed=eip1559_speed,
)
else:
tx_parameters = {
"from": sender_account.address,
"gasPrice": tx_gas_price or self.w3.eth.gas_price,
}
if tx_gas:
tx_parameters["gas"] = tx_gas
if tx_nonce is not None:
tx_parameters["nonce"] = tx_nonce
self.tx = self.w3_tx.build_transaction(tx_parameters)
self.tx["gas"] = Wei(
tx_gas or (max(self.tx["gas"] + 75000, self.recommended_gas()))
)
self.tx_hash = self.ethereum_client.send_unsigned_transaction(
self.tx,
private_key=sender_account.key,
retry=False if tx_nonce is not None else True,
block_identifier=block_identifier,
)
# Set signatures empty after executing the tx. `Nonce` is increased even if it fails,
# so signatures are not valid anymore
self.signatures = b""
return self.tx_hash, self.tx
def sign(self, private_key: str) -> bytes:
"""
{bytes32 r}{bytes32 s}{uint8 v}
:param private_key:
:return: Signature
"""
account = Account.from_key(private_key)
signature_dict = account.signHash(self.safe_tx_hash)
signature = signature_to_bytes(
signature_dict["v"], signature_dict["r"], signature_dict["s"]
)
# Insert signature sorted
if account.address not in self.signers:
new_owners = self.signers + [account.address]
new_owner_pos = sorted(new_owners, key=lambda x: int(x, 16)).index(
account.address
)
self.signatures = (
self.signatures[: 65 * new_owner_pos]
+ signature
+ self.signatures[65 * new_owner_pos :]
)
return signature
def unsign(self, address: str) -> bool:
for pos, signer in enumerate(self.signers):
if signer == address:
self.signatures = self.signatures.replace(
self.signatures[pos * 65 : pos * 65 + 65], b""
)
return True
return False | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/safe_tx.py | 0.910689 | 0.230811 | safe_tx.py | pypi |
import dataclasses
import math
from enum import Enum
from functools import cached_property
from logging import getLogger
from typing import Callable, List, NamedTuple, Optional, Union
from eth_abi import encode as encode_abi
from eth_abi.exceptions import DecodingError
from eth_abi.packed import encode_packed
from eth_account import Account
from eth_account.signers.local import LocalAccount
from eth_typing import ChecksumAddress, Hash32
from hexbytes import HexBytes
from web3 import Web3
from web3.contract import Contract
from web3.exceptions import Web3Exception
from web3.types import BlockIdentifier, Wei
from gnosis.eth import EthereumClient, EthereumTxSent
from gnosis.eth.constants import GAS_CALL_DATA_BYTE, NULL_ADDRESS, SENTINEL_ADDRESS
from gnosis.eth.contracts import (
get_compatibility_fallback_handler_V1_3_0_contract,
get_delegate_constructor_proxy_contract,
get_safe_contract,
get_safe_V0_0_1_contract,
get_safe_V1_0_0_contract,
get_safe_V1_1_1_contract,
get_safe_V1_3_0_contract,
)
from gnosis.eth.utils import (
fast_bytes_to_checksum_address,
fast_is_checksum_address,
fast_keccak,
get_eth_address_with_key,
)
from gnosis.safe.proxy_factory import ProxyFactory
from ..eth.typing import EthereumData
from .exceptions import (
CannotEstimateGas,
CannotRetrieveSafeInfoException,
InvalidPaymentToken,
)
from .safe_create2_tx import SafeCreate2Tx, SafeCreate2TxBuilder
from .safe_creation_tx import InvalidERC20Token, SafeCreationTx
from .safe_tx import SafeTx
logger = getLogger(__name__)
class SafeCreationEstimate(NamedTuple):
gas: int
gas_price: int
payment: int
payment_token: Optional[str]
class SafeOperation(Enum):
CALL = 0
DELEGATE_CALL = 1
CREATE = 2
@dataclasses.dataclass
class SafeInfo:
address: ChecksumAddress
fallback_handler: ChecksumAddress
guard: ChecksumAddress
master_copy: ChecksumAddress
modules: List[ChecksumAddress]
nonce: int
owners: List[ChecksumAddress]
threshold: int
version: str
class Safe:
"""
Class to manage a Gnosis Safe
"""
# keccak256("fallback_manager.handler.address")
FALLBACK_HANDLER_STORAGE_SLOT = (
0x6C9A6C4A39284E37ED1CF53D337577D14212A4870FB976A4366C693B939918D5
)
# keccak256("guard_manager.guard.address")
GUARD_STORAGE_SLOT = (
0x4A204F620C8C5CCDCA3FD54D003BADD85BA500436A431F0CBDA4F558C93C34C8
)
# keccak256("SafeMessage(bytes message)");
SAFE_MESSAGE_TYPEHASH = bytes.fromhex(
"60b3cbf8b4a223d68d641b3b6ddf9a298e7f33710cf3d3a9d1146b5a6150fbca"
)
def __init__(self, address: ChecksumAddress, ethereum_client: EthereumClient):
"""
:param address: Safe address
:param ethereum_client: Initialized ethereum client
"""
assert fast_is_checksum_address(address), "%s is not a valid address" % address
self.ethereum_client = ethereum_client
self.w3 = self.ethereum_client.w3
self.address = address
def __str__(self):
return f"Safe={self.address}"
@cached_property
def contract(self) -> Contract:
v_1_3_0_contract = get_safe_V1_3_0_contract(self.w3, address=self.address)
version = v_1_3_0_contract.functions.VERSION().call()
if version == "1.3.0":
return v_1_3_0_contract
else:
return get_safe_V1_1_1_contract(self.w3, address=self.address)
@cached_property
def domain_separator(self) -> Optional[bytes]:
"""
:return: EIP721 DomainSeparator for the Safe. Returns `None` if not supported (for Safes < 1.0.0)
"""
try:
return self.retrieve_domain_separator()
except (Web3Exception, DecodingError, ValueError):
logger.warning("Safe %s does not support domainSeparator", self.address)
return None
@staticmethod
def create(
ethereum_client: EthereumClient,
deployer_account: LocalAccount,
master_copy_address: str,
owners: List[str],
threshold: int,
fallback_handler: str = NULL_ADDRESS,
proxy_factory_address: Optional[str] = None,
payment_token: str = NULL_ADDRESS,
payment: int = 0,
payment_receiver: str = NULL_ADDRESS,
) -> EthereumTxSent:
"""
Deploy new Safe proxy pointing to the specified `master_copy` address and configured
with the provided `owners` and `threshold`. By default, payment for the deployer of the tx will be `0`.
If `proxy_factory_address` is set deployment will be done using the proxy factory instead of calling
the `constructor` of a new `DelegatedProxy`
Using `proxy_factory_address` is recommended, as it takes less gas.
(Testing with `Ganache` and 1 owner 261534 without proxy vs 229022 with Proxy)
"""
assert owners, "At least one owner must be set"
assert 1 <= threshold <= len(owners), "Threshold=%d must be <= %d" % (
threshold,
len(owners),
)
initializer = (
get_safe_contract(ethereum_client.w3, NULL_ADDRESS)
.functions.setup(
owners,
threshold,
NULL_ADDRESS, # Contract address for optional delegate call
b"", # Data payload for optional delegate call
fallback_handler, # Handler for fallback calls to this contract,
payment_token,
payment,
payment_receiver,
)
.build_transaction({"gas": Wei(1), "gasPrice": Wei(1)})["data"]
)
if proxy_factory_address:
proxy_factory = ProxyFactory(proxy_factory_address, ethereum_client)
return proxy_factory.deploy_proxy_contract(
deployer_account, master_copy_address, initializer=HexBytes(initializer)
)
proxy_contract = get_delegate_constructor_proxy_contract(ethereum_client.w3)
tx = proxy_contract.constructor(
master_copy_address, initializer
).build_transaction({"from": deployer_account.address})
tx["gas"] = tx["gas"] * 100000
tx_hash = ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
tx_receipt = ethereum_client.get_transaction_receipt(tx_hash, timeout=60)
assert tx_receipt
assert tx_receipt["status"]
contract_address = tx_receipt["contractAddress"]
return EthereumTxSent(tx_hash, tx, contract_address)
@staticmethod
def _deploy_master_contract(
ethereum_client: EthereumClient,
deployer_account: LocalAccount,
contract_fn: Callable[[Web3, Optional[str]], Contract],
) -> EthereumTxSent:
"""
Deploy master contract. Takes deployer_account (if unlocked in the node) or the deployer private key
Safe with version > v1.1.1 doesn't need to be initialized as it already has a constructor
:param ethereum_client:
:param deployer_account: Ethereum account
:param contract_fn: get contract function
:return: deployed contract address
"""
safe_contract = contract_fn(ethereum_client.w3)
constructor_tx = safe_contract.constructor().build_transaction()
tx_hash = ethereum_client.send_unsigned_transaction(
constructor_tx, private_key=deployer_account.key
)
tx_receipt = ethereum_client.get_transaction_receipt(tx_hash, timeout=60)
assert tx_receipt
assert tx_receipt["status"]
ethereum_tx_sent = EthereumTxSent(
tx_hash, constructor_tx, tx_receipt["contractAddress"]
)
logger.info(
"Deployed and initialized Safe Master Contract version=%s on address %s by %s",
contract_fn(ethereum_client.w3, ethereum_tx_sent.contract_address)
.functions.VERSION()
.call(),
ethereum_tx_sent.contract_address,
deployer_account.address,
)
return ethereum_tx_sent
@classmethod
def deploy_compatibility_fallback_handler(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy Compatibility Fallback handler v1.3.0
:param ethereum_client:
:param deployer_account: Ethereum account
:return: deployed contract address
"""
contract = get_compatibility_fallback_handler_V1_3_0_contract(
ethereum_client.w3
)
constructor_tx = contract.constructor().build_transaction()
tx_hash = ethereum_client.send_unsigned_transaction(
constructor_tx, private_key=deployer_account.key
)
tx_receipt = ethereum_client.get_transaction_receipt(tx_hash, timeout=60)
assert tx_receipt
assert tx_receipt["status"]
ethereum_tx_sent = EthereumTxSent(
tx_hash, constructor_tx, tx_receipt["contractAddress"]
)
logger.info(
"Deployed and initialized Compatibility Fallback Handler version=%s on address %s by %s",
"1.3.0",
ethereum_tx_sent.contract_address,
deployer_account.address,
)
return ethereum_tx_sent
@classmethod
def deploy_master_contract_v1_3_0(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy master contract v1.3.0. Takes deployer_account (if unlocked in the node) or the deployer private key
Safe with version > v1.1.1 doesn't need to be initialized as it already has a constructor
:param ethereum_client:
:param deployer_account: Ethereum account
:return: deployed contract address
"""
return cls._deploy_master_contract(
ethereum_client, deployer_account, get_safe_V1_3_0_contract
)
@classmethod
def deploy_master_contract_v1_1_1(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy master contract v1.1.1. Takes deployer_account (if unlocked in the node) or the deployer private key
Safe with version > v1.1.1 doesn't need to be initialized as it already has a constructor
:param ethereum_client:
:param deployer_account: Ethereum account
:return: deployed contract address
"""
return cls._deploy_master_contract(
ethereum_client, deployer_account, get_safe_V1_1_1_contract
)
@staticmethod
def deploy_master_contract_v1_0_0(
ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy master contract. Takes deployer_account (if unlocked in the node) or the deployer private key
:param ethereum_client:
:param deployer_account: Ethereum account
:return: deployed contract address
"""
safe_contract = get_safe_V1_0_0_contract(ethereum_client.w3)
constructor_data = safe_contract.constructor().build_transaction({"gas": 0})[
"data"
]
initializer_data = safe_contract.functions.setup(
# We use 2 owners that nobody controls for the master copy
[
"0x0000000000000000000000000000000000000002",
"0x0000000000000000000000000000000000000003",
],
2, # Threshold. Maximum security
NULL_ADDRESS, # Address for optional DELEGATE CALL
b"", # Data for optional DELEGATE CALL
NULL_ADDRESS, # Payment token
0, # Payment
NULL_ADDRESS, # Refund receiver
).build_transaction({"to": NULL_ADDRESS})["data"]
ethereum_tx_sent = ethereum_client.deploy_and_initialize_contract(
deployer_account, constructor_data, HexBytes(initializer_data)
)
logger.info(
"Deployed and initialized Safe Master Contract=%s by %s",
ethereum_tx_sent.contract_address,
deployer_account.address,
)
return ethereum_tx_sent
@staticmethod
def deploy_master_contract_v0_0_1(
ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy master contract. Takes deployer_account (if unlocked in the node) or the deployer private key
:param ethereum_client:
:param deployer_account: Ethereum account
:return: deployed contract address
"""
safe_contract = get_safe_V0_0_1_contract(ethereum_client.w3)
constructor_data = safe_contract.constructor().build_transaction({"gas": 0})[
"data"
]
initializer_data = safe_contract.functions.setup(
# We use 2 owners that nobody controls for the master copy
[
"0x0000000000000000000000000000000000000002",
"0x0000000000000000000000000000000000000003",
],
2, # Threshold. Maximum security
NULL_ADDRESS, # Address for optional DELEGATE CALL
b"", # Data for optional DELEGATE CALL
).build_transaction({"to": NULL_ADDRESS})["data"]
ethereum_tx_sent = ethereum_client.deploy_and_initialize_contract(
deployer_account, constructor_data, HexBytes(initializer_data)
)
logger.info(
"Deployed and initialized Old Safe Master Contract=%s by %s",
ethereum_tx_sent.contract_address,
deployer_account.address,
)
return ethereum_tx_sent
@staticmethod
def estimate_safe_creation(
ethereum_client: EthereumClient,
old_master_copy_address: str,
number_owners: int,
gas_price: int,
payment_token: Optional[str],
payment_receiver: str = NULL_ADDRESS,
payment_token_eth_value: float = 1.0,
fixed_creation_cost: Optional[int] = None,
) -> SafeCreationEstimate:
s = 15
owners = [get_eth_address_with_key()[0] for _ in range(number_owners)]
threshold = number_owners
safe_creation_tx = SafeCreationTx(
w3=ethereum_client.w3,
owners=owners,
threshold=threshold,
signature_s=s,
master_copy=old_master_copy_address,
gas_price=gas_price,
funder=payment_receiver,
payment_token=payment_token,
payment_token_eth_value=payment_token_eth_value,
fixed_creation_cost=fixed_creation_cost,
)
return SafeCreationEstimate(
safe_creation_tx.gas,
safe_creation_tx.gas_price,
safe_creation_tx.payment,
safe_creation_tx.payment_token,
)
@staticmethod
def estimate_safe_creation_2(
ethereum_client: EthereumClient,
master_copy_address: str,
proxy_factory_address: str,
number_owners: int,
gas_price: int,
payment_token: Optional[str],
payment_receiver: str = NULL_ADDRESS,
fallback_handler: Optional[str] = None,
payment_token_eth_value: float = 1.0,
fixed_creation_cost: Optional[int] = None,
) -> SafeCreationEstimate:
salt_nonce = 15
owners = [Account.create().address for _ in range(number_owners)]
threshold = number_owners
if not fallback_handler:
fallback_handler = (
Account.create().address
) # Better estimate it, it's required for new Safes
safe_creation_tx = SafeCreate2TxBuilder(
w3=ethereum_client.w3,
master_copy_address=master_copy_address,
proxy_factory_address=proxy_factory_address,
).build(
owners=owners,
threshold=threshold,
fallback_handler=fallback_handler,
salt_nonce=salt_nonce,
gas_price=gas_price,
payment_receiver=payment_receiver,
payment_token=payment_token,
payment_token_eth_value=payment_token_eth_value,
fixed_creation_cost=fixed_creation_cost,
)
return SafeCreationEstimate(
safe_creation_tx.gas,
safe_creation_tx.gas_price,
safe_creation_tx.payment,
safe_creation_tx.payment_token,
)
@staticmethod
def build_safe_creation_tx(
ethereum_client: EthereumClient,
master_copy_old_address: str,
s: int,
owners: List[str],
threshold: int,
gas_price: int,
payment_token: Optional[str],
payment_receiver: str,
payment_token_eth_value: float = 1.0,
fixed_creation_cost: Optional[int] = None,
) -> SafeCreationTx:
try:
safe_creation_tx = SafeCreationTx(
w3=ethereum_client.w3,
owners=owners,
threshold=threshold,
signature_s=s,
master_copy=master_copy_old_address,
gas_price=gas_price,
funder=payment_receiver,
payment_token=payment_token,
payment_token_eth_value=payment_token_eth_value,
fixed_creation_cost=fixed_creation_cost,
)
except InvalidERC20Token as exc:
raise InvalidPaymentToken(
"Invalid payment token %s" % payment_token
) from exc
assert safe_creation_tx.tx_pyethereum.nonce == 0
return safe_creation_tx
@staticmethod
def build_safe_create2_tx(
ethereum_client: EthereumClient,
master_copy_address: str,
proxy_factory_address: str,
salt_nonce: int,
owners: List[str],
threshold: int,
gas_price: int,
payment_token: Optional[str],
payment_receiver: Optional[str] = None, # If none, it will be `tx.origin`
fallback_handler: Optional[str] = NULL_ADDRESS,
payment_token_eth_value: float = 1.0,
fixed_creation_cost: Optional[int] = None,
) -> SafeCreate2Tx:
"""
Prepare safe proxy deployment for being relayed. It calculates and sets the costs of deployment to be returned
to the sender of the tx. If you are an advanced user you may prefer to use `create` function
"""
try:
safe_creation_tx = SafeCreate2TxBuilder(
w3=ethereum_client.w3,
master_copy_address=master_copy_address,
proxy_factory_address=proxy_factory_address,
).build(
owners=owners,
threshold=threshold,
fallback_handler=fallback_handler,
salt_nonce=salt_nonce,
gas_price=gas_price,
payment_receiver=payment_receiver,
payment_token=payment_token,
payment_token_eth_value=payment_token_eth_value,
fixed_creation_cost=fixed_creation_cost,
)
except InvalidERC20Token as exc:
raise InvalidPaymentToken(
"Invalid payment token %s" % payment_token
) from exc
return safe_creation_tx
def check_funds_for_tx_gas(
self, safe_tx_gas: int, base_gas: int, gas_price: int, gas_token: str
) -> bool:
"""
Check safe has enough funds to pay for a tx
:param safe_tx_gas: Safe tx gas
:param base_gas: Data gas
:param gas_price: Gas Price
:param gas_token: Gas Token, to use token instead of ether for the gas
:return: `True` if enough funds, `False` otherwise
"""
if gas_token == NULL_ADDRESS:
balance = self.ethereum_client.get_balance(self.address)
else:
balance = self.ethereum_client.erc20.get_balance(self.address, gas_token)
return balance >= (safe_tx_gas + base_gas) * gas_price
def estimate_tx_base_gas(
self,
to: str,
value: int,
data: bytes,
operation: int,
gas_token: str,
estimated_tx_gas: int,
) -> int:
"""
Calculate gas costs that are independent of the transaction execution(e.g. base transaction fee,
signature check, payment of the refund...)
:param to:
:param value:
:param data:
:param operation:
:param gas_token:
:param estimated_tx_gas: gas calculated with `estimate_tx_gas`
:return:
"""
data = data or b""
safe_contract = self.contract
threshold = self.retrieve_threshold()
nonce = self.retrieve_nonce()
# Every byte == 0 -> 4 Gas
# Every byte != 0 -> 16 Gas (68 before Istanbul)
# numbers < 256 (0x00(31*2)..ff) are 192 -> 31 * 4 + 1 * GAS_CALL_DATA_BYTE
# numbers < 65535 (0x(30*2)..ffff) are 256 -> 30 * 4 + 2 * GAS_CALL_DATA_BYTE
# Calculate gas for signatures
# (array count (3 -> r, s, v) + ecrecover costs) * signature count
# ecrecover for ecdsa ~= 4K gas, we use 6K
ecrecover_gas = 6000
signature_gas = threshold * (
1 * GAS_CALL_DATA_BYTE + 2 * 32 * GAS_CALL_DATA_BYTE + ecrecover_gas
)
safe_tx_gas = estimated_tx_gas
base_gas = 0
gas_price = 1
gas_token = gas_token or NULL_ADDRESS
signatures = b""
refund_receiver = NULL_ADDRESS
data = HexBytes(
safe_contract.functions.execTransaction(
to,
value,
data,
operation,
safe_tx_gas,
base_gas,
gas_price,
gas_token,
refund_receiver,
signatures,
).build_transaction({"gas": 1, "gasPrice": 1})["data"]
)
# If nonce == 0, nonce storage has to be initialized
if nonce == 0:
nonce_gas = 20000
else:
nonce_gas = 5000
# Keccak costs for the hash of the safe tx
hash_generation_gas = 1500
base_gas = (
signature_gas
+ self.ethereum_client.estimate_data_gas(data)
+ nonce_gas
+ hash_generation_gas
)
# Add additional gas costs
if base_gas > 65536:
base_gas += 64
else:
base_gas += 128
base_gas += 32000 # Base tx costs, transfer costs...
return base_gas
def estimate_tx_gas_with_safe(
self,
to: str,
value: int,
data: bytes,
operation: int,
gas_limit: Optional[int] = None,
block_identifier: Optional[BlockIdentifier] = "latest",
) -> int:
"""
Estimate tx gas using safe `requiredTxGas` method
:return: int: Estimated gas
:raises: CannotEstimateGas: If gas cannot be estimated
:raises: ValueError: Cannot decode received data
"""
safe_address = self.address
data = data or b""
def parse_revert_data(result: bytes) -> int:
# 4 bytes - error method id
# 32 bytes - position
# 32 bytes - length
# Last 32 bytes - value of revert (if everything went right)
gas_estimation_offset = 4 + 32 + 32
gas_estimation = result[gas_estimation_offset:]
# Estimated gas must be 32 bytes
if len(gas_estimation) != 32:
gas_limit_text = (
f"with gas limit={gas_limit} "
if gas_limit is not None
else "without gas limit set "
)
logger.warning(
"Safe=%s Problem estimating gas, returned value %sis %s for tx=%s",
safe_address,
gas_limit_text,
result.hex(),
tx,
)
raise CannotEstimateGas("Received %s for tx=%s" % (result.hex(), tx))
return int(gas_estimation.hex(), 16)
tx = self.contract.functions.requiredTxGas(
to, value, data, operation
).build_transaction(
{
"from": safe_address,
"gas": 0, # Don't call estimate
"gasPrice": 0, # Don't get gas price
}
)
tx_params = {
"from": safe_address,
"to": safe_address,
"data": tx["data"],
}
if gas_limit:
tx_params["gas"] = hex(gas_limit)
query = {
"jsonrpc": "2.0",
"method": "eth_call",
"params": [tx_params, block_identifier],
"id": 1,
}
response = self.ethereum_client.http_session.post(
self.ethereum_client.ethereum_node_url, json=query, timeout=30
)
if response.ok:
response_data = response.json()
error_data: Optional[str] = None
if "error" in response_data and "data" in response_data["error"]:
error_data = response_data["error"]["data"]
elif "result" in response_data: # Ganache-cli
error_data = response_data["result"]
if error_data:
if "0x" in error_data:
return parse_revert_data(
HexBytes(error_data[error_data.find("0x") :])
)
raise CannotEstimateGas(
f"Received {response.status_code} - {response.content} from ethereum node"
)
def estimate_tx_gas_with_web3(self, to: str, value: int, data: EthereumData) -> int:
"""
:param to:
:param value:
:param data:
:return: Estimation using web3 `estimate_gas`
"""
try:
return self.ethereum_client.estimate_gas(
to, from_=self.address, value=value, data=data
)
except (Web3Exception, ValueError) as exc:
raise CannotEstimateGas(
f"Cannot estimate gas with `eth_estimateGas`: {exc}"
) from exc
def estimate_tx_gas_by_trying(
self, to: str, value: int, data: Union[bytes, str], operation: int
):
"""
Try to get an estimation with Safe's `requiredTxGas`. If estimation if successful, try to set a gas limit and
estimate again. If gas estimation is ok, same gas estimation should be returned, if it's less than required
estimation will not be completed, so estimation was not accurate and gas limit needs to be increased.
:param to:
:param value:
:param data:
:param operation:
:return: Estimated gas calling `requiredTxGas` setting a gas limit and checking if `eth_call` is successful
:raises: CannotEstimateGas
"""
if not data:
data = b""
elif isinstance(data, str):
data = HexBytes(data)
gas_estimated = self.estimate_tx_gas_with_safe(to, value, data, operation)
block_gas_limit: Optional[int] = None
base_gas: Optional[int] = self.ethereum_client.estimate_data_gas(data)
for i in range(
1, 30
): # Make sure tx can be executed, fixing for example 63/64th problem
try:
self.estimate_tx_gas_with_safe(
to,
value,
data,
operation,
gas_limit=gas_estimated + base_gas + 32000,
)
return gas_estimated
except CannotEstimateGas:
logger.warning(
"Safe=%s - Found 63/64 problem gas-estimated=%d to=%s data=%s",
self.address,
gas_estimated,
to,
data.hex(),
)
block_gas_limit = (
block_gas_limit
or self.w3.eth.get_block("latest", full_transactions=False)[
"gasLimit"
]
)
gas_estimated = math.floor((1 + i * 0.03) * gas_estimated)
if gas_estimated >= block_gas_limit:
return block_gas_limit
return gas_estimated
def estimate_tx_gas(self, to: str, value: int, data: bytes, operation: int) -> int:
"""
Estimate tx gas. Use `requiredTxGas` on the Safe contract and fallbacks to `eth_estimateGas` if that method
fails. Note: `eth_estimateGas` cannot estimate delegate calls
:param to:
:param value:
:param data:
:param operation:
:return: Estimated gas for Safe inner tx
:raises: CannotEstimateGas
"""
# Costs to route through the proxy and nested calls
PROXY_GAS = 1000
# https://github.com/ethereum/solidity/blob/dfe3193c7382c80f1814247a162663a97c3f5e67/libsolidity/codegen/ExpressionCompiler.cpp#L1764
# This was `false` before solc 0.4.21 -> `m_context.evmVersion().canOverchargeGasForCall()`
# So gas needed by caller will be around 35k
OLD_CALL_GAS = 35000
# Web3 `estimate_gas` estimates less gas
WEB3_ESTIMATION_OFFSET = 23000
ADDITIONAL_GAS = PROXY_GAS + OLD_CALL_GAS
try:
return (
self.estimate_tx_gas_by_trying(to, value, data, operation)
+ ADDITIONAL_GAS
)
except CannotEstimateGas:
return (
self.estimate_tx_gas_with_web3(to, value, data)
+ ADDITIONAL_GAS
+ WEB3_ESTIMATION_OFFSET
)
def estimate_tx_operational_gas(self, data_bytes_length: int) -> int:
"""
DEPRECATED. `estimate_tx_base_gas` already includes this.
Estimates the gas for the verification of the signatures and other safe related tasks
before and after executing a transaction.
Calculation will be the sum of:
- Base cost of 15000 gas
- 100 of gas per word of `data_bytes`
- Validate the signatures 5000 * threshold (ecrecover for ecdsa ~= 4K gas)
:param data_bytes_length: Length of the data (in bytes, so `len(HexBytes('0x12'))` would be `1`
:return: gas costs per signature * threshold of Safe
"""
threshold = self.retrieve_threshold()
return 15000 + data_bytes_length // 32 * 100 + 5000 * threshold
def get_message_hash(self, message: Union[str, Hash32]) -> Hash32:
"""
Return hash of a message that can be signed by owners.
:param message: Message that should be hashed
:return: Message hash
"""
if isinstance(message, str):
message = message.encode()
message_hash = fast_keccak(message)
safe_message_hash = Web3.keccak(
encode_abi(
["bytes32", "bytes32"], [self.SAFE_MESSAGE_TYPEHASH, message_hash]
)
)
return Web3.keccak(
encode_packed(
["bytes1", "bytes1", "bytes32", "bytes32"],
[
bytes.fromhex("19"),
bytes.fromhex("01"),
self.domain_separator,
safe_message_hash,
],
)
)
def retrieve_all_info(
self, block_identifier: Optional[BlockIdentifier] = "latest"
) -> SafeInfo:
"""
Get all Safe info in the same batch call.
:param block_identifier:
:return:
:raises: CannotRetrieveSafeInfoException
"""
try:
contract = self.contract
master_copy = self.retrieve_master_copy_address()
fallback_handler = self.retrieve_fallback_handler()
guard = self.retrieve_guard()
results = self.ethereum_client.batch_call(
[
contract.functions.getModulesPaginated(
SENTINEL_ADDRESS, 20
), # Does not exist in version < 1.1.1
contract.functions.nonce(),
contract.functions.getOwners(),
contract.functions.getThreshold(),
contract.functions.VERSION(),
],
from_address=self.address,
block_identifier=block_identifier,
raise_exception=False,
)
modules_response, nonce, owners, threshold, version = results
if not modules_response:
# < 1.1.1
modules = self.retrieve_modules()
else:
modules, next_module = modules_response
if modules and next_module != SENTINEL_ADDRESS:
# Still more elements in the list
modules = self.retrieve_modules()
return SafeInfo(
self.address,
fallback_handler,
guard,
master_copy,
modules,
nonce,
owners,
threshold,
version,
)
except (Web3Exception, ValueError) as e:
raise CannotRetrieveSafeInfoException(self.address) from e
def retrieve_domain_separator(
self, block_identifier: Optional[BlockIdentifier] = "latest"
) -> str:
return self.contract.functions.domainSeparator().call(
block_identifier=block_identifier
)
def retrieve_code(self) -> HexBytes:
return self.w3.eth.get_code(self.address)
def retrieve_fallback_handler(
self, block_identifier: Optional[BlockIdentifier] = "latest"
) -> ChecksumAddress:
address = self.ethereum_client.w3.eth.get_storage_at(
self.address,
self.FALLBACK_HANDLER_STORAGE_SLOT,
block_identifier=block_identifier,
)[-20:].rjust(20, b"\0")
if len(address) == 20:
return fast_bytes_to_checksum_address(address)
else:
return NULL_ADDRESS
def retrieve_guard(
self, block_identifier: Optional[BlockIdentifier] = "latest"
) -> ChecksumAddress:
address = self.ethereum_client.w3.eth.get_storage_at(
self.address, self.GUARD_STORAGE_SLOT, block_identifier=block_identifier
)[-20:].rjust(20, b"\0")
if len(address) == 20:
return fast_bytes_to_checksum_address(address)
else:
return NULL_ADDRESS
def retrieve_master_copy_address(
self, block_identifier: Optional[BlockIdentifier] = "latest"
) -> ChecksumAddress:
address = self.w3.eth.get_storage_at(
self.address, "0x00", block_identifier=block_identifier
)[-20:].rjust(20, b"\0")
return fast_bytes_to_checksum_address(address)
def retrieve_modules(
self,
pagination: Optional[int] = 50,
block_identifier: Optional[BlockIdentifier] = "latest",
max_modules_to_retrieve: Optional[int] = 500,
) -> List[ChecksumAddress]:
"""
:param pagination: Number of modules to get per request
:param block_identifier:
:param max_modules_to_retrieve: Maximum number of modules to retrieve
:return: List of module addresses
"""
try:
# Contracts with Safe version < 1.1.0 were not paginated
contract = get_safe_V1_0_0_contract(
self.ethereum_client.w3, address=self.address
)
return contract.functions.getModules().call(
block_identifier=block_identifier
)
except Web3Exception:
pass
contract = self.contract
address = SENTINEL_ADDRESS
all_modules: List[ChecksumAddress] = []
for _ in range(max_modules_to_retrieve // pagination):
# If we use a `while True` loop a custom coded Safe could get us into an infinite loop
(modules, address) = contract.functions.getModulesPaginated(
address, pagination
).call(block_identifier=block_identifier)
if not modules or address in (NULL_ADDRESS, SENTINEL_ADDRESS):
# `NULL_ADDRESS` is only seen in uninitialized Safes
break
# Safes with version < 1.4.0 don't include the `starter address` used as pagination in the module list
# From 1.4.0 onwards it is included, so we check for duplicated addresses before inserting
modules_to_insert = [
module for module in modules + [address] if module not in all_modules
]
all_modules.extend(modules_to_insert)
return all_modules
def retrieve_is_hash_approved(
self,
owner: str,
safe_hash: bytes,
block_identifier: Optional[BlockIdentifier] = "latest",
) -> bool:
return (
self.contract.functions.approvedHashes(owner, safe_hash).call(
block_identifier=block_identifier
)
== 1
)
def retrieve_is_message_signed(
self,
message_hash: bytes,
block_identifier: Optional[BlockIdentifier] = "latest",
) -> bool:
return self.contract.functions.signedMessages(message_hash).call(
block_identifier=block_identifier
)
def retrieve_is_owner(
self, owner: str, block_identifier: Optional[BlockIdentifier] = "latest"
) -> bool:
return self.contract.functions.isOwner(owner).call(
block_identifier=block_identifier
)
def retrieve_nonce(
self, block_identifier: Optional[BlockIdentifier] = "latest"
) -> int:
return self.contract.functions.nonce().call(block_identifier=block_identifier)
def retrieve_owners(
self, block_identifier: Optional[BlockIdentifier] = "latest"
) -> List[str]:
return self.contract.functions.getOwners().call(
block_identifier=block_identifier
)
def retrieve_threshold(
self, block_identifier: Optional[BlockIdentifier] = "latest"
) -> int:
return self.contract.functions.getThreshold().call(
block_identifier=block_identifier
)
def retrieve_version(
self, block_identifier: Optional[BlockIdentifier] = "latest"
) -> str:
return self.contract.functions.VERSION().call(block_identifier=block_identifier)
def build_multisig_tx(
self,
to: str,
value: int,
data: bytes,
operation: int = SafeOperation.CALL.value,
safe_tx_gas: int = 0,
base_gas: int = 0,
gas_price: int = 0,
gas_token: str = NULL_ADDRESS,
refund_receiver: str = NULL_ADDRESS,
signatures: bytes = b"",
safe_nonce: Optional[int] = None,
safe_version: Optional[str] = None,
) -> SafeTx:
"""
Allows to execute a Safe transaction confirmed by required number of owners and then pays the account
that submitted the transaction. The fees are always transfered, even if the user transaction fails
:param to: Destination address of Safe transaction
:param value: Ether value of Safe transaction
:param data: Data payload of Safe transaction
:param operation: Operation type of Safe transaction
:param safe_tx_gas: Gas that should be used for the Safe transaction
:param base_gas: Gas costs for that are independent of the transaction execution
(e.g. base transaction fee, signature check, payment of the refund)
:param gas_price: Gas price that should be used for the payment calculation
:param gas_token: Token address (or `0x000..000` if ETH) that is used for the payment
:param refund_receiver: Address of receiver of gas payment (or `0x000..000` if tx.origin).
:param signatures: Packed signature data ({bytes32 r}{bytes32 s}{uint8 v})
:param safe_nonce: Nonce of the safe (to calculate hash)
:param safe_version: Safe version (to calculate hash)
:return:
"""
if safe_nonce is None:
safe_nonce = self.retrieve_nonce()
safe_version = safe_version or self.retrieve_version()
return SafeTx(
self.ethereum_client,
self.address,
to,
value,
data,
operation,
safe_tx_gas,
base_gas,
gas_price,
gas_token,
refund_receiver,
signatures=signatures,
safe_nonce=safe_nonce,
safe_version=safe_version,
)
def send_multisig_tx(
self,
to: str,
value: int,
data: bytes,
operation: int,
safe_tx_gas: int,
base_gas: int,
gas_price: int,
gas_token: str,
refund_receiver: str,
signatures: bytes,
tx_sender_private_key: str,
tx_gas=None,
tx_gas_price=None,
block_identifier: Optional[BlockIdentifier] = "latest",
) -> EthereumTxSent:
"""
Build and send Safe tx
:param to:
:param value:
:param data:
:param operation:
:param safe_tx_gas:
:param base_gas:
:param gas_price:
:param gas_token:
:param refund_receiver:
:param signatures:
:param tx_sender_private_key:
:param tx_gas: Gas for the external tx. If not, `(safe_tx_gas + data_gas) * 2` will be used
:param tx_gas_price: Gas price of the external tx. If not, `gas_price` will be used
:param block_identifier:
:return: Tuple(tx_hash, tx)
:raises: InvalidMultisigTx: If user tx cannot go through the Safe
"""
safe_tx = self.build_multisig_tx(
to,
value,
data,
operation,
safe_tx_gas,
base_gas,
gas_price,
gas_token,
refund_receiver,
signatures,
)
tx_sender_address = Account.from_key(tx_sender_private_key).address
safe_tx.call(
tx_sender_address=tx_sender_address, block_identifier=block_identifier
)
tx_hash, tx = safe_tx.execute(
tx_sender_private_key=tx_sender_private_key,
tx_gas=tx_gas,
tx_gas_price=tx_gas_price,
block_identifier=block_identifier,
)
return EthereumTxSent(tx_hash, tx, None) | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/safe.py | 0.889571 | 0.178204 | safe.py | pypi |
from typing import Dict, List, Tuple
from gnosis.eth import EthereumNetwork
MASTER_COPIES: Dict[EthereumNetwork, List[Tuple[str, int, str]]] = {
EthereumNetwork.MAINNET: [
(
"0xfb1bffC9d739B8D520DaF37dF666da4C687191EA",
14981217,
"1.3.0+L2",
), # safe singleton address
(
"0x3E5c63644E683549055b9Be8653de26E0B4CD36E",
12504423,
"1.3.0+L2",
), # default singleton address
("0xd9Db270c1B5E3Bd161E8c8503c55cEABeE709552", 12504268, "1.3.0"),
("0x6851D6fDFAfD08c0295C392436245E5bc78B0185", 10329734, "1.2.0"),
("0x34CfAC646f301356fAa8B21e94227e3583Fe3F5F", 9084503, "1.1.1"),
("0xaE32496491b53841efb51829d6f886387708F99B", 8915728, "1.1.0"),
("0xb6029EA3B2c51D09a50B53CA8012FeEB05bDa35A", 7457553, "1.0.0"),
("0x8942595A2dC5181Df0465AF0D7be08c8f23C93af", 6766257, "0.1.0"),
("0xAC6072986E985aaBE7804695EC2d8970Cf7541A2", 6569433, "0.0.2"),
],
EthereumNetwork.GOERLI: [
(
"0xfb1bffC9d739B8D520DaF37dF666da4C687191EA",
6900544,
"1.3.0+L2",
), # safe singleton address
(
"0x3E5c63644E683549055b9Be8653de26E0B4CD36E",
4854168,
"1.3.0+L2",
), # default singleton address
(
"0x69f4D1788e39c87893C980c06EdF4b7f686e2938",
6900547,
"1.3.0",
), # safe singleton address
(
"0xd9Db270c1B5E3Bd161E8c8503c55cEABeE709552",
4854169,
"1.3.0",
), # default singleton address
("0x6851D6fDFAfD08c0295C392436245E5bc78B0185", 2930373, "1.2.0"),
("0x34CfAC646f301356fAa8B21e94227e3583Fe3F5F", 1798663, "1.1.1"),
("0xaE32496491b53841efb51829d6f886387708F99B", 1631488, "1.1.0"),
("0xb6029EA3B2c51D09a50B53CA8012FeEB05bDa35A", 319108, "1.0.0"),
("0x8942595A2dC5181Df0465AF0D7be08c8f23C93af", 34096, "0.1.0"),
],
EthereumNetwork.PULSECHAIN_MAINNET: [
(
"0xfb1bffC9d739B8D520DaF37dF666da4C687191EA",
14981217,
"1.3.0+L2",
), # safe singleton address
(
"0x3E5c63644E683549055b9Be8653de26E0B4CD36E",
12504423,
"1.3.0+L2",
), # default singleton address
("0xd9Db270c1B5E3Bd161E8c8503c55cEABeE709552", 12504268, "1.3.0"),
("0x6851D6fDFAfD08c0295C392436245E5bc78B0185", 10329734, "1.2.0"),
("0x34CfAC646f301356fAa8B21e94227e3583Fe3F5F", 9084503, "1.1.1"),
("0xaE32496491b53841efb51829d6f886387708F99B", 8915728, "1.1.0"),
("0xb6029EA3B2c51D09a50B53CA8012FeEB05bDa35A", 7457553, "1.0.0"),
("0x8942595A2dC5181Df0465AF0D7be08c8f23C93af", 6766257, "0.1.0"),
("0xAC6072986E985aaBE7804695EC2d8970Cf7541A2", 6569433, "0.0.2"),
],
EthereumNetwork.PULSECHAIN_TESTNET: [
(
"0xfb1bffC9d739B8D520DaF37dF666da4C687191EA",
14981217,
"1.3.0+L2",
), # safe singleton address
(
"0x3E5c63644E683549055b9Be8653de26E0B4CD36E",
12504423,
"1.3.0+L2",
), # default singleton address
("0xd9Db270c1B5E3Bd161E8c8503c55cEABeE709552", 12504268, "1.3.0"),
("0x6851D6fDFAfD08c0295C392436245E5bc78B0185", 10329734, "1.2.0"),
("0x34CfAC646f301356fAa8B21e94227e3583Fe3F5F", 9084503, "1.1.1"),
("0xaE32496491b53841efb51829d6f886387708F99B", 8915728, "1.1.0"),
("0xb6029EA3B2c51D09a50B53CA8012FeEB05bDa35A", 7457553, "1.0.0"),
("0x8942595A2dC5181Df0465AF0D7be08c8f23C93af", 6766257, "0.1.0"),
("0xAC6072986E985aaBE7804695EC2d8970Cf7541A2", 6569433, "0.0.2"),
],
}
PROXY_FACTORIES: Dict[EthereumNetwork, List[Tuple[str, int]]] = {
EthereumNetwork.MAINNET: [
(
"0xC22834581EbC8527d974F8a1c97E1bEA4EF910BC",
14981216,
), # v1.3.0 safe singleton address
(
"0xa6B71E26C5e0845f74c812102Ca7114b6a896AB2",
12504126,
), # v1.3.0 default singleton address
("0x76E2cFc1F5Fa8F6a5b3fC4c8F4788F0116861F9B", 9084508), # v1.1.1
("0x50e55Af101C777bA7A1d560a774A82eF002ced9F", 8915731), # v1.1.0
("0x12302fE9c02ff50939BaAaaf415fc226C078613C", 7450116), # v1.0.0
],
EthereumNetwork.GOERLI: [
(
"0xC22834581EbC8527d974F8a1c97E1bEA4EF910BC",
6900531,
), # v1.3.0 safe singleton address
(
"0xa6B71E26C5e0845f74c812102Ca7114b6a896AB2",
4695402,
), # v1.3.0 default singleton address
("0x76E2cFc1F5Fa8F6a5b3fC4c8F4788F0116861F9B", 1798666),
("0x50e55Af101C777bA7A1d560a774A82eF002ced9F", 1631491),
("0x12302fE9c02ff50939BaAaaf415fc226C078613C", 312509),
],
EthereumNetwork.PULSECHAIN_MAINNET: [
(
"0xC22834581EbC8527d974F8a1c97E1bEA4EF910BC",
14981216,
), # v1.3.0 safe singleton address
(
"0xa6B71E26C5e0845f74c812102Ca7114b6a896AB2",
12504126,
), # v1.3.0 default singleton address
("0x76E2cFc1F5Fa8F6a5b3fC4c8F4788F0116861F9B", 9084508), # v1.1.1
("0x50e55Af101C777bA7A1d560a774A82eF002ced9F", 8915731), # v1.1.0
("0x12302fE9c02ff50939BaAaaf415fc226C078613C", 7450116), # v1.0.0
],
EthereumNetwork.PULSECHAIN_TESTNET: [
(
"0xC22834581EbC8527d974F8a1c97E1bEA4EF910BC",
14981216,
), # v1.3.0 safe singleton address
(
"0xa6B71E26C5e0845f74c812102Ca7114b6a896AB2",
12504126,
), # v1.3.0 default singleton address
("0x76E2cFc1F5Fa8F6a5b3fC4c8F4788F0116861F9B", 9084508), # v1.1.1
("0x50e55Af101C777bA7A1d560a774A82eF002ced9F", 8915731), # v1.1.0
("0x12302fE9c02ff50939BaAaaf415fc226C078613C", 7450116), # v1.0.0
],
} | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/addresses.py | 0.507568 | 0.303783 | addresses.py | pypi |
from logging import getLogger
from typing import Optional
from eth_account.signers.local import LocalAccount
from eth_typing import ChecksumAddress
from web3.contract import Contract
from gnosis.eth import EthereumClient, EthereumTxSent
from gnosis.eth.contracts import (
get_paying_proxy_deployed_bytecode,
get_proxy_1_0_0_deployed_bytecode,
get_proxy_1_1_1_deployed_bytecode,
get_proxy_1_1_1_mainnet_deployed_bytecode,
get_proxy_1_3_0_deployed_bytecode,
get_proxy_factory_contract,
get_proxy_factory_V1_0_0_contract,
get_proxy_factory_V1_1_1_contract,
)
from gnosis.eth.utils import compare_byte_code, fast_is_checksum_address
from gnosis.util import cache
logger = getLogger(__name__)
class ProxyFactory:
def __init__(self, address: ChecksumAddress, ethereum_client: EthereumClient):
assert fast_is_checksum_address(address), (
"%s proxy factory address not valid" % address
)
self.address = address
self.ethereum_client = ethereum_client
self.w3 = ethereum_client.w3
@staticmethod
def _deploy_proxy_factory_contract(
ethereum_client: EthereumClient,
deployer_account: LocalAccount,
contract: Contract,
) -> EthereumTxSent:
tx = contract.constructor().build_transaction(
{"from": deployer_account.address}
)
tx_hash = ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
tx_receipt = ethereum_client.get_transaction_receipt(tx_hash, timeout=120)
assert tx_receipt
assert tx_receipt["status"]
contract_address = tx_receipt["contractAddress"]
logger.info(
"Deployed and initialized Proxy Factory Contract=%s by %s",
contract_address,
deployer_account.address,
)
return EthereumTxSent(tx_hash, tx, contract_address)
@classmethod
def deploy_proxy_factory_contract(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy proxy factory contract last version (v1.3.0)
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
proxy_factory_contract = get_proxy_factory_contract(ethereum_client.w3)
return cls._deploy_proxy_factory_contract(
ethereum_client, deployer_account, proxy_factory_contract
)
@classmethod
def deploy_proxy_factory_contract_v1_1_1(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy proxy factory contract v1.1.1
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
proxy_factory_contract = get_proxy_factory_V1_1_1_contract(ethereum_client.w3)
return cls._deploy_proxy_factory_contract(
ethereum_client, deployer_account, proxy_factory_contract
)
@classmethod
def deploy_proxy_factory_contract_v1_0_0(
cls, ethereum_client: EthereumClient, deployer_account: LocalAccount
) -> EthereumTxSent:
"""
Deploy proxy factory contract v1.0.0
:param ethereum_client:
:param deployer_account: Ethereum Account
:return: deployed contract address
"""
proxy_factory_contract = get_proxy_factory_V1_0_0_contract(ethereum_client.w3)
return cls._deploy_proxy_factory_contract(
ethereum_client, deployer_account, proxy_factory_contract
)
def check_proxy_code(self, address: ChecksumAddress) -> bool:
"""
Check if proxy is valid
:param address: Ethereum address to check
:return: True if proxy is valid, False otherwise
"""
deployed_proxy_code = self.w3.eth.get_code(address)
proxy_code_fns = (
get_proxy_1_3_0_deployed_bytecode,
get_proxy_1_1_1_deployed_bytecode,
get_proxy_1_1_1_mainnet_deployed_bytecode,
get_proxy_1_0_0_deployed_bytecode,
get_paying_proxy_deployed_bytecode,
self.get_proxy_runtime_code,
)
for proxy_code_fn in proxy_code_fns:
if compare_byte_code(deployed_proxy_code, proxy_code_fn()):
return True
return False
def deploy_proxy_contract(
self,
deployer_account: LocalAccount,
master_copy: ChecksumAddress,
initializer: bytes = b"",
gas: Optional[int] = None,
gas_price: Optional[int] = None,
) -> EthereumTxSent:
"""
Deploy proxy contract via ProxyFactory using `createProxy` function
:param deployer_account: Ethereum account
:param master_copy: Address the proxy will point at
:param initializer: Initializer
:param gas: Gas
:param gas_price: Gas Price
:return: EthereumTxSent
"""
proxy_factory_contract = self.get_contract()
create_proxy_fn = proxy_factory_contract.functions.createProxy(
master_copy, initializer
)
tx_parameters = {"from": deployer_account.address}
contract_address = create_proxy_fn.call(tx_parameters)
if gas_price is not None:
tx_parameters["gasPrice"] = gas_price
if gas is not None:
tx_parameters["gas"] = gas
tx = create_proxy_fn.build_transaction(tx_parameters)
# Auto estimation of gas does not work. We use a little more gas just in case
tx["gas"] = tx["gas"] + 50000
tx_hash = self.ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
return EthereumTxSent(tx_hash, tx, contract_address)
def deploy_proxy_contract_with_nonce(
self,
deployer_account: LocalAccount,
master_copy: ChecksumAddress,
initializer: bytes,
salt_nonce: int,
gas: Optional[int] = None,
gas_price: Optional[int] = None,
nonce: Optional[int] = None,
) -> EthereumTxSent:
"""
Deploy proxy contract via Proxy Factory using `createProxyWithNonce` (create2)
:param deployer_account: Ethereum account
:param master_copy: Address the proxy will point at
:param initializer: Data for safe creation
:param salt_nonce: Uint256 for `create2` salt
:param gas: Gas
:param gas_price: Gas Price
:param nonce: Nonce
:return: Tuple(tx-hash, tx, deployed contract address)
"""
proxy_factory_contract = self.get_contract()
create_proxy_fn = proxy_factory_contract.functions.createProxyWithNonce(
master_copy, initializer, salt_nonce
)
tx_parameters = {"from": deployer_account.address}
contract_address = create_proxy_fn.call(tx_parameters)
if gas_price is not None:
tx_parameters["gasPrice"] = gas_price
if gas is not None:
tx_parameters["gas"] = gas
if nonce is not None:
tx_parameters["nonce"] = nonce
tx = create_proxy_fn.build_transaction(tx_parameters)
# Auto estimation of gas does not work. We use a little more gas just in case
tx["gas"] = tx["gas"] + 50000
tx_hash = self.ethereum_client.send_unsigned_transaction(
tx, private_key=deployer_account.key
)
return EthereumTxSent(tx_hash, tx, contract_address)
def get_contract(self, address: Optional[ChecksumAddress] = None):
address = address or self.address
return get_proxy_factory_contract(self.ethereum_client.w3, address)
@cache
def get_proxy_runtime_code(self, address: Optional[ChecksumAddress] = None):
"""
Get runtime code for current proxy factory
"""
address = address or self.address
return self.get_contract(address=address).functions.proxyRuntimeCode().call() | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/proxy_factory.py | 0.910927 | 0.157072 | proxy_factory.py | pypi |
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from gnosis.eth.constants import (
SIGNATURE_R_MAX_VALUE,
SIGNATURE_R_MIN_VALUE,
SIGNATURE_S_MAX_VALUE,
SIGNATURE_S_MIN_VALUE,
SIGNATURE_V_MAX_VALUE,
SIGNATURE_V_MIN_VALUE,
)
from gnosis.eth.django.serializers import EthereumAddressField, HexadecimalField
from .safe import SafeOperation
class SafeSignatureSerializer(serializers.Serializer):
"""
When using safe signatures `v` can have more values
"""
v = serializers.IntegerField(min_value=0)
r = serializers.IntegerField(min_value=0)
s = serializers.IntegerField(min_value=0)
def validate_v(self, v):
if v == 0: # Contract signature
return v
elif v == 1: # Approved hash
return v
elif v > 30 and self.check_v(v - 4): # Support eth_sign
return v
elif self.check_v(v):
return v
else:
raise serializers.ValidationError(
"v should be 0, 1 or be in %d-%d"
% (SIGNATURE_V_MIN_VALUE, SIGNATURE_V_MAX_VALUE)
)
def validate(self, data):
super().validate(data)
v = data["v"]
r = data["r"]
s = data["s"]
if v not in [0, 1]: # Disable checks for `r` and `s` if v is 0 or 1
if not self.check_r(r):
raise serializers.ValidationError("r not valid")
elif not self.check_s(s):
raise serializers.ValidationError("s not valid")
return data
def check_v(self, v):
return SIGNATURE_V_MIN_VALUE <= v <= SIGNATURE_V_MAX_VALUE
def check_r(self, r):
return SIGNATURE_R_MIN_VALUE <= r <= SIGNATURE_R_MAX_VALUE
def check_s(self, s):
return SIGNATURE_S_MIN_VALUE <= s <= SIGNATURE_S_MAX_VALUE
class SafeMultisigEstimateTxSerializer(serializers.Serializer):
safe = EthereumAddressField()
to = EthereumAddressField()
value = serializers.IntegerField(min_value=0)
data = HexadecimalField(default=None, allow_null=True, allow_blank=True)
operation = serializers.IntegerField(min_value=0)
gas_token = EthereumAddressField(
default=None, allow_null=True, allow_zero_address=True
)
def validate_operation(self, value):
try:
return SafeOperation(value).value
except ValueError:
raise ValidationError("Unknown operation")
def validate(self, data):
super().validate(data)
if not data["to"] and not data["data"]:
raise ValidationError("`data` and `to` cannot both be null")
if not data["to"] and not data["data"]:
raise ValidationError("`data` and `to` cannot both be null")
if data["operation"] == SafeOperation.CREATE.value:
raise ValidationError(
"Operation CREATE not supported. Please use Gnosis Safe CreateLib"
)
# if data['to']:
# raise ValidationError('Operation is Create, but `to` was provided')
# elif not data['data']:
# raise ValidationError('Operation is Create, but not `data` was provided')
return data
class SafeMultisigTxSerializer(SafeMultisigEstimateTxSerializer):
"""
DEPRECATED, use `SafeMultisigTxSerializerV1` instead
"""
safe_tx_gas = serializers.IntegerField(min_value=0)
data_gas = serializers.IntegerField(min_value=0)
gas_price = serializers.IntegerField(min_value=0)
refund_receiver = EthereumAddressField(
default=None, allow_null=True, allow_zero_address=True
)
nonce = serializers.IntegerField(min_value=0)
class SafeMultisigTxSerializerV1(SafeMultisigEstimateTxSerializer):
"""
Version 1.0.0 of the Safe changes `data_gas` to `base_gas`
"""
safe_tx_gas = serializers.IntegerField(min_value=0)
base_gas = serializers.IntegerField(min_value=0)
gas_price = serializers.IntegerField(min_value=0)
refund_receiver = EthereumAddressField(
default=None, allow_null=True, allow_zero_address=True
)
nonce = serializers.IntegerField(min_value=0) | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/serializers.py | 0.78838 | 0.331039 | serializers.py | pypi |
import math
import os
from logging import getLogger
from typing import Any, Dict, List, Optional, Tuple
import rlp
from eth.vm.forks.frontier.transactions import FrontierTransaction
from eth_keys.exceptions import BadSignature
from hexbytes import HexBytes
from web3 import Web3
from web3.contract import ContractConstructor
from web3.exceptions import Web3Exception
from gnosis.eth.constants import GAS_CALL_DATA_BYTE, NULL_ADDRESS, SECPK1_N
from gnosis.eth.contracts import (
get_erc20_contract,
get_paying_proxy_contract,
get_safe_V0_0_1_contract,
)
from gnosis.eth.utils import (
fast_is_checksum_address,
fast_to_checksum_address,
mk_contract_address,
)
logger = getLogger(__name__)
class InvalidERC20Token(Exception):
pass
class SafeCreationTx:
def __init__(
self,
w3: Web3,
owners: List[str],
threshold: int,
signature_s: int,
master_copy: str,
gas_price: int,
funder: Optional[str],
payment_token: Optional[str] = None,
payment_token_eth_value: float = 1.0,
fixed_creation_cost: Optional[int] = None,
):
"""
Prepare Safe creation
:param w3: Web3 instance
:param owners: Owners of the Safe
:param threshold: Minimum number of users required to operate the Safe
:param signature_s: Random s value for ecdsa signature
:param master_copy: Safe master copy address
:param gas_price: Gas Price
:param funder: Address to refund when the Safe is created. Address(0) if no need to refund
:param payment_token: Payment token instead of paying the funder with ether. If None Ether will be used
:param payment_token_eth_value: Value of payment token per 1 Ether
:param fixed_creation_cost: Fixed creation cost of Safe (Wei)
"""
assert 0 < threshold <= len(owners)
funder = funder or NULL_ADDRESS
payment_token = payment_token or NULL_ADDRESS
assert fast_is_checksum_address(master_copy)
assert fast_is_checksum_address(funder)
assert fast_is_checksum_address(payment_token)
self.w3 = w3
self.owners = owners
self.threshold = threshold
self.s = signature_s
self.master_copy = master_copy
self.gas_price = gas_price
self.funder = funder
self.payment_token = payment_token
self.payment_token_eth_value = payment_token_eth_value
self.fixed_creation_cost = fixed_creation_cost
# Get bytes for `setup(address[] calldata _owners, uint256 _threshold, address to, bytes calldata data)`
# This initializer will be passed to the proxy and will be called right after proxy is deployed
safe_setup_data: bytes = self._get_initial_setup_safe_data(owners, threshold)
# Calculate gas based on experience of previous deployments of the safe
calculated_gas: int = self._calculate_gas(
owners, safe_setup_data, payment_token
)
# Estimate gas using web3
estimated_gas: int = self._estimate_gas(
master_copy, safe_setup_data, funder, payment_token
)
self.gas = max(calculated_gas, estimated_gas)
# Payment will be safe deploy cost + transfer fees for sending ether to the deployer
self.payment = self._calculate_refund_payment(
self.gas, gas_price, fixed_creation_cost, payment_token_eth_value
)
self.tx_dict: Dict[str, Any] = self._build_proxy_contract_creation_tx(
master_copy=master_copy,
initializer=safe_setup_data,
funder=funder,
payment_token=payment_token,
payment=self.payment,
gas=self.gas,
gas_price=gas_price,
)
self.tx_pyethereum: FrontierTransaction = (
self._build_contract_creation_tx_with_valid_signature(self.tx_dict, self.s)
)
self.tx_raw = rlp.encode(self.tx_pyethereum)
self.tx_hash = self.tx_pyethereum.hash
self.deployer_address = fast_to_checksum_address(self.tx_pyethereum.sender)
self.safe_address = mk_contract_address(self.tx_pyethereum.sender, 0)
self.v = self.tx_pyethereum.v
self.r = self.tx_pyethereum.r
self.safe_setup_data = safe_setup_data
assert mk_contract_address(self.deployer_address, nonce=0) == self.safe_address
@property
def payment_ether(self):
return self.gas * self.gas_price
@staticmethod
def find_valid_random_signature(s: int) -> Tuple[int, int]:
"""
Find v and r valid values for a given s
:param s: random value
:return: v, r
"""
for _ in range(10000):
r = int(os.urandom(31).hex(), 16)
v = (r % 2) + 27
if r < SECPK1_N:
tx = FrontierTransaction(0, 1, 21000, b"", 0, b"", v=v, r=r, s=s)
try:
tx.sender
return v, r
except (BadSignature, ValueError):
logger.debug("Cannot find signature with v=%d r=%d s=%d", v, r, s)
raise ValueError("Valid signature not found with s=%d", s)
@staticmethod
def _calculate_gas(
owners: List[str], safe_setup_data: bytes, payment_token: str
) -> int:
"""
Calculate gas manually, based on tests of previosly deployed safes
:param owners: Safe owners
:param safe_setup_data: Data for proxy setup
:param payment_token: If payment token, we will need more gas to transfer and maybe storage if first time
:return: total gas needed for deployment
"""
# TODO Do gas calculation estimating the call instead this magic
base_gas = 60580 # Transaction standard gas
# If we already have the token, we don't have to pay for storage, so it will be just 5K instead of 20K.
# The other 1K is for overhead of making the call
if payment_token != NULL_ADDRESS:
payment_token_gas = 55000
else:
payment_token_gas = 0
data_gas = GAS_CALL_DATA_BYTE * len(safe_setup_data) # Data gas
gas_per_owner = 18020 # Magic number calculated by testing and averaging owners
return (
base_gas
+ data_gas
+ payment_token_gas
+ 270000
+ len(owners) * gas_per_owner
)
@staticmethod
def _calculate_refund_payment(
gas: int,
gas_price: int,
fixed_creation_cost: Optional[int],
payment_token_eth_value: float,
) -> int:
if fixed_creation_cost is None:
# Payment will be safe deploy cost + transfer fees for sending ether to the deployer
base_payment: int = (gas + 23000) * gas_price
# Calculate payment for tokens using the conversion (if used)
return math.ceil(base_payment / payment_token_eth_value)
else:
return fixed_creation_cost
def _build_proxy_contract_creation_constructor(
self,
master_copy: str,
initializer: bytes,
funder: str,
payment_token: str,
payment: int,
) -> ContractConstructor:
"""
:param master_copy: Master Copy of Gnosis Safe already deployed
:param initializer: Data initializer to send to GnosisSafe setup method
:param funder: Address that should get the payment (if payment set)
:param payment_token: Address if a token is used. If not set, 0x0 will be ether
:param payment: Payment
:return: Transaction dictionary
"""
if not funder or funder == NULL_ADDRESS:
funder = NULL_ADDRESS
payment = 0
return get_paying_proxy_contract(self.w3).constructor(
master_copy, initializer, funder, payment_token, payment
)
def _build_proxy_contract_creation_tx(
self,
master_copy: str,
initializer: bytes,
funder: str,
payment_token: str,
payment: int,
gas: int,
gas_price: int,
nonce: int = 0,
):
"""
:param master_copy: Master Copy of Gnosis Safe already deployed
:param initializer: Data initializer to send to GnosisSafe setup method
:param funder: Address that should get the payment (if payment set)
:param payment_token: Address if a token is used. If not set, 0x0 will be ether
:param payment: Payment
:return: Transaction dictionary
"""
return self._build_proxy_contract_creation_constructor(
master_copy, initializer, funder, payment_token, payment
).build_transaction(
{
"gas": gas,
"gasPrice": gas_price,
"nonce": nonce,
}
)
def _build_contract_creation_tx_with_valid_signature(
self, tx_dict: Dict[str, Any], s: int
) -> FrontierTransaction:
"""
Use pyethereum `Transaction` to generate valid tx using a random signature
:param tx_dict: Web3 tx dictionary
:param s: Signature s value
:return: PyEthereum creation tx for the proxy contract
"""
zero_address = HexBytes("0x" + "0" * 40)
f_address = HexBytes("0x" + "f" * 40)
nonce = tx_dict["nonce"]
gas_price = tx_dict["gasPrice"]
gas = tx_dict["gas"]
to = tx_dict.get("to", b"") # Contract creation should always have `to` empty
value = tx_dict["value"]
data = tx_dict["data"]
for _ in range(100):
try:
v, r = self.find_valid_random_signature(s)
contract_creation_tx = FrontierTransaction(
nonce, gas_price, gas, to, value, HexBytes(data), v=v, r=r, s=s
)
sender_address = contract_creation_tx.sender
contract_address: bytes = HexBytes(
mk_contract_address(sender_address, nonce)
)
if sender_address in (zero_address, f_address) or contract_address in (
zero_address,
f_address,
):
raise ValueError("Invalid transaction")
return contract_creation_tx
except BadSignature:
pass
raise ValueError("Valid signature not found with s=%d", s)
def _estimate_gas(
self, master_copy: str, initializer: bytes, funder: str, payment_token: str
) -> int:
"""
Gas estimation done using web3 and calling the node
Payment cannot be estimated, as no ether is in the address. So we add some gas later.
:param master_copy: Master Copy of Gnosis Safe already deployed
:param initializer: Data initializer to send to GnosisSafe setup method
:param funder: Address that should get the payment (if payment set)
:param payment_token: Address if a token is used. If not set, 0x0 will be ether
:return: Total gas estimation
"""
# Estimate the contract deployment. We cannot estimate the refunding, as the safe address has not any fund
gas: int = self._build_proxy_contract_creation_constructor(
master_copy, initializer, funder, payment_token, 0
).estimate_gas()
# We estimate the refund as a new tx
if payment_token == NULL_ADDRESS:
# Same cost to send 1 ether than 1000
gas += self.w3.eth.estimate_gas({"to": funder, "value": 1})
else:
# Top should be around 52000 when storage is needed (funder no previous owner of token),
# we use value 1 as we are simulating an internal call, and in that calls you don't pay for the data.
# If it was a new tx sending 5000 tokens would be more expensive than sending 1 because of data costs
try:
gas += (
get_erc20_contract(self.w3, payment_token)
.functions.transfer(funder, 1)
.estimate_gas({"from": payment_token})
)
except Web3Exception as exc:
if "transfer amount exceeds balance" in str(exc):
return 70000
raise InvalidERC20Token from exc
return gas
def _get_initial_setup_safe_data(self, owners: List[str], threshold: int) -> bytes:
return (
get_safe_V0_0_1_contract(self.w3, self.master_copy)
.functions.setup(
owners,
threshold,
NULL_ADDRESS, # Contract address for optional delegate call
b"", # Data payload for optional delegate call
)
.build_transaction(
{
"gas": 1,
"gasPrice": 1,
}
)["data"]
) | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/safe_creation_tx.py | 0.813387 | 0.302662 | safe_creation_tx.py | pypi |
from typing import List, Tuple, Union
from eth_keys import keys
from eth_keys.exceptions import BadSignature
from hexbytes import HexBytes
from gnosis.eth.constants import NULL_ADDRESS
def signature_split(
signatures: Union[bytes, str], pos: int = 0
) -> Tuple[int, int, int]:
"""
:param signatures: signatures in form of {bytes32 r}{bytes32 s}{uint8 v}
:param pos: position of the signature
:return: Tuple with v, r, s
"""
signatures = HexBytes(signatures)
signature_pos = 65 * pos
if len(signatures[signature_pos : signature_pos + 65]) < 65:
raise ValueError(f"Signature must be at least 65 bytes {signatures.hex()}")
r = int.from_bytes(signatures[signature_pos : 32 + signature_pos], "big")
s = int.from_bytes(signatures[32 + signature_pos : 64 + signature_pos], "big")
v = signatures[64 + signature_pos]
return v, r, s
def signature_to_bytes(v: int, r: int, s: int) -> bytes:
"""
Convert ecdsa signature to bytes
:param v:
:param r:
:param s:
:return: signature in form of {bytes32 r}{bytes32 s}{uint8 v}
"""
byte_order = "big"
return (
r.to_bytes(32, byteorder=byte_order)
+ s.to_bytes(32, byteorder=byte_order)
+ v.to_bytes(1, byteorder=byte_order)
)
def signatures_to_bytes(signatures: List[Tuple[int, int, int]]) -> bytes:
"""
Convert signatures to bytes
:param signatures: list of tuples(v, r, s)
:return: 65 bytes per signature
"""
return b"".join([signature_to_bytes(v, r, s) for v, r, s in signatures])
def get_signing_address(signed_hash: Union[bytes, str], v: int, r: int, s: int) -> str:
"""
:return: checksummed ethereum address, for example `0x568c93675A8dEb121700A6FAdDdfE7DFAb66Ae4A`
:rtype: str or `NULL_ADDRESS` if signature is not valid
"""
try:
public_key = keys.ecdsa_recover(signed_hash, keys.Signature(vrs=(v - 27, r, s)))
return public_key.to_checksum_address()
except BadSignature:
return NULL_ADDRESS | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/signatures.py | 0.923049 | 0.493958 | signatures.py | pypi |
from typing import TypedDict
from urllib.parse import urljoin
import requests
from eth_typing import ChecksumAddress, HexStr
from gnosis.eth.ethereum_client import EthereumNetwork
from .. import SafeTx
from ..signatures import signature_split
from .base_api import SafeAPIException, SafeBaseAPI
class RelayEstimation(TypedDict):
safeTxGas: int
baseGas: int
gasPrice: int
lastUsedNonce: int
gasToken: ChecksumAddress
refundReceiver: ChecksumAddress
class RelaySentTransaction(TypedDict):
safeTxHash: HexStr
txHash: HexStr
class RelayServiceApi(SafeBaseAPI):
URL_BY_NETWORK = {
EthereumNetwork.GOERLI: "https://safe-relay.goerli.gnosis.io/",
EthereumNetwork.MAINNET: "https://safe-relay.gnosis.io",
}
def send_transaction(
self, safe_address: str, safe_tx: SafeTx
) -> RelaySentTransaction:
url = urljoin(self.base_url, f"/api/v1/safes/{safe_address}/transactions/")
signatures = []
for i in range(len(safe_tx.signatures) // 65):
v, r, s = signature_split(safe_tx.signatures, i)
signatures.append(
{
"v": v,
"r": r,
"s": s,
}
)
data = {
"to": safe_tx.to,
"value": safe_tx.value,
"data": safe_tx.data.hex() if safe_tx.data else None,
"operation": safe_tx.operation,
"gasToken": safe_tx.gas_token,
"safeTxGas": safe_tx.safe_tx_gas,
"dataGas": safe_tx.base_gas,
"gasPrice": safe_tx.gas_price,
"refundReceiver": safe_tx.refund_receiver,
"nonce": safe_tx.safe_nonce,
"signatures": signatures,
}
response = requests.post(url, json=data)
if not response.ok:
raise SafeAPIException(f"Error posting transaction: {response.content}")
else:
return RelaySentTransaction(response.json())
def get_estimation(self, safe_address: str, safe_tx: SafeTx) -> RelayEstimation:
"""
:param safe_address:
:param safe_tx:
:return: RelayEstimation
"""
url = urljoin(
self.base_url, f"/api/v2/safes/{safe_address}/transactions/estimate/"
)
data = {
"to": safe_tx.to,
"value": safe_tx.value,
"data": safe_tx.data.hex() if safe_tx.data else None,
"operation": safe_tx.operation,
"gasToken": safe_tx.gas_token,
}
response = requests.post(url, json=data)
if not response.ok:
raise SafeAPIException(f"Error posting transaction: {response.content}")
else:
response_json = response.json()
# Convert values to int
for key in ("safeTxGas", "baseGas", "gasPrice"):
response_json[key] = int(response_json[key])
return RelayEstimation(response_json) | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/api/relay_service_api.py | 0.724578 | 0.203193 | relay_service_api.py | pypi |
import logging
import time
from typing import Any, Dict, List, Optional, Tuple, Union
from eth_account.signers.local import LocalAccount
from eth_typing import HexStr
from hexbytes import HexBytes
from web3 import Web3
from gnosis.eth import EthereumNetwork
from gnosis.safe import SafeTx
from .base_api import SafeAPIException, SafeBaseAPI
logger = logging.getLogger(__name__)
class TransactionServiceApi(SafeBaseAPI):
URL_BY_NETWORK = {
EthereumNetwork.MAINNET: "https://safe-transaction-mainnet.safe.global",
EthereumNetwork.GOERLI: "https://safe-transaction-goerli.safe.global",
EthereumNetwork.PULSECHAIN_MAINNET: "https://safe-transaction.pulse.domains",
EthereumNetwork.PULSECHAIN_TESTNET: "https://t.safe-transaction.pulse.domains",
}
@classmethod
def create_delegate_message_hash(cls, delegate_address: str) -> str:
totp = int(time.time()) // 3600
hash_to_sign = Web3.keccak(text=delegate_address + str(totp))
return hash_to_sign
@classmethod
def data_decoded_to_text(cls, data_decoded: Dict[str, Any]) -> Optional[str]:
"""
Decoded data decoded to text
:param data_decoded:
:return:
"""
if not data_decoded:
return None
method = data_decoded["method"]
parameters = data_decoded.get("parameters", [])
text = ""
for (
parameter
) in parameters: # Multisend or executeTransaction from another Safe
if "decodedValue" in parameter:
text += (
method
+ ":\n - "
+ "\n - ".join(
[
cls.data_decoded_to_text(
decoded_value.get("decodedData", {})
)
for decoded_value in parameter.get("decodedValue", {})
]
)
+ "\n"
)
if text:
return text.strip()
else:
return (
method
+ ": "
+ ",".join([str(parameter["value"]) for parameter in parameters])
)
@classmethod
def parse_signatures(cls, raw_tx: Dict[str, Any]) -> Optional[HexBytes]:
if raw_tx["signatures"]:
# Tx was executed and signatures field is populated
return raw_tx["signatures"]
elif raw_tx["confirmations"]:
# Parse offchain transactions
return b"".join(
[
HexBytes(confirmation["signature"])
for confirmation in sorted(
raw_tx["confirmations"], key=lambda x: int(x["owner"], 16)
)
if confirmation["signatureType"] == "EOA"
]
)
def get_balances(self, safe_address: str) -> List[Dict[str, Any]]:
response = self._get_request(f"/api/v1/safes/{safe_address}/balances/")
if not response.ok:
raise SafeAPIException(f"Cannot get balances: {response.content}")
else:
return response.json()
def get_safe_transaction(
self, safe_tx_hash: Union[bytes, HexStr]
) -> Tuple[SafeTx, Optional[HexBytes]]:
"""
:param safe_tx_hash:
:return: SafeTx and `tx-hash` if transaction was executed
"""
safe_tx_hash = HexBytes(safe_tx_hash).hex()
response = self._get_request(f"/api/v1/multisig-transactions/{safe_tx_hash}/")
if not response.ok:
raise SafeAPIException(
f"Cannot get transaction with safe-tx-hash={safe_tx_hash}: {response.content}"
)
else:
result = response.json()
# TODO return tx-hash if executed
signatures = self.parse_signatures(result)
if not self.ethereum_client:
logger.warning(
"EthereumClient should be defined to get a executable SafeTx"
)
safe_tx = SafeTx(
self.ethereum_client,
result["safe"],
result["to"],
int(result["value"]),
HexBytes(result["data"]) if result["data"] else b"",
int(result["operation"]),
int(result["safeTxGas"]),
int(result["baseGas"]),
int(result["gasPrice"]),
result["gasToken"],
result["refundReceiver"],
signatures=signatures if signatures else b"",
safe_nonce=int(result["nonce"]),
chain_id=self.network.value,
)
tx_hash = (
HexBytes(result["transactionHash"])
if result["transactionHash"]
else None
)
if tx_hash:
safe_tx.tx_hash = tx_hash
return (safe_tx, tx_hash)
def get_transactions(self, safe_address: str) -> List[Dict[str, Any]]:
response = self._get_request(
f"/api/v1/safes/{safe_address}/multisig-transactions/"
)
if not response.ok:
raise SafeAPIException(f"Cannot get transactions: {response.content}")
else:
return response.json().get("results", [])
def get_delegates(self, safe_address: str) -> List[Dict[str, Any]]:
response = self._get_request(f"/api/v1/safes/{safe_address}/delegates/")
if not response.ok:
raise SafeAPIException(f"Cannot get delegates: {response.content}")
else:
return response.json().get("results", [])
def post_signatures(self, safe_tx_hash: bytes, signatures: bytes) -> None:
safe_tx_hash = HexBytes(safe_tx_hash).hex()
response = self._post_request(
f"/api/v1/multisig-transactions/{safe_tx_hash}/confirmations/",
payload={"signature": HexBytes(signatures).hex()},
)
if not response.ok:
raise SafeAPIException(
f"Cannot post signatures for tx with safe-tx-hash={safe_tx_hash}: {response.content}"
)
def add_delegate(
self,
safe_address: str,
delegate_address: str,
label: str,
signer_account: LocalAccount,
):
hash_to_sign = self.create_delegate_message_hash(delegate_address)
signature = signer_account.signHash(hash_to_sign)
add_payload = {
"safe": safe_address,
"delegate": delegate_address,
"signature": signature.signature.hex(),
"label": label,
}
response = self._post_request(
f"/api/v1/safes/{safe_address}/delegates/", add_payload
)
if not response.ok:
raise SafeAPIException(f"Cannot add delegate: {response.content}")
def remove_delegate(
self, safe_address: str, delegate_address: str, signer_account: LocalAccount
):
hash_to_sign = self.create_delegate_message_hash(delegate_address)
signature = signer_account.signHash(hash_to_sign)
remove_payload = {"signature": signature.signature.hex()}
response = self._delete_request(
f"/api/v1/safes/{safe_address}/delegates/{delegate_address}/",
remove_payload,
)
if not response.ok:
raise SafeAPIException(f"Cannot remove delegate: {response.content}")
def post_transaction(self, safe_tx: SafeTx):
random_sender = "0x0000000000000000000000000000000000000002"
sender = safe_tx.sorted_signers[0] if safe_tx.sorted_signers else random_sender
data = {
"to": safe_tx.to,
"value": safe_tx.value,
"data": safe_tx.data.hex() if safe_tx.data else None,
"operation": safe_tx.operation,
"gasToken": safe_tx.gas_token,
"safeTxGas": safe_tx.safe_tx_gas,
"baseGas": safe_tx.base_gas,
"gasPrice": safe_tx.gas_price,
"refundReceiver": safe_tx.refund_receiver,
"nonce": safe_tx.safe_nonce,
"contractTransactionHash": safe_tx.safe_tx_hash.hex(),
"sender": sender,
"signature": safe_tx.signatures.hex() if safe_tx.signatures else None,
"origin": "Safe-CLI",
}
response = self._post_request(
f"/api/v1/safes/{safe_tx.safe_address}/multisig-transactions/", data
)
if not response.ok:
raise SafeAPIException(f"Error posting transaction: {response.content}") | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/safe/api/transaction_service_api.py | 0.739234 | 0.218596 | transaction_service_api.py | pypi |
from dataclasses import dataclass
from enum import Enum
from typing import Any, Dict, Literal
from eth_typing import ChecksumAddress, Hash32
@dataclass
class Order:
sellToken: ChecksumAddress
buyToken: ChecksumAddress
receiver: ChecksumAddress
sellAmount: int
buyAmount: int
validTo: int
appData: Hash32
feeAmount: int
kind: Literal["sell", "buy"]
partiallyFillable: bool
sellTokenBalance: Literal["erc20", "external", "internal"]
buyTokenBalance: Literal["erc20", "internal"]
def is_sell_order(self) -> bool:
return self.kind == "sell"
def get_eip712_structured_data(
self, chain_id: int, verifying_contract: ChecksumAddress
) -> Dict[str, Any]:
types = {
"EIP712Domain": [
{"name": "name", "type": "string"},
{"name": "version", "type": "string"},
{"name": "chainId", "type": "uint256"},
{"name": "verifyingContract", "type": "address"},
],
"Order": [
{"name": "sellToken", "type": "address"},
{"name": "buyToken", "type": "address"},
{"name": "receiver", "type": "address"},
{"name": "sellAmount", "type": "uint256"},
{"name": "buyAmount", "type": "uint256"},
{"name": "validTo", "type": "uint32"},
{"name": "appData", "type": "bytes32"},
{"name": "feeAmount", "type": "uint256"},
{"name": "kind", "type": "string"},
{"name": "partiallyFillable", "type": "bool"},
{"name": "sellTokenBalance", "type": "string"},
{"name": "buyTokenBalance", "type": "string"},
],
}
message = {
"sellToken": self.sellToken,
"buyToken": self.buyToken,
"receiver": self.receiver,
"sellAmount": self.sellAmount,
"buyAmount": self.buyAmount,
"validTo": self.validTo,
"appData": self.appData,
"feeAmount": self.feeAmount,
"kind": self.kind,
"partiallyFillable": self.partiallyFillable,
"sellTokenBalance": self.sellTokenBalance,
"buyTokenBalance": self.buyTokenBalance,
}
return {
"types": types,
"primaryType": "Order",
"domain": {
"name": "Gnosis Protocol",
"version": "v2",
"chainId": chain_id,
"verifyingContract": verifying_contract,
},
"message": message,
}
class OrderKind(Enum):
BUY = 0
SELL = 1 | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/protocol/order.py | 0.843412 | 0.333829 | order.py | pypi |
from functools import cached_property
from typing import Any, Dict, List, Optional, TypedDict, Union, cast
import requests
from eth_account import Account
from eth_account.messages import encode_defunct
from eth_typing import AnyAddress, ChecksumAddress, HexStr
from hexbytes import HexBytes
from gnosis.eth import EthereumNetwork, EthereumNetworkNotSupported
from gnosis.eth.eip712 import eip712_encode_hash
from ..eth.constants import NULL_ADDRESS
from .order import Order, OrderKind
class TradeResponse(TypedDict):
blockNumber: int
logIndex: int
orderUid: HexStr
buyAmount: str # Stringified int
sellAmount: str # Stringified int
sellAmountBeforeFees: str # Stringified int
owner: AnyAddress # Not checksummed
buyToken: AnyAddress
sellToken: AnyAddress
txHash: HexStr
class AmountResponse(TypedDict):
sellAmount: int
buyAmount: int
class ErrorResponse(TypedDict):
errorType: str
description: str
class GnosisProtocolAPI:
"""
Client for GnosisProtocol API. More info: https://docs.cowswap.exchange/
"""
SETTLEMENT_CONTRACT_ADDRESSES = {
EthereumNetwork.MAINNET: "0x9008D19f58AAbD9eD0D60971565AA8510560ab41",
EthereumNetwork.GOERLI: "0x9008D19f58AAbD9eD0D60971565AA8510560ab41",
}
API_BASE_URLS = {
EthereumNetwork.MAINNET: "https://api.cow.fi/mainnet/api/v1/",
EthereumNetwork.GOERLI: "https://api.cow.fi/goerli/api/v1/",
}
def __init__(self, ethereum_network: EthereumNetwork):
self.network = ethereum_network
if self.network not in self.API_BASE_URLS:
raise EthereumNetworkNotSupported(
f"{self.network.name} network not supported by Gnosis Protocol"
)
self.settlement_contract_address = self.SETTLEMENT_CONTRACT_ADDRESSES[
self.network
]
self.base_url = self.API_BASE_URLS[self.network]
self.http_session = requests.Session()
@cached_property
def weth_address(self) -> ChecksumAddress:
"""
:return: Wrapped ether checksummed address
"""
if self.network == EthereumNetwork.GOERLI: # Goerli WETH9
return ChecksumAddress("0xB4FBF271143F4FBf7B91A5ded31805e42b2208d6")
# Mainnet WETH9
return ChecksumAddress("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2")
def get_quote(
self, order: Order, from_address: ChecksumAddress
) -> Union[Dict[str, Any], ErrorResponse]:
url = self.base_url + "quote"
data_json = {
"sellToken": order.sellToken.lower(),
"buyToken": order.buyToken.lower(),
"sellAmountAfterFee": str(order.sellAmount),
# "validTo": order.validTo,
"appData": HexBytes(order.appData).hex()
if isinstance(order.appData, bytes)
else order.appData,
"feeAmount": str(order.feeAmount),
"kind": order.kind,
"partiallyFillable": order.partiallyFillable,
"signingScheme": "ethsign",
"from": from_address,
"priceQuality": "fast",
}
r = self.http_session.post(url, json=data_json)
if r.ok:
return r.json()
else:
return ErrorResponse(r.json())
def get_fee(
self, order: Order, from_address: ChecksumAddress
) -> Union[int, ErrorResponse]:
quote = self.get_quote(order, from_address)
if "quote" in quote:
return int(quote["quote"]["feeAmount"])
else:
return quote
def place_order(
self, order: Order, private_key: HexStr
) -> Union[HexStr, ErrorResponse]:
"""
Place order. If `feeAmount=0` in Order it will be calculated calling `get_fee(order, from_address)`
:return: UUID for the order as an hex hash
"""
assert (
order.buyAmount and order.sellAmount
), "Order buyAmount and sellAmount cannot be empty"
url = self.base_url + "orders/"
from_address = Account.from_key(private_key).address
if not order.feeAmount:
fee_amount = self.get_fee(order, from_address)
if isinstance(fee_amount, int):
order.feeAmount = fee_amount
elif "errorType" in fee_amount: # ErrorResponse
return fee_amount
signable_hash = eip712_encode_hash(
order.get_eip712_structured_data(
self.network.value, self.settlement_contract_address
)
)
message = encode_defunct(primitive=signable_hash)
signed_message = Account.from_key(private_key).sign_message(message)
data_json = {
"sellToken": order.sellToken.lower(),
"buyToken": order.buyToken.lower(),
"sellAmount": str(order.sellAmount),
"buyAmount": str(order.buyAmount),
"validTo": order.validTo,
"appData": HexBytes(order.appData).hex()
if isinstance(order.appData, bytes)
else order.appData,
"feeAmount": str(order.feeAmount),
"kind": order.kind,
"partiallyFillable": order.partiallyFillable,
"signature": signed_message.signature.hex(),
"signingScheme": "ethsign",
"from": from_address,
}
r = self.http_session.post(url, json=data_json)
if r.ok:
return HexStr(r.json())
else:
return ErrorResponse(r.json())
def get_orders(
self, owner: ChecksumAddress, offset: int = 0, limit=10
) -> List[Dict[str, Any]]:
"""
:param owner:
:param offset: Defaults to 0
:param limit: Defaults to 10. Maximum is 1000, minimum is 1
:return: Orders of one user paginated. The orders are ordered by their creation
date descending (newest orders first).
To enumerate all orders start with offset 0 and keep increasing the offset by the
total number of returned results. When a response contains less than the limit
the last page has been reached.
"""
url = self.base_url + f"account/{owner}/orders"
r = self.http_session.get(url)
if r.ok:
return cast(List[Dict[str, Any]], r.json())
else:
return ErrorResponse(r.json())
def get_trades(
self, order_ui: Optional[HexStr] = None, owner: Optional[ChecksumAddress] = None
) -> List[TradeResponse]:
assert bool(order_ui) ^ bool(
owner
), "order_ui or owner must be provided, but not both"
url = self.base_url + "trades/?"
if order_ui:
url += f"orderUid={order_ui}"
elif owner:
url += f"owner={owner}"
r = self.http_session.get(url)
if r.ok:
return cast(List[TradeResponse], r.json())
else:
return ErrorResponse(r.json())
def get_estimated_amount(
self,
base_token: ChecksumAddress,
quote_token: ChecksumAddress,
kind: OrderKind,
amount_wei: int,
) -> Union[AmountResponse, ErrorResponse]:
"""
:param base_token:
:param quote_token:
:param kind:
:param amount_wei:
:return: Both `sellAmount` and `buyAmount` as they can be adjusted by CowSwap API
"""
order = Order(
sellToken=base_token,
buyToken=quote_token,
receiver=NULL_ADDRESS,
sellAmount=amount_wei * 10 if kind == OrderKind.SELL else 0,
buyAmount=amount_wei * 10 if kind == OrderKind.BUY else 0,
validTo=0, # Valid for 1 hour
appData="0x0000000000000000000000000000000000000000000000000000000000000000",
feeAmount=0,
kind=kind.name.lower(), # `sell` or `buy`
partiallyFillable=False,
sellTokenBalance="erc20", # `erc20`, `external` or `internal`
buyTokenBalance="erc20", # `erc20` or `internal`
)
quote = self.get_quote(order, NULL_ADDRESS)
if "quote" in quote:
return {
"buyAmount": int(quote["quote"]["buyAmount"]),
"sellAmount": int(quote["quote"]["sellAmount"]),
}
else:
return quote | /safe_pls_py-5.4.3-py3-none-any.whl/gnosis/protocol/gnosis_protocol_api.py | 0.853532 | 0.211712 | gnosis_protocol_api.py | pypi |
import pydantic
import os
import yaml
import re
import typing
@pydantic.dataclasses.dataclass(frozen=True, order=True)
class RegexTestCase:
text: pydantic.constr()
matches: typing.Optional[typing.List[str]] = None
def run(self, regex):
""" evaluate the test case against the pattern """
actual = regex.match(self.text)
link = regex.get_regexr_debug_link()
msg = f"{self.text} match of {regex.pattern} != {self.matches}: {link}"
if self.matches is None:
assert actual is None, msg
elif len(self.matches) == 1:
assert self.matches[0] == actual.group(0), msg
else:
for i in range(len(self.matches)):
assert self.matches[i] == actual.group(i + 1), msg
@pydantic.dataclasses.dataclass()
class RegularExpression:
pattern: pydantic.constr(min_length=2)
description: pydantic.constr(min_length=3)
test_cases: typing.List[RegexTestCase]
@classmethod
def from_yaml(cls, expression_name: str, folder: str = None):
environment_path = os.environ.get("SAFE_REGEX_PATH")
if folder:
working_folder = folder
elif environment_path:
working_folder = environment_path
else:
working_folder = os.getcwd()
file_path = os.path.join(working_folder, f"{expression_name}.re.yaml")
with open(file_path, "r") as yaml_file:
yaml_data = yaml.safe_load(yaml_file)
return cls(**yaml_data)
def __post_init_post_parse__(self):
self.regex = re.compile(self.pattern)
self.flags = self.regex.flags
self.groups = self.regex.groups
self.groupindex = self.regex.groupindex
class Config:
extra = "forbid"
def test(self):
for test_case in self.test_cases:
test_case.run(self)
def get_regexr_debug_link(self) -> str:
import urllib.parse
match = [tc.text for tc in self.test_cases if tc.matches is not None]
not_match = [tc.text for tc in self.test_cases if tc.matches is None]
tests = "These should match\n{}\nThese should not match\n{}".format(
"\n".join(sorted(match)),
"\n".join(sorted(not_match)),
)
params = {"expression": f"/{self.pattern}/gms", "text": tests}
encoded_params = urllib.parse.urlencode(params)
return f"https://regexr.com/?{encoded_params}"
"""
pass through to re.Pattern
"""
def search(self, *args, **kwargs):
return self.regex.search(*args, **kwargs)
def match(self, *args, **kwargs):
return self.regex.match(*args, **kwargs)
def fullmatch(self, *args, **kwargs):
return self.regex.fullmatch(*args, **kwargs)
def split(self, *args, **kwargs):
return self.regex.split(*args, **kwargs)
def findall(self, *args, **kwargs):
return self.regex.findall(*args, **kwargs)
def finditer(self, *args, **kwargs):
return self.regex.finditer(*args, **kwargs)
def sub(self, *args, **kwargs):
return self.regex.sub(*args, **kwargs)
def subn(self, *args, **kwargs):
return self.regex.subn(*args, **kwargs) | /safe_regex-0.1-py3-none-any.whl/safe_regex/regular_expression.py | 0.543833 | 0.374276 | regular_expression.py | pypi |
[](https://pepy.tech/project/safe-transformer)
[](https://badge.fury.io/py/safe-transformer)
# SAFE - Surrogate Assisted Feature Extraction
SAFE is a python library that you can use to build better explainable ML models leveraging capabilities of more powerful, black-box models.
The idea is to use more complicated model - called surrogate model - to extract more information from features, which can be used later to fit some simpler but explainable model.
Input data is divided into intervals or new set of categories, determined by surrogate model, and then it is transformed based on the interval or category each point belonged to.
Library provides you with SafeTransformer class, which implements TransformerMixin interface, so it can be used as a part of the scikit-learn pipeline.
Using this library you can boost simple ML models, by transferring informations from more complicated models.
Article about SAFE on can be found [here](https://arxiv.org/abs/1902.11035).
## Requirements
To install this library run:
```
pip install safe-transformer
```
The only requirement is to have Python 3 installed on your machine.
## Usage with example
Sample code using SAFE transformer as part of scikit-learn pipeline:
```python
from SafeTransformer import SafeTransformer
from sklearn.datasets import load_boston
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.model_selection import train_test_split
import pandas as pd
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
from sklearn.pipeline import Pipeline
data = load_boston()
X = pd.DataFrame(data.data, columns=data.feature_names)
y = data['target']
X_train, X_test, y_train, y_test = train_test_split(X, y)
surrogate_model = GradientBoostingRegressor(n_estimators=100,
max_depth=4,
learning_rate=0.1,
loss='huber')
surrogate_model = surrogate_model.fit(X_train, y_train)
linear_model = LinearRegression()
safe_transformer = SafeTransformer(surrogate_model, penalty = 0.84)
pipe = Pipeline(steps=[('safe', safe_transformer), ('linear', linear_model)])
pipe = pipe.fit(X_train, y_train)
predictions = pipe.predict(X_test)
mean_squared_error(y_test, predictions)
```
```bash
13.617733207161479
```
```python
linear_model_standard = LinearRegression()
linear_model_standard = linear_model_standard.fit(X_train, y_train)
standard_predictions = linear_model_standard.predict(X_test)
mean_squared_error(y_test, standard_predictions)
```
```bash
29.27790566931337
```
As you can see you can improve your simple model performance with help of the more powerful, black-box model, keeping the interpretability of the simple model.
You can use any model you like, as long as it has fit and predict methods in case of regression, or fit and predict_proba in case of classification. Data used to fit SAFE transformer needs to be pandas data frame.
You can also specify penalty and pelt model arguments.
In [examples folder](https://github.com/olagacek/SAFE/tree/master/examples) you can find jupyter notebooks with complete classification and regression examples.
API reference documentation can be found [here](https://plubon.github.io/safe-docs/)
## Algorithm
Our goal is to divide each feature into intervals or new categories and then transform feature values based on the subset they belonged to.
The division is based on the response of the surrogate model.
In case of continuous dependent variables for each of them we find changepoints - points that indicate values of variable for which the response of the surrogate model changes quickly. Intervals between changepoints are the basis of the transformation, eg. feature is transformed to categorical variable, where feature values in the same interval form the same category. To find changepoints we need partial dependence plots.
These plots describe the marginal effect of a given variable (or multiple variables) on an outcome of the model.
In case of categorical variables for each of them we perform hierarchical clustering based on surrogate model responses. Then, based on the biggest similarity in response between categories, they are merged together forming new categories.
Algorithm for performing fit method is illustrated below:

Our algorithm works both for regression and classification problems. In case of regression we simply use model response for creating partial dependence plot and hierarchical clustering. As for classification we use predicted probabilities of each class.
### Continuous variable transformation
Here is example of partial dependence plot. It was created for boston housing data frame, variable in example is LSTAT. To get changepoints from partial dependence plots we use [ruptures](http://ctruong.perso.math.cnrs.fr/ruptures-docs/build/html/index.html) library and its model [Pelt](http://ctruong.perso.math.cnrs.fr/ruptures-docs/build/html/detection/pelt.html).
<img src="images/simple-plot.png" width="425"/> <img src="images/changepoint.png" width="425"/>
### Categorical variable transformation
In the plot below there is illustarted categorical variable transformation. To create new categories, based on the average model responses, we use scikit-learn [ward algorithm](https://scikit-learn.org/0.15/modules/generated/sklearn.cluster.Ward.html) and to find number of clusters to cut KneeLocator class from [kneed library](https://github.com/arvkevi/kneed) is used.
<img src="images/categorical.png" width="425"/> <img src="images/dendo.png" width="425"/>
## Model optimization
One of the parameters you can specify is penalty - it has an impact on the number of changepoints that will be created. Here you can see how the quality of the model changese with penalty. For reference results of surrogate and basic model are also in the plot.
<img src="images/pens.png" alt="Model performance" width="500"/>
With correctly chosen penalty your simple model can achieve much better accuracy, close to accuracy of surrogate model.
## Variables transformation
If you are interested in how your dataset was changed you can check summary method.
```python
safe_transformer.summary(variable_name='CRIM')
```
```
Numerical Variable CRIM
Selected intervals:
[-Inf, 4.90)
[4.90, 11.14)
[11.14, 15.59)
[15.59, 24.50)
[24.50, 33.40)
[33.40, 48.54)
[48.54, Inf)
```
To see transformations of all the variables do not specify variable_name argument.
```python
safe_transformer.summary()
```
```
Numerical Variable CRIM
Selected intervals:
[-Inf, 4.90)
[4.90, 11.14)
[11.14, 15.59)
[15.59, 24.50)
[24.50, 33.40)
[33.40, 48.54)
[48.54, Inf)
Numerical Variable ZN
Selected intervals:
[-Inf, 33.53)
[33.53, Inf)
Numerical Variable INDUS
Selected intervals:
[-Inf, 2.78)
[2.78, 3.19)
[3.19, 4.28)
[4.28, 10.29)
[10.29, 26.68)
[26.68, Inf)
.
.
.
Numerical Variable LSTAT
Selected intervals:
[-Inf, 4.55)
[4.55, 4.73)
[4.73, 5.43)
[5.43, 5.96)
[5.96, 7.55)
[7.55, 8.08)
[8.08, 9.67)
[9.67, 9.85)
[9.85, 10.02)
[10.02, 14.43)
[14.43, 14.96)
[14.96, 16.02)
[16.02, 18.14)
[18.14, 19.37)
[19.37, 23.96)
[23.96, 26.78)
[26.78, 29.61)
[29.61, Inf)
```
## References
* [Original Safe algorithm](https://mi2datalab.github.io/SAFE/index.html), implemented in R
* [ruptures library](https://github.com/deepcharles/ruptures), used for finding changepoints
* [kneed library](https://github.com/arvkevi/kneed), used for cutting hierarchical tree
* [SAFE article](https://arxiv.org/abs/1902.11035) - article about SAFE algorithm, including benchmark results using SAFE library
The project was made on [research workshops classes](https://github.com/pbiecek/CaseStudies2019W) at the Warsaw University of Technology at the Faculty of Mathematics and Information Science by Aleksandra Gacek and Piotr Luboń.
| /safe_transformer-0.0.5-py3-none-any.whl/safe_transformer-0.0.5.dist-info/DESCRIPTION.rst | 0.857768 | 0.991764 | DESCRIPTION.rst | pypi |
import json, exceptions
# -----------------------------------------------------------------------------
# The Safe None Class
# -----------------------------------------------------------------------------
class SafeNoneClass(object):
"""
Since we can't subclass 'None' we create a new object that we use as None
in safeJSON. This object behaves like None with several exceptions.
1) Attempting to get items on SafeNone (e.g. SafeNone['a']) will return
SafeNone.
2) Since we can't subclass or re-assign None the expression SafeNone is None
will (unfortunatley) return false. Accounting for this is the major
difference between using safeJSON and regular python.
3) This object writes itself out as 'SafeNone' not None.
4) len(SafeNone) evaluates to 0
5) 'for i in SafeNone:' will not raise an exception
6) SafeNone.__hash__() always evaluates to 0
7) PyMongo will store SafeNone as 0
8) JSON.dumps(SafeNone) will yield 'SafeNone' not 'null'
As a note this object will evaluate to 'False' in boolean expressions.
"""
# Singleton instance
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(SafeNoneClass, cls).__new__(cls, *args, **kwargs)
return cls._instance
def __getitem__(self, index):
"""
This method simulates array accesses. Whenever this object is
subscripted, it returns itself.
"""
return self._instance
def __ge__(self, o):
if self.__eq__(o):
return True
return False
def __gt__(self, o):
return False
def __le__(self, o):
return True
def __lt__(self, o):
return True
def __ne__(self, o):
return not self.__eq__(o)
def __str__(self):
"""
The printout of this object.
"""
return 'SafeNone'
def __repr__(self):
"""
The printout of this object.
"""
return 'SafeNone'
def __eq__(self, o):
"""
SafeNone is only equal to 0, None and itself.
"""
return o == 0 or o is self or o is None
def __cmp__(self, o):
"""
Compares this object to another object. Will always be
-1 except for objects that are __eq__ to this object.
"""
if self.__eq__(o):
return 0
return 1
def __iter__(self):
"""
SafeNone implements the iterator interface, so it is returned for
iterator requests.
"""
return self
def next(self):
"""
The iterator interface. This object always raises a StopIteration
exception on the first iterator access.
"""
raise StopIteration()
def __len__(self):
"""
The length of this object is always zero.
"""
return 0
def __hash__(self):
"""
The hash of this object is always zero.
"""
return 0
def __delitem__(self, item):
"""
Implemented so that calls to del(SafeNone[k]) don't throw an
exception.
"""
pass
def items(self):
"""
items() method for when SafeNone is treated like a dictionary.
"""
return ()
def keys(self):
"""
keys() method for when SafeNone is treated like a dictionary.
"""
return()
def __contains__(self, x):
"""
contains() method for when SafeNone is treated like a dictionary.
"""
return False
def __setitem__(self, *args, **kwargs):
"""
Raises exception, you cannot store values in a SafeNone.
"""
raise Exception("Cannot store values in SafeNone.")
def fromkeys(self, *args, **kwargs):
"""
Raises exception, you cannot store values in a SafeNone.
"""
raise Exception("Cannot store values in SafeNone.")
def update(self, *args, **kwargs):
"""
Raises exception, you cannot store values in a SafeNone.
"""
raise Exception("Cannot store values in SafeNone.")
def setdefault(self, *args, **kwargs):
"""
Raises exception, you cannot store values in a SafeNone.
"""
raise Exception("Cannot store values in SafeNone.")
def clear(self):
"""
Return None.
"""
return None;
def copy(self):
"""
Return reference to SafeNone.
"""
return SafeNone
def get(self, *args, **kwargs):
"""
All attempts to get things from SafeNone will return SafeNone
"""
return SafeNone
def has_key(self, *args, **kwargs):
"""
SafeNone never has the key your looking for, this method always returns false.
"""
return False
def iteritems(self):
"""
Returns an iterator over 0 objects.
"""
return ().__iter__()
def iterkeys(self):
"""
Returns an iterator over 0 objects.
"""
return ().__iter__()
def itervalues(self):
"""
Returns an iterator over 0 objects.
"""
return ().__iter__()
def pop(self, *args, **kwargs):
"""
Return reference to SafeNone.
"""
return SafeNone
def popitem(self, *args, **kwargs):
"""
Return reference to SafeNone.
"""
return SafeNone
def values(self):
"""
Return empty tuple.
"""
return ()
def viewitems(self):
"""
Return empty tuple.
"""
return ()
def viewkeys(self):
"""
Return empty tuple.
"""
return ()
def viewvalues(self):
"""
Return empty tuple.
"""
return ()
# LIST METHODS
def __add__(self, y):
"""
Adding this item to another item will return just the other item.
"""
return y
def __delslice__(self, *args, **kwargs):
"""
Deleting a slice of items will result in nothing occuring.
"""
pass
def __getslice__(self, i, j):
"""
Any slice of SafeNone is also SafeNone
"""
return SafeNone
def __iadd__(self, y):
"""
Raises exception, you cannot store values in a SafeNone.
"""
raise Exception("Cannot modify SafeNone.")
def __imul__(self, y):
"""
Raises exception, you cannot store values in a SafeNone.
"""
raise Exception("Cannot modify SafeNone.")
def __mul__(self, n):
"""
Following list semantics, a SafeJSON multiplied by any number should
be a SafeJSON, so that is what is returned.
"""
return SafeNone
def __reversed__(self):
"""
A SafeJSON object is its own reverse.
"""
return SafeNone
def __rmul__(self, n):
"""
Following list semantics, a SafeJSON multiplied by any number should
be a SafeJSON, so that is what is returned.
"""
return SafeNone
def __setslice__(self, i, j, y):
"""
Raises exception, you cannot store values in a SafeNone.
"""
raise Exception("Cannot store values in SafeNone.")
def append(self, o):
"""
Raises exception, you cannot store values in a SafeNone.
"""
raise Exception("Cannot store values in SafeNone.")
def count(self, value):
"""
There are always 0 items in a SafeNone
"""
return 0
def extend(self, iterable):
"""
Raises exception, you cannot store values in a SafeNone.
"""
raise Exception("Cannot store values in SafeNone.")
def index(self, *args, **kwargs):
"""
Always raises an exception because no items are ever in a SafeNone
"""
raise exceptions.ValueError()
def insert(self, o):
"""
Raises exception, you cannot store values in a SafeNone.
"""
raise Exception("Cannot store values in SafeNone.")
def remove(self, value):
"""
Always returns SafeJSON
"""
return SafeNone
def reverse(self):
"""
Does nothing because SafeJSON is its own reverse.
"""
pass
def sort(self):
"""
Does nothing because SafeJSON is always sorted.
"""
pass
# Our static reference to SafeNone - put it in the scope of the module
SafeNone = SafeNoneClass()
# -----------------------------------------------------------------------------
# The Safe list class
# -----------------------------------------------------------------------------
class SafeList(list):
"""
Class that behaves exactly like list except that accesses to items not in
the list will return SafeNone rather than raise an IndexError.
"""
def __getitem__(self, index):
if index < len(self) or type(index) != type(0):
return super(SafeList, self).__getitem__(index)
return SafeNone
def __delitem__(self, index):
if index < len(self) or type(index) != type(0):
return super(SafeList, self).__delitem__(index)
# -----------------------------------------------------------------------------
# The safe dict class
# -----------------------------------------------------------------------------
class SafeDict(dict):
"""
Class that behaves exactly like dict except that accesses to items not in
the dict will return SafeNone rather than raise a KeyError.
"""
def __getitem__(self, key):
if key in self:
return super(SafeDict, self).__getitem__(key)
return SafeNone
def __delitem__(self, key):
if key in self:
return super(SafeDict, self).__delitem__(key)
# -----------------------------------------------------------------------------
# The safeJSON parser class
# -----------------------------------------------------------------------------
class SafeJSONParser(object):
"""
Wrapper for json.load and json.loads that returns objets that have had
their dictionaries and lists replaced by their 'Safe' equivalents.
"""
def loads(self, s):
o = json.loads(s)
return self.transcode(o)
def load(self, f):
o = json.load(f)
return self.transcode(o)
def transcode(self, o):
if type(o) == dict:
safeO = SafeDict()
for key, value in o.items():
safeO[key] = self.transcode(value)
return safeO
elif type(o) == list:
safeO = SafeList()
for value in o:
safeO.append(self.transcode(value))
return safeO
else:
return o
# Put the load / loads module in the global scope
load = SafeJSONParser().load
loads = SafeJSONParser().loads | /safeJSON-0.9b.tar.gz/safeJSON-0.9b/src/safeJSON.py | 0.565779 | 0.294333 | safeJSON.py | pypi |
import numpy as np
import pypoman
from scipy.optimize import line_search, linprog
class SAFW:
def __init__(self, A, b, eta, x_prime):
m, d = A.shape
self.m = m
self.d = d
self.A = A
self.b = b
self.eta = eta
self.x_prime = x_prime
def ask_constraints(self, x):
y = self.A @ x - self.b + np.random.normal(0, self.eta, self.m)
return y
def f(self, x):
y = 0.5*np.linalg.norm(x - self.x_prime)**2
return y
def e_i(self, i, d):
l = [0]*(i - 1) + [1] + [0]*(d - i)
return np.array(l)
def ask_gradient(self, x):
y = x - self.x_prime
return y
def run(self, x_0, T, epsilon, delta, tau, N_t, n_t):
self.trajectroy = [x_0]
self.x_0 = x_0
self.T = T
self.epsilon = epsilon
self.delta = delta
self.tau = tau
self.X = np.zeros([N_t, d])
self.Y = np.zeros([N_t, m])
self.x_t = np.zeros([d,n_t+1])
self.x_t[:,0] = np.ones(d)*0.1
self.x_t[:,0] = x_0
self.atoms = [x_0]
for i in range(t):
x = self.trajectroy[-1]
for j in range(d):
x_measure_1 = x + self.e_i(j+1, d)*omega_0
x_measure_2 = x - self.e_i(j+1, d)*omega_0
for k in range(n_t//(2*d)):
self.X[i*n_t + j*2*(n_t//(2*d)) + k, :] = x_measure_1
self.Y[i*n_t + j*2*(n_t//(2*d)) + k, :] = self.ask_constraints(x_measure_1)
self.X[i*n_t + j*2*(n_t//(2*d)) + (n_t//(2*d)) + k, :] = x_measure_2
self.Y[i*n_t + j*2*(n_t//(2*d)) + (n_t//(2*d)) + k, :] = self.ask_constraints(x_measure_2)
X_bar = np.concatenate([self.X[0:(i + 1)*n_t,:], np.array([-1]*((i + 1)*n_t)).reshape(-1,1)], axis = 1)
Y_bar = self.Y[0:(i + 1)*n_t,:] + tau
beta_t = np.linalg.inv(X_bar.T @ X_bar) @ X_bar.T @ Y_bar
A_hat = beta_t[0:d, :]
b_hat = beta_t[-1, :]
vertices = np.array(pypoman.compute_polytope_vertices(A_hat.T, b_hat))
bounds2 = [[0, 1]]*vertices.shape[0]
weights = linprog(np.array([1]*vertices.shape[0]), \
A_eq = np.concatenate([vertices.T, np.array([1]*vertices.shape[0]).reshape(1,vertices.shape[0])], axis = 0),\
b_eq = np.append(x - tau, 1) , bounds=bounds2).x
atoms_with_positive_wight = vertices[weights > 0, :]
for j in range(len(atoms_with_positive_wight) + 1):
bounds2 = [[0, 1]]*vertices.shape[0]
weights = linprog(np.array([1]*vertices.shape[0]), \
A_eq = np.concatenate([vertices.T, np.array([1]*vertices.shape[0]).reshape(1,vertices.shape[0])], axis = 0),\
b_eq = np.append(x - tau, 1) , bounds=bounds2).x
atoms_with_positive_wight = vertices[weights > 0, :]
grad = self.ask_gradient(x)
res = linprog(grad, A_ub=A_hat.T, b_ub=b_hat )
d_FW = res.x - x
inner_product = atoms_with_positive_wight @ grad
argmax = np.argmax(inner_product)
v_t = atoms_with_positive_wight[argmax, :]
bounds2 = [[0, 1]]*atoms_with_positive_wight.shape[0]
weights = linprog(np.array([1]*atoms_with_positive_wight.shape[0]), \
A_eq = np.concatenate([atoms_with_positive_wight.T, \
np.array([1]*atoms_with_positive_wight.shape[0]).reshape(1,atoms_with_positive_wight.shape[0])], axis = 0),
b_eq = np.append(x - tau, 1), bounds=bounds2).x
alpha_v_t = weights[argmax]
d_A = x - v_t
duality_gap = np.dot(-grad, d_FW)
step_away = np.dot(-grad, d_A)
if duality_gap <= self.epsilon:
break
else:
if duality_gap >= step_away:
d_t = d_FW
gamma_max = 1
else:
d_t = d_A
gamma_max = alpha_v_t/(1 - alpha_v_t)
gamma_t = line_search(self.f, self.ask_gradient, x, d_t, amax = gamma_max)[0]
if gamma_t == None:
gamma_t = gamma_max
x_next = x + gamma_t*d_t
self.trajectroy.append(x_next)
x = x_next
bounds2 = [[0, 1]]*vertices.shape[0]
vertices = np.array(pypoman.compute_polytope_vertices(A_hat.T, b_hat))
weights = linprog(np.array([1]*vertices.shape[0]), \
A_eq = np.concatenate([vertices.T, np.array([1]*vertices.shape[0]).reshape(1,vertices.shape[0])], axis = 0),
b_eq = np.append(x - tau, 1) , bounds=bounds2).x
atoms_with_positive_wight = vertices[weights > 0, :] | /safeOptimization-0.0.4.tar.gz/safeOptimization-0.0.4/src/mypackage/SAFW.py | 0.478285 | 0.489748 | SAFW.py | pypi |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
def lenet(images, num_classes=10, is_training=False,
dropout_keep_prob=0.5,
prediction_fn=slim.softmax,
scope='LeNet'):
"""Creates a variant of the LeNet model.
Note that since the output is a set of 'logits', the values fall in the
interval of (-infinity, infinity). Consequently, to convert the outputs to a
probability distribution over the characters, one will need to convert them
using the softmax function:
logits = lenet.lenet(images, is_training=False)
probabilities = tf.nn.softmax(logits)
predictions = tf.argmax(logits, 1)
Args:
images: A batch of `Tensors` of size [batch_size, height, width, channels].
num_classes: the number of classes in the dataset. If 0 or None, the logits
layer is omitted and the input features to the logits layer are returned
instead.
is_training: specifies whether or not we're currently training the model.
This variable will determine the behaviour of the dropout layer.
dropout_keep_prob: the percentage of activation values that are retained.
prediction_fn: a function to get predictions out of logits.
scope: Optional variable_scope.
Returns:
net: a 2D Tensor with the logits (pre-softmax activations) if num_classes
is a non-zero integer, or the inon-dropped-out nput to the logits layer
if num_classes is 0 or None.
end_points: a dictionary from components of the network to the corresponding
activation.
"""
end_points = {}
with tf.variable_scope(scope, 'LeNet', [images]):
net = end_points['conv1'] = slim.conv2d(images, 32, [5, 5], scope='conv1')
print('net', net)
net = end_points['pool1'] = slim.max_pool2d(net, [2, 2], 2, scope='pool1')
print('net', net)
net = end_points['conv2'] = slim.conv2d(net, 64, [5, 5], scope='conv2')
print('net', net)
net = end_points['pool2'] = slim.max_pool2d(net, [2, 2], 2, scope='pool2')
print('net', net)
net = slim.flatten(net)
print('net', net)
end_points['Flatten'] = net
net = end_points['fc3'] = slim.fully_connected(net, 1024, scope='fc3')
if not num_classes:
return net, end_points
net = end_points['dropout3'] = slim.dropout(
net, dropout_keep_prob, is_training=is_training, scope='dropout3')
logits = end_points['Logits'] = slim.fully_connected(
net, num_classes, activation_fn=None, scope='fc4')
end_points['Predictions'] = prediction_fn(logits, scope='Predictions')
return logits, end_points
lenet.default_image_size = 28
def lenet_arg_scope(weight_decay=0.0):
"""Defines the default lenet argument scope.
Args:
weight_decay: The weight decay to use for regularizing the model.
Returns:
An `arg_scope` to use for the inception v3 model.
"""
with slim.arg_scope(
[slim.conv2d, slim.fully_connected],
weights_regularizer=slim.l2_regularizer(weight_decay),
weights_initializer=tf.truncated_normal_initializer(stddev=0.1),
activation_fn=tf.nn.relu) as sc:
return sc | /models/lenet.py | 0.914336 | 0.58166 | lenet.py | pypi |
"""Helper script to bump the current version."""
import argparse
from datetime import datetime
import re
import subprocess
from packaging.version import Version
from homeassistant import const
def _bump_release(release, bump_type):
"""Bump a release tuple consisting of 3 numbers."""
major, minor, patch = release
if bump_type == "patch":
patch += 1
elif bump_type == "minor":
minor += 1
patch = 0
return major, minor, patch
def bump_version(version, bump_type):
"""Return a new version given a current version and action."""
to_change = {}
if bump_type == "minor":
# Convert 0.67.3 to 0.68.0
# Convert 0.67.3.b5 to 0.68.0
# Convert 0.67.3.dev0 to 0.68.0
# Convert 0.67.0.b5 to 0.67.0
# Convert 0.67.0.dev0 to 0.67.0
to_change["dev"] = None
to_change["pre"] = None
if not version.is_prerelease or version.release[2] != 0:
to_change["release"] = _bump_release(version.release, "minor")
elif bump_type == "patch":
# Convert 0.67.3 to 0.67.4
# Convert 0.67.3.b5 to 0.67.3
# Convert 0.67.3.dev0 to 0.67.3
to_change["dev"] = None
to_change["pre"] = None
if not version.is_prerelease:
to_change["release"] = _bump_release(version.release, "patch")
elif bump_type == "dev":
# Convert 0.67.3 to 0.67.4.dev0
# Convert 0.67.3.b5 to 0.67.4.dev0
# Convert 0.67.3.dev0 to 0.67.3.dev1
if version.is_devrelease:
to_change["dev"] = ("dev", version.dev + 1)
else:
to_change["pre"] = ("dev", 0)
to_change["release"] = _bump_release(version.release, "minor")
elif bump_type == "beta":
# Convert 0.67.5 to 0.67.6b0
# Convert 0.67.0.dev0 to 0.67.0b0
# Convert 0.67.5.b4 to 0.67.5b5
if version.is_devrelease:
to_change["dev"] = None
to_change["pre"] = ("b", 0)
elif version.is_prerelease:
if version.pre[0] == "a":
to_change["pre"] = ("b", 0)
if version.pre[0] == "b":
to_change["pre"] = ("b", version.pre[1] + 1)
else:
to_change["pre"] = ("b", 0)
to_change["release"] = _bump_release(version.release, "patch")
else:
to_change["release"] = _bump_release(version.release, "patch")
to_change["pre"] = ("b", 0)
elif bump_type == "nightly":
# Convert 0.70.0d0 to 0.70.0d20190424, fails when run on non dev release
if not version.is_devrelease:
raise ValueError("Can only be run on dev release")
to_change["dev"] = (
"dev",
datetime.utcnow().date().isoformat().replace("-", ""),
)
else:
assert False, f"Unsupported type: {bump_type}"
temp = Version("0")
temp._version = version._version._replace(**to_change)
return Version(str(temp))
def write_version(version):
"""Update Safegate Pro constant file with new version."""
with open("homeassistant/const.py") as fil:
content = fil.read()
major, minor, patch = str(version).split(".", 2)
content = re.sub(
"MAJOR_VERSION: Final = .*\n", f"MAJOR_VERSION: Final = {major}\n", content
)
content = re.sub(
"MINOR_VERSION: Final = .*\n", f"MINOR_VERSION: Final = {minor}\n", content
)
content = re.sub(
"PATCH_VERSION: Final = .*\n", f'PATCH_VERSION: Final = "{patch}"\n', content
)
with open("homeassistant/const.py", "wt") as fil:
content = fil.write(content)
def main():
"""Execute script."""
parser = argparse.ArgumentParser(description="Bump version of Safegate Pro")
parser.add_argument(
"type",
help="The type of the bump the version to.",
choices=["beta", "dev", "patch", "minor", "nightly"],
)
parser.add_argument(
"--commit", action="store_true", help="Create a version bump commit."
)
arguments = parser.parse_args()
if arguments.commit and subprocess.run(["git", "diff", "--quiet"]).returncode == 1:
print("Cannot use --commit because git is dirty.")
return
current = Version(const.__version__)
bumped = bump_version(current, arguments.type)
assert bumped > current, "BUG! New version is not newer than old version"
write_version(bumped)
if not arguments.commit:
return
subprocess.run(["git", "commit", "-nam", f"Bumped version to {bumped}"])
def test_bump_version():
"""Make sure it all works."""
import pytest
assert bump_version(Version("0.56.0"), "beta") == Version("0.56.1b0")
assert bump_version(Version("0.56.0b3"), "beta") == Version("0.56.0b4")
assert bump_version(Version("0.56.0.dev0"), "beta") == Version("0.56.0b0")
assert bump_version(Version("0.56.3"), "dev") == Version("0.57.0.dev0")
assert bump_version(Version("0.56.0b3"), "dev") == Version("0.57.0.dev0")
assert bump_version(Version("0.56.0.dev0"), "dev") == Version("0.56.0.dev1")
assert bump_version(Version("0.56.3"), "patch") == Version("0.56.4")
assert bump_version(Version("0.56.3.b3"), "patch") == Version("0.56.3")
assert bump_version(Version("0.56.0.dev0"), "patch") == Version("0.56.0")
assert bump_version(Version("0.56.0"), "minor") == Version("0.57.0")
assert bump_version(Version("0.56.3"), "minor") == Version("0.57.0")
assert bump_version(Version("0.56.0.b3"), "minor") == Version("0.56.0")
assert bump_version(Version("0.56.3.b3"), "minor") == Version("0.57.0")
assert bump_version(Version("0.56.0.dev0"), "minor") == Version("0.56.0")
assert bump_version(Version("0.56.2.dev0"), "minor") == Version("0.57.0")
today = datetime.utcnow().date().isoformat().replace("-", "")
assert bump_version(Version("0.56.0.dev0"), "nightly") == Version(
f"0.56.0.dev{today}"
)
with pytest.raises(ValueError):
assert bump_version(Version("0.56.0"), "nightly")
if __name__ == "__main__":
main() | /safegate_pro-2021.7.6-py3-none-any.whl/script/version_bump.py | 0.756088 | 0.402128 | version_bump.py | pypi |
from __future__ import annotations
from pathlib import Path
from urllib.parse import urlparse
from awesomeversion import (
AwesomeVersion,
AwesomeVersionException,
AwesomeVersionStrategy,
)
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .model import Config, Integration
DOCUMENTATION_URL_SCHEMA = "https"
DOCUMENTATION_URL_HOST = "www.home-assistant.io"
DOCUMENTATION_URL_PATH_PREFIX = "/integrations/"
DOCUMENTATION_URL_EXCEPTIONS = {"https://www.home-assistant.io/hassio"}
SUPPORTED_QUALITY_SCALES = ["gold", "internal", "platinum", "silver"]
SUPPORTED_IOT_CLASSES = [
"assumed_state",
"calculated",
"cloud_polling",
"cloud_push",
"local_polling",
"local_push",
]
# List of integrations that are supposed to have no IoT class
NO_IOT_CLASS = [
"air_quality",
"alarm_control_panel",
"api",
"auth",
"automation",
"binary_sensor",
"blueprint",
"calendar",
"camera",
"climate",
"color_extractor",
"config",
"configurator",
"counter",
"cover",
"default_config",
"device_automation",
"device_tracker",
"discovery",
"downloader",
"fan",
"ffmpeg",
"frontend",
"geo_location",
"history",
"homeassistant",
"humidifier",
"image_processing",
"image",
"input_boolean",
"input_datetime",
"input_number",
"input_select",
"input_text",
"intent_script",
"intent",
"light",
"lock",
"logbook",
"logger",
"lovelace",
"mailbox",
"map",
"media_player",
"media_source",
"my",
"notify",
"number",
"onboarding",
"panel_custom",
"panel_iframe",
"plant",
"profiler",
"proxy",
"python_script",
"remote",
"safe_mode",
"scene",
"script",
"search",
"select",
"sensor",
"stt",
"switch",
"system_health",
"system_log",
"tag",
"timer",
"trace",
"tts",
"vacuum",
"water_heater",
"weather",
"webhook",
"websocket_api",
"zone",
]
def documentation_url(value: str) -> str:
"""Validate that a documentation url has the correct path and domain."""
if value in DOCUMENTATION_URL_EXCEPTIONS:
return value
parsed_url = urlparse(value)
if parsed_url.scheme != DOCUMENTATION_URL_SCHEMA:
raise vol.Invalid("Documentation url is not prefixed with https")
if parsed_url.netloc == DOCUMENTATION_URL_HOST and not parsed_url.path.startswith(
DOCUMENTATION_URL_PATH_PREFIX
):
raise vol.Invalid(
"Documentation url does not begin with www.home-assistant.io/integrations"
)
return value
def verify_lowercase(value: str):
"""Verify a value is lowercase."""
if value.lower() != value:
raise vol.Invalid("Value needs to be lowercase")
return value
def verify_uppercase(value: str):
"""Verify a value is uppercase."""
if value.upper() != value:
raise vol.Invalid("Value needs to be uppercase")
return value
def verify_version(value: str):
"""Verify the version."""
try:
AwesomeVersion(
value,
[
AwesomeVersionStrategy.CALVER,
AwesomeVersionStrategy.SEMVER,
AwesomeVersionStrategy.SIMPLEVER,
AwesomeVersionStrategy.BUILDVER,
AwesomeVersionStrategy.PEP440,
],
)
except AwesomeVersionException:
raise vol.Invalid(f"'{value}' is not a valid version.")
return value
def verify_wildcard(value: str):
"""Verify the matcher contains a wildcard."""
if "*" not in value:
raise vol.Invalid(f"'{value}' needs to contain a wildcard matcher")
return value
MANIFEST_SCHEMA = vol.Schema(
{
vol.Required("domain"): str,
vol.Required("name"): str,
vol.Optional("config_flow"): bool,
vol.Optional("mqtt"): [str],
vol.Optional("zeroconf"): [
vol.Any(
str,
vol.Schema(
{
vol.Required("type"): str,
vol.Optional("macaddress"): vol.All(
str, verify_uppercase, verify_wildcard
),
vol.Optional("manufacturer"): vol.All(str, verify_lowercase),
vol.Optional("name"): vol.All(str, verify_lowercase),
}
),
)
],
vol.Optional("ssdp"): vol.Schema(
vol.All([vol.All(vol.Schema({}, extra=vol.ALLOW_EXTRA), vol.Length(min=1))])
),
vol.Optional("homekit"): vol.Schema({vol.Optional("models"): [str]}),
vol.Optional("dhcp"): [
vol.Schema(
{
vol.Optional("macaddress"): vol.All(
str, verify_uppercase, verify_wildcard
),
vol.Optional("hostname"): vol.All(str, verify_lowercase),
}
)
],
vol.Required("documentation"): vol.All(
vol.Url(), documentation_url # pylint: disable=no-value-for-parameter
),
vol.Optional(
"issue_tracker"
): vol.Url(), # pylint: disable=no-value-for-parameter
vol.Optional("quality_scale"): vol.In(SUPPORTED_QUALITY_SCALES),
vol.Optional("requirements"): [str],
vol.Optional("dependencies"): [str],
vol.Optional("after_dependencies"): [str],
vol.Required("codeowners"): [str],
vol.Optional("disabled"): str,
vol.Optional("iot_class"): vol.In(SUPPORTED_IOT_CLASSES),
}
)
CUSTOM_INTEGRATION_MANIFEST_SCHEMA = MANIFEST_SCHEMA.extend(
{
vol.Optional("version"): vol.All(str, verify_version),
}
)
def validate_version(integration: Integration):
"""
Validate the version of the integration.
Will be removed when the version key is no longer optional for custom integrations.
"""
if not integration.manifest.get("version"):
integration.add_error("manifest", "No 'version' key in the manifest file.")
return
def validate_manifest(integration: Integration, core_components_dir: Path) -> None:
"""Validate manifest."""
if not integration.manifest:
return
try:
if integration.core:
MANIFEST_SCHEMA(integration.manifest)
else:
CUSTOM_INTEGRATION_MANIFEST_SCHEMA(integration.manifest)
except vol.Invalid as err:
integration.add_error(
"manifest", f"Invalid manifest: {humanize_error(integration.manifest, err)}"
)
if integration.manifest["domain"] != integration.path.name:
integration.add_error("manifest", "Domain does not match dir name")
if (
not integration.core
and (core_components_dir / integration.manifest["domain"]).exists()
):
integration.add_warning(
"manifest", "Domain collides with built-in core integration"
)
if (
integration.manifest["domain"] in NO_IOT_CLASS
and "iot_class" in integration.manifest
):
integration.add_error("manifest", "Domain should not have an IoT Class")
if (
integration.manifest["domain"] not in NO_IOT_CLASS
and "iot_class" not in integration.manifest
):
integration.add_error("manifest", "Domain is missing an IoT Class")
if not integration.core:
validate_version(integration)
def validate(integrations: dict[str, Integration], config: Config) -> None:
"""Handle all integrations manifests."""
core_components_dir = config.root / "homeassistant/components"
for integration in integrations.values():
validate_manifest(integration, core_components_dir) | /safegate_pro-2021.7.6-py3-none-any.whl/script/hassfest/manifest.py | 0.666062 | 0.193281 | manifest.py | pypi |
import argparse
import pathlib
import sys
from time import monotonic
from . import (
codeowners,
config_flow,
coverage,
dependencies,
dhcp,
json,
manifest,
mqtt,
mypy_config,
requirements,
services,
ssdp,
translations,
zeroconf,
)
from .model import Config, Integration
INTEGRATION_PLUGINS = [
json,
codeowners,
config_flow,
dependencies,
manifest,
mqtt,
services,
ssdp,
translations,
zeroconf,
dhcp,
]
HASS_PLUGINS = [
coverage,
mypy_config,
]
def valid_integration_path(integration_path):
"""Test if it's a valid integration."""
path = pathlib.Path(integration_path)
if not path.is_dir():
raise argparse.ArgumentTypeError(f"{integration_path} is not a directory.")
return path
def get_config() -> Config:
"""Return config."""
parser = argparse.ArgumentParser(description="Hassfest")
parser.add_argument(
"--action", type=str, choices=["validate", "generate"], default=None
)
parser.add_argument(
"--integration-path",
action="append",
type=valid_integration_path,
help="Validate a single integration",
)
parser.add_argument(
"--requirements",
action="store_true",
help="Validate requirements",
)
parsed = parser.parse_args()
if parsed.action is None:
parsed.action = "validate" if parsed.integration_path else "generate"
if parsed.action == "generate" and parsed.integration_path:
raise RuntimeError(
"Generate is not allowed when limiting to specific integrations"
)
if (
not parsed.integration_path
and not pathlib.Path("requirements_all.txt").is_file()
):
raise RuntimeError("Run from Safegate Pro root")
return Config(
root=pathlib.Path(".").absolute(),
specific_integrations=parsed.integration_path,
action=parsed.action,
requirements=parsed.requirements,
)
def main():
"""Validate manifests."""
try:
config = get_config()
except RuntimeError as err:
print(err)
return 1
plugins = [*INTEGRATION_PLUGINS]
if config.requirements:
plugins.append(requirements)
if config.specific_integrations:
integrations = {}
for int_path in config.specific_integrations:
integration = Integration(int_path)
integration.load_manifest()
integrations[integration.domain] = integration
else:
integrations = Integration.load_dir(pathlib.Path("homeassistant/components"))
plugins += HASS_PLUGINS
for plugin in plugins:
try:
start = monotonic()
print(f"Validating {plugin.__name__.split('.')[-1]}...", end="", flush=True)
if plugin is requirements and not config.specific_integrations:
print()
plugin.validate(integrations, config)
print(f" done in {monotonic() - start:.2f}s")
except RuntimeError as err:
print()
print()
print("Error!")
print(err)
return 1
# When we generate, all errors that are fixable will be ignored,
# as generating them will be fixed.
if config.action == "generate":
general_errors = [err for err in config.errors if not err.fixable]
invalid_itg = [
itg
for itg in integrations.values()
if any(not error.fixable for error in itg.errors)
]
else:
# action == validate
general_errors = config.errors
invalid_itg = [itg for itg in integrations.values() if itg.errors]
warnings_itg = [itg for itg in integrations.values() if itg.warnings]
print()
print("Integrations:", len(integrations))
print("Invalid integrations:", len(invalid_itg))
print()
if not invalid_itg and not general_errors:
print_integrations_status(config, warnings_itg, show_fixable_errors=False)
if config.action == "generate":
for plugin in plugins:
if hasattr(plugin, "generate"):
plugin.generate(integrations, config)
return 0
if config.action == "generate":
print("Found errors. Generating files canceled.")
print()
if general_errors:
print("General errors:")
for error in general_errors:
print("*", error)
print()
invalid_itg.extend(itg for itg in warnings_itg if itg not in invalid_itg)
print_integrations_status(config, invalid_itg, show_fixable_errors=False)
return 1
def print_integrations_status(config, integrations, *, show_fixable_errors=True):
"""Print integration status."""
for integration in sorted(integrations, key=lambda itg: itg.domain):
extra = f" - {integration.path}" if config.specific_integrations else ""
print(f"Integration {integration.domain}{extra}:")
for error in integration.errors:
if show_fixable_errors or not error.fixable:
print("*", "[ERROR]", error)
for warning in integration.warnings:
print("*", "[WARNING]", warning)
print()
if __name__ == "__main__":
sys.exit(main()) | /safegate_pro-2021.7.6-py3-none-any.whl/script/hassfest/__main__.py | 0.494629 | 0.167797 | __main__.py | pypi |
from __future__ import annotations
import pathlib
import re
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant.const import CONF_SELECTOR
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, selector
from homeassistant.util.yaml import load_yaml
from .model import Integration
def exists(value):
"""Check if value exists."""
if value is None:
raise vol.Invalid("Value cannot be None")
return value
FIELD_SCHEMA = vol.Schema(
{
vol.Required("description"): str,
vol.Optional("name"): str,
vol.Optional("example"): exists,
vol.Optional("default"): exists,
vol.Optional("values"): exists,
vol.Optional("required"): bool,
vol.Optional("advanced"): bool,
vol.Optional(CONF_SELECTOR): selector.validate_selector,
}
)
SERVICE_SCHEMA = vol.Schema(
{
vol.Required("description"): str,
vol.Optional("name"): str,
vol.Optional("target"): vol.Any(
selector.TargetSelector.CONFIG_SCHEMA, None # pylint: disable=no-member
),
vol.Optional("fields"): vol.Schema({str: FIELD_SCHEMA}),
}
)
SERVICES_SCHEMA = vol.Schema({cv.slug: SERVICE_SCHEMA})
def grep_dir(path: pathlib.Path, glob_pattern: str, search_pattern: str) -> bool:
"""Recursively go through a dir and it's children and find the regex."""
pattern = re.compile(search_pattern)
for fil in path.glob(glob_pattern):
if not fil.is_file():
continue
if pattern.search(fil.read_text()):
return True
return False
def validate_services(integration: Integration):
"""Validate services."""
try:
data = load_yaml(str(integration.path / "services.yaml"))
except FileNotFoundError:
# Find if integration uses services
has_services = grep_dir(
integration.path,
"**/*.py",
r"(hass\.services\.(register|async_register))|async_register_entity_service|async_register_admin_service",
)
if has_services:
integration.add_error(
"services", "Registers services but has no services.yaml"
)
return
except HomeAssistantError:
integration.add_error("services", "Unable to load services.yaml")
return
try:
SERVICES_SCHEMA(data)
except vol.Invalid as err:
integration.add_error(
"services", f"Invalid services.yaml: {humanize_error(data, err)}"
)
def validate(integrations: dict[str, Integration], config):
"""Handle dependencies for integrations."""
# check services.yaml is cool
for integration in integrations.values():
if not integration.manifest:
continue
validate_services(integration) | /safegate_pro-2021.7.6-py3-none-any.whl/script/hassfest/services.py | 0.699049 | 0.167185 | services.py | pypi |
from __future__ import annotations
from pathlib import Path
from .model import Config, Integration
DONT_IGNORE = (
"config_flow.py",
"device_action.py",
"device_condition.py",
"device_trigger.py",
"group.py",
"intent.py",
"logbook.py",
"media_source.py",
"scene.py",
)
# They were violating when we introduced this check
# Need to be fixed in a future PR.
ALLOWED_IGNORE_VIOLATIONS = {
("ambient_station", "config_flow.py"),
("cast", "config_flow.py"),
("daikin", "config_flow.py"),
("doorbird", "config_flow.py"),
("doorbird", "logbook.py"),
("elkm1", "config_flow.py"),
("elkm1", "scene.py"),
("fibaro", "scene.py"),
("flume", "config_flow.py"),
("hangouts", "config_flow.py"),
("harmony", "config_flow.py"),
("hisense_aehw4a1", "config_flow.py"),
("home_connect", "config_flow.py"),
("huawei_lte", "config_flow.py"),
("ifttt", "config_flow.py"),
("ios", "config_flow.py"),
("iqvia", "config_flow.py"),
("knx", "scene.py"),
("konnected", "config_flow.py"),
("lcn", "scene.py"),
("life360", "config_flow.py"),
("lifx", "config_flow.py"),
("lutron", "scene.py"),
("mobile_app", "config_flow.py"),
("nest", "config_flow.py"),
("plaato", "config_flow.py"),
("point", "config_flow.py"),
("rachio", "config_flow.py"),
("sense", "config_flow.py"),
("sms", "config_flow.py"),
("solarlog", "config_flow.py"),
("sonos", "config_flow.py"),
("speedtestdotnet", "config_flow.py"),
("spider", "config_flow.py"),
("starline", "config_flow.py"),
("tado", "config_flow.py"),
("tahoma", "scene.py"),
("totalconnect", "config_flow.py"),
("tradfri", "config_flow.py"),
("tuya", "config_flow.py"),
("tuya", "scene.py"),
("upnp", "config_flow.py"),
("velux", "scene.py"),
("wemo", "config_flow.py"),
("wiffi", "config_flow.py"),
("wink", "scene.py"),
}
def validate(integrations: dict[str, Integration], config: Config):
"""Validate coverage."""
coverage_path = config.root / ".coveragerc"
not_found = []
checking = False
with coverage_path.open("rt") as fp:
for line in fp:
line = line.strip()
if not line or line.startswith("#"):
continue
if not checking:
if line == "omit =":
checking = True
continue
# Finished
if line == "[report]":
break
path = Path(line)
# Discard wildcard
path_exists = path
while "*" in path_exists.name:
path_exists = path_exists.parent
if not path_exists.exists():
not_found.append(line)
continue
if (
not line.startswith("homeassistant/components/")
or len(path.parts) != 4
or path.parts[-1] != "*"
):
continue
integration_path = path.parent
integration = integrations[integration_path.name]
for check in DONT_IGNORE:
if (integration_path.name, check) in ALLOWED_IGNORE_VIOLATIONS:
continue
if (integration_path / check).exists():
integration.add_error(
"coverage",
f"{check} must not be ignored by the .coveragerc file",
)
if not not_found:
return
errors = []
if not_found:
errors.append(
f".coveragerc references files that don't exist: {', '.join(not_found)}."
)
raise RuntimeError(" ".join(errors)) | /safegate_pro-2021.7.6-py3-none-any.whl/script/hassfest/coverage.py | 0.651355 | 0.368406 | coverage.py | pypi |
from __future__ import annotations
import importlib
import json
import pathlib
from typing import Any
import attr
@attr.s
class Error:
"""Error validating an integration."""
plugin: str = attr.ib()
error: str = attr.ib()
fixable: bool = attr.ib(default=False)
def __str__(self) -> str:
"""Represent error as string."""
return f"[{self.plugin.upper()}] {self.error}"
@attr.s
class Config:
"""Config for the run."""
specific_integrations: pathlib.Path | None = attr.ib()
root: pathlib.Path = attr.ib()
action: str = attr.ib()
requirements: bool = attr.ib()
errors: list[Error] = attr.ib(factory=list)
cache: dict[str, Any] = attr.ib(factory=dict)
def add_error(self, *args: Any, **kwargs: Any) -> None:
"""Add an error."""
self.errors.append(Error(*args, **kwargs))
@attr.s
class Integration:
"""Represent an integration in our validator."""
@classmethod
def load_dir(cls, path: pathlib.Path):
"""Load all integrations in a directory."""
assert path.is_dir()
integrations = {}
for fil in path.iterdir():
if fil.is_file() or fil.name == "__pycache__":
continue
init = fil / "__init__.py"
if not init.exists():
print(
f"Warning: {init} missing, skipping directory. "
"If this is your development environment, "
"you can safely delete this folder."
)
continue
integration = cls(fil)
integration.load_manifest()
integrations[integration.domain] = integration
return integrations
path: pathlib.Path = attr.ib()
manifest: dict[str, Any] | None = attr.ib(default=None)
errors: list[Error] = attr.ib(factory=list)
warnings: list[Error] = attr.ib(factory=list)
@property
def domain(self) -> str:
"""Integration domain."""
return self.path.name
@property
def core(self) -> bool:
"""Core integration."""
return self.path.as_posix().startswith("homeassistant/components")
@property
def disabled(self) -> str | None:
"""Return if integration is disabled."""
return self.manifest.get("disabled")
@property
def requirements(self) -> list[str]:
"""List of requirements."""
return self.manifest.get("requirements", [])
@property
def dependencies(self) -> list[str]:
"""List of dependencies."""
return self.manifest.get("dependencies", [])
def add_error(self, *args: Any, **kwargs: Any) -> None:
"""Add an error."""
self.errors.append(Error(*args, **kwargs))
def add_warning(self, *args: Any, **kwargs: Any) -> None:
"""Add an warning."""
self.warnings.append(Error(*args, **kwargs))
def load_manifest(self) -> None:
"""Load manifest."""
manifest_path = self.path / "manifest.json"
if not manifest_path.is_file():
self.add_error("model", f"Manifest file {manifest_path} not found")
return
try:
manifest = json.loads(manifest_path.read_text())
except ValueError as err:
self.add_error("model", f"Manifest contains invalid JSON: {err}")
return
self.manifest = manifest
def import_pkg(self, platform=None):
"""Import the Python file."""
pkg = f"homeassistant.components.{self.domain}"
if platform is not None:
pkg += f".{platform}"
return importlib.import_module(pkg) | /safegate_pro-2021.7.6-py3-none-any.whl/script/hassfest/model.py | 0.880496 | 0.169063 | model.py | pypi |
import json
from homeassistant.util import slugify
from script.hassfest.manifest import SUPPORTED_IOT_CLASSES
from .const import COMPONENT_DIR
from .error import ExitApp
from .model import Info
CHECK_EMPTY = ["Cannot be empty", lambda value: value]
def gather_info(arguments) -> Info:
"""Gather info."""
if arguments.integration:
info = {"domain": arguments.integration}
elif arguments.develop:
print("Running in developer mode. Automatically filling in info.")
print()
info = {"domain": "develop"}
else:
info = _gather_info(
{
"domain": {
"prompt": "What is the domain?",
"validators": [
CHECK_EMPTY,
[
"Domains cannot contain spaces or special characters.",
lambda value: value == slugify(value),
],
],
}
}
)
info["is_new"] = not (COMPONENT_DIR / info["domain"] / "manifest.json").exists()
if not info["is_new"]:
return _load_existing_integration(info["domain"])
if arguments.develop:
info.update(
{
"name": "Develop Hub",
"codeowner": "@developer",
"requirement": "aiodevelop==1.2.3",
"oauth2": True,
"iot_class": "local_polling",
}
)
else:
info.update(gather_new_integration(arguments.template == "integration"))
return Info(**info)
YES_NO = {
"validators": [["Type either 'yes' or 'no'", lambda value: value in ("yes", "no")]],
"converter": lambda value: value == "yes",
}
def gather_new_integration(determine_auth: bool) -> Info:
"""Gather info about new integration from user."""
fields = {
"name": {
"prompt": "What is the name of your integration?",
"validators": [CHECK_EMPTY],
},
"codeowner": {
"prompt": "What is your GitHub handle?",
"validators": [
CHECK_EMPTY,
[
'GitHub handles need to start with an "@"',
lambda value: value.startswith("@"),
],
],
},
"requirement": {
"prompt": "What PyPI package and version do you depend on? Leave blank for none.",
"validators": [
[
"Versions should be pinned using '=='.",
lambda value: not value or "==" in value,
]
],
},
"iot_class": {
"prompt": (
f"""How will your integration gather data?
Valid values are {', '.join(SUPPORTED_IOT_CLASSES)}
More info @ https://developers.home-assistant.io/docs/creating_integration_manifest#iot-class
"""
),
"validators": [
[
f"You need to pick one of {', '.join(SUPPORTED_IOT_CLASSES)}",
lambda value: value in SUPPORTED_IOT_CLASSES,
]
],
},
}
if determine_auth:
fields.update(
{
"authentication": {
"prompt": "Does Safegate Pro need the user to authenticate to control the device/service? (yes/no)",
"default": "yes",
**YES_NO,
},
"discoverable": {
"prompt": "Is the device/service discoverable on the local network? (yes/no)",
"default": "no",
**YES_NO,
},
"oauth2": {
"prompt": "Can the user authenticate the device using OAuth2? (yes/no)",
"default": "no",
**YES_NO,
},
}
)
return _gather_info(fields)
def _load_existing_integration(domain) -> Info:
"""Load an existing integration."""
if not (COMPONENT_DIR / domain).exists():
raise ExitApp("Integration does not exist", 1)
manifest = json.loads((COMPONENT_DIR / domain / "manifest.json").read_text())
return Info(domain=domain, name=manifest["name"], is_new=False)
def _gather_info(fields) -> dict:
"""Gather info from user."""
answers = {}
for key, info in fields.items():
hint = None
while key not in answers:
if hint is not None:
print()
print(f"Error: {hint}")
try:
print()
msg = info["prompt"]
if "default" in info:
msg += f" [{info['default']}]"
value = input(f"{msg}\n> ")
except (KeyboardInterrupt, EOFError):
raise ExitApp("Interrupted!", 1)
value = value.strip()
if value == "" and "default" in info:
value = info["default"]
hint = None
for validator_hint, validator in info["validators"]:
if not validator(value):
hint = validator_hint
break
if hint is None:
if "converter" in info:
value = info["converter"](value)
answers[key] = value
return answers | /safegate_pro-2021.7.6-py3-none-any.whl/script/scaffold/gather_info.py | 0.504394 | 0.290905 | gather_info.py | pypi |
from .model import Info
DATA = {
"config_flow": {
"title": "Config Flow",
"docs": "https://developers.home-assistant.io/docs/en/config_entries_config_flow_handler.html",
},
"config_flow_discovery": {
"title": "Discoverable Config Flow",
"docs": "https://developers.home-assistant.io/docs/en/config_entries_config_flow_handler.html#discoverable-integrations-that-require-no-authentication",
},
"config_flow_oauth2": {
"title": "OAuth2 Config Flow",
"docs": "https://developers.home-assistant.io/docs/en/next/config_entries_config_flow_handler.html#configuration-via-oauth2",
},
"device_action": {
"title": "Device Action",
"docs": "https://developers.home-assistant.io/docs/en/device_automation_action.html",
},
"device_condition": {
"title": "Device Condition",
"docs": "https://developers.home-assistant.io/docs/en/device_automation_condition.html",
},
"device_trigger": {
"title": "Device Trigger",
"docs": "https://developers.home-assistant.io/docs/en/device_automation_trigger.html",
},
"integration": {
"title": "Integration",
"docs": "https://developers.home-assistant.io/docs/en/creating_integration_file_structure.html",
},
"reproduce_state": {
"title": "Reproduce State",
"docs": "https://developers.home-assistant.io/docs/en/reproduce_state_index.html",
"extra": "You will now need to update the code to make sure that every attribute that can occur in the state will cause the right service to be called.",
},
"significant_change": {
"title": "Significant Change",
"docs": "https://developers.home-assistant.io/docs/en/significant_change_index.html",
"extra": "You will now need to update the code to make sure that entities with different device classes are correctly considered.",
},
}
def print_relevant_docs(template: str, info: Info) -> None:
"""Print relevant docs."""
data = DATA[template]
print()
print("**************************")
print()
print()
print(f"{data['title']} code has been generated")
print()
if info.files_added:
print("Added the following files:")
for file in info.files_added:
print(f"- {file}")
print()
if info.tests_added:
print("Added the following tests:")
for file in info.tests_added:
print(f"- {file}")
print()
if info.examples_added:
print(
"Because some files already existed, we added the following example files. Please copy the relevant code to the existing files."
)
for file in info.examples_added:
print(f"- {file}")
print()
print(
"The next step is to look at the files and deal with all areas marked as TODO."
)
if "extra" in data:
print()
print(data["extra"]) | /safegate_pro-2021.7.6-py3-none-any.whl/script/scaffold/docs.py | 0.536799 | 0.441613 | docs.py | pypi |
from pathlib import Path
from .model import Info
TEMPLATE_DIR = Path(__file__).parent / "templates"
TEMPLATE_INTEGRATION = TEMPLATE_DIR / "integration"
TEMPLATE_TESTS = TEMPLATE_DIR / "tests"
def generate(template: str, info: Info) -> None:
"""Generate a template."""
print(f"Scaffolding {template} for the {info.domain} integration...")
_ensure_tests_dir_exists(info)
_generate(TEMPLATE_DIR / template / "integration", info.integration_dir, info)
_generate(TEMPLATE_DIR / template / "tests", info.tests_dir, info)
_custom_tasks(template, info)
print()
def _generate(src_dir, target_dir, info: Info) -> None:
"""Generate an integration."""
replaces = {"NEW_DOMAIN": info.domain, "NEW_NAME": info.name}
if not target_dir.exists():
target_dir.mkdir()
for source_file in src_dir.glob("**/*"):
content = source_file.read_text()
for to_search, to_replace in replaces.items():
content = content.replace(to_search, to_replace)
target_file = target_dir / source_file.relative_to(src_dir)
# If the target file exists, create our template as EXAMPLE_<filename>.
# Exception: If we are creating a new integration, we can end up running integration base
# and a config flows on top of one another. In that case, we want to override the files.
if not info.is_new and target_file.exists():
new_name = f"EXAMPLE_{target_file.name}"
print(f"File {target_file} already exists, creating {new_name} instead.")
target_file = target_file.parent / new_name
info.examples_added.add(target_file)
elif src_dir.name == "integration":
info.files_added.add(target_file)
else:
info.tests_added.add(target_file)
print(f"Writing {target_file}")
target_file.write_text(content)
def _ensure_tests_dir_exists(info: Info) -> None:
"""Ensure a test dir exists."""
if info.tests_dir.exists():
return
info.tests_dir.mkdir()
print(f"Writing {info.tests_dir / '__init__.py'}")
(info.tests_dir / "__init__.py").write_text(
f'"""Tests for the {info.name} integration."""\n'
)
def _append(path: Path, text):
"""Append some text to a path."""
path.write_text(path.read_text() + text)
def _custom_tasks(template, info: Info) -> None:
"""Handle custom tasks for templates."""
if template == "integration":
changes = {"codeowners": [info.codeowner], "iot_class": info.iot_class}
if info.requirement:
changes["requirements"] = [info.requirement]
info.update_manifest(**changes)
elif template == "device_trigger":
info.update_strings(
device_automation={
**info.strings().get("device_automation", {}),
"trigger_type": {
"turned_on": "{entity_name} turned on",
"turned_off": "{entity_name} turned off",
},
}
)
elif template == "device_condition":
info.update_strings(
device_automation={
**info.strings().get("device_automation", {}),
"condition_type": {
"is_on": "{entity_name} is on",
"is_off": "{entity_name} is off",
},
}
)
elif template == "device_action":
info.update_strings(
device_automation={
**info.strings().get("device_automation", {}),
"action_type": {
"turn_on": "Turn on {entity_name}",
"turn_off": "Turn off {entity_name}",
},
}
)
elif template == "config_flow":
info.update_manifest(config_flow=True)
info.update_strings(
title=info.name,
config={
"step": {
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]",
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]",
},
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]",
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
},
},
)
elif template == "config_flow_discovery":
info.update_manifest(config_flow=True)
info.update_strings(
title=info.name,
config={
"step": {
"confirm": {
"description": "[%key:common::config_flow::description::confirm_setup%]",
}
},
"abort": {
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
},
},
)
elif template == "config_flow_oauth2":
info.update_manifest(config_flow=True, dependencies=["http"])
info.update_strings(
title=info.name,
config={
"step": {
"pick_implementation": {
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
}
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
},
"create_entry": {
"default": "[%key:common::config_flow::create_entry::authenticated%]"
},
},
) | /safegate_pro-2021.7.6-py3-none-any.whl/script/scaffold/generate.py | 0.661923 | 0.239972 | generate.py | pypi |
import argparse
import json
from pathlib import Path
import re
from shutil import rmtree
import sys
from . import download, upload
from .const import INTEGRATIONS_DIR
from .util import get_base_arg_parser
def valid_integration(integration):
"""Test if it's a valid integration."""
if not (INTEGRATIONS_DIR / integration).is_dir():
raise argparse.ArgumentTypeError(
f"The integration {integration} does not exist."
)
return integration
def get_arguments() -> argparse.Namespace:
"""Get parsed passed in arguments."""
parser = get_base_arg_parser()
parser.add_argument(
"--integration", type=valid_integration, help="Integration to process."
)
return parser.parse_args()
def flatten_translations(translations):
"""Flatten all translations."""
stack = [iter(translations.items())]
key_stack = []
flattened_translations = {}
while stack:
for k, v in stack[-1]:
key_stack.append(k)
if isinstance(v, dict):
stack.append(iter(v.items()))
break
elif isinstance(v, str):
common_key = "::".join(key_stack)
flattened_translations[common_key] = v
key_stack.pop()
else:
stack.pop()
if len(key_stack) > 0:
key_stack.pop()
return flattened_translations
def substitute_translation_references(integration_strings, flattened_translations):
"""Recursively processes all translation strings for the integration."""
result = {}
for key, value in integration_strings.items():
if isinstance(value, dict):
sub_dict = substitute_translation_references(value, flattened_translations)
result[key] = sub_dict
elif isinstance(value, str):
result[key] = substitute_reference(value, flattened_translations)
return result
def substitute_reference(value, flattened_translations):
"""Substitute localization key references in a translation string."""
matches = re.findall(r"\[\%key:((?:[\w]+|[:]{2})*)\%\]", value)
if not matches:
return value
new = value
for key in matches:
if key in flattened_translations:
new = new.replace(f"[%key:{key}%]", flattened_translations[key])
else:
print(f"Invalid substitution key '{key}' found in string '{value}'")
sys.exit(1)
return new
def run():
"""Run the script."""
args = get_arguments()
if args.integration:
integration = args.integration
else:
integration = None
while (
integration is None
or not Path(f"homeassistant/components/{integration}").exists()
):
if integration is not None:
print(f"Integration {integration} doesn't exist!")
print()
integration = input("Integration to process: ")
translations = upload.generate_upload_data()
if integration not in translations["component"]:
print("Integration has no strings.json")
sys.exit(1)
flattened_translations = flatten_translations(translations)
integration_strings = translations["component"][integration]
translations["component"][integration] = substitute_translation_references(
integration_strings, flattened_translations
)
if download.DOWNLOAD_DIR.is_dir():
rmtree(str(download.DOWNLOAD_DIR))
download.DOWNLOAD_DIR.mkdir(parents=True)
(download.DOWNLOAD_DIR / "en.json").write_text(
json.dumps({"component": {integration: translations["component"][integration]}})
)
download.write_integration_translations()
return 0 | /safegate_pro-2021.7.6-py3-none-any.whl/script/translations/develop.py | 0.566258 | 0.199913 | develop.py | pypi |
"""Merge all translation sources into a single JSON file."""
from __future__ import annotations
import json
import os
import pathlib
import re
import subprocess
from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR
from .error import ExitApp
from .util import get_lokalise_token
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute()
def run_download_docker():
"""Run the Docker image to download the translations."""
print("Running Docker to download latest translations.")
run = subprocess.run(
[
"docker",
"run",
"-v",
f"{DOWNLOAD_DIR}:/opt/dest/locale",
"--rm",
f"lokalise/lokalise-cli-2:{CLI_2_DOCKER_IMAGE}",
# Lokalise command
"lokalise2",
"--token",
get_lokalise_token(),
"--project-id",
CORE_PROJECT_ID,
"file",
"download",
CORE_PROJECT_ID,
"--original-filenames=false",
"--replace-breaks=false",
"--export-empty-as",
"skip",
"--format",
"json",
"--unzip-to",
"/opt/dest",
]
)
print()
if run.returncode != 0:
raise ExitApp("Failed to download translations")
def save_json(filename: str, data: list | dict):
"""Save JSON data to a file.
Returns True on success.
"""
data = json.dumps(data, sort_keys=True, indent=4)
with open(filename, "w", encoding="utf-8") as fdesc:
fdesc.write(data)
return True
return False
def get_component_path(lang, component):
"""Get the component translation path."""
if os.path.isdir(os.path.join("homeassistant", "components", component)):
return os.path.join(
"homeassistant", "components", component, "translations", f"{lang}.json"
)
raise ExitApp(f"Integration {component} not found under homeassistant/components/")
def get_platform_path(lang, component, platform):
"""Get the platform translation path."""
return os.path.join(
"homeassistant",
"components",
component,
"translations",
f"{platform}.{lang}.json",
)
def get_component_translations(translations):
"""Get the component level translations."""
translations = translations.copy()
translations.pop("platform", None)
return translations
def save_language_translations(lang, translations):
"""Distribute the translations for this language."""
components = translations.get("component", {})
for component, component_translations in components.items():
base_translations = get_component_translations(component_translations)
if base_translations:
path = get_component_path(lang, component)
os.makedirs(os.path.dirname(path), exist_ok=True)
save_json(path, base_translations)
if "platform" not in component_translations:
continue
for platform, platform_translations in component_translations[
"platform"
].items():
path = get_platform_path(lang, component, platform)
os.makedirs(os.path.dirname(path), exist_ok=True)
save_json(path, platform_translations)
def write_integration_translations():
"""Write integration translations."""
for lang_file in DOWNLOAD_DIR.glob("*.json"):
lang = lang_file.stem
translations = json.loads(lang_file.read_text())
save_language_translations(lang, translations)
def delete_old_translations():
"""Delete old translations."""
for fil in INTEGRATIONS_DIR.glob("*/translations/*"):
fil.unlink()
def run():
"""Run the script."""
DOWNLOAD_DIR.mkdir(parents=True, exist_ok=True)
run_download_docker()
delete_old_translations()
write_integration_translations()
return 0 | /safegate_pro-2021.7.6-py3-none-any.whl/script/translations/download.py | 0.813646 | 0.189071 | download.py | pypi |
import json
import pathlib
from pprint import pprint
import re
from .const import CORE_PROJECT_ID, FRONTEND_PROJECT_ID, INTEGRATIONS_DIR
from .lokalise import get_api
FRONTEND_REPO = pathlib.Path("../frontend/")
def create_lookup(results):
"""Create a lookup table by key name."""
return {key["key_name"]["web"]: key for key in results}
def rename_keys(project_id, to_migrate):
"""Rename keys.
to_migrate is Dict[from_key] = to_key.
"""
updates = []
lokalise = get_api(project_id)
from_key_data = lokalise.keys_list({"filter_keys": ",".join(to_migrate)})
if len(from_key_data) != len(to_migrate):
print(
f"Lookin up keys in Lokalise returns {len(from_key_data)} results, expected {len(to_migrate)}"
)
return
from_key_lookup = create_lookup(from_key_data)
print("Gathering IDs")
for from_key, to_key in to_migrate.items():
updates.append(
{"key_id": from_key_lookup[from_key]["key_id"], "key_name": to_key}
)
pprint(updates)
print()
while input("Type YES to confirm: ") != "YES":
pass
print()
print("Updating keys")
pprint(lokalise.keys_bulk_update(updates))
def list_keys_helper(lokalise, keys, params={}, *, validate=True):
"""List keys in chunks so it doesn't exceed max URL length."""
results = []
for i in range(0, len(keys), 100):
filter_keys = keys[i : i + 100]
from_key_data = lokalise.keys_list(
{
**params,
"filter_keys": ",".join(filter_keys),
"limit": len(filter_keys) + 1,
}
)
if len(from_key_data) == len(filter_keys) or not validate:
results.extend(from_key_data)
continue
print(
f"Lookin up keys in Lokalise returns {len(from_key_data)} results, expected {len(keys)}"
)
searched = set(filter_keys)
returned = set(create_lookup(from_key_data))
print("Not found:", ", ".join(searched - returned))
raise ValueError
return results
def migrate_project_keys_translations(from_project_id, to_project_id, to_migrate):
"""Migrate keys and translations from one project to another.
to_migrate is Dict[from_key] = to_key.
"""
from_lokalise = get_api(from_project_id)
to_lokalise = get_api(to_project_id)
# Fetch keys in target
# We are going to skip migrating existing keys
print("Checking which target keys exist..")
try:
to_key_data = list_keys_helper(
to_lokalise, list(to_migrate.values()), validate=False
)
except ValueError:
return
existing = set(create_lookup(to_key_data))
missing = [key for key in to_migrate.values() if key not in existing]
if not missing:
print("All keys to migrate exist already, nothing to do")
return
# Fetch keys whose translations we're importing
print("Fetch translations that we're importing..")
try:
from_key_data = list_keys_helper(
from_lokalise,
[key for key, value in to_migrate.items() if value not in existing],
{"include_translations": 1},
)
except ValueError:
return
from_key_lookup = create_lookup(from_key_data)
print("Creating", ", ".join(missing))
to_key_lookup = create_lookup(
to_lokalise.keys_create(
[{"key_name": key, "platforms": ["web"]} for key in missing]
)
)
updates = []
for from_key, to_key in to_migrate.items():
# If it is not in lookup, it already existed, skipping it.
if to_key not in to_key_lookup:
continue
updates.append(
{
"key_id": to_key_lookup[to_key]["key_id"],
"translations": [
{
"language_iso": from_translation["language_iso"],
"translation": from_translation["translation"],
"is_reviewed": from_translation["is_reviewed"],
"is_fuzzy": from_translation["is_fuzzy"],
}
for from_translation in from_key_lookup[from_key]["translations"]
],
}
)
print("Updating")
pprint(updates)
print()
print()
pprint(to_lokalise.keys_bulk_update(updates))
def find_and_rename_keys():
"""Find and rename keys in core."""
to_migrate = {}
for integration in INTEGRATIONS_DIR.iterdir():
strings_file = integration / "strings.json"
if not strings_file.is_file():
continue
strings = json.loads(strings_file.read_text())
if "title" in strings.get("config", {}):
from_key = f"component::{integration.name}::config::title"
to_key = f"component::{integration.name}::title"
to_migrate[from_key] = to_key
rename_keys(CORE_PROJECT_ID, to_migrate)
def find_different_languages():
"""Find different supported languages."""
core_api = get_api(CORE_PROJECT_ID)
frontend_api = get_api(FRONTEND_PROJECT_ID)
core_languages = {lang["lang_iso"] for lang in core_api.languages_list()}
frontend_languages = {lang["lang_iso"] for lang in frontend_api.languages_list()}
print("Core minus frontend", core_languages - frontend_languages)
print("Frontend minus core", frontend_languages - core_languages)
def interactive_update():
"""Interactive update integration strings."""
for integration in INTEGRATIONS_DIR.iterdir():
strings_file = integration / "strings.json"
if not strings_file.is_file():
continue
strings = json.loads(strings_file.read_text())
if "title" not in strings:
continue
manifest = json.loads((integration / "manifest.json").read_text())
print("Processing", manifest["name"])
print("Translation title", strings["title"])
if input("Drop title? (1=yes, 2=no) ") == "1":
strings.pop("title")
strings_file.write_text(json.dumps(strings))
print()
STATE_REWRITE = {
"Off": "[%key:common::state::off%]",
"On": "[%key:common::state::on%]",
"Unknown": "[%key:common::state::unknown%]",
"Unavailable": "[%key:common::state::unavailable%]",
"Open": "[%key:common::state::open%]",
"Closed": "[%key:common::state::closed%]",
"Connected": "[%key:common::state::connected%]",
"Disconnected": "[%key:common::state::disconnected%]",
"Locked": "[%key:common::state::locked%]",
"Unlocked": "[%key:common::state::unlocked%]",
"Active": "[%key:common::state::active%]",
"active": "[%key:common::state::active%]",
"Standby": "[%key:common::state::standby%]",
"Idle": "[%key:common::state::idle%]",
"idle": "[%key:common::state::idle%]",
"Paused": "[%key:common::state::paused%]",
"paused": "[%key:common::state::paused%]",
"Home": "[%key:common::state::home%]",
"Away": "[%key:common::state::not_home%]",
"[%key:state::default::off%]": "[%key:common::state::off%]",
"[%key:state::default::on%]": "[%key:common::state::on%]",
"[%key:state::cover::open%]": "[%key:common::state::open%]",
"[%key:state::cover::closed%]": "[%key:common::state::closed%]",
"[%key:state::lock::locked%]": "[%key:common::state::locked%]",
"[%key:state::lock::unlocked%]": "[%key:common::state::unlocked%]",
}
SKIP_DOMAIN = {"default", "scene"}
STATES_WITH_DEV_CLASS = {"binary_sensor", "zwave"}
GROUP_DELETE = {"opening", "closing", "stopped"} # They don't exist
def find_frontend_states():
"""Find frontend states.
Source key -> target key
Add key to integrations strings.json
"""
frontend_states = json.loads(
(FRONTEND_REPO / "src/translations/en.json").read_text()
)["state"]
# domain => state object
to_write = {}
to_migrate = {}
for domain, states in frontend_states.items():
if domain in SKIP_DOMAIN:
continue
to_key_base = f"component::{domain}::state"
from_key_base = f"state::{domain}"
if domain in STATES_WITH_DEV_CLASS:
domain_to_write = dict(states)
for device_class, dev_class_states in domain_to_write.items():
to_device_class = "_" if device_class == "default" else device_class
for key in dev_class_states:
to_migrate[
f"{from_key_base}::{device_class}::{key}"
] = f"{to_key_base}::{to_device_class}::{key}"
# Rewrite "default" device class to _
if "default" in domain_to_write:
domain_to_write["_"] = domain_to_write.pop("default")
else:
if domain == "group":
for key in GROUP_DELETE:
states.pop(key)
domain_to_write = {"_": states}
for key in states:
to_migrate[f"{from_key_base}::{key}"] = f"{to_key_base}::_::{key}"
# Map out common values with
for dev_class_states in domain_to_write.values():
for key, value in dev_class_states.copy().items():
if value in STATE_REWRITE:
dev_class_states[key] = STATE_REWRITE[value]
continue
match = re.match(r"\[\%key:state::(\w+)::(.+)\%\]", value)
if not match:
continue
dev_class_states[key] = "[%key:component::{}::state::{}%]".format(
*match.groups()
)
to_write[domain] = domain_to_write
for domain, state in to_write.items():
strings = INTEGRATIONS_DIR / domain / "strings.json"
if strings.is_file():
content = json.loads(strings.read_text())
else:
content = {}
content["state"] = state
strings.write_text(json.dumps(content, indent=2) + "\n")
pprint(to_migrate)
print()
while input("Type YES to confirm: ") != "YES":
pass
migrate_project_keys_translations(FRONTEND_PROJECT_ID, CORE_PROJECT_ID, to_migrate)
def apply_data_references(to_migrate):
"""Apply references."""
for strings_file in INTEGRATIONS_DIR.glob("*/strings.json"):
strings = json.loads(strings_file.read_text())
steps = strings.get("config", {}).get("step")
if not steps:
continue
changed = False
for step_data in steps.values():
step_data = step_data.get("data", {})
for key, value in step_data.items():
if key in to_migrate and value != to_migrate[key]:
if key.split("_")[0].lower() in value.lower():
step_data[key] = to_migrate[key]
changed = True
elif value.startswith("[%key"):
pass
else:
print(
f"{strings_file}: Skipped swapping '{key}': '{value}' does not contain '{key}'"
)
if not changed:
continue
strings_file.write_text(json.dumps(strings, indent=2))
def run():
"""Migrate translations."""
apply_data_references(
{
"host": "[%key:common::config_flow::data::host%]",
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]",
"port": "[%key:common::config_flow::data::port%]",
"usb_path": "[%key:common::config_flow::data::usb_path%]",
"access_token": "[%key:common::config_flow::data::access_token%]",
"api_key": "[%key:common::config_flow::data::api_key%]",
}
)
# Rename existing keys to common keys,
# Old keys have been updated with reference to the common key
# rename_keys(
# CORE_PROJECT_ID,
# {
# "component::blebox::config::step::user::data::host": "common::config_flow::data::ip",
# },
# )
# find_frontend_states()
# find_different_languages()
return 0 | /safegate_pro-2021.7.6-py3-none-any.whl/script/translations/migrate.py | 0.468791 | 0.218138 | migrate.py | pypi |
"""Merge all translation sources into a single JSON file."""
import json
import os
import pathlib
import re
import subprocess
from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR
from .error import ExitApp
from .util import get_current_branch, get_lokalise_token
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
LOCAL_FILE = pathlib.Path("build/translations-upload.json").absolute()
CONTAINER_FILE = "/opt/src/build/translations-upload.json"
LANG_ISO = "en"
def run_upload_docker():
"""Run the Docker image to upload the translations."""
print("Running Docker to upload latest translations.")
run = subprocess.run(
[
"docker",
"run",
"-v",
f"{LOCAL_FILE}:{CONTAINER_FILE}",
"--rm",
f"lokalise/lokalise-cli-2:{CLI_2_DOCKER_IMAGE}",
# Lokalise command
"lokalise2",
"--token",
get_lokalise_token(),
"--project-id",
CORE_PROJECT_ID,
"file",
"upload",
"--file",
CONTAINER_FILE,
"--lang-iso",
LANG_ISO,
"--convert-placeholders=false",
"--replace-modified",
],
)
print()
if run.returncode != 0:
raise ExitApp("Failed to download translations")
def generate_upload_data():
"""Generate the data for uploading."""
translations = json.loads((INTEGRATIONS_DIR.parent / "strings.json").read_text())
translations["component"] = {}
for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"):
component = path.parent.name
match = FILENAME_FORMAT.search(path.name)
platform = match.group("suffix") if match else None
parent = translations["component"].setdefault(component, {})
if platform:
platforms = parent.setdefault("platform", {})
parent = platforms.setdefault(platform, {})
parent.update(json.loads(path.read_text()))
return translations
def run():
"""Run the script."""
if get_current_branch() != "dev" and os.environ.get("AZURE_BRANCH") != "dev":
raise ExitApp(
"Please only run the translations upload script from a clean checkout of dev."
)
translations = generate_upload_data()
LOCAL_FILE.parent.mkdir(parents=True, exist_ok=True)
LOCAL_FILE.write_text(json.dumps(translations, indent=4, sort_keys=True))
run_upload_docker()
return 0 | /safegate_pro-2021.7.6-py3-none-any.whl/script/translations/upload.py | 0.557123 | 0.161816 | upload.py | pypi |
from __future__ import annotations
import asyncio
from collections.abc import Iterable
import os
from typing import Any, cast
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
from homeassistant.loader import Integration, IntegrationNotFound, async_get_integration
import homeassistant.util.package as pkg_util
# mypy: disallow-any-generics
DATA_PIP_LOCK = "pip_lock"
DATA_PKG_CACHE = "pkg_cache"
DATA_INTEGRATIONS_WITH_REQS = "integrations_with_reqs"
CONSTRAINT_FILE = "package_constraints.txt"
DISCOVERY_INTEGRATIONS: dict[str, Iterable[str]] = {
"dhcp": ("dhcp",),
"mqtt": ("mqtt",),
"ssdp": ("ssdp",),
"zeroconf": ("zeroconf", "homekit"),
}
class RequirementsNotFound(HomeAssistantError):
"""Raised when a component is not found."""
def __init__(self, domain: str, requirements: list[str]) -> None:
"""Initialize a component not found error."""
super().__init__(f"Requirements for {domain} not found: {requirements}.")
self.domain = domain
self.requirements = requirements
async def async_get_integration_with_requirements(
hass: HomeAssistant, domain: str, done: set[str] | None = None
) -> Integration:
"""Get an integration with all requirements installed, including the dependencies.
This can raise IntegrationNotFound if manifest or integration
is invalid, RequirementNotFound if there was some type of
failure to install requirements.
"""
if done is None:
done = {domain}
else:
done.add(domain)
integration = await async_get_integration(hass, domain)
if hass.config.skip_pip:
return integration
cache = hass.data.get(DATA_INTEGRATIONS_WITH_REQS)
if cache is None:
cache = hass.data[DATA_INTEGRATIONS_WITH_REQS] = {}
int_or_evt: Integration | asyncio.Event | None | UndefinedType = cache.get(
domain, UNDEFINED
)
if isinstance(int_or_evt, asyncio.Event):
await int_or_evt.wait()
int_or_evt = cache.get(domain, UNDEFINED)
# When we have waited and it's UNDEFINED, it doesn't exist
# We don't cache that it doesn't exist, or else people can't fix it
# and then restart, because their config will never be valid.
if int_or_evt is UNDEFINED:
raise IntegrationNotFound(domain)
if int_or_evt is not UNDEFINED:
return cast(Integration, int_or_evt)
event = cache[domain] = asyncio.Event()
try:
await _async_process_integration(hass, integration, done)
except Exception:
del cache[domain]
event.set()
raise
cache[domain] = integration
event.set()
return integration
async def _async_process_integration(
hass: HomeAssistant, integration: Integration, done: set[str]
) -> None:
"""Process an integration and requirements."""
if integration.requirements:
await async_process_requirements(
hass, integration.domain, integration.requirements
)
deps_to_check = [
dep
for dep in integration.dependencies + integration.after_dependencies
if dep not in done
]
for check_domain, to_check in DISCOVERY_INTEGRATIONS.items():
if (
check_domain not in done
and check_domain not in deps_to_check
and any(check in integration.manifest for check in to_check)
):
deps_to_check.append(check_domain)
if not deps_to_check:
return
results = await asyncio.gather(
*[
async_get_integration_with_requirements(hass, dep, done)
for dep in deps_to_check
],
return_exceptions=True,
)
for result in results:
if not isinstance(result, BaseException):
continue
if not isinstance(result, IntegrationNotFound) or not (
not integration.is_built_in
and result.domain in integration.after_dependencies
):
raise result
async def async_process_requirements(
hass: HomeAssistant, name: str, requirements: list[str]
) -> None:
"""Install the requirements for a component or platform.
This method is a coroutine. It will raise RequirementsNotFound
if an requirement can't be satisfied.
"""
pip_lock = hass.data.get(DATA_PIP_LOCK)
if pip_lock is None:
pip_lock = hass.data[DATA_PIP_LOCK] = asyncio.Lock()
kwargs = pip_kwargs(hass.config.config_dir)
async with pip_lock:
for req in requirements:
if pkg_util.is_installed(req):
continue
def _install(req: str, kwargs: dict[str, Any]) -> bool:
"""Install requirement."""
return pkg_util.install_package(req, **kwargs)
ret = await hass.async_add_executor_job(_install, req, kwargs)
if not ret:
raise RequirementsNotFound(name, [req])
def pip_kwargs(config_dir: str | None) -> dict[str, Any]:
"""Return keyword arguments for PIP install."""
is_docker = pkg_util.is_docker_env()
kwargs = {
"constraints": os.path.join(os.path.dirname(__file__), CONSTRAINT_FILE),
"no_cache_dir": is_docker,
}
if "WHEELS_LINKS" in os.environ:
kwargs["find_links"] = os.environ["WHEELS_LINKS"]
if not (config_dir is None or pkg_util.is_virtual_env()) and not is_docker:
kwargs["target"] = os.path.join(config_dir, "deps")
return kwargs | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/requirements.py | 0.756088 | 0.163612 | requirements.py | pypi |
from __future__ import annotations
import abc
import asyncio
from collections.abc import Mapping
from types import MappingProxyType
from typing import Any, TypedDict
import uuid
import voluptuous as vol
from .core import HomeAssistant, callback
from .exceptions import HomeAssistantError
RESULT_TYPE_FORM = "form"
RESULT_TYPE_CREATE_ENTRY = "create_entry"
RESULT_TYPE_ABORT = "abort"
RESULT_TYPE_EXTERNAL_STEP = "external"
RESULT_TYPE_EXTERNAL_STEP_DONE = "external_done"
RESULT_TYPE_SHOW_PROGRESS = "progress"
RESULT_TYPE_SHOW_PROGRESS_DONE = "progress_done"
# Event that is fired when a flow is progressed via external or progress source.
EVENT_DATA_ENTRY_FLOW_PROGRESSED = "data_entry_flow_progressed"
class FlowError(HomeAssistantError):
"""Error while configuring an account."""
class UnknownHandler(FlowError):
"""Unknown handler specified."""
class UnknownFlow(FlowError):
"""Unknown flow specified."""
class UnknownStep(FlowError):
"""Unknown step specified."""
class AbortFlow(FlowError):
"""Exception to indicate a flow needs to be aborted."""
def __init__(
self, reason: str, description_placeholders: dict | None = None
) -> None:
"""Initialize an abort flow exception."""
super().__init__(f"Flow aborted: {reason}")
self.reason = reason
self.description_placeholders = description_placeholders
class FlowResult(TypedDict, total=False):
"""Typed result dict."""
version: int
type: str
flow_id: str
handler: str
title: str
data: Mapping[str, Any]
step_id: str
data_schema: vol.Schema
extra: str
required: bool
errors: dict[str, str] | None
description: str | None
description_placeholders: dict[str, Any] | None
progress_action: str
url: str
reason: str
context: dict[str, Any]
result: Any
last_step: bool | None
options: Mapping[str, Any]
class FlowManager(abc.ABC):
"""Manage all the flows that are in progress."""
def __init__(
self,
hass: HomeAssistant,
) -> None:
"""Initialize the flow manager."""
self.hass = hass
self._initializing: dict[str, list[asyncio.Future]] = {}
self._initialize_tasks: dict[str, list[asyncio.Task]] = {}
self._progress: dict[str, Any] = {}
async def async_wait_init_flow_finish(self, handler: str) -> None:
"""Wait till all flows in progress are initialized."""
current = self._initializing.get(handler)
if not current:
return
await asyncio.wait(current)
@abc.abstractmethod
async def async_create_flow(
self,
handler_key: Any,
*,
context: dict[str, Any] | None = None,
data: dict[str, Any] | None = None,
) -> FlowHandler:
"""Create a flow for specified handler.
Handler key is the domain of the component that we want to set up.
"""
@abc.abstractmethod
async def async_finish_flow(
self, flow: FlowHandler, result: FlowResult
) -> FlowResult:
"""Finish a config flow and add an entry."""
async def async_post_init(self, flow: FlowHandler, result: FlowResult) -> None:
"""Entry has finished executing its first step asynchronously."""
@callback
def async_progress(self, include_uninitialized: bool = False) -> list[FlowResult]:
"""Return the flows in progress."""
return [
{
"flow_id": flow.flow_id,
"handler": flow.handler,
"context": flow.context,
"step_id": flow.cur_step["step_id"] if flow.cur_step else None,
}
for flow in self._progress.values()
if include_uninitialized or flow.cur_step is not None
]
async def async_init(
self, handler: str, *, context: dict[str, Any] | None = None, data: Any = None
) -> FlowResult:
"""Start a configuration flow."""
if context is None:
context = {}
init_done: asyncio.Future = asyncio.Future()
self._initializing.setdefault(handler, []).append(init_done)
task = asyncio.create_task(self._async_init(init_done, handler, context, data))
self._initialize_tasks.setdefault(handler, []).append(task)
try:
flow, result = await task
finally:
self._initialize_tasks[handler].remove(task)
self._initializing[handler].remove(init_done)
if result["type"] != RESULT_TYPE_ABORT:
await self.async_post_init(flow, result)
return result
async def _async_init(
self,
init_done: asyncio.Future,
handler: str,
context: dict,
data: Any,
) -> tuple[FlowHandler, FlowResult]:
"""Run the init in a task to allow it to be canceled at shutdown."""
flow = await self.async_create_flow(handler, context=context, data=data)
if not flow:
raise UnknownFlow("Flow was not created")
flow.hass = self.hass
flow.handler = handler
flow.flow_id = uuid.uuid4().hex
flow.context = context
self._progress[flow.flow_id] = flow
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
return flow, result
async def async_shutdown(self) -> None:
"""Cancel any initializing flows."""
for task_list in self._initialize_tasks.values():
for task in task_list:
task.cancel()
async def async_configure(
self, flow_id: str, user_input: dict | None = None
) -> FlowResult:
"""Continue a configuration flow."""
flow = self._progress.get(flow_id)
if flow is None:
raise UnknownFlow
cur_step = flow.cur_step
if cur_step.get("data_schema") is not None and user_input is not None:
user_input = cur_step["data_schema"](user_input)
result = await self._async_handle_step(flow, cur_step["step_id"], user_input)
if cur_step["type"] in (RESULT_TYPE_EXTERNAL_STEP, RESULT_TYPE_SHOW_PROGRESS):
if cur_step["type"] == RESULT_TYPE_EXTERNAL_STEP and result["type"] not in (
RESULT_TYPE_EXTERNAL_STEP,
RESULT_TYPE_EXTERNAL_STEP_DONE,
):
raise ValueError(
"External step can only transition to "
"external step or external step done."
)
if cur_step["type"] == RESULT_TYPE_SHOW_PROGRESS and result["type"] not in (
RESULT_TYPE_SHOW_PROGRESS,
RESULT_TYPE_SHOW_PROGRESS_DONE,
):
raise ValueError(
"Show progress can only transition to show progress or show progress done."
)
# If the result has changed from last result, fire event to update
# the frontend.
if (
cur_step["step_id"] != result.get("step_id")
or result["type"] == RESULT_TYPE_SHOW_PROGRESS
):
# Tell frontend to reload the flow state.
self.hass.bus.async_fire(
EVENT_DATA_ENTRY_FLOW_PROGRESSED,
{"handler": flow.handler, "flow_id": flow_id, "refresh": True},
)
return result
@callback
def async_abort(self, flow_id: str) -> None:
"""Abort a flow."""
if self._progress.pop(flow_id, None) is None:
raise UnknownFlow
async def _async_handle_step(
self,
flow: Any,
step_id: str,
user_input: dict | None,
step_done: asyncio.Future | None = None,
) -> FlowResult:
"""Handle a step of a flow."""
method = f"async_step_{step_id}"
if not hasattr(flow, method):
self._progress.pop(flow.flow_id)
if step_done:
step_done.set_result(None)
raise UnknownStep(
f"Handler {flow.__class__.__name__} doesn't support step {step_id}"
)
try:
result: FlowResult = await getattr(flow, method)(user_input)
except AbortFlow as err:
result = _create_abort_data(
flow.flow_id, flow.handler, err.reason, err.description_placeholders
)
# Mark the step as done.
# We do this before calling async_finish_flow because config entries will hit a
# circular dependency where async_finish_flow sets up new entry, which needs the
# integration to be set up, which is waiting for init to be done.
if step_done:
step_done.set_result(None)
if result["type"] not in (
RESULT_TYPE_FORM,
RESULT_TYPE_EXTERNAL_STEP,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_ABORT,
RESULT_TYPE_EXTERNAL_STEP_DONE,
RESULT_TYPE_SHOW_PROGRESS,
RESULT_TYPE_SHOW_PROGRESS_DONE,
):
raise ValueError(f"Handler returned incorrect type: {result['type']}")
if result["type"] in (
RESULT_TYPE_FORM,
RESULT_TYPE_EXTERNAL_STEP,
RESULT_TYPE_EXTERNAL_STEP_DONE,
RESULT_TYPE_SHOW_PROGRESS,
RESULT_TYPE_SHOW_PROGRESS_DONE,
):
flow.cur_step = result
return result
# We pass a copy of the result because we're mutating our version
result = await self.async_finish_flow(flow, result.copy())
# _async_finish_flow may change result type, check it again
if result["type"] == RESULT_TYPE_FORM:
flow.cur_step = result
return result
# Abort and Success results both finish the flow
self._progress.pop(flow.flow_id)
return result
class FlowHandler:
"""Handle the configuration flow of a component."""
# Set by flow manager
cur_step: dict[str, str] | None = None
# While not purely typed, it makes typehinting more useful for us
# and removes the need for constant None checks or asserts.
flow_id: str = None # type: ignore
hass: HomeAssistant = None # type: ignore
handler: str = None # type: ignore
# Ensure the attribute has a subscriptable, but immutable, default value.
context: dict[str, Any] = MappingProxyType({}) # type: ignore
# Set by _async_create_flow callback
init_step = "init"
# Set by developer
VERSION = 1
@property
def source(self) -> str | None:
"""Source that initialized the flow."""
if not hasattr(self, "context"):
return None
return self.context.get("source", None)
@property
def show_advanced_options(self) -> bool:
"""If we should show advanced options."""
if not hasattr(self, "context"):
return False
return self.context.get("show_advanced_options", False)
@callback
def async_show_form(
self,
*,
step_id: str,
data_schema: vol.Schema = None,
errors: dict[str, str] | None = None,
description_placeholders: dict[str, Any] | None = None,
last_step: bool | None = None,
) -> FlowResult:
"""Return the definition of a form to gather user input."""
return {
"type": RESULT_TYPE_FORM,
"flow_id": self.flow_id,
"handler": self.handler,
"step_id": step_id,
"data_schema": data_schema,
"errors": errors,
"description_placeholders": description_placeholders,
"last_step": last_step, # Display next or submit button in frontend
}
@callback
def async_create_entry(
self,
*,
title: str,
data: Mapping[str, Any],
description: str | None = None,
description_placeholders: dict | None = None,
) -> FlowResult:
"""Finish config flow and create a config entry."""
return {
"version": self.VERSION,
"type": RESULT_TYPE_CREATE_ENTRY,
"flow_id": self.flow_id,
"handler": self.handler,
"title": title,
"data": data,
"description": description,
"description_placeholders": description_placeholders,
}
@callback
def async_abort(
self, *, reason: str, description_placeholders: dict | None = None
) -> FlowResult:
"""Abort the config flow."""
return _create_abort_data(
self.flow_id, self.handler, reason, description_placeholders
)
@callback
def async_external_step(
self, *, step_id: str, url: str, description_placeholders: dict | None = None
) -> FlowResult:
"""Return the definition of an external step for the user to take."""
return {
"type": RESULT_TYPE_EXTERNAL_STEP,
"flow_id": self.flow_id,
"handler": self.handler,
"step_id": step_id,
"url": url,
"description_placeholders": description_placeholders,
}
@callback
def async_external_step_done(self, *, next_step_id: str) -> FlowResult:
"""Return the definition of an external step for the user to take."""
return {
"type": RESULT_TYPE_EXTERNAL_STEP_DONE,
"flow_id": self.flow_id,
"handler": self.handler,
"step_id": next_step_id,
}
@callback
def async_show_progress(
self,
*,
step_id: str,
progress_action: str,
description_placeholders: dict | None = None,
) -> FlowResult:
"""Show a progress message to the user, without user input allowed."""
return {
"type": RESULT_TYPE_SHOW_PROGRESS,
"flow_id": self.flow_id,
"handler": self.handler,
"step_id": step_id,
"progress_action": progress_action,
"description_placeholders": description_placeholders,
}
@callback
def async_show_progress_done(self, *, next_step_id: str) -> FlowResult:
"""Mark the progress done."""
return {
"type": RESULT_TYPE_SHOW_PROGRESS_DONE,
"flow_id": self.flow_id,
"handler": self.handler,
"step_id": next_step_id,
}
@callback
def _create_abort_data(
flow_id: str,
handler: str,
reason: str,
description_placeholders: dict | None = None,
) -> FlowResult:
"""Return the definition of an external step for the user to take."""
return {
"type": RESULT_TYPE_ABORT,
"flow_id": flow_id,
"handler": handler,
"reason": reason,
"description_placeholders": description_placeholders,
} | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/data_entry_flow.py | 0.846356 | 0.196286 | data_entry_flow.py | pypi |
from __future__ import annotations
from collections.abc import Generator, Sequence
from typing import TYPE_CHECKING
import attr
if TYPE_CHECKING:
from .core import Context
class HomeAssistantError(Exception):
"""General Safegate Pro exception occurred."""
class InvalidEntityFormatError(HomeAssistantError):
"""When an invalid formatted entity is encountered."""
class NoEntitySpecifiedError(HomeAssistantError):
"""When no entity is specified."""
class TemplateError(HomeAssistantError):
"""Error during template rendering."""
def __init__(self, exception: Exception) -> None:
"""Init the error."""
super().__init__(f"{exception.__class__.__name__}: {exception}")
@attr.s
class ConditionError(HomeAssistantError):
"""Error during condition evaluation."""
# The type of the failed condition, such as 'and' or 'numeric_state'
type: str = attr.ib()
@staticmethod
def _indent(indent: int, message: str) -> str:
"""Return indentation."""
return " " * indent + message
def output(self, indent: int) -> Generator:
"""Yield an indented representation."""
raise NotImplementedError()
def __str__(self) -> str:
"""Return string representation."""
return "\n".join(list(self.output(indent=0)))
@attr.s
class ConditionErrorMessage(ConditionError):
"""Condition error message."""
# A message describing this error
message: str = attr.ib()
def output(self, indent: int) -> Generator:
"""Yield an indented representation."""
yield self._indent(indent, f"In '{self.type}' condition: {self.message}")
@attr.s
class ConditionErrorIndex(ConditionError):
"""Condition error with index."""
# The zero-based index of the failed condition, for conditions with multiple parts
index: int = attr.ib()
# The total number of parts in this condition, including non-failed parts
total: int = attr.ib()
# The error that this error wraps
error: ConditionError = attr.ib()
def output(self, indent: int) -> Generator:
"""Yield an indented representation."""
if self.total > 1:
yield self._indent(
indent, f"In '{self.type}' (item {self.index+1} of {self.total}):"
)
else:
yield self._indent(indent, f"In '{self.type}':")
yield from self.error.output(indent + 1)
@attr.s
class ConditionErrorContainer(ConditionError):
"""Condition error with subconditions."""
# List of ConditionErrors that this error wraps
errors: Sequence[ConditionError] = attr.ib()
def output(self, indent: int) -> Generator:
"""Yield an indented representation."""
for item in self.errors:
yield from item.output(indent)
class IntegrationError(HomeAssistantError):
"""Base class for platform and config entry exceptions."""
def __str__(self) -> str:
"""Return a human readable error."""
return super().__str__() or str(self.__cause__)
class PlatformNotReady(IntegrationError):
"""Error to indicate that platform is not ready."""
class ConfigEntryNotReady(IntegrationError):
"""Error to indicate that config entry is not ready."""
class ConfigEntryAuthFailed(IntegrationError):
"""Error to indicate that config entry could not authenticate."""
class InvalidStateError(HomeAssistantError):
"""When an invalid state is encountered."""
class Unauthorized(HomeAssistantError):
"""When an action is unauthorized."""
def __init__(
self,
context: Context | None = None,
user_id: str | None = None,
entity_id: str | None = None,
config_entry_id: str | None = None,
perm_category: str | None = None,
permission: str | None = None,
) -> None:
"""Unauthorized error."""
super().__init__(self.__class__.__name__)
self.context = context
if user_id is None and context is not None:
user_id = context.user_id
self.user_id = user_id
self.entity_id = entity_id
self.config_entry_id = config_entry_id
# Not all actions have an ID (like adding config entry)
# We then use this fallback to know what category was unauth
self.perm_category = perm_category
self.permission = permission
class UnknownUser(Unauthorized):
"""When call is made with user ID that doesn't exist."""
class ServiceNotFound(HomeAssistantError):
"""Raised when a service is not found."""
def __init__(self, domain: str, service: str) -> None:
"""Initialize error."""
super().__init__(self, f"Service {domain}.{service} not found")
self.domain = domain
self.service = service
def __str__(self) -> str:
"""Return string representation."""
return f"Unable to find service {self.domain}.{self.service}"
class MaxLengthExceeded(HomeAssistantError):
"""Raised when a property value has exceeded the max character length."""
def __init__(self, value: str, property_name: str, max_length: int) -> None:
"""Initialize error."""
super().__init__(
self,
(
f"Value {value} for property {property_name} has a max length of "
f"{max_length} characters"
),
)
self.value = value
self.property_name = property_name
self.max_length = max_length
class RequiredParameterMissing(HomeAssistantError):
"""Raised when a required parameter is missing from a function call."""
def __init__(self, parameter_names: list[str]) -> None:
"""Initialize error."""
super().__init__(
self,
(
"Call must include at least one of the following parameters: "
f"{', '.join(parameter_names)}"
),
)
self.parameter_names = parameter_names | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/exceptions.py | 0.964187 | 0.209308 | exceptions.py | pypi |
from __future__ import annotations
from abc import ABC, abstractmethod
import asyncio
from collections.abc import Coroutine
from dataclasses import dataclass
from itertools import groupby
import logging
from typing import Any, Awaitable, Callable, Iterable, Optional, cast
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant.components import websocket_api
from homeassistant.const import CONF_ID
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.storage import Store
from homeassistant.util import slugify
STORAGE_VERSION = 1
SAVE_DELAY = 10
CHANGE_ADDED = "added"
CHANGE_UPDATED = "updated"
CHANGE_REMOVED = "removed"
@dataclass
class CollectionChangeSet:
"""Class to represent a change set.
change_type: One of CHANGE_*
item_id: The id of the item
item: The item
"""
change_type: str
item_id: str
item: Any
ChangeListener = Callable[
[
# Change type
str,
# Item ID
str,
# New or removed config
dict,
],
Awaitable[None],
]
ChangeSetListener = Callable[[Iterable[CollectionChangeSet]], Awaitable[None]]
class CollectionError(HomeAssistantError):
"""Base class for collection related errors."""
class ItemNotFound(CollectionError):
"""Raised when an item is not found."""
def __init__(self, item_id: str) -> None:
"""Initialize item not found error."""
super().__init__(f"Item {item_id} not found.")
self.item_id = item_id
class IDManager:
"""Keep track of IDs across different collections."""
def __init__(self) -> None:
"""Initiate the ID manager."""
self.collections: list[dict[str, Any]] = []
def add_collection(self, collection: dict[str, Any]) -> None:
"""Add a collection to check for ID usage."""
self.collections.append(collection)
def has_id(self, item_id: str) -> bool:
"""Test if the ID exists."""
return any(item_id in collection for collection in self.collections)
def generate_id(self, suggestion: str) -> str:
"""Generate an ID."""
base = slugify(suggestion)
proposal = base
attempt = 1
while self.has_id(proposal):
attempt += 1
proposal = f"{base}_{attempt}"
return proposal
class ObservableCollection(ABC):
"""Base collection type that can be observed."""
def __init__(
self, logger: logging.Logger, id_manager: IDManager | None = None
) -> None:
"""Initialize the base collection."""
self.logger = logger
self.id_manager = id_manager or IDManager()
self.data: dict[str, dict] = {}
self.listeners: list[ChangeListener] = []
self.change_set_listeners: list[ChangeSetListener] = []
self.id_manager.add_collection(self.data)
@callback
def async_items(self) -> list[dict]:
"""Return list of items in collection."""
return list(self.data.values())
@callback
def async_add_listener(self, listener: ChangeListener) -> None:
"""Add a listener.
Will be called with (change_type, item_id, updated_config).
"""
self.listeners.append(listener)
@callback
def async_add_change_set_listener(self, listener: ChangeSetListener) -> None:
"""Add a listener for a full change set.
Will be called with [(change_type, item_id, updated_config), ...]
"""
self.change_set_listeners.append(listener)
async def notify_changes(self, change_sets: Iterable[CollectionChangeSet]) -> None:
"""Notify listeners of a change."""
await asyncio.gather(
*[
listener(change_set.change_type, change_set.item_id, change_set.item)
for listener in self.listeners
for change_set in change_sets
],
*[
change_set_listener(change_sets)
for change_set_listener in self.change_set_listeners
],
)
class YamlCollection(ObservableCollection):
"""Offer a collection based on static data."""
async def async_load(self, data: list[dict]) -> None:
"""Load the YAML collection. Overrides existing data."""
old_ids = set(self.data)
change_sets = []
for item in data:
item_id = item[CONF_ID]
if item_id in old_ids:
old_ids.remove(item_id)
event = CHANGE_UPDATED
elif self.id_manager.has_id(item_id):
self.logger.warning("Duplicate ID '%s' detected, skipping", item_id)
continue
else:
event = CHANGE_ADDED
self.data[item_id] = item
change_sets.append(CollectionChangeSet(event, item_id, item))
for item_id in old_ids:
change_sets.append(
CollectionChangeSet(CHANGE_REMOVED, item_id, self.data.pop(item_id))
)
if change_sets:
await self.notify_changes(change_sets)
class StorageCollection(ObservableCollection):
"""Offer a CRUD interface on top of JSON storage."""
def __init__(
self,
store: Store,
logger: logging.Logger,
id_manager: IDManager | None = None,
) -> None:
"""Initialize the storage collection."""
super().__init__(logger, id_manager)
self.store = store
@property
def hass(self) -> HomeAssistant:
"""Safegate Pro object."""
return self.store.hass
async def _async_load_data(self) -> dict | None:
"""Load the data."""
return cast(Optional[dict], await self.store.async_load())
async def async_load(self) -> None:
"""Load the storage Manager."""
raw_storage = await self._async_load_data()
if raw_storage is None:
raw_storage = {"items": []}
for item in raw_storage["items"]:
self.data[item[CONF_ID]] = item
await self.notify_changes(
[
CollectionChangeSet(CHANGE_ADDED, item[CONF_ID], item)
for item in raw_storage["items"]
]
)
@abstractmethod
async def _process_create_data(self, data: dict) -> dict:
"""Validate the config is valid."""
@callback
@abstractmethod
def _get_suggested_id(self, info: dict) -> str:
"""Suggest an ID based on the config."""
@abstractmethod
async def _update_data(self, data: dict, update_data: dict) -> dict:
"""Return a new updated data object."""
async def async_create_item(self, data: dict) -> dict:
"""Create a new item."""
item = await self._process_create_data(data)
item[CONF_ID] = self.id_manager.generate_id(self._get_suggested_id(item))
self.data[item[CONF_ID]] = item
self._async_schedule_save()
await self.notify_changes(
[CollectionChangeSet(CHANGE_ADDED, item[CONF_ID], item)]
)
return item
async def async_update_item(self, item_id: str, updates: dict) -> dict:
"""Update item."""
if item_id not in self.data:
raise ItemNotFound(item_id)
if CONF_ID in updates:
raise ValueError("Cannot update ID")
current = self.data[item_id]
updated = await self._update_data(current, updates)
self.data[item_id] = updated
self._async_schedule_save()
await self.notify_changes(
[CollectionChangeSet(CHANGE_UPDATED, item_id, updated)]
)
return self.data[item_id]
async def async_delete_item(self, item_id: str) -> None:
"""Delete item."""
if item_id not in self.data:
raise ItemNotFound(item_id)
item = self.data.pop(item_id)
self._async_schedule_save()
await self.notify_changes([CollectionChangeSet(CHANGE_REMOVED, item_id, item)])
@callback
def _async_schedule_save(self) -> None:
"""Schedule saving the area registry."""
self.store.async_delay_save(self._data_to_save, SAVE_DELAY)
@callback
def _data_to_save(self) -> dict:
"""Return data of area registry to store in a file."""
return {"items": list(self.data.values())}
class IDLessCollection(ObservableCollection):
"""A collection without IDs."""
counter = 0
async def async_load(self, data: list[dict]) -> None:
"""Load the collection. Overrides existing data."""
await self.notify_changes(
[
CollectionChangeSet(CHANGE_REMOVED, item_id, item)
for item_id, item in list(self.data.items())
]
)
self.data.clear()
for item in data:
self.counter += 1
item_id = f"fakeid-{self.counter}"
self.data[item_id] = item
await self.notify_changes(
[
CollectionChangeSet(CHANGE_ADDED, item_id, item)
for item_id, item in self.data.items()
]
)
@callback
def sync_entity_lifecycle(
hass: HomeAssistant,
domain: str,
platform: str,
entity_component: EntityComponent,
collection: ObservableCollection,
create_entity: Callable[[dict], Entity],
) -> None:
"""Map a collection to an entity component."""
entities = {}
ent_reg = entity_registry.async_get(hass)
async def _add_entity(change_set: CollectionChangeSet) -> Entity:
entities[change_set.item_id] = create_entity(change_set.item)
return entities[change_set.item_id]
async def _remove_entity(change_set: CollectionChangeSet) -> None:
ent_to_remove = ent_reg.async_get_entity_id(
domain, platform, change_set.item_id
)
if ent_to_remove is not None:
ent_reg.async_remove(ent_to_remove)
else:
await entities[change_set.item_id].async_remove(force_remove=True)
entities.pop(change_set.item_id)
async def _update_entity(change_set: CollectionChangeSet) -> None:
await entities[change_set.item_id].async_update_config(change_set.item) # type: ignore
_func_map: dict[
str, Callable[[CollectionChangeSet], Coroutine[Any, Any, Entity | None]]
] = {
CHANGE_ADDED: _add_entity,
CHANGE_REMOVED: _remove_entity,
CHANGE_UPDATED: _update_entity,
}
async def _collection_changed(change_sets: Iterable[CollectionChangeSet]) -> None:
"""Handle a collection change."""
# Create a new bucket every time we have a different change type
# to ensure operations happen in order. We only group
# the same change type.
for _, grouped in groupby(
change_sets, lambda change_set: change_set.change_type
):
new_entities = [
entity
for entity in await asyncio.gather(
*[
_func_map[change_set.change_type](change_set)
for change_set in grouped
]
)
if entity is not None
]
if new_entities:
await entity_component.async_add_entities(new_entities)
collection.async_add_change_set_listener(_collection_changed)
class StorageCollectionWebsocket:
"""Class to expose storage collection management over websocket."""
def __init__(
self,
storage_collection: StorageCollection,
api_prefix: str,
model_name: str,
create_schema: dict,
update_schema: dict,
) -> None:
"""Initialize a websocket CRUD."""
self.storage_collection = storage_collection
self.api_prefix = api_prefix
self.model_name = model_name
self.create_schema = create_schema
self.update_schema = update_schema
assert self.api_prefix[-1] != "/", "API prefix should not end in /"
@property
def item_id_key(self) -> str:
"""Return item ID key."""
return f"{self.model_name}_id"
@callback
def async_setup(
self,
hass: HomeAssistant,
*,
create_list: bool = True,
create_create: bool = True,
) -> None:
"""Set up the websocket commands."""
if create_list:
websocket_api.async_register_command(
hass,
f"{self.api_prefix}/list",
self.ws_list_item,
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): f"{self.api_prefix}/list"}
),
)
if create_create:
websocket_api.async_register_command(
hass,
f"{self.api_prefix}/create",
websocket_api.require_admin(
websocket_api.async_response(self.ws_create_item)
),
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{
**self.create_schema,
vol.Required("type"): f"{self.api_prefix}/create",
}
),
)
websocket_api.async_register_command(
hass,
f"{self.api_prefix}/update",
websocket_api.require_admin(
websocket_api.async_response(self.ws_update_item)
),
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{
**self.update_schema,
vol.Required("type"): f"{self.api_prefix}/update",
vol.Required(self.item_id_key): str,
}
),
)
websocket_api.async_register_command(
hass,
f"{self.api_prefix}/delete",
websocket_api.require_admin(
websocket_api.async_response(self.ws_delete_item)
),
websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{
vol.Required("type"): f"{self.api_prefix}/delete",
vol.Required(self.item_id_key): str,
}
),
)
def ws_list_item(
self, hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None:
"""List items."""
connection.send_result(msg["id"], self.storage_collection.async_items())
async def ws_create_item(
self, hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None:
"""Create a item."""
try:
data = dict(msg)
data.pop("id")
data.pop("type")
item = await self.storage_collection.async_create_item(data)
connection.send_result(msg["id"], item)
except vol.Invalid as err:
connection.send_error(
msg["id"],
websocket_api.const.ERR_INVALID_FORMAT,
humanize_error(data, err),
)
except ValueError as err:
connection.send_error(
msg["id"], websocket_api.const.ERR_INVALID_FORMAT, str(err)
)
async def ws_update_item(
self, hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None:
"""Update a item."""
data = dict(msg)
msg_id = data.pop("id")
item_id = data.pop(self.item_id_key)
data.pop("type")
try:
item = await self.storage_collection.async_update_item(item_id, data)
connection.send_result(msg_id, item)
except ItemNotFound:
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_FOUND,
f"Unable to find {self.item_id_key} {item_id}",
)
except vol.Invalid as err:
connection.send_error(
msg["id"],
websocket_api.const.ERR_INVALID_FORMAT,
humanize_error(data, err),
)
except ValueError as err:
connection.send_error(
msg_id, websocket_api.const.ERR_INVALID_FORMAT, str(err)
)
async def ws_delete_item(
self, hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None:
"""Delete a item."""
try:
await self.storage_collection.async_delete_item(msg[self.item_id_key])
except ItemNotFound:
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_FOUND,
f"Unable to find {self.item_id_key} {msg[self.item_id_key]}",
)
connection.send_result(msg["id"]) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/collection.py | 0.888831 | 0.169183 | collection.py | pypi |
from __future__ import annotations
from typing import Any, Callable, TypedDict
from homeassistant import core, setup
from homeassistant.core import CALLBACK_TYPE
from homeassistant.loader import bind_hass
from .dispatcher import async_dispatcher_connect, async_dispatcher_send
from .typing import ConfigType, DiscoveryInfoType
SIGNAL_PLATFORM_DISCOVERED = "discovery.platform_discovered_{}"
EVENT_LOAD_PLATFORM = "load_platform.{}"
ATTR_PLATFORM = "platform"
ATTR_DISCOVERED = "discovered"
# mypy: disallow-any-generics
class DiscoveryDict(TypedDict):
"""Discovery data."""
service: str
platform: str | None
discovered: DiscoveryInfoType | None
@core.callback
@bind_hass
def async_listen(
hass: core.HomeAssistant,
service: str,
callback: CALLBACK_TYPE,
) -> None:
"""Set up listener for discovery of specific service.
Service can be a string or a list/tuple.
"""
job = core.HassJob(callback)
async def discovery_event_listener(discovered: DiscoveryDict) -> None:
"""Listen for discovery events."""
task = hass.async_run_hass_job(
job, discovered["service"], discovered["discovered"]
)
if task:
await task
async_dispatcher_connect(
hass, SIGNAL_PLATFORM_DISCOVERED.format(service), discovery_event_listener
)
@bind_hass
def discover(
hass: core.HomeAssistant,
service: str,
discovered: DiscoveryInfoType,
component: str,
hass_config: ConfigType,
) -> None:
"""Fire discovery event. Can ensure a component is loaded."""
hass.add_job(
async_discover( # type: ignore
hass, service, discovered, component, hass_config
)
)
@bind_hass
async def async_discover(
hass: core.HomeAssistant,
service: str,
discovered: DiscoveryInfoType | None,
component: str | None,
hass_config: ConfigType,
) -> None:
"""Fire discovery event. Can ensure a component is loaded."""
if component is not None and component not in hass.config.components:
await setup.async_setup_component(hass, component, hass_config)
data: DiscoveryDict = {
"service": service,
"platform": None,
"discovered": discovered,
}
async_dispatcher_send(hass, SIGNAL_PLATFORM_DISCOVERED.format(service), data)
@bind_hass
def async_listen_platform(
hass: core.HomeAssistant,
component: str,
callback: Callable[[str, dict[str, Any] | None], Any],
) -> None:
"""Register a platform loader listener.
This method must be run in the event loop.
"""
service = EVENT_LOAD_PLATFORM.format(component)
job = core.HassJob(callback)
async def discovery_platform_listener(discovered: DiscoveryDict) -> None:
"""Listen for platform discovery events."""
platform = discovered["platform"]
if not platform:
return
task = hass.async_run_hass_job(job, platform, discovered.get("discovered"))
if task:
await task
async_dispatcher_connect(
hass, SIGNAL_PLATFORM_DISCOVERED.format(service), discovery_platform_listener
)
@bind_hass
def load_platform(
hass: core.HomeAssistant,
component: str,
platform: str,
discovered: DiscoveryInfoType,
hass_config: ConfigType,
) -> None:
"""Load a component and platform dynamically."""
hass.add_job(
async_load_platform( # type: ignore
hass, component, platform, discovered, hass_config
)
)
@bind_hass
async def async_load_platform(
hass: core.HomeAssistant,
component: str,
platform: str,
discovered: DiscoveryInfoType,
hass_config: ConfigType,
) -> None:
"""Load a component and platform dynamically.
Use `async_listen_platform` to register a callback for these events.
Warning: Do not await this inside a setup method to avoid a dead lock.
Use `hass.async_create_task(async_load_platform(..))` instead.
"""
assert hass_config, "You need to pass in the real hass config"
setup_success = True
if component not in hass.config.components:
setup_success = await setup.async_setup_component(hass, component, hass_config)
# No need to send signal if we could not set up component
if not setup_success:
return
service = EVENT_LOAD_PLATFORM.format(component)
data: DiscoveryDict = {
"service": service,
"platform": platform,
"discovered": discovered,
}
async_dispatcher_send(hass, SIGNAL_PLATFORM_DISCOVERED.format(service), data) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/discovery.py | 0.815122 | 0.213213 | discovery.py | pypi |
from __future__ import annotations
from typing import Any, Callable, cast
import voluptuous as vol
from homeassistant.const import CONF_MODE, CONF_UNIT_OF_MEASUREMENT
from homeassistant.util import decorator
SELECTORS = decorator.Registry()
def validate_selector(config: Any) -> dict:
"""Validate a selector."""
if not isinstance(config, dict):
raise vol.Invalid("Expected a dictionary")
if len(config) != 1:
raise vol.Invalid(f"Only one type can be specified. Found {', '.join(config)}")
selector_type = list(config)[0]
selector_class = SELECTORS.get(selector_type)
if selector_class is None:
raise vol.Invalid(f"Unknown selector type {selector_type} found")
# Selectors can be empty
if config[selector_type] is None:
return {selector_type: {}}
return {
selector_type: cast(dict, selector_class.CONFIG_SCHEMA(config[selector_type]))
}
class Selector:
"""Base class for selectors."""
CONFIG_SCHEMA: Callable
@SELECTORS.register("entity")
class EntitySelector(Selector):
"""Selector of a single entity."""
CONFIG_SCHEMA = vol.Schema(
{
# Integration that provided the entity
vol.Optional("integration"): str,
# Domain the entity belongs to
vol.Optional("domain"): str,
# Device class of the entity
vol.Optional("device_class"): str,
}
)
@SELECTORS.register("device")
class DeviceSelector(Selector):
"""Selector of a single device."""
CONFIG_SCHEMA = vol.Schema(
{
# Integration linked to it with a config entry
vol.Optional("integration"): str,
# Manufacturer of device
vol.Optional("manufacturer"): str,
# Model of device
vol.Optional("model"): str,
# Device has to contain entities matching this selector
vol.Optional("entity"): EntitySelector.CONFIG_SCHEMA,
}
)
@SELECTORS.register("area")
class AreaSelector(Selector):
"""Selector of a single area."""
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional("entity"): vol.Schema(
{
vol.Optional("domain"): str,
vol.Optional("device_class"): str,
vol.Optional("integration"): str,
}
),
vol.Optional("device"): vol.Schema(
{
vol.Optional("integration"): str,
vol.Optional("manufacturer"): str,
vol.Optional("model"): str,
}
),
}
)
@SELECTORS.register("number")
class NumberSelector(Selector):
"""Selector of a numeric value."""
CONFIG_SCHEMA = vol.Schema(
{
vol.Required("min"): vol.Coerce(float),
vol.Required("max"): vol.Coerce(float),
vol.Optional("step", default=1): vol.All(
vol.Coerce(float), vol.Range(min=1e-3)
),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): str,
vol.Optional(CONF_MODE, default="slider"): vol.In(["box", "slider"]),
}
)
@SELECTORS.register("addon")
class AddonSelector(Selector):
"""Selector of a add-on."""
CONFIG_SCHEMA = vol.Schema({})
@SELECTORS.register("boolean")
class BooleanSelector(Selector):
"""Selector of a boolean value."""
CONFIG_SCHEMA = vol.Schema({})
@SELECTORS.register("time")
class TimeSelector(Selector):
"""Selector of a time value."""
CONFIG_SCHEMA = vol.Schema({})
@SELECTORS.register("target")
class TargetSelector(Selector):
"""Selector of a target value (area ID, device ID, entity ID etc).
Value should follow cv.ENTITY_SERVICE_FIELDS format.
"""
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional("entity"): vol.Schema(
{
vol.Optional("domain"): str,
vol.Optional("device_class"): str,
vol.Optional("integration"): str,
}
),
vol.Optional("device"): vol.Schema(
{
vol.Optional("integration"): str,
vol.Optional("manufacturer"): str,
vol.Optional("model"): str,
}
),
}
)
@SELECTORS.register("action")
class ActionSelector(Selector):
"""Selector of an action sequence (script syntax)."""
CONFIG_SCHEMA = vol.Schema({})
@SELECTORS.register("object")
class ObjectSelector(Selector):
"""Selector for an arbitrary object."""
CONFIG_SCHEMA = vol.Schema({})
@SELECTORS.register("text")
class StringSelector(Selector):
"""Selector for a multi-line text string."""
CONFIG_SCHEMA = vol.Schema({vol.Optional("multiline", default=False): bool})
@SELECTORS.register("select")
class SelectSelector(Selector):
"""Selector for an single-choice input select."""
CONFIG_SCHEMA = vol.Schema(
{vol.Required("options"): vol.All([str], vol.Length(min=1))}
) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/selector.py | 0.90447 | 0.203055 | selector.py | pypi |
from __future__ import annotations
from types import MappingProxyType
from typing import Any, Callable, Optional, Union
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.core import HomeAssistant, State, callback
from .integration_platform import async_process_integration_platforms
PLATFORM = "significant_change"
DATA_FUNCTIONS = "significant_change"
CheckTypeFunc = Callable[
[
HomeAssistant,
str,
Union[dict, MappingProxyType],
str,
Union[dict, MappingProxyType],
],
Optional[bool],
]
ExtraCheckTypeFunc = Callable[
[
HomeAssistant,
str,
Union[dict, MappingProxyType],
Any,
str,
Union[dict, MappingProxyType],
Any,
],
Optional[bool],
]
async def create_checker(
hass: HomeAssistant,
_domain: str,
extra_significant_check: ExtraCheckTypeFunc | None = None,
) -> SignificantlyChangedChecker:
"""Create a significantly changed checker for a domain."""
await _initialize(hass)
return SignificantlyChangedChecker(hass, extra_significant_check)
# Marked as singleton so multiple calls all wait for same output.
async def _initialize(hass: HomeAssistant) -> None:
"""Initialize the functions."""
if DATA_FUNCTIONS in hass.data:
return
functions = hass.data[DATA_FUNCTIONS] = {}
async def process_platform(
hass: HomeAssistant, component_name: str, platform: Any
) -> None:
"""Process a significant change platform."""
functions[component_name] = platform.async_check_significant_change
await async_process_integration_platforms(hass, PLATFORM, process_platform)
def either_one_none(val1: Any | None, val2: Any | None) -> bool:
"""Test if exactly one value is None."""
return (val1 is None and val2 is not None) or (val1 is not None and val2 is None)
def check_numeric_changed(
val1: int | float | None,
val2: int | float | None,
change: int | float,
) -> bool:
"""Check if two numeric values have changed."""
if val1 is None and val2 is None:
return False
if either_one_none(val1, val2):
return True
assert val1 is not None
assert val2 is not None
if abs(val1 - val2) >= change:
return True
return False
class SignificantlyChangedChecker:
"""Class to keep track of entities to see if they have significantly changed.
Will always compare the entity to the last entity that was considered significant.
"""
def __init__(
self,
hass: HomeAssistant,
extra_significant_check: ExtraCheckTypeFunc | None = None,
) -> None:
"""Test if an entity has significantly changed."""
self.hass = hass
self.last_approved_entities: dict[str, tuple[State, Any]] = {}
self.extra_significant_check = extra_significant_check
@callback
def async_is_significant_change(
self, new_state: State, *, extra_arg: Any | None = None
) -> bool:
"""Return if this was a significant change.
Extra kwargs are passed to the extra significant checker.
"""
old_data: tuple[State, Any] | None = self.last_approved_entities.get(
new_state.entity_id
)
# First state change is always ok to report
if old_data is None:
self.last_approved_entities[new_state.entity_id] = (new_state, extra_arg)
return True
old_state, old_extra_arg = old_data
# Handle state unknown or unavailable
if new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
if new_state.state == old_state.state:
return False
self.last_approved_entities[new_state.entity_id] = (new_state, extra_arg)
return True
# If last state was unknown/unavailable, also significant.
if old_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
self.last_approved_entities[new_state.entity_id] = (new_state, extra_arg)
return True
functions: dict[str, CheckTypeFunc] | None = self.hass.data.get(DATA_FUNCTIONS)
if functions is None:
raise RuntimeError("Significant Change not initialized")
check_significantly_changed = functions.get(new_state.domain)
if check_significantly_changed is not None:
result = check_significantly_changed(
self.hass,
old_state.state,
old_state.attributes,
new_state.state,
new_state.attributes,
)
if result is False:
return False
if self.extra_significant_check is not None:
result = self.extra_significant_check(
self.hass,
old_state.state,
old_state.attributes,
old_extra_arg,
new_state.state,
new_state.attributes,
extra_arg,
)
if result is False:
return False
# Result is either True or None.
# None means the function doesn't know. For now assume it's True
self.last_approved_entities[new_state.entity_id] = (
new_state,
extra_arg,
)
return True | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/significant_change.py | 0.907156 | 0.325681 | significant_change.py | pypi |
from __future__ import annotations
from abc import ABC
import asyncio
from collections.abc import Awaitable, Iterable, Mapping
from datetime import datetime, timedelta
import functools as ft
import logging
import math
import sys
from timeit import default_timer as timer
from typing import Any, TypedDict, final
from homeassistant.config import DATA_CUSTOMIZE
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_DEVICE_CLASS,
ATTR_ENTITY_PICTURE,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_SUPPORTED_FEATURES,
ATTR_UNIT_OF_MEASUREMENT,
DEVICE_DEFAULT_NAME,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import CALLBACK_TYPE, Context, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError, NoEntitySpecifiedError
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import EntityPlatform
from homeassistant.helpers.entity_registry import RegistryEntry
from homeassistant.helpers.event import Event, async_track_entity_registry_updated_event
from homeassistant.helpers.typing import StateType
from homeassistant.loader import bind_hass
from homeassistant.util import dt as dt_util, ensure_unique_string, slugify
_LOGGER = logging.getLogger(__name__)
SLOW_UPDATE_WARNING = 10
DATA_ENTITY_SOURCE = "entity_info"
SOURCE_CONFIG_ENTRY = "config_entry"
SOURCE_PLATFORM_CONFIG = "platform_config"
# Used when converting float states to string: limit precision according to machine
# epsilon to make the string representation readable
FLOAT_PRECISION = abs(int(math.floor(math.log10(abs(sys.float_info.epsilon))))) - 1
@callback
@bind_hass
def entity_sources(hass: HomeAssistant) -> dict[str, dict[str, str]]:
"""Get the entity sources."""
return hass.data.get(DATA_ENTITY_SOURCE, {})
def generate_entity_id(
entity_id_format: str,
name: str | None,
current_ids: list[str] | None = None,
hass: HomeAssistant | None = None,
) -> str:
"""Generate a unique entity ID based on given entity IDs or used IDs."""
return async_generate_entity_id(entity_id_format, name, current_ids, hass)
@callback
def async_generate_entity_id(
entity_id_format: str,
name: str | None,
current_ids: Iterable[str] | None = None,
hass: HomeAssistant | None = None,
) -> str:
"""Generate a unique entity ID based on given entity IDs or used IDs."""
name = (name or DEVICE_DEFAULT_NAME).lower()
preferred_string = entity_id_format.format(slugify(name))
if current_ids is not None:
return ensure_unique_string(preferred_string, current_ids)
if hass is None:
raise ValueError("Missing required parameter current_ids or hass")
test_string = preferred_string
tries = 1
while not hass.states.async_available(test_string):
tries += 1
test_string = f"{preferred_string}_{tries}"
return test_string
def get_capability(hass: HomeAssistant, entity_id: str, capability: str) -> Any | None:
"""Get a capability attribute of an entity.
First try the statemachine, then entity registry.
"""
state = hass.states.get(entity_id)
if state:
return state.attributes.get(capability)
entity_registry = er.async_get(hass)
entry = entity_registry.async_get(entity_id)
if not entry:
raise HomeAssistantError(f"Unknown entity {entity_id}")
return entry.capabilities.get(capability) if entry.capabilities else None
def get_device_class(hass: HomeAssistant, entity_id: str) -> str | None:
"""Get device class of an entity.
First try the statemachine, then entity registry.
"""
state = hass.states.get(entity_id)
if state:
return state.attributes.get(ATTR_DEVICE_CLASS)
entity_registry = er.async_get(hass)
entry = entity_registry.async_get(entity_id)
if not entry:
raise HomeAssistantError(f"Unknown entity {entity_id}")
return entry.device_class
def get_supported_features(hass: HomeAssistant, entity_id: str) -> int:
"""Get supported features for an entity.
First try the statemachine, then entity registry.
"""
state = hass.states.get(entity_id)
if state:
return state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
entity_registry = er.async_get(hass)
entry = entity_registry.async_get(entity_id)
if not entry:
raise HomeAssistantError(f"Unknown entity {entity_id}")
return entry.supported_features or 0
def get_unit_of_measurement(hass: HomeAssistant, entity_id: str) -> str | None:
"""Get unit of measurement class of an entity.
First try the statemachine, then entity registry.
"""
state = hass.states.get(entity_id)
if state:
return state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
entity_registry = er.async_get(hass)
entry = entity_registry.async_get(entity_id)
if not entry:
raise HomeAssistantError(f"Unknown entity {entity_id}")
return entry.unit_of_measurement
class DeviceInfo(TypedDict, total=False):
"""Entity device information for device registry."""
name: str
connections: set[tuple[str, str]]
identifiers: set[tuple[str, str]]
manufacturer: str
model: str
suggested_area: str
sw_version: str
via_device: tuple[str, str]
entry_type: str | None
default_name: str
default_manufacturer: str
default_model: str
class Entity(ABC):
"""An abstract class for Safegate Pro entities."""
# SAFE TO OVERWRITE
# The properties and methods here are safe to overwrite when inheriting
# this class. These may be used to customize the behavior of the entity.
entity_id: str = None # type: ignore
# Owning hass instance. Will be set by EntityPlatform
# While not purely typed, it makes typehinting more useful for us
# and removes the need for constant None checks or asserts.
hass: HomeAssistant = None # type: ignore
# Owning platform instance. Will be set by EntityPlatform
platform: EntityPlatform | None = None
# If we reported if this entity was slow
_slow_reported = False
# If we reported this entity is updated while disabled
_disabled_reported = False
# Protect for multiple updates
_update_staged = False
# Process updates in parallel
parallel_updates: asyncio.Semaphore | None = None
# Entry in the entity registry
registry_entry: RegistryEntry | None = None
# Hold list for functions to call on remove.
_on_remove: list[CALLBACK_TYPE] | None = None
# Context
_context: Context | None = None
_context_set: datetime | None = None
# If entity is added to an entity platform
_added = False
# Entity Properties
_attr_assumed_state: bool = False
_attr_available: bool = True
_attr_context_recent_time: timedelta = timedelta(seconds=5)
_attr_device_class: str | None = None
_attr_device_info: DeviceInfo | None = None
_attr_entity_picture: str | None = None
_attr_entity_registry_enabled_default: bool = True
_attr_extra_state_attributes: Mapping[str, Any] | None = None
_attr_force_update: bool = False
_attr_icon: str | None = None
_attr_name: str | None = None
_attr_should_poll: bool = True
_attr_state: StateType = STATE_UNKNOWN
_attr_supported_features: int | None = None
_attr_unique_id: str | None = None
_attr_unit_of_measurement: str | None = None
@property
def should_poll(self) -> bool:
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return self._attr_should_poll
@property
def unique_id(self) -> str | None:
"""Return a unique ID."""
return self._attr_unique_id
@property
def name(self) -> str | None:
"""Return the name of the entity."""
return self._attr_name
@property
def state(self) -> StateType:
"""Return the state of the entity."""
return self._attr_state
@property
def capability_attributes(self) -> Mapping[str, Any] | None:
"""Return the capability attributes.
Attributes that explain the capabilities of an entity.
Implemented by component base class. Convention for attribute names
is lowercase snake_case.
"""
return None
@property
def state_attributes(self) -> dict[str, Any] | None:
"""Return the state attributes.
Implemented by component base class, should not be extended by integrations.
Convention for attribute names is lowercase snake_case.
"""
return None
@property
def device_state_attributes(self) -> Mapping[str, Any] | None:
"""Return entity specific state attributes.
This method is deprecated, platform classes should implement
extra_state_attributes instead.
"""
return None
@property
def extra_state_attributes(self) -> Mapping[str, Any] | None:
"""Return entity specific state attributes.
Implemented by platform classes. Convention for attribute names
is lowercase snake_case.
"""
return self._attr_extra_state_attributes
@property
def device_info(self) -> DeviceInfo | None:
"""Return device specific attributes.
Implemented by platform classes.
"""
return self._attr_device_info
@property
def device_class(self) -> str | None:
"""Return the class of this device, from component DEVICE_CLASSES."""
return self._attr_device_class
@property
def unit_of_measurement(self) -> str | None:
"""Return the unit of measurement of this entity, if any."""
return self._attr_unit_of_measurement
@property
def icon(self) -> str | None:
"""Return the icon to use in the frontend, if any."""
return self._attr_icon
@property
def entity_picture(self) -> str | None:
"""Return the entity picture to use in the frontend, if any."""
return self._attr_entity_picture
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._attr_available
@property
def assumed_state(self) -> bool:
"""Return True if unable to access real state of the entity."""
return self._attr_assumed_state
@property
def force_update(self) -> bool:
"""Return True if state updates should be forced.
If True, a state change will be triggered anytime the state property is
updated, not just when the value changes.
"""
return self._attr_force_update
@property
def supported_features(self) -> int | None:
"""Flag supported features."""
return self._attr_supported_features
@property
def context_recent_time(self) -> timedelta:
"""Time that a context is considered recent."""
return self._attr_context_recent_time
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self._attr_entity_registry_enabled_default
# DO NOT OVERWRITE
# These properties and methods are either managed by Safegate Pro or they
# are used to perform a very specific function. Overwriting these may
# produce undesirable effects in the entity's operation.
@property
def enabled(self) -> bool:
"""Return if the entity is enabled in the entity registry.
If an entity is not part of the registry, it cannot be disabled
and will therefore always be enabled.
"""
return self.registry_entry is None or not self.registry_entry.disabled
@callback
def async_set_context(self, context: Context) -> None:
"""Set the context the entity currently operates under."""
self._context = context
self._context_set = dt_util.utcnow()
async def async_update_ha_state(self, force_refresh: bool = False) -> None:
"""Update Safegate Pro with current state of entity.
If force_refresh == True will update entity before setting state.
This method must be run in the event loop.
"""
if self.hass is None:
raise RuntimeError(f"Attribute hass is None for {self}")
if self.entity_id is None:
raise NoEntitySpecifiedError(
f"No entity id specified for entity {self.name}"
)
# update entity data
if force_refresh:
try:
await self.async_device_update()
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Update for %s fails", self.entity_id)
return
self._async_write_ha_state()
@callback
def async_write_ha_state(self) -> None:
"""Write the state to the state machine."""
if self.hass is None:
raise RuntimeError(f"Attribute hass is None for {self}")
if self.entity_id is None:
raise NoEntitySpecifiedError(
f"No entity id specified for entity {self.name}"
)
self._async_write_ha_state()
def _stringify_state(self) -> str:
"""Convert state to string."""
if not self.available:
return STATE_UNAVAILABLE
state = self.state
if state is None:
return STATE_UNKNOWN
if isinstance(state, float):
# If the entity's state is a float, limit precision according to machine
# epsilon to make the string representation readable
return f"{state:.{FLOAT_PRECISION}}"
return str(state)
@callback
def _async_write_ha_state(self) -> None:
"""Write the state to the state machine."""
if self.registry_entry and self.registry_entry.disabled_by:
if not self._disabled_reported:
self._disabled_reported = True
assert self.platform is not None
_LOGGER.warning(
"Entity %s is incorrectly being triggered for updates while it is disabled. This is a bug in the %s integration",
self.entity_id,
self.platform.platform_name,
)
return
start = timer()
attr = self.capability_attributes
attr = dict(attr) if attr else {}
state = self._stringify_state()
if self.available:
attr.update(self.state_attributes or {})
extra_state_attributes = self.extra_state_attributes
# Backwards compatibility for "device_state_attributes" deprecated in 2021.4
# Add warning in 2021.6, remove in 2021.10
if extra_state_attributes is None:
extra_state_attributes = self.device_state_attributes
attr.update(extra_state_attributes or {})
unit_of_measurement = self.unit_of_measurement
if unit_of_measurement is not None:
attr[ATTR_UNIT_OF_MEASUREMENT] = unit_of_measurement
entry = self.registry_entry
# pylint: disable=consider-using-ternary
name = (entry and entry.name) or self.name
if name is not None:
attr[ATTR_FRIENDLY_NAME] = name
icon = (entry and entry.icon) or self.icon
if icon is not None:
attr[ATTR_ICON] = icon
entity_picture = self.entity_picture
if entity_picture is not None:
attr[ATTR_ENTITY_PICTURE] = entity_picture
assumed_state = self.assumed_state
if assumed_state:
attr[ATTR_ASSUMED_STATE] = assumed_state
supported_features = self.supported_features
if supported_features is not None:
attr[ATTR_SUPPORTED_FEATURES] = supported_features
device_class = self.device_class
if device_class is not None:
attr[ATTR_DEVICE_CLASS] = str(device_class)
end = timer()
if end - start > 0.4 and not self._slow_reported:
self._slow_reported = True
extra = ""
if "custom_components" in type(self).__module__:
extra = "Please report it to the custom component author."
else:
extra = (
"Please create a bug report at "
"https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue"
)
if self.platform:
extra += (
f"+label%3A%22integration%3A+{self.platform.platform_name}%22"
)
_LOGGER.warning(
"Updating state for %s (%s) took %.3f seconds. %s",
self.entity_id,
type(self),
end - start,
extra,
)
# Overwrite properties that have been set in the config file.
if DATA_CUSTOMIZE in self.hass.data:
attr.update(self.hass.data[DATA_CUSTOMIZE].get(self.entity_id))
# Convert temperature if we detect one
try:
unit_of_measure = attr.get(ATTR_UNIT_OF_MEASUREMENT)
units = self.hass.config.units
if (
unit_of_measure in (TEMP_CELSIUS, TEMP_FAHRENHEIT)
and unit_of_measure != units.temperature_unit
):
prec = len(state) - state.index(".") - 1 if "." in state else 0
temp = units.temperature(float(state), unit_of_measure)
state = str(round(temp) if prec == 0 else round(temp, prec))
attr[ATTR_UNIT_OF_MEASUREMENT] = units.temperature_unit
except ValueError:
# Could not convert state to float
pass
if (
self._context_set is not None
and dt_util.utcnow() - self._context_set > self.context_recent_time
):
self._context = None
self._context_set = None
self.hass.states.async_set(
self.entity_id, state, attr, self.force_update, self._context
)
def schedule_update_ha_state(self, force_refresh: bool = False) -> None:
"""Schedule an update ha state change task.
Scheduling the update avoids executor deadlocks.
Entity state and attributes are read when the update ha state change
task is executed.
If state is changed more than once before the ha state change task has
been executed, the intermediate state transitions will be missed.
"""
self.hass.add_job(self.async_update_ha_state(force_refresh)) # type: ignore
@callback
def async_schedule_update_ha_state(self, force_refresh: bool = False) -> None:
"""Schedule an update ha state change task.
This method must be run in the event loop.
Scheduling the update avoids executor deadlocks.
Entity state and attributes are read when the update ha state change
task is executed.
If state is changed more than once before the ha state change task has
been executed, the intermediate state transitions will be missed.
"""
if force_refresh:
self.hass.async_create_task(self.async_update_ha_state(force_refresh))
else:
self.async_write_ha_state()
async def async_device_update(self, warning: bool = True) -> None:
"""Process 'update' or 'async_update' from entity.
This method is a coroutine.
"""
if self._update_staged:
return
self._update_staged = True
# Process update sequential
if self.parallel_updates:
await self.parallel_updates.acquire()
try:
# pylint: disable=no-member
if hasattr(self, "async_update"):
task = self.hass.async_create_task(self.async_update()) # type: ignore
elif hasattr(self, "update"):
task = self.hass.async_add_executor_job(self.update) # type: ignore
else:
return
if not warning:
await task
return
finished, _ = await asyncio.wait([task], timeout=SLOW_UPDATE_WARNING)
for done in finished:
exc = done.exception()
if exc:
raise exc
return
_LOGGER.warning(
"Update of %s is taking over %s seconds",
self.entity_id,
SLOW_UPDATE_WARNING,
)
await task
finally:
self._update_staged = False
if self.parallel_updates:
self.parallel_updates.release()
@callback
def async_on_remove(self, func: CALLBACK_TYPE) -> None:
"""Add a function to call when entity removed."""
if self._on_remove is None:
self._on_remove = []
self._on_remove.append(func)
async def async_removed_from_registry(self) -> None:
"""Run when entity has been removed from entity registry.
To be extended by integrations.
"""
@callback
def add_to_platform_start(
self,
hass: HomeAssistant,
platform: EntityPlatform,
parallel_updates: asyncio.Semaphore | None,
) -> None:
"""Start adding an entity to a platform."""
if self._added:
raise HomeAssistantError(
f"Entity {self.entity_id} cannot be added a second time to an entity platform"
)
self.hass = hass
self.platform = platform
self.parallel_updates = parallel_updates
self._added = True
@callback
def add_to_platform_abort(self) -> None:
"""Abort adding an entity to a platform."""
self.hass = None # type: ignore
self.platform = None
self.parallel_updates = None
self._added = False
async def add_to_platform_finish(self) -> None:
"""Finish adding an entity to a platform."""
await self.async_internal_added_to_hass()
await self.async_added_to_hass()
self.async_write_ha_state()
async def async_remove(self, *, force_remove: bool = False) -> None:
"""Remove entity from Safegate Pro.
If the entity has a non disabled entry in the entity registry,
the entity's state will be set to unavailable, in the same way
as when the entity registry is loaded.
If the entity doesn't have a non disabled entry in the entity registry,
or if force_remove=True, its state will be removed.
"""
if self.platform and not self._added:
raise HomeAssistantError(
f"Entity {self.entity_id} async_remove called twice"
)
self._added = False
if self._on_remove is not None:
while self._on_remove:
self._on_remove.pop()()
await self.async_internal_will_remove_from_hass()
await self.async_will_remove_from_hass()
# Check if entry still exists in entity registry (e.g. unloading config entry)
if (
not force_remove
and self.registry_entry
and not self.registry_entry.disabled
):
# Set the entity's state will to unavailable + ATTR_RESTORED: True
self.registry_entry.write_unavailable_state(self.hass)
else:
self.hass.states.async_remove(self.entity_id, context=self._context)
async def async_added_to_hass(self) -> None:
"""Run when entity about to be added to hass.
To be extended by integrations.
"""
async def async_will_remove_from_hass(self) -> None:
"""Run when entity will be removed from hass.
To be extended by integrations.
"""
async def async_internal_added_to_hass(self) -> None:
"""Run when entity about to be added to hass.
Not to be extended by integrations.
"""
if self.platform:
info = {"domain": self.platform.platform_name}
if self.platform.config_entry:
info["source"] = SOURCE_CONFIG_ENTRY
info["config_entry"] = self.platform.config_entry.entry_id
else:
info["source"] = SOURCE_PLATFORM_CONFIG
self.hass.data.setdefault(DATA_ENTITY_SOURCE, {})[self.entity_id] = info
if self.registry_entry is not None:
# This is an assert as it should never happen, but helps in tests
assert (
not self.registry_entry.disabled_by
), f"Entity {self.entity_id} is being added while it's disabled"
self.async_on_remove(
async_track_entity_registry_updated_event(
self.hass, self.entity_id, self._async_registry_updated
)
)
async def async_internal_will_remove_from_hass(self) -> None:
"""Run when entity will be removed from hass.
Not to be extended by integrations.
"""
if self.platform:
self.hass.data[DATA_ENTITY_SOURCE].pop(self.entity_id)
async def _async_registry_updated(self, event: Event) -> None:
"""Handle entity registry update."""
data = event.data
if data["action"] == "remove":
await self.async_removed_from_registry()
self.registry_entry = None
await self.async_remove()
if data["action"] != "update":
return
ent_reg = await self.hass.helpers.entity_registry.async_get_registry()
old = self.registry_entry
self.registry_entry = ent_reg.async_get(data["entity_id"])
assert self.registry_entry is not None
if self.registry_entry.disabled:
await self.async_remove()
return
assert old is not None
if self.registry_entry.entity_id == old.entity_id:
self.async_write_ha_state()
return
await self.async_remove(force_remove=True)
assert self.platform is not None
self.entity_id = self.registry_entry.entity_id
await self.platform.async_add_entities([self])
def __eq__(self, other: Any) -> bool:
"""Return the comparison."""
if not isinstance(other, self.__class__):
return False
# Can only decide equality if both have a unique id
if self.unique_id is None or other.unique_id is None:
return False
# Ensure they belong to the same platform
if self.platform is not None or other.platform is not None:
if self.platform is None or other.platform is None:
return False
if self.platform.platform != other.platform.platform:
return False
return self.unique_id == other.unique_id
def __repr__(self) -> str:
"""Return the representation."""
return f"<Entity {self.name}: {self.state}>"
async def async_request_call(self, coro: Awaitable) -> None:
"""Process request batched."""
if self.parallel_updates:
await self.parallel_updates.acquire()
try:
await coro
finally:
if self.parallel_updates:
self.parallel_updates.release()
class ToggleEntity(Entity):
"""An abstract class for entities that can be turned on and off."""
_attr_is_on: bool
_attr_state: None = None
@property
@final
def state(self) -> str | None:
"""Return the state."""
return STATE_ON if self.is_on else STATE_OFF
@property
def is_on(self) -> bool:
"""Return True if entity is on."""
return self._attr_is_on
def turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
raise NotImplementedError()
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
await self.hass.async_add_executor_job(ft.partial(self.turn_on, **kwargs))
def turn_off(self, **kwargs: Any) -> None:
"""Turn the entity off."""
raise NotImplementedError()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the entity off."""
await self.hass.async_add_executor_job(ft.partial(self.turn_off, **kwargs))
def toggle(self, **kwargs: Any) -> None:
"""Toggle the entity."""
if self.is_on:
self.turn_off(**kwargs)
else:
self.turn_on(**kwargs)
async def async_toggle(self, **kwargs: Any) -> None:
"""Toggle the entity."""
if self.is_on:
await self.async_turn_off(**kwargs)
else:
await self.async_turn_on(**kwargs) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/entity.py | 0.6705 | 0.174903 | entity.py | pypi |
from __future__ import annotations
from collections import deque
from collections.abc import Generator
from contextlib import contextmanager
from contextvars import ContextVar
from functools import wraps
from typing import Any, Callable, cast
from homeassistant.helpers.typing import TemplateVarsType
import homeassistant.util.dt as dt_util
class TraceElement:
"""Container for trace data."""
def __init__(self, variables: TemplateVarsType, path: str) -> None:
"""Container for trace data."""
self._child_key: tuple[str, str] | None = None
self._child_run_id: str | None = None
self._error: Exception | None = None
self.path: str = path
self._result: dict | None = None
self.reuse_by_child = False
self._timestamp = dt_util.utcnow()
if variables is None:
variables = {}
last_variables = variables_cv.get() or {}
variables_cv.set(dict(variables))
changed_variables = {
key: value
for key, value in variables.items()
if key not in last_variables or last_variables[key] != value
}
self._variables = changed_variables
def __repr__(self) -> str:
"""Container for trace data."""
return str(self.as_dict())
def set_child_id(self, child_key: tuple[str, str], child_run_id: str) -> None:
"""Set trace id of a nested script run."""
self._child_key = child_key
self._child_run_id = child_run_id
def set_error(self, ex: Exception) -> None:
"""Set error."""
self._error = ex
def set_result(self, **kwargs: Any) -> None:
"""Set result."""
self._result = {**kwargs}
def update_result(self, **kwargs: Any) -> None:
"""Set result."""
old_result = self._result or {}
self._result = {**old_result, **kwargs}
def as_dict(self) -> dict[str, Any]:
"""Return dictionary version of this TraceElement."""
result: dict[str, Any] = {"path": self.path, "timestamp": self._timestamp}
if self._child_key is not None:
result["child_id"] = {
"domain": self._child_key[0],
"item_id": self._child_key[1],
"run_id": str(self._child_run_id),
}
if self._variables:
result["changed_variables"] = self._variables
if self._error is not None:
result["error"] = str(self._error)
if self._result is not None:
result["result"] = self._result
return result
# Context variables for tracing
# Current trace
trace_cv: ContextVar[dict[str, deque[TraceElement]] | None] = ContextVar(
"trace_cv", default=None
)
# Stack of TraceElements
trace_stack_cv: ContextVar[list[TraceElement] | None] = ContextVar(
"trace_stack_cv", default=None
)
# Current location in config tree
trace_path_stack_cv: ContextVar[list[str] | None] = ContextVar(
"trace_path_stack_cv", default=None
)
# Copy of last variables
variables_cv: ContextVar[Any | None] = ContextVar("variables_cv", default=None)
# (domain, item_id) + Run ID
trace_id_cv: ContextVar[tuple[str, str] | None] = ContextVar(
"trace_id_cv", default=None
)
# Reason for stopped script execution
script_execution_cv: ContextVar[StopReason | None] = ContextVar(
"script_execution_cv", default=None
)
def trace_id_set(trace_id: tuple[str, str]) -> None:
"""Set id of the current trace."""
trace_id_cv.set(trace_id)
def trace_id_get() -> tuple[str, str] | None:
"""Get id if the current trace."""
return trace_id_cv.get()
def trace_stack_push(trace_stack_var: ContextVar, node: Any) -> None:
"""Push an element to the top of a trace stack."""
trace_stack = trace_stack_var.get()
if trace_stack is None:
trace_stack = []
trace_stack_var.set(trace_stack)
trace_stack.append(node)
def trace_stack_pop(trace_stack_var: ContextVar) -> None:
"""Remove the top element from a trace stack."""
trace_stack = trace_stack_var.get()
trace_stack.pop()
def trace_stack_top(trace_stack_var: ContextVar) -> Any | None:
"""Return the element at the top of a trace stack."""
trace_stack = trace_stack_var.get()
return trace_stack[-1] if trace_stack else None
def trace_path_push(suffix: str | list[str]) -> int:
"""Go deeper in the config tree."""
if isinstance(suffix, str):
suffix = [suffix]
for node in suffix:
trace_stack_push(trace_path_stack_cv, node)
return len(suffix)
def trace_path_pop(count: int) -> None:
"""Go n levels up in the config tree."""
for _ in range(count):
trace_stack_pop(trace_path_stack_cv)
def trace_path_get() -> str:
"""Return a string representing the current location in the config tree."""
path = trace_path_stack_cv.get()
if not path:
return ""
return "/".join(path)
def trace_append_element(
trace_element: TraceElement,
maxlen: int | None = None,
) -> None:
"""Append a TraceElement to trace[path]."""
path = trace_element.path
trace = trace_cv.get()
if trace is None:
trace = {}
trace_cv.set(trace)
if path not in trace:
trace[path] = deque(maxlen=maxlen)
trace[path].append(trace_element)
def trace_get(clear: bool = True) -> dict[str, deque[TraceElement]] | None:
"""Return the current trace."""
if clear:
trace_clear()
return trace_cv.get()
def trace_clear() -> None:
"""Clear the trace."""
trace_cv.set({})
trace_stack_cv.set(None)
trace_path_stack_cv.set(None)
variables_cv.set(None)
script_execution_cv.set(StopReason())
def trace_set_child_id(child_key: tuple[str, str], child_run_id: str) -> None:
"""Set child trace_id of TraceElement at the top of the stack."""
node = cast(TraceElement, trace_stack_top(trace_stack_cv))
if node:
node.set_child_id(child_key, child_run_id)
def trace_set_result(**kwargs: Any) -> None:
"""Set the result of TraceElement at the top of the stack."""
node = cast(TraceElement, trace_stack_top(trace_stack_cv))
node.set_result(**kwargs)
def trace_update_result(**kwargs: Any) -> None:
"""Update the result of TraceElement at the top of the stack."""
node = cast(TraceElement, trace_stack_top(trace_stack_cv))
node.update_result(**kwargs)
class StopReason:
"""Mutable container class for script_execution."""
script_execution: str | None = None
def script_execution_set(reason: str) -> None:
"""Set stop reason."""
data = script_execution_cv.get()
if data is None:
return
data.script_execution = reason
def script_execution_get() -> str | None:
"""Return the current trace."""
data = script_execution_cv.get()
if data is None:
return None
return data.script_execution
@contextmanager
def trace_path(suffix: str | list[str]) -> Generator:
"""Go deeper in the config tree.
Can not be used as a decorator on couroutine functions.
"""
count = trace_path_push(suffix)
try:
yield
finally:
trace_path_pop(count)
def async_trace_path(suffix: str | list[str]) -> Callable:
"""Go deeper in the config tree.
To be used as a decorator on coroutine functions.
"""
def _trace_path_decorator(func: Callable) -> Callable:
"""Decorate a coroutine function."""
@wraps(func)
async def async_wrapper(*args: Any) -> None:
"""Catch and log exception."""
with trace_path(suffix):
await func(*args)
return async_wrapper
return _trace_path_decorator | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/trace.py | 0.931921 | 0.180431 | trace.py | pypi |
from __future__ import annotations
from collections import OrderedDict
from collections.abc import Container, Iterable, MutableMapping
from typing import cast
import attr
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.loader import bind_hass
from homeassistant.util import slugify
# mypy: disallow-any-generics
DATA_REGISTRY = "area_registry"
EVENT_AREA_REGISTRY_UPDATED = "area_registry_updated"
STORAGE_KEY = "core.area_registry"
STORAGE_VERSION = 1
SAVE_DELAY = 10
@attr.s(slots=True, frozen=True)
class AreaEntry:
"""Area Registry Entry."""
name: str = attr.ib()
normalized_name: str = attr.ib()
id: str | None = attr.ib(default=None)
def generate_id(self, existing_ids: Container[str]) -> None:
"""Initialize ID."""
suggestion = suggestion_base = slugify(self.name)
tries = 1
while suggestion in existing_ids:
tries += 1
suggestion = f"{suggestion_base}_{tries}"
object.__setattr__(self, "id", suggestion)
class AreaRegistry:
"""Class to hold a registry of areas."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the area registry."""
self.hass = hass
self.areas: MutableMapping[str, AreaEntry] = {}
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
self._normalized_name_area_idx: dict[str, str] = {}
@callback
def async_get_area(self, area_id: str) -> AreaEntry | None:
"""Get area by id."""
return self.areas.get(area_id)
@callback
def async_get_area_by_name(self, name: str) -> AreaEntry | None:
"""Get area by name."""
normalized_name = normalize_area_name(name)
if normalized_name not in self._normalized_name_area_idx:
return None
return self.areas[self._normalized_name_area_idx[normalized_name]]
@callback
def async_list_areas(self) -> Iterable[AreaEntry]:
"""Get all areas."""
return self.areas.values()
@callback
def async_get_or_create(self, name: str) -> AreaEntry:
"""Get or create an area."""
area = self.async_get_area_by_name(name)
if area:
return area
return self.async_create(name)
@callback
def async_create(self, name: str) -> AreaEntry:
"""Create a new area."""
normalized_name = normalize_area_name(name)
if self.async_get_area_by_name(name):
raise ValueError(f"The name {name} ({normalized_name}) is already in use")
area = AreaEntry(name=name, normalized_name=normalized_name)
area.generate_id(self.areas)
assert area.id is not None
self.areas[area.id] = area
self._normalized_name_area_idx[normalized_name] = area.id
self.async_schedule_save()
self.hass.bus.async_fire(
EVENT_AREA_REGISTRY_UPDATED, {"action": "create", "area_id": area.id}
)
return area
@callback
def async_delete(self, area_id: str) -> None:
"""Delete area."""
area = self.areas[area_id]
device_registry = dr.async_get(self.hass)
entity_registry = er.async_get(self.hass)
device_registry.async_clear_area_id(area_id)
entity_registry.async_clear_area_id(area_id)
del self.areas[area_id]
del self._normalized_name_area_idx[area.normalized_name]
self.hass.bus.async_fire(
EVENT_AREA_REGISTRY_UPDATED, {"action": "remove", "area_id": area_id}
)
self.async_schedule_save()
@callback
def async_update(self, area_id: str, name: str) -> AreaEntry:
"""Update name of area."""
updated = self._async_update(area_id, name)
self.hass.bus.async_fire(
EVENT_AREA_REGISTRY_UPDATED, {"action": "update", "area_id": area_id}
)
return updated
@callback
def _async_update(self, area_id: str, name: str) -> AreaEntry:
"""Update name of area."""
old = self.areas[area_id]
changes = {}
if name == old.name:
return old
normalized_name = normalize_area_name(name)
if normalized_name != old.normalized_name and self.async_get_area_by_name(name):
raise ValueError(f"The name {name} ({normalized_name}) is already in use")
changes["name"] = name
changes["normalized_name"] = normalized_name
new = self.areas[area_id] = attr.evolve(old, **changes)
self._normalized_name_area_idx[
normalized_name
] = self._normalized_name_area_idx.pop(old.normalized_name)
self.async_schedule_save()
return new
async def async_load(self) -> None:
"""Load the area registry."""
data = await self._store.async_load()
areas: MutableMapping[str, AreaEntry] = OrderedDict()
if data is not None:
for area in data["areas"]:
normalized_name = normalize_area_name(area["name"])
areas[area["id"]] = AreaEntry(
name=area["name"], id=area["id"], normalized_name=normalized_name
)
self._normalized_name_area_idx[normalized_name] = area["id"]
self.areas = areas
@callback
def async_schedule_save(self) -> None:
"""Schedule saving the area registry."""
self._store.async_delay_save(self._data_to_save, SAVE_DELAY)
@callback
def _data_to_save(self) -> dict[str, list[dict[str, str | None]]]:
"""Return data of area registry to store in a file."""
data = {}
data["areas"] = [
{
"name": entry.name,
"id": entry.id,
}
for entry in self.areas.values()
]
return data
@callback
def async_get(hass: HomeAssistant) -> AreaRegistry:
"""Get area registry."""
return cast(AreaRegistry, hass.data[DATA_REGISTRY])
async def async_load(hass: HomeAssistant) -> None:
"""Load area registry."""
assert DATA_REGISTRY not in hass.data
hass.data[DATA_REGISTRY] = AreaRegistry(hass)
await hass.data[DATA_REGISTRY].async_load()
@bind_hass
async def async_get_registry(hass: HomeAssistant) -> AreaRegistry:
"""Get area registry.
This is deprecated and will be removed in the future. Use async_get instead.
"""
return async_get(hass)
def normalize_area_name(area_name: str) -> str:
"""Normalize an area name by removing whitespace and case folding."""
return area_name.casefold().replace(" ", "") | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/area_registry.py | 0.889241 | 0.221624 | area_registry.py | pypi |
from __future__ import annotations
import asyncio
from collections.abc import Awaitable
from logging import Logger
from typing import Any, Callable
from homeassistant.core import HassJob, HomeAssistant, callback
class Debouncer:
"""Class to rate limit calls to a specific command."""
def __init__(
self,
hass: HomeAssistant,
logger: Logger,
*,
cooldown: float,
immediate: bool,
function: Callable[..., Awaitable[Any]] | None = None,
) -> None:
"""Initialize debounce.
immediate: indicate if the function needs to be called right away and
wait <cooldown> until executing next invocation.
function: optional and can be instantiated later.
"""
self.hass = hass
self.logger = logger
self._function = function
self.cooldown = cooldown
self.immediate = immediate
self._timer_task: asyncio.TimerHandle | None = None
self._execute_at_end_of_timer: bool = False
self._execute_lock = asyncio.Lock()
self._job: HassJob | None = None if function is None else HassJob(function)
@property
def function(self) -> Callable[..., Awaitable[Any]] | None:
"""Return the function being wrapped by the Debouncer."""
return self._function
@function.setter
def function(self, function: Callable[..., Awaitable[Any]]) -> None:
"""Update the function being wrapped by the Debouncer."""
self._function = function
if self._job is None or function != self._job.target:
self._job = HassJob(function)
async def async_call(self) -> None:
"""Call the function."""
assert self._job is not None
if self._timer_task:
if not self._execute_at_end_of_timer:
self._execute_at_end_of_timer = True
return
# Locked means a call is in progress. Any call is good, so abort.
if self._execute_lock.locked():
return
if not self.immediate:
self._execute_at_end_of_timer = True
self._schedule_timer()
return
async with self._execute_lock:
# Abort if timer got set while we're waiting for the lock.
if self._timer_task:
return
task = self.hass.async_run_hass_job(self._job)
if task:
await task
self._schedule_timer()
async def _handle_timer_finish(self) -> None:
"""Handle a finished timer."""
assert self._job is not None
self._timer_task = None
if not self._execute_at_end_of_timer:
return
self._execute_at_end_of_timer = False
# Locked means a call is in progress. Any call is good, so abort.
if self._execute_lock.locked():
return
async with self._execute_lock:
# Abort if timer got set while we're waiting for the lock.
if self._timer_task:
return # type: ignore
try:
task = self.hass.async_run_hass_job(self._job)
if task:
await task
except Exception: # pylint: disable=broad-except
self.logger.exception("Unexpected exception from %s", self.function)
self._schedule_timer()
@callback
def async_cancel(self) -> None:
"""Cancel any scheduled call."""
if self._timer_task:
self._timer_task.cancel()
self._timer_task = None
self._execute_at_end_of_timer = False
@callback
def _schedule_timer(self) -> None:
"""Schedule a timer."""
self._timer_task = self.hass.loop.call_later(
self.cooldown,
lambda: self.hass.async_create_task(self._handle_timer_finish()),
) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/debounce.py | 0.874393 | 0.226356 | debounce.py | pypi |
from __future__ import annotations
from collections.abc import Iterable
import logging
import voluptuous as vol
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
from homeassistant.core import HomeAssistant, State
from homeassistant.util import location as loc_util
_LOGGER = logging.getLogger(__name__)
def has_location(state: State) -> bool:
"""Test if state contains a valid location.
Async friendly.
"""
return (
isinstance(state, State)
and isinstance(state.attributes.get(ATTR_LATITUDE), float)
and isinstance(state.attributes.get(ATTR_LONGITUDE), float)
)
def closest(latitude: float, longitude: float, states: Iterable[State]) -> State | None:
"""Return closest state to point.
Async friendly.
"""
with_location = [state for state in states if has_location(state)]
if not with_location:
return None
return min(
with_location,
key=lambda state: loc_util.distance(
state.attributes.get(ATTR_LATITUDE),
state.attributes.get(ATTR_LONGITUDE),
latitude,
longitude,
)
or 0,
)
def find_coordinates(
hass: HomeAssistant, entity_id: str, recursion_history: list | None = None
) -> str | None:
"""Find the gps coordinates of the entity in the form of '90.000,180.000'."""
entity_state = hass.states.get(entity_id)
if entity_state is None:
_LOGGER.error("Unable to find entity %s", entity_id)
return None
# Check if the entity has location attributes
if has_location(entity_state):
return _get_location_from_attributes(entity_state)
# Check if device is in a zone
zone_entity = hass.states.get(f"zone.{entity_state.state}")
if has_location(zone_entity): # type: ignore
_LOGGER.debug(
"%s is in %s, getting zone location", entity_id, zone_entity.entity_id # type: ignore
)
return _get_location_from_attributes(zone_entity) # type: ignore
# Resolve nested entity
if recursion_history is None:
recursion_history = []
recursion_history.append(entity_id)
if entity_state.state in recursion_history:
_LOGGER.error(
"Circular reference detected while trying to find coordinates of an entity. The state of %s has already been checked",
entity_state.state,
)
return None
_LOGGER.debug("Getting nested entity for state: %s", entity_state.state)
nested_entity = hass.states.get(entity_state.state)
if nested_entity is not None:
_LOGGER.debug("Resolving nested entity_id: %s", entity_state.state)
return find_coordinates(hass, entity_state.state, recursion_history)
# Check if state is valid coordinate set
try:
# Import here, not at top-level to avoid circular import
import homeassistant.helpers.config_validation as cv # pylint: disable=import-outside-toplevel
cv.gps(entity_state.state.split(","))
except vol.Invalid:
_LOGGER.error(
"Entity %s does not contain a location and does not point at an entity that does: %s",
entity_id,
entity_state.state,
)
return None
else:
return entity_state.state
def _get_location_from_attributes(entity_state: State) -> str:
"""Get the lat/long string from an entities attributes."""
attr = entity_state.attributes
return f"{attr.get(ATTR_LATITUDE)},{attr.get(ATTR_LONGITUDE)}" | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/location.py | 0.84759 | 0.447762 | location.py | pypi |
from __future__ import annotations
import asyncio
from collections import ChainMap
import logging
from typing import Any
from homeassistant.core import HomeAssistant, callback
from homeassistant.loader import (
MAX_LOAD_CONCURRENTLY,
Integration,
async_get_config_flows,
async_get_integration,
bind_hass,
)
from homeassistant.util.async_ import gather_with_concurrency
from homeassistant.util.json import load_json
_LOGGER = logging.getLogger(__name__)
TRANSLATION_LOAD_LOCK = "translation_load_lock"
TRANSLATION_FLATTEN_CACHE = "translation_flatten_cache"
LOCALE_EN = "en"
def recursive_flatten(prefix: Any, data: dict) -> dict[str, Any]:
"""Return a flattened representation of dict data."""
output = {}
for key, value in data.items():
if isinstance(value, dict):
output.update(recursive_flatten(f"{prefix}{key}.", value))
else:
output[f"{prefix}{key}"] = value
return output
@callback
def component_translation_path(
component: str, language: str, integration: Integration
) -> str | None:
"""Return the translation json file location for a component.
For component:
- components/hue/translations/nl.json
For platform:
- components/hue/translations/light.nl.json
If component is just a single file, will return None.
"""
parts = component.split(".")
domain = parts[-1]
is_platform = len(parts) == 2
# If it's a component that is just one file, we don't support translations
# Example custom_components/my_component.py
if integration.file_path.name != domain:
return None
if is_platform:
filename = f"{parts[0]}.{language}.json"
else:
filename = f"{language}.json"
translation_path = integration.file_path / "translations"
return str(translation_path / filename)
def load_translations_files(
translation_files: dict[str, str]
) -> dict[str, dict[str, Any]]:
"""Load and parse translation.json files."""
loaded = {}
for component, translation_file in translation_files.items():
loaded_json = load_json(translation_file)
if not isinstance(loaded_json, dict):
_LOGGER.warning(
"Translation file is unexpected type %s. Expected dict for %s",
type(loaded_json),
translation_file,
)
continue
loaded[component] = loaded_json
return loaded
def _merge_resources(
translation_strings: dict[str, dict[str, Any]],
components: set[str],
category: str,
) -> dict[str, dict[str, Any]]:
"""Build and merge the resources response for the given components and platforms."""
# Build response
resources: dict[str, dict[str, Any]] = {}
for component in components:
if "." not in component:
domain = component
else:
domain = component.split(".", 1)[0]
domain_resources = resources.setdefault(domain, {})
# Integrations are able to provide translations for their entities under other
# integrations if they don't have an existing device class. This is done by
# using a custom device class prefixed with their domain and two underscores.
# These files are in platform specific files in the integration folder with
# names like `strings.sensor.json`.
# We are going to merge the translations for the custom device classes into
# the translations of sensor.
new_value = translation_strings[component].get(category)
if new_value is None:
continue
if isinstance(new_value, dict):
domain_resources.update(new_value)
else:
_LOGGER.error(
"An integration providing translations for %s provided invalid data: %s",
domain,
new_value,
)
return resources
def _build_resources(
translation_strings: dict[str, dict[str, Any]],
components: set[str],
category: str,
) -> dict[str, dict[str, Any]]:
"""Build the resources response for the given components."""
# Build response
return {
component: translation_strings[component][category]
for component in components
if category in translation_strings[component]
and translation_strings[component][category] is not None
}
async def async_get_component_strings(
hass: HomeAssistant, language: str, components: set[str]
) -> dict[str, Any]:
"""Load translations."""
domains = list({loaded.split(".")[-1] for loaded in components})
integrations = dict(
zip(
domains,
await gather_with_concurrency(
MAX_LOAD_CONCURRENTLY,
*[async_get_integration(hass, domain) for domain in domains],
),
)
)
translations: dict[str, Any] = {}
# Determine paths of missing components/platforms
files_to_load = {}
for loaded in components:
parts = loaded.split(".")
domain = parts[-1]
integration = integrations[domain]
path = component_translation_path(loaded, language, integration)
# No translation available
if path is None:
translations[loaded] = {}
else:
files_to_load[loaded] = path
if not files_to_load:
return translations
# Load files
load_translations_job = hass.async_add_executor_job(
load_translations_files, files_to_load
)
assert load_translations_job is not None
loaded_translations = await load_translations_job
# Translations that miss "title" will get integration put in.
for loaded, loaded_translation in loaded_translations.items():
if "." in loaded:
continue
if "title" not in loaded_translation:
loaded_translation["title"] = integrations[loaded].name
translations.update(loaded_translations)
return translations
class _TranslationCache:
"""Cache for flattened translations."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the cache."""
self.hass = hass
self.loaded: dict[str, set[str]] = {}
self.cache: dict[str, dict[str, dict[str, Any]]] = {}
async def async_fetch(
self,
language: str,
category: str,
components: set,
) -> list[dict[str, dict[str, Any]]]:
"""Load resources into the cache."""
components_to_load = components - self.loaded.setdefault(language, set())
if components_to_load:
await self._async_load(language, components_to_load)
cached = self.cache.get(language, {})
return [cached.get(component, {}).get(category, {}) for component in components]
async def _async_load(self, language: str, components: set) -> None:
"""Populate the cache for a given set of components."""
_LOGGER.debug(
"Cache miss for %s: %s",
language,
", ".join(components),
)
# Fetch the English resources, as a fallback for missing keys
languages = [LOCALE_EN] if language == LOCALE_EN else [LOCALE_EN, language]
for translation_strings in await asyncio.gather(
*[
async_get_component_strings(self.hass, lang, components)
for lang in languages
]
):
self._build_category_cache(language, components, translation_strings)
self.loaded[language].update(components)
@callback
def _build_category_cache(
self,
language: str,
components: set,
translation_strings: dict[str, dict[str, Any]],
) -> None:
"""Extract resources into the cache."""
cached = self.cache.setdefault(language, {})
categories: set[str] = set()
for resource in translation_strings.values():
categories.update(resource)
for category in categories:
resource_func = (
_merge_resources if category == "state" else _build_resources
)
new_resources = resource_func(translation_strings, components, category)
for component, resource in new_resources.items():
category_cache: dict[str, Any] = cached.setdefault(
component, {}
).setdefault(category, {})
if isinstance(resource, dict):
category_cache.update(
recursive_flatten(
f"component.{component}.{category}.",
resource,
)
)
else:
category_cache[f"component.{component}.{category}"] = resource
@bind_hass
async def async_get_translations(
hass: HomeAssistant,
language: str,
category: str,
integration: str | None = None,
config_flow: bool | None = None,
) -> dict[str, Any]:
"""Return all backend translations.
If integration specified, load it for that one.
Otherwise default to loaded intgrations combined with config flow
integrations if config_flow is true.
"""
lock = hass.data.setdefault(TRANSLATION_LOAD_LOCK, asyncio.Lock())
if integration is not None:
components = {integration}
elif config_flow:
components = (await async_get_config_flows(hass)) - hass.config.components
elif category == "state":
components = set(hass.config.components)
else:
# Only 'state' supports merging, so remove platforms from selection
components = {
component for component in hass.config.components if "." not in component
}
async with lock:
cache = hass.data.setdefault(TRANSLATION_FLATTEN_CACHE, _TranslationCache(hass))
cached = await cache.async_fetch(language, category, components)
return dict(ChainMap(*cached)) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/translation.py | 0.909214 | 0.265339 | translation.py | pypi |
from __future__ import annotations
import asyncio
import functools
import logging
from traceback import FrameSummary, extract_stack
from typing import Any, Callable, TypeVar, cast
from homeassistant.exceptions import HomeAssistantError
_LOGGER = logging.getLogger(__name__)
CALLABLE_T = TypeVar("CALLABLE_T", bound=Callable) # pylint: disable=invalid-name
def get_integration_frame(
exclude_integrations: set | None = None,
) -> tuple[FrameSummary, str, str]:
"""Return the frame, integration and integration path of the current stack frame."""
found_frame = None
if not exclude_integrations:
exclude_integrations = set()
for frame in reversed(extract_stack()):
for path in ("custom_components/", "homeassistant/components/"):
try:
index = frame.filename.index(path)
start = index + len(path)
end = frame.filename.index("/", start)
integration = frame.filename[start:end]
if integration not in exclude_integrations:
found_frame = frame
break
except ValueError:
continue
if found_frame is not None:
break
if found_frame is None:
raise MissingIntegrationFrame
return found_frame, integration, path
class MissingIntegrationFrame(HomeAssistantError):
"""Raised when no integration is found in the frame."""
def report(what: str) -> None:
"""Report incorrect usage.
Async friendly.
"""
try:
integration_frame = get_integration_frame()
except MissingIntegrationFrame as err:
# Did not source from an integration? Hard error.
raise RuntimeError(
f"Detected code that {what}. Please report this issue."
) from err
report_integration(what, integration_frame)
def report_integration(
what: str, integration_frame: tuple[FrameSummary, str, str]
) -> None:
"""Report incorrect usage in an integration.
Async friendly.
"""
found_frame, integration, path = integration_frame
index = found_frame.filename.index(path)
if path == "custom_components/":
extra = " to the custom component author"
else:
extra = ""
_LOGGER.warning(
"Detected integration that %s. "
"Please report issue%s for %s using this method at %s, line %s: %s",
what,
extra,
integration,
found_frame.filename[index:],
found_frame.lineno,
found_frame.line.strip(),
)
def warn_use(func: CALLABLE_T, what: str) -> CALLABLE_T:
"""Mock a function to warn when it was about to be used."""
if asyncio.iscoroutinefunction(func):
@functools.wraps(func)
async def report_use(*args: Any, **kwargs: Any) -> None:
report(what)
else:
@functools.wraps(func)
def report_use(*args: Any, **kwargs: Any) -> None:
report(what)
return cast(CALLABLE_T, report_use) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/frame.py | 0.846641 | 0.188473 | frame.py | pypi |
import logging
from typing import Any, Callable
from homeassistant.core import HassJob, HomeAssistant, callback
from homeassistant.loader import bind_hass
from homeassistant.util.async_ import run_callback_threadsafe
from homeassistant.util.logging import catch_log_exception
_LOGGER = logging.getLogger(__name__)
DATA_DISPATCHER = "dispatcher"
@bind_hass
def dispatcher_connect(
hass: HomeAssistant, signal: str, target: Callable[..., None]
) -> Callable[[], None]:
"""Connect a callable function to a signal."""
async_unsub = run_callback_threadsafe(
hass.loop, async_dispatcher_connect, hass, signal, target
).result()
def remove_dispatcher() -> None:
"""Remove signal listener."""
run_callback_threadsafe(hass.loop, async_unsub).result()
return remove_dispatcher
@callback
@bind_hass
def async_dispatcher_connect(
hass: HomeAssistant, signal: str, target: Callable[..., Any]
) -> Callable[[], None]:
"""Connect a callable function to a signal.
This method must be run in the event loop.
"""
if DATA_DISPATCHER not in hass.data:
hass.data[DATA_DISPATCHER] = {}
job = HassJob(
catch_log_exception(
target,
lambda *args: "Exception in {} when dispatching '{}': {}".format(
# Functions wrapped in partial do not have a __name__
getattr(target, "__name__", None) or str(target),
signal,
args,
),
)
)
hass.data[DATA_DISPATCHER].setdefault(signal, []).append(job)
@callback
def async_remove_dispatcher() -> None:
"""Remove signal listener."""
try:
hass.data[DATA_DISPATCHER][signal].remove(job)
except (KeyError, ValueError):
# KeyError is key target listener did not exist
# ValueError if listener did not exist within signal
_LOGGER.warning("Unable to remove unknown dispatcher %s", target)
return async_remove_dispatcher
@bind_hass
def dispatcher_send(hass: HomeAssistant, signal: str, *args: Any) -> None:
"""Send signal and data."""
hass.loop.call_soon_threadsafe(async_dispatcher_send, hass, signal, *args)
@callback
@bind_hass
def async_dispatcher_send(hass: HomeAssistant, signal: str, *args: Any) -> None:
"""Send signal and data.
This method must be run in the event loop.
"""
target_list = hass.data.get(DATA_DISPATCHER, {}).get(signal, [])
for job in target_list:
hass.async_add_hass_job(job, *args) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/dispatcher.py | 0.729905 | 0.216529 | dispatcher.py | pypi |
from __future__ import annotations
import asyncio
from contextlib import suppress
from json import JSONEncoder
import logging
import os
from typing import Any, Callable
from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE
from homeassistant.core import CALLBACK_TYPE, CoreState, Event, HomeAssistant, callback
from homeassistant.helpers.event import async_call_later
from homeassistant.loader import bind_hass
from homeassistant.util import json as json_util
# mypy: allow-untyped-calls, allow-untyped-defs, no-warn-return-any
# mypy: no-check-untyped-defs
STORAGE_DIR = ".storage"
_LOGGER = logging.getLogger(__name__)
@bind_hass
async def async_migrator(
hass,
old_path,
store,
*,
old_conf_load_func=None,
old_conf_migrate_func=None,
):
"""Migrate old data to a store and then load data.
async def old_conf_migrate_func(old_data)
"""
store_data = await store.async_load()
# If we already have store data we have already migrated in the past.
if store_data is not None:
return store_data
def load_old_config():
"""Load old config."""
if not os.path.isfile(old_path):
return None
if old_conf_load_func is not None:
return old_conf_load_func(old_path)
return json_util.load_json(old_path)
config = await hass.async_add_executor_job(load_old_config)
if config is None:
return None
if old_conf_migrate_func is not None:
config = await old_conf_migrate_func(config)
await store.async_save(config)
await hass.async_add_executor_job(os.remove, old_path)
return config
@bind_hass
class Store:
"""Class to help storing data."""
def __init__(
self,
hass: HomeAssistant,
version: int,
key: str,
private: bool = False,
*,
encoder: type[JSONEncoder] | None = None,
) -> None:
"""Initialize storage class."""
self.version = version
self.key = key
self.hass = hass
self._private = private
self._data: dict[str, Any] | None = None
self._unsub_delay_listener: CALLBACK_TYPE | None = None
self._unsub_final_write_listener: CALLBACK_TYPE | None = None
self._write_lock = asyncio.Lock()
self._load_task: asyncio.Future | None = None
self._encoder = encoder
@property
def path(self):
"""Return the config path."""
return self.hass.config.path(STORAGE_DIR, self.key)
async def async_load(self) -> dict | list | None:
"""Load data.
If the expected version does not match the given version, the migrate
function will be invoked with await migrate_func(version, config).
Will ensure that when a call comes in while another one is in progress,
the second call will wait and return the result of the first call.
"""
if self._load_task is None:
self._load_task = self.hass.async_create_task(self._async_load())
return await self._load_task
async def _async_load(self):
"""Load the data and ensure the task is removed."""
try:
return await self._async_load_data()
finally:
self._load_task = None
async def _async_load_data(self):
"""Load the data."""
# Check if we have a pending write
if self._data is not None:
data = self._data
# If we didn't generate data yet, do it now.
if "data_func" in data:
data["data"] = data.pop("data_func")()
else:
data = await self.hass.async_add_executor_job(
json_util.load_json, self.path
)
if data == {}:
return None
if data["version"] == self.version:
stored = data["data"]
else:
_LOGGER.info(
"Migrating %s storage from %s to %s",
self.key,
data["version"],
self.version,
)
stored = await self._async_migrate_func(data["version"], data["data"])
return stored
async def async_save(self, data: dict | list) -> None:
"""Save data."""
self._data = {"version": self.version, "key": self.key, "data": data}
if self.hass.state == CoreState.stopping:
self._async_ensure_final_write_listener()
return
await self._async_handle_write_data()
@callback
def async_delay_save(self, data_func: Callable[[], dict], delay: float = 0) -> None:
"""Save data with an optional delay."""
self._data = {"version": self.version, "key": self.key, "data_func": data_func}
self._async_cleanup_delay_listener()
self._async_ensure_final_write_listener()
if self.hass.state == CoreState.stopping:
return
self._unsub_delay_listener = async_call_later(
self.hass, delay, self._async_callback_delayed_write
)
@callback
def _async_ensure_final_write_listener(self) -> None:
"""Ensure that we write if we quit before delay has passed."""
if self._unsub_final_write_listener is None:
self._unsub_final_write_listener = self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_FINAL_WRITE, self._async_callback_final_write
)
@callback
def _async_cleanup_final_write_listener(self) -> None:
"""Clean up a stop listener."""
if self._unsub_final_write_listener is not None:
self._unsub_final_write_listener()
self._unsub_final_write_listener = None
@callback
def _async_cleanup_delay_listener(self) -> None:
"""Clean up a delay listener."""
if self._unsub_delay_listener is not None:
self._unsub_delay_listener()
self._unsub_delay_listener = None
async def _async_callback_delayed_write(self, _now):
"""Handle a delayed write callback."""
# catch the case where a call is scheduled and then we stop Safegate Pro
if self.hass.state == CoreState.stopping:
self._async_ensure_final_write_listener()
return
await self._async_handle_write_data()
async def _async_callback_final_write(self, _event: Event) -> None:
"""Handle a write because Safegate Pro is in final write state."""
self._unsub_final_write_listener = None
await self._async_handle_write_data()
async def _async_handle_write_data(self, *_args):
"""Handle writing the config."""
async with self._write_lock:
self._async_cleanup_delay_listener()
self._async_cleanup_final_write_listener()
if self._data is None:
# Another write already consumed the data
return
data = self._data
if "data_func" in data:
data["data"] = data.pop("data_func")()
self._data = None
try:
await self.hass.async_add_executor_job(
self._write_data, self.path, data
)
except (json_util.SerializationError, json_util.WriteError) as err:
_LOGGER.error("Error writing config for %s: %s", self.key, err)
def _write_data(self, path: str, data: dict) -> None:
"""Write the data."""
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
_LOGGER.debug("Writing data for %s to %s", self.key, path)
json_util.save_json(path, data, self._private, encoder=self._encoder)
async def _async_migrate_func(self, old_version, old_data):
"""Migrate to the new version."""
raise NotImplementedError
async def async_remove(self) -> None:
"""Remove all data."""
self._async_cleanup_delay_listener()
self._async_cleanup_final_write_listener()
with suppress(FileNotFoundError):
await self.hass.async_add_executor_job(os.unlink, self.path) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/storage.py | 0.785761 | 0.16132 | storage.py | pypi |
from __future__ import annotations
from typing import Any
from aiohttp import web
import voluptuous as vol
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.const import HTTP_BAD_REQUEST, HTTP_NOT_FOUND
import homeassistant.helpers.config_validation as cv
class _BaseFlowManagerView(HomeAssistantView):
"""Foundation for flow manager views."""
def __init__(self, flow_mgr: data_entry_flow.FlowManager) -> None:
"""Initialize the flow manager index view."""
self._flow_mgr = flow_mgr
# pylint: disable=no-self-use
def _prepare_result_json(
self, result: data_entry_flow.FlowResult
) -> data_entry_flow.FlowResult:
"""Convert result to JSON."""
if result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
data = result.copy()
data.pop("result")
data.pop("data")
return data
if result["type"] != data_entry_flow.RESULT_TYPE_FORM:
return result
import voluptuous_serialize # pylint: disable=import-outside-toplevel
data = result.copy()
schema = data["data_schema"]
if schema is None:
data["data_schema"] = []
else:
data["data_schema"] = voluptuous_serialize.convert(
schema, custom_serializer=cv.custom_serializer
)
return data
class FlowManagerIndexView(_BaseFlowManagerView):
"""View to create config flows."""
@RequestDataValidator(
vol.Schema(
{
vol.Required("handler"): vol.Any(str, list),
vol.Optional("show_advanced_options", default=False): cv.boolean,
},
extra=vol.ALLOW_EXTRA,
)
)
async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
"""Handle a POST request."""
if isinstance(data["handler"], list):
handler = tuple(data["handler"])
else:
handler = data["handler"]
try:
result = await self._flow_mgr.async_init(
handler, # type: ignore
context={
"source": config_entries.SOURCE_USER,
"show_advanced_options": data["show_advanced_options"],
},
)
except data_entry_flow.UnknownHandler:
return self.json_message("Invalid handler specified", HTTP_NOT_FOUND)
except data_entry_flow.UnknownStep:
return self.json_message("Handler does not support user", HTTP_BAD_REQUEST)
result = self._prepare_result_json(result)
return self.json(result)
class FlowManagerResourceView(_BaseFlowManagerView):
"""View to interact with the flow manager."""
async def get(self, request: web.Request, flow_id: str) -> web.Response:
"""Get the current state of a data_entry_flow."""
try:
result = await self._flow_mgr.async_configure(flow_id)
except data_entry_flow.UnknownFlow:
return self.json_message("Invalid flow specified", HTTP_NOT_FOUND)
result = self._prepare_result_json(result)
return self.json(result)
@RequestDataValidator(vol.Schema(dict), allow_empty=True)
async def post(
self, request: web.Request, flow_id: str, data: dict[str, Any]
) -> web.Response:
"""Handle a POST request."""
try:
result = await self._flow_mgr.async_configure(flow_id, data)
except data_entry_flow.UnknownFlow:
return self.json_message("Invalid flow specified", HTTP_NOT_FOUND)
except vol.Invalid:
return self.json_message("User input malformed", HTTP_BAD_REQUEST)
result = self._prepare_result_json(result)
return self.json(result)
async def delete(self, request: web.Request, flow_id: str) -> web.Response:
"""Cancel a flow in progress."""
try:
self._flow_mgr.async_abort(flow_id)
except data_entry_flow.UnknownFlow:
return self.json_message("Invalid flow specified", HTTP_NOT_FOUND)
return self.json_message("Flow aborted") | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/data_entry_flow.py | 0.886045 | 0.239094 | data_entry_flow.py | pypi |
from __future__ import annotations
import asyncio
from collections import defaultdict
from collections.abc import Iterable
import datetime as dt
import logging
from types import ModuleType, TracebackType
from typing import Any
from homeassistant.components.sun import STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON
from homeassistant.const import (
STATE_CLOSED,
STATE_HOME,
STATE_LOCKED,
STATE_NOT_HOME,
STATE_OFF,
STATE_ON,
STATE_OPEN,
STATE_UNKNOWN,
STATE_UNLOCKED,
)
from homeassistant.core import Context, HomeAssistant, State
from homeassistant.loader import IntegrationNotFound, async_get_integration, bind_hass
import homeassistant.util.dt as dt_util
from .frame import report
_LOGGER = logging.getLogger(__name__)
class AsyncTrackStates:
"""
Record the time when the with-block is entered.
Add all states that have changed since the start time to the return list
when with-block is exited.
Must be run within the event loop.
Deprecated. Remove after June 2021.
Warning added via `get_changed_since`.
"""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize a TrackStates block."""
self.hass = hass
self.states: list[State] = []
# pylint: disable=attribute-defined-outside-init
def __enter__(self) -> list[State]:
"""Record time from which to track changes."""
self.now = dt_util.utcnow()
return self.states
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
"""Add changes states to changes list."""
self.states.extend(get_changed_since(self.hass.states.async_all(), self.now))
def get_changed_since(
states: Iterable[State], utc_point_in_time: dt.datetime
) -> list[State]:
"""Return list of states that have been changed since utc_point_in_time.
Deprecated. Remove after June 2021.
"""
report("uses deprecated `get_changed_since`")
return [state for state in states if state.last_updated >= utc_point_in_time]
@bind_hass
async def async_reproduce_state(
hass: HomeAssistant,
states: State | Iterable[State],
*,
context: Context | None = None,
reproduce_options: dict[str, Any] | None = None,
) -> None:
"""Reproduce a list of states on multiple domains."""
if isinstance(states, State):
states = [states]
to_call: dict[str, list[State]] = defaultdict(list)
for state in states:
to_call[state.domain].append(state)
async def worker(domain: str, states_by_domain: list[State]) -> None:
try:
integration = await async_get_integration(hass, domain)
except IntegrationNotFound:
_LOGGER.warning(
"Trying to reproduce state for unknown integration: %s", domain
)
return
try:
platform: ModuleType | None = integration.get_platform("reproduce_state")
except ImportError:
_LOGGER.warning("Integration %s does not support reproduce state", domain)
return
await platform.async_reproduce_states( # type: ignore
hass, states_by_domain, context=context, reproduce_options=reproduce_options
)
if to_call:
# run all domains in parallel
await asyncio.gather(
*(worker(domain, data) for domain, data in to_call.items())
)
def state_as_number(state: State) -> float:
"""
Try to coerce our state to a number.
Raises ValueError if this is not possible.
"""
if state.state in (
STATE_ON,
STATE_LOCKED,
STATE_ABOVE_HORIZON,
STATE_OPEN,
STATE_HOME,
):
return 1
if state.state in (
STATE_OFF,
STATE_UNLOCKED,
STATE_UNKNOWN,
STATE_BELOW_HORIZON,
STATE_CLOSED,
STATE_NOT_HOME,
):
return 0
return float(state.state) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/state.py | 0.854339 | 0.264059 | state.py | pypi |
from __future__ import annotations
import asyncio
from collections.abc import Iterable
import logging
from homeassistant import config as conf_util
from homeassistant.const import SERVICE_RELOAD
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_per_platform
from homeassistant.helpers.entity_platform import EntityPlatform, async_get_platforms
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import async_get_integration
from homeassistant.setup import async_setup_component
_LOGGER = logging.getLogger(__name__)
async def async_reload_integration_platforms(
hass: HomeAssistant, integration_name: str, integration_platforms: Iterable
) -> None:
"""Reload an integration's platforms.
The platform must support being re-setup.
This functionality is only intended to be used for integrations that process
Safegate Pro data and make this available to other integrations.
Examples are template, stats, derivative, utility meter.
"""
try:
unprocessed_conf = await conf_util.async_hass_config_yaml(hass)
except HomeAssistantError as err:
_LOGGER.error(err)
return
tasks = [
_resetup_platform(
hass, integration_name, integration_platform, unprocessed_conf
)
for integration_platform in integration_platforms
]
await asyncio.gather(*tasks)
async def _resetup_platform(
hass: HomeAssistant,
integration_name: str,
integration_platform: str,
unprocessed_conf: ConfigType,
) -> None:
"""Resetup a platform."""
integration = await async_get_integration(hass, integration_platform)
conf = await conf_util.async_process_component_config(
hass, unprocessed_conf, integration
)
if not conf:
return
root_config: dict = {integration_platform: []}
# Extract only the config for template, ignore the rest.
for p_type, p_config in config_per_platform(conf, integration_platform):
if p_type != integration_name:
continue
root_config[integration_platform].append(p_config)
component = integration.get_component()
if hasattr(component, "async_reset_platform"):
# If the integration has its own way to reset
# use this method.
await component.async_reset_platform(hass, integration_name) # type: ignore
await component.async_setup(hass, root_config) # type: ignore
return
# If its an entity platform, we use the entity_platform
# async_reset method
platform = async_get_platform_without_config_entry(
hass, integration_name, integration_platform
)
if platform:
await _async_reconfig_platform(platform, root_config[integration_platform])
return
if not root_config[integration_platform]:
# No config for this platform
# and its not loaded. Nothing to do
return
await _async_setup_platform(
hass, integration_name, integration_platform, root_config[integration_platform]
)
async def _async_setup_platform(
hass: HomeAssistant,
integration_name: str,
integration_platform: str,
platform_configs: list[dict],
) -> None:
"""Platform for the first time when new configuration is added."""
if integration_platform not in hass.data:
await async_setup_component(
hass, integration_platform, {integration_platform: platform_configs}
)
return
entity_component = hass.data[integration_platform]
tasks = [
entity_component.async_setup_platform(integration_name, p_config)
for p_config in platform_configs
]
await asyncio.gather(*tasks)
async def _async_reconfig_platform(
platform: EntityPlatform, platform_configs: list[dict]
) -> None:
"""Reconfigure an already loaded platform."""
await platform.async_reset()
tasks = [platform.async_setup(p_config) for p_config in platform_configs]
await asyncio.gather(*tasks)
async def async_integration_yaml_config(
hass: HomeAssistant, integration_name: str
) -> ConfigType | None:
"""Fetch the latest yaml configuration for an integration."""
integration = await async_get_integration(hass, integration_name)
return await conf_util.async_process_component_config(
hass, await conf_util.async_hass_config_yaml(hass), integration
)
@callback
def async_get_platform_without_config_entry(
hass: HomeAssistant, integration_name: str, integration_platform_name: str
) -> EntityPlatform | None:
"""Find an existing platform that is not a config entry."""
for integration_platform in async_get_platforms(hass, integration_name):
if integration_platform.config_entry is not None:
continue
if integration_platform.domain == integration_platform_name:
platform: EntityPlatform = integration_platform
return platform
return None
async def async_setup_reload_service(
hass: HomeAssistant, domain: str, platforms: Iterable
) -> None:
"""Create the reload service for the domain."""
if hass.services.has_service(domain, SERVICE_RELOAD):
return
async def _reload_config(call: Event) -> None:
"""Reload the platforms."""
await async_reload_integration_platforms(hass, domain, platforms)
hass.bus.async_fire(f"event_{domain}_reloaded", context=call.context)
hass.helpers.service.async_register_admin_service(
domain, SERVICE_RELOAD, _reload_config
)
def setup_reload_service(hass: HomeAssistant, domain: str, platforms: Iterable) -> None:
"""Sync version of async_setup_reload_service."""
asyncio.run_coroutine_threadsafe(
async_setup_reload_service(hass, domain, platforms),
hass.loop,
).result() | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/reload.py | 0.738103 | 0.217961 | reload.py | pypi |
from __future__ import annotations
import datetime
from typing import TYPE_CHECKING
from homeassistant.const import SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET
from homeassistant.core import HomeAssistant, callback
from homeassistant.loader import bind_hass
from homeassistant.util import dt as dt_util
if TYPE_CHECKING:
import astral
DATA_LOCATION_CACHE = "astral_location_cache"
ELEVATION_AGNOSTIC_EVENTS = ("noon", "midnight")
@callback
@bind_hass
def get_astral_location(
hass: HomeAssistant,
) -> tuple[astral.location.Location, astral.Elevation]:
"""Get an astral location for the current Safegate Pro configuration."""
from astral import LocationInfo # pylint: disable=import-outside-toplevel
from astral.location import Location # pylint: disable=import-outside-toplevel
latitude = hass.config.latitude
longitude = hass.config.longitude
timezone = str(hass.config.time_zone)
elevation = hass.config.elevation
info = ("", "", timezone, latitude, longitude)
# Cache astral locations so they aren't recreated with the same args
if DATA_LOCATION_CACHE not in hass.data:
hass.data[DATA_LOCATION_CACHE] = {}
if info not in hass.data[DATA_LOCATION_CACHE]:
hass.data[DATA_LOCATION_CACHE][info] = Location(LocationInfo(*info))
return hass.data[DATA_LOCATION_CACHE][info], elevation
@callback
@bind_hass
def get_astral_event_next(
hass: HomeAssistant,
event: str,
utc_point_in_time: datetime.datetime | None = None,
offset: datetime.timedelta | None = None,
) -> datetime.datetime:
"""Calculate the next specified solar event."""
location, elevation = get_astral_location(hass)
return get_location_astral_event_next(
location, elevation, event, utc_point_in_time, offset
)
@callback
def get_location_astral_event_next(
location: astral.location.Location,
elevation: astral.Elevation,
event: str,
utc_point_in_time: datetime.datetime | None = None,
offset: datetime.timedelta | None = None,
) -> datetime.datetime:
"""Calculate the next specified solar event."""
if offset is None:
offset = datetime.timedelta()
if utc_point_in_time is None:
utc_point_in_time = dt_util.utcnow()
kwargs = {"local": False}
if event not in ELEVATION_AGNOSTIC_EVENTS:
kwargs["observer_elevation"] = elevation
mod = -1
while True:
try:
next_dt: datetime.datetime = (
getattr(location, event)(
dt_util.as_local(utc_point_in_time).date()
+ datetime.timedelta(days=mod),
**kwargs,
)
+ offset
)
if next_dt > utc_point_in_time:
return next_dt
except ValueError:
pass
mod += 1
@callback
@bind_hass
def get_astral_event_date(
hass: HomeAssistant,
event: str,
date: datetime.date | datetime.datetime | None = None,
) -> datetime.datetime | None:
"""Calculate the astral event time for the specified date."""
location, elevation = get_astral_location(hass)
if date is None:
date = dt_util.now().date()
if isinstance(date, datetime.datetime):
date = dt_util.as_local(date).date()
kwargs = {"local": False}
if event not in ELEVATION_AGNOSTIC_EVENTS:
kwargs["observer_elevation"] = elevation
try:
return getattr(location, event)(date, **kwargs) # type: ignore
except ValueError:
# Event never occurs for specified date.
return None
@callback
@bind_hass
def is_up(
hass: HomeAssistant, utc_point_in_time: datetime.datetime | None = None
) -> bool:
"""Calculate if the sun is currently up."""
if utc_point_in_time is None:
utc_point_in_time = dt_util.utcnow()
next_sunrise = get_astral_event_next(hass, SUN_EVENT_SUNRISE, utc_point_in_time)
next_sunset = get_astral_event_next(hass, SUN_EVENT_SUNSET, utc_point_in_time)
return next_sunrise > next_sunset | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/sun.py | 0.853608 | 0.202463 | sun.py | pypi |
from __future__ import annotations
import asyncio
from collections.abc import Hashable
from datetime import datetime, timedelta
import logging
from typing import Any, Callable
from homeassistant.core import HomeAssistant, callback
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
class KeyedRateLimit:
"""Class to track rate limits."""
def __init__(
self,
hass: HomeAssistant,
) -> None:
"""Initialize ratelimit tracker."""
self.hass = hass
self._last_triggered: dict[Hashable, datetime] = {}
self._rate_limit_timers: dict[Hashable, asyncio.TimerHandle] = {}
@callback
def async_has_timer(self, key: Hashable) -> bool:
"""Check if a rate limit timer is running."""
if not self._rate_limit_timers:
return False
return key in self._rate_limit_timers
@callback
def async_triggered(self, key: Hashable, now: datetime | None = None) -> None:
"""Call when the action we are tracking was triggered."""
self.async_cancel_timer(key)
self._last_triggered[key] = now or dt_util.utcnow()
@callback
def async_cancel_timer(self, key: Hashable) -> None:
"""Cancel a rate limit time that will call the action."""
if not self._rate_limit_timers or not self.async_has_timer(key):
return
self._rate_limit_timers.pop(key).cancel()
@callback
def async_remove(self) -> None:
"""Remove all timers."""
for timer in self._rate_limit_timers.values():
timer.cancel()
self._rate_limit_timers.clear()
@callback
def async_schedule_action(
self,
key: Hashable,
rate_limit: timedelta | None,
now: datetime,
action: Callable,
*args: Any,
) -> datetime | None:
"""Check rate limits and schedule an action if we hit the limit.
If the rate limit is hit:
Schedules the action for when the rate limit expires
if there are no pending timers. The action must
be called in async.
Returns the time the rate limit will expire
If the rate limit is not hit:
Return None
"""
if rate_limit is None:
return None
last_triggered = self._last_triggered.get(key)
if not last_triggered:
return None
next_call_time = last_triggered + rate_limit
if next_call_time <= now:
self.async_cancel_timer(key)
return None
_LOGGER.debug(
"Reached rate limit of %s for %s and deferred action until %s",
rate_limit,
key,
next_call_time,
)
if key not in self._rate_limit_timers:
self._rate_limit_timers[key] = self.hass.loop.call_later(
(next_call_time - now).total_seconds(),
action,
*args,
)
return next_call_time | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/ratelimit.py | 0.906173 | 0.25527 | ratelimit.py | pypi |
from __future__ import annotations
import functools
import inspect
import logging
from typing import Any, Callable
from ..helpers.frame import MissingIntegrationFrame, get_integration_frame
def deprecated_substitute(substitute_name: str) -> Callable[..., Callable]:
"""Help migrate properties to new names.
When a property is added to replace an older property, this decorator can
be added to the new property, listing the old property as the substitute.
If the old property is defined, its value will be used instead, and a log
warning will be issued alerting the user of the impending change.
"""
def decorator(func: Callable) -> Callable:
"""Decorate function as deprecated."""
def func_wrapper(self: Callable) -> Any:
"""Wrap for the original function."""
if hasattr(self, substitute_name):
# If this platform is still using the old property, issue
# a logger warning once with instructions on how to fix it.
warnings = getattr(func, "_deprecated_substitute_warnings", {})
module_name = self.__module__
if not warnings.get(module_name):
logger = logging.getLogger(module_name)
logger.warning(
"'%s' is deprecated. Please rename '%s' to "
"'%s' in '%s' to ensure future support.",
substitute_name,
substitute_name,
func.__name__,
inspect.getfile(self.__class__),
)
warnings[module_name] = True
setattr(func, "_deprecated_substitute_warnings", warnings)
# Return the old property
return getattr(self, substitute_name)
return func(self)
return func_wrapper
return decorator
def get_deprecated(
config: dict[str, Any], new_name: str, old_name: str, default: Any | None = None
) -> Any | None:
"""Allow an old config name to be deprecated with a replacement.
If the new config isn't found, but the old one is, the old value is used
and a warning is issued to the user.
"""
if old_name in config:
module = inspect.getmodule(inspect.stack(context=0)[1].frame)
if module is not None:
module_name = module.__name__
else:
# If Python is unable to access the sources files, the call stack frame
# will be missing information, so let's guard.
# https://github.com/home-assistant/core/issues/24982
module_name = __name__
logger = logging.getLogger(module_name)
logger.warning(
"'%s' is deprecated. Please rename '%s' to '%s' in your "
"configuration file.",
old_name,
old_name,
new_name,
)
return config.get(old_name)
return config.get(new_name, default)
def deprecated_class(replacement: str) -> Any:
"""Mark class as deprecated and provide a replacement class to be used instead."""
def deprecated_decorator(cls: Any) -> Any:
"""Decorate class as deprecated."""
@functools.wraps(cls)
def deprecated_cls(*args: tuple, **kwargs: dict[str, Any]) -> Any:
"""Wrap for the original class."""
_print_deprecation_warning(cls, replacement, "class")
return cls(*args, **kwargs)
return deprecated_cls
return deprecated_decorator
def deprecated_function(replacement: str) -> Callable[..., Callable]:
"""Mark function as deprecated and provide a replacement function to be used instead."""
def deprecated_decorator(func: Callable) -> Callable:
"""Decorate function as deprecated."""
@functools.wraps(func)
def deprecated_func(*args: tuple, **kwargs: dict[str, Any]) -> Any:
"""Wrap for the original function."""
_print_deprecation_warning(func, replacement, "function")
return func(*args, **kwargs)
return deprecated_func
return deprecated_decorator
def _print_deprecation_warning(obj: Any, replacement: str, description: str) -> None:
logger = logging.getLogger(obj.__module__)
try:
_, integration, path = get_integration_frame()
if path == "custom_components/":
logger.warning(
"%s was called from %s, this is a deprecated %s. Use %s instead, please report this to the maintainer of %s",
obj.__name__,
integration,
description,
replacement,
integration,
)
else:
logger.warning(
"%s was called from %s, this is a deprecated %s. Use %s instead",
obj.__name__,
integration,
description,
replacement,
)
except MissingIntegrationFrame:
logger.warning(
"%s is a deprecated %s. Use %s instead",
obj.__name__,
description,
replacement,
) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/deprecation.py | 0.863089 | 0.20264 | deprecation.py | pypi |
from __future__ import annotations
from collections.abc import Iterable
import logging
import re
from typing import Any, Callable, Dict
import voluptuous as vol
from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES
from homeassistant.core import Context, HomeAssistant, State, T, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.loader import bind_hass
_LOGGER = logging.getLogger(__name__)
_SlotsType = Dict[str, Any]
INTENT_TURN_OFF = "HassTurnOff"
INTENT_TURN_ON = "HassTurnOn"
INTENT_TOGGLE = "HassToggle"
SLOT_SCHEMA = vol.Schema({}, extra=vol.ALLOW_EXTRA)
DATA_KEY = "intent"
SPEECH_TYPE_PLAIN = "plain"
SPEECH_TYPE_SSML = "ssml"
@callback
@bind_hass
def async_register(hass: HomeAssistant, handler: IntentHandler) -> None:
"""Register an intent with Safegate Pro."""
intents = hass.data.get(DATA_KEY)
if intents is None:
intents = hass.data[DATA_KEY] = {}
assert handler.intent_type is not None, "intent_type cannot be None"
if handler.intent_type in intents:
_LOGGER.warning(
"Intent %s is being overwritten by %s", handler.intent_type, handler
)
intents[handler.intent_type] = handler
@bind_hass
async def async_handle(
hass: HomeAssistant,
platform: str,
intent_type: str,
slots: _SlotsType | None = None,
text_input: str | None = None,
context: Context | None = None,
) -> IntentResponse:
"""Handle an intent."""
handler: IntentHandler = hass.data.get(DATA_KEY, {}).get(intent_type)
if handler is None:
raise UnknownIntent(f"Unknown intent {intent_type}")
if context is None:
context = Context()
intent = Intent(hass, platform, intent_type, slots or {}, text_input, context)
try:
_LOGGER.info("Triggering intent handler %s", handler)
result = await handler.async_handle(intent)
return result
except vol.Invalid as err:
_LOGGER.warning("Received invalid slot info for %s: %s", intent_type, err)
raise InvalidSlotInfo(f"Received invalid slot info for {intent_type}") from err
except IntentHandleError:
raise
except Exception as err:
raise IntentUnexpectedError(f"Error handling {intent_type}") from err
class IntentError(HomeAssistantError):
"""Base class for intent related errors."""
class UnknownIntent(IntentError):
"""When the intent is not registered."""
class InvalidSlotInfo(IntentError):
"""When the slot data is invalid."""
class IntentHandleError(IntentError):
"""Error while handling intent."""
class IntentUnexpectedError(IntentError):
"""Unexpected error while handling intent."""
@callback
@bind_hass
def async_match_state(
hass: HomeAssistant, name: str, states: Iterable[State] | None = None
) -> State:
"""Find a state that matches the name."""
if states is None:
states = hass.states.async_all()
state = _fuzzymatch(name, states, lambda state: state.name)
if state is None:
raise IntentHandleError(f"Unable to find an entity called {name}")
return state
@callback
def async_test_feature(state: State, feature: int, feature_name: str) -> None:
"""Test if state supports a feature."""
if state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) & feature == 0:
raise IntentHandleError(f"Entity {state.name} does not support {feature_name}")
class IntentHandler:
"""Intent handler registration."""
intent_type: str | None = None
slot_schema: vol.Schema | None = None
_slot_schema: vol.Schema | None = None
platforms: Iterable[str] | None = []
@callback
def async_can_handle(self, intent_obj: Intent) -> bool:
"""Test if an intent can be handled."""
return self.platforms is None or intent_obj.platform in self.platforms
@callback
def async_validate_slots(self, slots: _SlotsType) -> _SlotsType:
"""Validate slot information."""
if self.slot_schema is None:
return slots
if self._slot_schema is None:
self._slot_schema = vol.Schema(
{
key: SLOT_SCHEMA.extend({"value": validator})
for key, validator in self.slot_schema.items()
},
extra=vol.ALLOW_EXTRA,
)
return self._slot_schema(slots) # type: ignore
async def async_handle(self, intent_obj: Intent) -> IntentResponse:
"""Handle the intent."""
raise NotImplementedError()
def __repr__(self) -> str:
"""Represent a string of an intent handler."""
return f"<{self.__class__.__name__} - {self.intent_type}>"
def _fuzzymatch(name: str, items: Iterable[T], key: Callable[[T], str]) -> T | None:
"""Fuzzy matching function."""
matches = []
pattern = ".*?".join(name)
regex = re.compile(pattern, re.IGNORECASE)
for idx, item in enumerate(items):
match = regex.search(key(item))
if match:
# Add key length so we prefer shorter keys with the same group and start.
# Add index so we pick first match in case same group, start, and key length.
matches.append(
(len(match.group()), match.start(), len(key(item)), idx, item)
)
return sorted(matches)[0][4] if matches else None
class ServiceIntentHandler(IntentHandler):
"""Service Intent handler registration.
Service specific intent handler that calls a service by name/entity_id.
"""
slot_schema = {vol.Required("name"): cv.string}
def __init__(
self, intent_type: str, domain: str, service: str, speech: str
) -> None:
"""Create Service Intent Handler."""
self.intent_type = intent_type
self.domain = domain
self.service = service
self.speech = speech
async def async_handle(self, intent_obj: Intent) -> IntentResponse:
"""Handle the hass intent."""
hass = intent_obj.hass
slots = self.async_validate_slots(intent_obj.slots)
state = async_match_state(hass, slots["name"]["value"])
await hass.services.async_call(
self.domain,
self.service,
{ATTR_ENTITY_ID: state.entity_id},
context=intent_obj.context,
)
response = intent_obj.create_response()
response.async_set_speech(self.speech.format(state.name))
return response
class Intent:
"""Hold the intent."""
__slots__ = ["hass", "platform", "intent_type", "slots", "text_input", "context"]
def __init__(
self,
hass: HomeAssistant,
platform: str,
intent_type: str,
slots: _SlotsType,
text_input: str | None,
context: Context,
) -> None:
"""Initialize an intent."""
self.hass = hass
self.platform = platform
self.intent_type = intent_type
self.slots = slots
self.text_input = text_input
self.context = context
@callback
def create_response(self) -> IntentResponse:
"""Create a response."""
return IntentResponse(self)
class IntentResponse:
"""Response to an intent."""
def __init__(self, intent: Intent | None = None) -> None:
"""Initialize an IntentResponse."""
self.intent = intent
self.speech: dict[str, dict[str, Any]] = {}
self.card: dict[str, dict[str, str]] = {}
@callback
def async_set_speech(
self, speech: str, speech_type: str = "plain", extra_data: Any | None = None
) -> None:
"""Set speech response."""
self.speech[speech_type] = {"speech": speech, "extra_data": extra_data}
@callback
def async_set_card(
self, title: str, content: str, card_type: str = "simple"
) -> None:
"""Set speech response."""
self.card[card_type] = {"title": title, "content": content}
@callback
def as_dict(self) -> dict[str, dict[str, dict[str, Any]]]:
"""Return a dictionary representation of an intent response."""
return {"speech": self.speech, "card": self.card} | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/intent.py | 0.871064 | 0.190762 | intent.py | pypi |
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
from homeassistant.core import HomeAssistant, callback
from . import template
class ScriptVariables:
"""Class to hold and render script variables."""
def __init__(self, variables: dict[str, Any]) -> None:
"""Initialize script variables."""
self.variables = variables
self._has_template: bool | None = None
@callback
def async_render(
self,
hass: HomeAssistant,
run_variables: Mapping[str, Any] | None,
*,
render_as_defaults: bool = True,
limited: bool = False,
) -> dict[str, Any]:
"""Render script variables.
The run variables are used to compute the static variables.
If `render_as_defaults` is True, the run variables will not be overridden.
"""
if self._has_template is None:
self._has_template = template.is_complex(self.variables)
template.attach(hass, self.variables)
if not self._has_template:
if render_as_defaults:
rendered_variables = dict(self.variables)
if run_variables is not None:
rendered_variables.update(run_variables)
else:
rendered_variables = (
{} if run_variables is None else dict(run_variables)
)
rendered_variables.update(self.variables)
return rendered_variables
rendered_variables = {} if run_variables is None else dict(run_variables)
for key, value in self.variables.items():
# We can skip if we're going to override this key with
# run variables anyway
if render_as_defaults and key in rendered_variables:
continue
rendered_variables[key] = template.render_complex(
value, rendered_variables, limited
)
return rendered_variables
def as_dict(self) -> dict:
"""Return dict version of this class."""
return self.variables | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/helpers/script_variables.py | 0.91781 | 0.207135 | script_variables.py | pypi |
from __future__ import annotations
import asyncio
from collections import OrderedDict
from datetime import timedelta
import hmac
from logging import getLogger
from typing import Any
from homeassistant.auth.const import ACCESS_TOKEN_EXPIRATION
from homeassistant.core import HomeAssistant, callback
from homeassistant.util import dt as dt_util
from . import models
from .const import GROUP_ID_ADMIN, GROUP_ID_READ_ONLY, GROUP_ID_USER
from .permissions import PermissionLookup, system_policies
from .permissions.types import PolicyType
STORAGE_VERSION = 1
STORAGE_KEY = "auth"
GROUP_NAME_ADMIN = "Administrators"
GROUP_NAME_USER = "Users"
GROUP_NAME_READ_ONLY = "Read Only"
class AuthStore:
"""Stores authentication info.
Any mutation to an object should happen inside the auth store.
The auth store is lazy. It won't load the data from disk until a method is
called that needs it.
"""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the auth store."""
self.hass = hass
self._users: dict[str, models.User] | None = None
self._groups: dict[str, models.Group] | None = None
self._perm_lookup: PermissionLookup | None = None
self._store = hass.helpers.storage.Store(
STORAGE_VERSION, STORAGE_KEY, private=True
)
self._lock = asyncio.Lock()
async def async_get_groups(self) -> list[models.Group]:
"""Retrieve all users."""
if self._groups is None:
await self._async_load()
assert self._groups is not None
return list(self._groups.values())
async def async_get_group(self, group_id: str) -> models.Group | None:
"""Retrieve all users."""
if self._groups is None:
await self._async_load()
assert self._groups is not None
return self._groups.get(group_id)
async def async_get_users(self) -> list[models.User]:
"""Retrieve all users."""
if self._users is None:
await self._async_load()
assert self._users is not None
return list(self._users.values())
async def async_get_user(self, user_id: str) -> models.User | None:
"""Retrieve a user by id."""
if self._users is None:
await self._async_load()
assert self._users is not None
return self._users.get(user_id)
async def async_create_user(
self,
name: str | None,
is_owner: bool | None = None,
is_active: bool | None = None,
system_generated: bool | None = None,
credentials: models.Credentials | None = None,
group_ids: list[str] | None = None,
) -> models.User:
"""Create a new user."""
if self._users is None:
await self._async_load()
assert self._users is not None
assert self._groups is not None
groups = []
for group_id in group_ids or []:
group = self._groups.get(group_id)
if group is None:
raise ValueError(f"Invalid group specified {group_id}")
groups.append(group)
kwargs: dict[str, Any] = {
"name": name,
# Until we get group management, we just put everyone in the
# same group.
"groups": groups,
"perm_lookup": self._perm_lookup,
}
if is_owner is not None:
kwargs["is_owner"] = is_owner
if is_active is not None:
kwargs["is_active"] = is_active
if system_generated is not None:
kwargs["system_generated"] = system_generated
new_user = models.User(**kwargs)
self._users[new_user.id] = new_user
if credentials is None:
self._async_schedule_save()
return new_user
# Saving is done inside the link.
await self.async_link_user(new_user, credentials)
return new_user
async def async_link_user(
self, user: models.User, credentials: models.Credentials
) -> None:
"""Add credentials to an existing user."""
user.credentials.append(credentials)
self._async_schedule_save()
credentials.is_new = False
async def async_remove_user(self, user: models.User) -> None:
"""Remove a user."""
if self._users is None:
await self._async_load()
assert self._users is not None
self._users.pop(user.id)
self._async_schedule_save()
async def async_update_user(
self,
user: models.User,
name: str | None = None,
is_active: bool | None = None,
group_ids: list[str] | None = None,
) -> None:
"""Update a user."""
assert self._groups is not None
if group_ids is not None:
groups = []
for grid in group_ids:
group = self._groups.get(grid)
if group is None:
raise ValueError("Invalid group specified.")
groups.append(group)
user.groups = groups
user.invalidate_permission_cache()
for attr_name, value in (("name", name), ("is_active", is_active)):
if value is not None:
setattr(user, attr_name, value)
self._async_schedule_save()
async def async_activate_user(self, user: models.User) -> None:
"""Activate a user."""
user.is_active = True
self._async_schedule_save()
async def async_deactivate_user(self, user: models.User) -> None:
"""Activate a user."""
user.is_active = False
self._async_schedule_save()
async def async_remove_credentials(self, credentials: models.Credentials) -> None:
"""Remove credentials."""
if self._users is None:
await self._async_load()
assert self._users is not None
for user in self._users.values():
found = None
for index, cred in enumerate(user.credentials):
if cred is credentials:
found = index
break
if found is not None:
user.credentials.pop(found)
break
self._async_schedule_save()
async def async_create_refresh_token(
self,
user: models.User,
client_id: str | None = None,
client_name: str | None = None,
client_icon: str | None = None,
token_type: str = models.TOKEN_TYPE_NORMAL,
access_token_expiration: timedelta = ACCESS_TOKEN_EXPIRATION,
credential: models.Credentials | None = None,
) -> models.RefreshToken:
"""Create a new token for a user."""
kwargs: dict[str, Any] = {
"user": user,
"client_id": client_id,
"token_type": token_type,
"access_token_expiration": access_token_expiration,
"credential": credential,
}
if client_name:
kwargs["client_name"] = client_name
if client_icon:
kwargs["client_icon"] = client_icon
refresh_token = models.RefreshToken(**kwargs)
user.refresh_tokens[refresh_token.id] = refresh_token
self._async_schedule_save()
return refresh_token
async def async_remove_refresh_token(
self, refresh_token: models.RefreshToken
) -> None:
"""Remove a refresh token."""
if self._users is None:
await self._async_load()
assert self._users is not None
for user in self._users.values():
if user.refresh_tokens.pop(refresh_token.id, None):
self._async_schedule_save()
break
async def async_get_refresh_token(
self, token_id: str
) -> models.RefreshToken | None:
"""Get refresh token by id."""
if self._users is None:
await self._async_load()
assert self._users is not None
for user in self._users.values():
refresh_token = user.refresh_tokens.get(token_id)
if refresh_token is not None:
return refresh_token
return None
async def async_get_refresh_token_by_token(
self, token: str
) -> models.RefreshToken | None:
"""Get refresh token by token."""
if self._users is None:
await self._async_load()
assert self._users is not None
found = None
for user in self._users.values():
for refresh_token in user.refresh_tokens.values():
if hmac.compare_digest(refresh_token.token, token):
found = refresh_token
return found
@callback
def async_log_refresh_token_usage(
self, refresh_token: models.RefreshToken, remote_ip: str | None = None
) -> None:
"""Update refresh token last used information."""
refresh_token.last_used_at = dt_util.utcnow()
refresh_token.last_used_ip = remote_ip
self._async_schedule_save()
async def _async_load(self) -> None:
"""Load the users."""
async with self._lock:
if self._users is not None:
return
await self._async_load_task()
async def _async_load_task(self) -> None:
"""Load the users."""
[ent_reg, dev_reg, data] = await asyncio.gather(
self.hass.helpers.entity_registry.async_get_registry(),
self.hass.helpers.device_registry.async_get_registry(),
self._store.async_load(),
)
# Make sure that we're not overriding data if 2 loads happened at the
# same time
if self._users is not None:
return
self._perm_lookup = perm_lookup = PermissionLookup(ent_reg, dev_reg)
if data is None:
self._set_defaults()
return
users: dict[str, models.User] = OrderedDict()
groups: dict[str, models.Group] = OrderedDict()
credentials: dict[str, models.Credentials] = OrderedDict()
# Soft-migrating data as we load. We are going to make sure we have a
# read only group and an admin group. There are two states that we can
# migrate from:
# 1. Data from a recent version which has a single group without policy
# 2. Data from old version which has no groups
has_admin_group = False
has_user_group = False
has_read_only_group = False
group_without_policy = None
# When creating objects we mention each attribute explicitly. This
# prevents crashing if user rolls back HA version after a new property
# was added.
for group_dict in data.get("groups", []):
policy: PolicyType | None = None
if group_dict["id"] == GROUP_ID_ADMIN:
has_admin_group = True
name = GROUP_NAME_ADMIN
policy = system_policies.ADMIN_POLICY
system_generated = True
elif group_dict["id"] == GROUP_ID_USER:
has_user_group = True
name = GROUP_NAME_USER
policy = system_policies.USER_POLICY
system_generated = True
elif group_dict["id"] == GROUP_ID_READ_ONLY:
has_read_only_group = True
name = GROUP_NAME_READ_ONLY
policy = system_policies.READ_ONLY_POLICY
system_generated = True
else:
name = group_dict["name"]
policy = group_dict.get("policy")
system_generated = False
# We don't want groups without a policy that are not system groups
# This is part of migrating from state 1
if policy is None:
group_without_policy = group_dict["id"]
continue
groups[group_dict["id"]] = models.Group(
id=group_dict["id"],
name=name,
policy=policy,
system_generated=system_generated,
)
# If there are no groups, add all existing users to the admin group.
# This is part of migrating from state 2
migrate_users_to_admin_group = not groups and group_without_policy is None
# If we find a no_policy_group, we need to migrate all users to the
# admin group. We only do this if there are no other groups, as is
# the expected state. If not expected state, not marking people admin.
# This is part of migrating from state 1
if groups and group_without_policy is not None:
group_without_policy = None
# This is part of migrating from state 1 and 2
if not has_admin_group:
admin_group = _system_admin_group()
groups[admin_group.id] = admin_group
# This is part of migrating from state 1 and 2
if not has_read_only_group:
read_only_group = _system_read_only_group()
groups[read_only_group.id] = read_only_group
if not has_user_group:
user_group = _system_user_group()
groups[user_group.id] = user_group
for user_dict in data["users"]:
# Collect the users group.
user_groups = []
for group_id in user_dict.get("group_ids", []):
# This is part of migrating from state 1
if group_id == group_without_policy:
group_id = GROUP_ID_ADMIN
user_groups.append(groups[group_id])
# This is part of migrating from state 2
if not user_dict["system_generated"] and migrate_users_to_admin_group:
user_groups.append(groups[GROUP_ID_ADMIN])
users[user_dict["id"]] = models.User(
name=user_dict["name"],
groups=user_groups,
id=user_dict["id"],
is_owner=user_dict["is_owner"],
is_active=user_dict["is_active"],
system_generated=user_dict["system_generated"],
perm_lookup=perm_lookup,
)
for cred_dict in data["credentials"]:
credential = models.Credentials(
id=cred_dict["id"],
is_new=False,
auth_provider_type=cred_dict["auth_provider_type"],
auth_provider_id=cred_dict["auth_provider_id"],
data=cred_dict["data"],
)
credentials[cred_dict["id"]] = credential
users[cred_dict["user_id"]].credentials.append(credential)
for rt_dict in data["refresh_tokens"]:
# Filter out the old keys that don't have jwt_key (pre-0.76)
if "jwt_key" not in rt_dict:
continue
created_at = dt_util.parse_datetime(rt_dict["created_at"])
if created_at is None:
getLogger(__name__).error(
"Ignoring refresh token %(id)s with invalid created_at "
"%(created_at)s for user_id %(user_id)s",
rt_dict,
)
continue
token_type = rt_dict.get("token_type")
if token_type is None:
if rt_dict["client_id"] is None:
token_type = models.TOKEN_TYPE_SYSTEM
else:
token_type = models.TOKEN_TYPE_NORMAL
# old refresh_token don't have last_used_at (pre-0.78)
last_used_at_str = rt_dict.get("last_used_at")
if last_used_at_str:
last_used_at = dt_util.parse_datetime(last_used_at_str)
else:
last_used_at = None
token = models.RefreshToken(
id=rt_dict["id"],
user=users[rt_dict["user_id"]],
client_id=rt_dict["client_id"],
# use dict.get to keep backward compatibility
client_name=rt_dict.get("client_name"),
client_icon=rt_dict.get("client_icon"),
token_type=token_type,
created_at=created_at,
access_token_expiration=timedelta(
seconds=rt_dict["access_token_expiration"]
),
token=rt_dict["token"],
jwt_key=rt_dict["jwt_key"],
last_used_at=last_used_at,
last_used_ip=rt_dict.get("last_used_ip"),
credential=credentials.get(rt_dict.get("credential_id")),
version=rt_dict.get("version"),
)
users[rt_dict["user_id"]].refresh_tokens[token.id] = token
self._groups = groups
self._users = users
@callback
def _async_schedule_save(self) -> None:
"""Save users."""
if self._users is None:
return
self._store.async_delay_save(self._data_to_save, 1)
@callback
def _data_to_save(self) -> dict:
"""Return the data to store."""
assert self._users is not None
assert self._groups is not None
users = [
{
"id": user.id,
"group_ids": [group.id for group in user.groups],
"is_owner": user.is_owner,
"is_active": user.is_active,
"name": user.name,
"system_generated": user.system_generated,
}
for user in self._users.values()
]
groups = []
for group in self._groups.values():
g_dict: dict[str, Any] = {
"id": group.id,
# Name not read for sys groups. Kept here for backwards compat
"name": group.name,
}
if not group.system_generated:
g_dict["policy"] = group.policy
groups.append(g_dict)
credentials = [
{
"id": credential.id,
"user_id": user.id,
"auth_provider_type": credential.auth_provider_type,
"auth_provider_id": credential.auth_provider_id,
"data": credential.data,
}
for user in self._users.values()
for credential in user.credentials
]
refresh_tokens = [
{
"id": refresh_token.id,
"user_id": user.id,
"client_id": refresh_token.client_id,
"client_name": refresh_token.client_name,
"client_icon": refresh_token.client_icon,
"token_type": refresh_token.token_type,
"created_at": refresh_token.created_at.isoformat(),
"access_token_expiration": refresh_token.access_token_expiration.total_seconds(),
"token": refresh_token.token,
"jwt_key": refresh_token.jwt_key,
"last_used_at": refresh_token.last_used_at.isoformat()
if refresh_token.last_used_at
else None,
"last_used_ip": refresh_token.last_used_ip,
"credential_id": refresh_token.credential.id
if refresh_token.credential
else None,
"version": refresh_token.version,
}
for user in self._users.values()
for refresh_token in user.refresh_tokens.values()
]
return {
"users": users,
"groups": groups,
"credentials": credentials,
"refresh_tokens": refresh_tokens,
}
def _set_defaults(self) -> None:
"""Set default values for auth store."""
self._users = OrderedDict()
groups: dict[str, models.Group] = OrderedDict()
admin_group = _system_admin_group()
groups[admin_group.id] = admin_group
user_group = _system_user_group()
groups[user_group.id] = user_group
read_only_group = _system_read_only_group()
groups[read_only_group.id] = read_only_group
self._groups = groups
def _system_admin_group() -> models.Group:
"""Create system admin group."""
return models.Group(
name=GROUP_NAME_ADMIN,
id=GROUP_ID_ADMIN,
policy=system_policies.ADMIN_POLICY,
system_generated=True,
)
def _system_user_group() -> models.Group:
"""Create system user group."""
return models.Group(
name=GROUP_NAME_USER,
id=GROUP_ID_USER,
policy=system_policies.USER_POLICY,
system_generated=True,
)
def _system_read_only_group() -> models.Group:
"""Create read only group."""
return models.Group(
name=GROUP_NAME_READ_ONLY,
id=GROUP_ID_READ_ONLY,
policy=system_policies.READ_ONLY_POLICY,
system_generated=True,
) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/auth/auth_store.py | 0.864953 | 0.168754 | auth_store.py | pypi |
from __future__ import annotations
from functools import wraps
from typing import Callable, Dict, Optional, cast
from .const import SUBCAT_ALL
from .models import PermissionLookup
from .types import CategoryType, SubCategoryDict, ValueType
LookupFunc = Callable[[PermissionLookup, SubCategoryDict, str], Optional[ValueType]]
SubCatLookupType = Dict[str, LookupFunc]
def lookup_all(
perm_lookup: PermissionLookup, lookup_dict: SubCategoryDict, object_id: str
) -> ValueType:
"""Look up permission for all."""
# In case of ALL category, lookup_dict IS the schema.
return cast(ValueType, lookup_dict)
def compile_policy(
policy: CategoryType, subcategories: SubCatLookupType, perm_lookup: PermissionLookup
) -> Callable[[str, str], bool]:
"""Compile policy into a function that tests policy.
Subcategories are mapping key -> lookup function, ordered by highest
priority first.
"""
# None, False, empty dict
if not policy:
def apply_policy_deny_all(entity_id: str, key: str) -> bool:
"""Decline all."""
return False
return apply_policy_deny_all
if policy is True:
def apply_policy_allow_all(entity_id: str, key: str) -> bool:
"""Approve all."""
return True
return apply_policy_allow_all
assert isinstance(policy, dict)
funcs: list[Callable[[str, str], bool | None]] = []
for key, lookup_func in subcategories.items():
lookup_value = policy.get(key)
# If any lookup value is `True`, it will always be positive
if isinstance(lookup_value, bool):
return lambda object_id, key: True
if lookup_value is not None:
funcs.append(_gen_dict_test_func(perm_lookup, lookup_func, lookup_value))
if len(funcs) == 1:
func = funcs[0]
@wraps(func)
def apply_policy_func(object_id: str, key: str) -> bool:
"""Apply a single policy function."""
return func(object_id, key) is True
return apply_policy_func
def apply_policy_funcs(object_id: str, key: str) -> bool:
"""Apply several policy functions."""
for func in funcs:
result = func(object_id, key)
if result is not None:
return result
return False
return apply_policy_funcs
def _gen_dict_test_func(
perm_lookup: PermissionLookup, lookup_func: LookupFunc, lookup_dict: SubCategoryDict
) -> Callable[[str, str], bool | None]:
"""Generate a lookup function."""
def test_value(object_id: str, key: str) -> bool | None:
"""Test if permission is allowed based on the keys."""
schema: ValueType = lookup_func(perm_lookup, lookup_dict, object_id)
if schema is None or isinstance(schema, bool):
return schema
assert isinstance(schema, dict)
return schema.get(key)
return test_value
def test_all(policy: CategoryType, key: str) -> bool:
"""Test if a policy has an ALL access for a specific key."""
if not isinstance(policy, dict):
return bool(policy)
all_policy = policy.get(SUBCAT_ALL)
if not isinstance(all_policy, dict):
return bool(all_policy)
return all_policy.get(key, False) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/auth/permissions/util.py | 0.929848 | 0.35516 | util.py | pypi |
from __future__ import annotations
from collections import OrderedDict
from typing import Callable
import voluptuous as vol
from .const import POLICY_CONTROL, POLICY_EDIT, POLICY_READ, SUBCAT_ALL
from .models import PermissionLookup
from .types import CategoryType, SubCategoryDict, ValueType
from .util import SubCatLookupType, compile_policy, lookup_all
SINGLE_ENTITY_SCHEMA = vol.Any(
True,
vol.Schema(
{
vol.Optional(POLICY_READ): True,
vol.Optional(POLICY_CONTROL): True,
vol.Optional(POLICY_EDIT): True,
}
),
)
ENTITY_DOMAINS = "domains"
ENTITY_AREAS = "area_ids"
ENTITY_DEVICE_IDS = "device_ids"
ENTITY_ENTITY_IDS = "entity_ids"
ENTITY_VALUES_SCHEMA = vol.Any(True, vol.Schema({str: SINGLE_ENTITY_SCHEMA}))
ENTITY_POLICY_SCHEMA = vol.Any(
True,
vol.Schema(
{
vol.Optional(SUBCAT_ALL): SINGLE_ENTITY_SCHEMA,
vol.Optional(ENTITY_AREAS): ENTITY_VALUES_SCHEMA,
vol.Optional(ENTITY_DEVICE_IDS): ENTITY_VALUES_SCHEMA,
vol.Optional(ENTITY_DOMAINS): ENTITY_VALUES_SCHEMA,
vol.Optional(ENTITY_ENTITY_IDS): ENTITY_VALUES_SCHEMA,
}
),
)
def _lookup_domain(
perm_lookup: PermissionLookup, domains_dict: SubCategoryDict, entity_id: str
) -> ValueType | None:
"""Look up entity permissions by domain."""
return domains_dict.get(entity_id.split(".", 1)[0])
def _lookup_area(
perm_lookup: PermissionLookup, area_dict: SubCategoryDict, entity_id: str
) -> ValueType | None:
"""Look up entity permissions by area."""
entity_entry = perm_lookup.entity_registry.async_get(entity_id)
if entity_entry is None or entity_entry.device_id is None:
return None
device_entry = perm_lookup.device_registry.async_get(entity_entry.device_id)
if device_entry is None or device_entry.area_id is None:
return None
return area_dict.get(device_entry.area_id)
def _lookup_device(
perm_lookup: PermissionLookup, devices_dict: SubCategoryDict, entity_id: str
) -> ValueType | None:
"""Look up entity permissions by device."""
entity_entry = perm_lookup.entity_registry.async_get(entity_id)
if entity_entry is None or entity_entry.device_id is None:
return None
return devices_dict.get(entity_entry.device_id)
def _lookup_entity_id(
perm_lookup: PermissionLookup, entities_dict: SubCategoryDict, entity_id: str
) -> ValueType | None:
"""Look up entity permission by entity id."""
return entities_dict.get(entity_id)
def compile_entities(
policy: CategoryType, perm_lookup: PermissionLookup
) -> Callable[[str, str], bool]:
"""Compile policy into a function that tests policy."""
subcategories: SubCatLookupType = OrderedDict()
subcategories[ENTITY_ENTITY_IDS] = _lookup_entity_id
subcategories[ENTITY_DEVICE_IDS] = _lookup_device
subcategories[ENTITY_AREAS] = _lookup_area
subcategories[ENTITY_DOMAINS] = _lookup_domain
subcategories[SUBCAT_ALL] = lookup_all
return compile_policy(policy, subcategories, perm_lookup) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/auth/permissions/entities.py | 0.809653 | 0.154058 | entities.py | pypi |
from __future__ import annotations
import logging
from typing import Any, Callable
import voluptuous as vol
from .const import CAT_ENTITIES
from .entities import ENTITY_POLICY_SCHEMA, compile_entities
from .merge import merge_policies # noqa: F401
from .models import PermissionLookup
from .types import PolicyType
from .util import test_all
POLICY_SCHEMA = vol.Schema({vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA})
_LOGGER = logging.getLogger(__name__)
class AbstractPermissions:
"""Default permissions class."""
_cached_entity_func: Callable[[str, str], bool] | None = None
def _entity_func(self) -> Callable[[str, str], bool]:
"""Return a function that can test entity access."""
raise NotImplementedError
def access_all_entities(self, key: str) -> bool:
"""Check if we have a certain access to all entities."""
raise NotImplementedError
def check_entity(self, entity_id: str, key: str) -> bool:
"""Check if we can access entity."""
entity_func = self._cached_entity_func
if entity_func is None:
entity_func = self._cached_entity_func = self._entity_func()
return entity_func(entity_id, key)
class PolicyPermissions(AbstractPermissions):
"""Handle permissions."""
def __init__(self, policy: PolicyType, perm_lookup: PermissionLookup) -> None:
"""Initialize the permission class."""
self._policy = policy
self._perm_lookup = perm_lookup
def access_all_entities(self, key: str) -> bool:
"""Check if we have a certain access to all entities."""
return test_all(self._policy.get(CAT_ENTITIES), key)
def _entity_func(self) -> Callable[[str, str], bool]:
"""Return a function that can test entity access."""
return compile_entities(self._policy.get(CAT_ENTITIES), self._perm_lookup)
def __eq__(self, other: Any) -> bool:
"""Equals check."""
return isinstance(other, PolicyPermissions) and other._policy == self._policy
class _OwnerPermissions(AbstractPermissions):
"""Owner permissions."""
def access_all_entities(self, key: str) -> bool:
"""Check if we have a certain access to all entities."""
return True
def _entity_func(self) -> Callable[[str, str], bool]:
"""Return a function that can test entity access."""
return lambda entity_id, key: True
OwnerPermissions = _OwnerPermissions() | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/auth/permissions/__init__.py | 0.887296 | 0.171616 | __init__.py | pypi |
from __future__ import annotations
import asyncio
from io import BytesIO
from typing import Any
import voluptuous as vol
from homeassistant.auth.models import User
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResult
from . import (
MULTI_FACTOR_AUTH_MODULE_SCHEMA,
MULTI_FACTOR_AUTH_MODULES,
MultiFactorAuthModule,
SetupFlow,
)
REQUIREMENTS = ["pyotp==2.3.0", "PyQRCode==1.2.1"]
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA)
STORAGE_VERSION = 1
STORAGE_KEY = "auth_module.totp"
STORAGE_USERS = "users"
STORAGE_USER_ID = "user_id"
STORAGE_OTA_SECRET = "ota_secret"
INPUT_FIELD_CODE = "code"
DUMMY_SECRET = "FPPTH34D4E3MI2HG"
def _generate_qr_code(data: str) -> str:
"""Generate a base64 PNG string represent QR Code image of data."""
import pyqrcode # pylint: disable=import-outside-toplevel
qr_code = pyqrcode.create(data)
with BytesIO() as buffer:
qr_code.svg(file=buffer, scale=4)
return str(
buffer.getvalue()
.decode("ascii")
.replace("\n", "")
.replace(
'<?xml version="1.0" encoding="UTF-8"?>'
'<svg xmlns="http://www.w3.org/2000/svg"',
"<svg",
)
)
def _generate_secret_and_qr_code(username: str) -> tuple[str, str, str]:
"""Generate a secret, url, and QR code."""
import pyotp # pylint: disable=import-outside-toplevel
ota_secret = pyotp.random_base32()
url = pyotp.totp.TOTP(ota_secret).provisioning_uri(
username, issuer_name="Safegate Pro"
)
image = _generate_qr_code(url)
return ota_secret, url, image
@MULTI_FACTOR_AUTH_MODULES.register("totp")
class TotpAuthModule(MultiFactorAuthModule):
"""Auth module validate time-based one time password."""
DEFAULT_TITLE = "Time-based One Time Password"
MAX_RETRY_TIME = 5
def __init__(self, hass: HomeAssistant, config: dict[str, Any]) -> None:
"""Initialize the user data store."""
super().__init__(hass, config)
self._users: dict[str, str] | None = None
self._user_store = hass.helpers.storage.Store(
STORAGE_VERSION, STORAGE_KEY, private=True
)
self._init_lock = asyncio.Lock()
@property
def input_schema(self) -> vol.Schema:
"""Validate login flow input data."""
return vol.Schema({INPUT_FIELD_CODE: str})
async def _async_load(self) -> None:
"""Load stored data."""
async with self._init_lock:
if self._users is not None:
return
data = await self._user_store.async_load()
if data is None:
data = {STORAGE_USERS: {}}
self._users = data.get(STORAGE_USERS, {})
async def _async_save(self) -> None:
"""Save data."""
await self._user_store.async_save({STORAGE_USERS: self._users})
def _add_ota_secret(self, user_id: str, secret: str | None = None) -> str:
"""Create a ota_secret for user."""
import pyotp # pylint: disable=import-outside-toplevel
ota_secret: str = secret or pyotp.random_base32()
self._users[user_id] = ota_secret # type: ignore
return ota_secret
async def async_setup_flow(self, user_id: str) -> SetupFlow:
"""Return a data entry flow handler for setup module.
Mfa module should extend SetupFlow
"""
user = await self.hass.auth.async_get_user(user_id)
assert user is not None
return TotpSetupFlow(self, self.input_schema, user)
async def async_setup_user(self, user_id: str, setup_data: Any) -> str:
"""Set up auth module for user."""
if self._users is None:
await self._async_load()
result = await self.hass.async_add_executor_job(
self._add_ota_secret, user_id, setup_data.get("secret")
)
await self._async_save()
return result
async def async_depose_user(self, user_id: str) -> None:
"""Depose auth module for user."""
if self._users is None:
await self._async_load()
if self._users.pop(user_id, None): # type: ignore
await self._async_save()
async def async_is_user_setup(self, user_id: str) -> bool:
"""Return whether user is setup."""
if self._users is None:
await self._async_load()
return user_id in self._users # type: ignore
async def async_validate(self, user_id: str, user_input: dict[str, Any]) -> bool:
"""Return True if validation passed."""
if self._users is None:
await self._async_load()
# user_input has been validate in caller
# set INPUT_FIELD_CODE as vol.Required is not user friendly
return await self.hass.async_add_executor_job(
self._validate_2fa, user_id, user_input.get(INPUT_FIELD_CODE, "")
)
def _validate_2fa(self, user_id: str, code: str) -> bool:
"""Validate two factor authentication code."""
import pyotp # pylint: disable=import-outside-toplevel
ota_secret = self._users.get(user_id) # type: ignore
if ota_secret is None:
# even we cannot find user, we still do verify
# to make timing the same as if user was found.
pyotp.TOTP(DUMMY_SECRET).verify(code, valid_window=1)
return False
return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1))
class TotpSetupFlow(SetupFlow):
"""Handler for the setup flow."""
def __init__(
self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User
) -> None:
"""Initialize the setup flow."""
super().__init__(auth_module, setup_schema, user.id)
# to fix typing complaint
self._auth_module: TotpAuthModule = auth_module
self._user = user
self._ota_secret: str | None = None
self._url = None # type Optional[str]
self._image = None # type Optional[str]
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the first step of setup flow.
Return self.async_show_form(step_id='init') if user_input is None.
Return self.async_create_entry(data={'result': result}) if finish.
"""
import pyotp # pylint: disable=import-outside-toplevel
errors: dict[str, str] = {}
if user_input:
verified = await self.hass.async_add_executor_job(
pyotp.TOTP(self._ota_secret).verify, user_input["code"]
)
if verified:
result = await self._auth_module.async_setup_user(
self._user_id, {"secret": self._ota_secret}
)
return self.async_create_entry(
title=self._auth_module.name, data={"result": result}
)
errors["base"] = "invalid_code"
else:
hass = self._auth_module.hass
(
self._ota_secret,
self._url,
self._image,
) = await hass.async_add_executor_job(
_generate_secret_and_qr_code, # type: ignore
str(self._user.name),
)
return self.async_show_form(
step_id="init",
data_schema=self._setup_schema,
description_placeholders={
"code": self._ota_secret,
"url": self._url,
"qr_code": self._image,
},
errors=errors,
) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/auth/mfa_modules/totp.py | 0.716715 | 0.15772 | totp.py | pypi |
from __future__ import annotations
import importlib
import logging
import types
from typing import Any
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant import data_entry_flow, requirements
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util.decorator import Registry
MULTI_FACTOR_AUTH_MODULES = Registry()
MULTI_FACTOR_AUTH_MODULE_SCHEMA = vol.Schema(
{
vol.Required(CONF_TYPE): str,
vol.Optional(CONF_NAME): str,
# Specify ID if you have two mfa auth module for same type.
vol.Optional(CONF_ID): str,
},
extra=vol.ALLOW_EXTRA,
)
DATA_REQS = "mfa_auth_module_reqs_processed"
_LOGGER = logging.getLogger(__name__)
class MultiFactorAuthModule:
"""Multi-factor Auth Module of validation function."""
DEFAULT_TITLE = "Unnamed auth module"
MAX_RETRY_TIME = 3
def __init__(self, hass: HomeAssistant, config: dict[str, Any]) -> None:
"""Initialize an auth module."""
self.hass = hass
self.config = config
@property
def id(self) -> str:
"""Return id of the auth module.
Default is same as type
"""
return self.config.get(CONF_ID, self.type)
@property
def type(self) -> str:
"""Return type of the module."""
return self.config[CONF_TYPE] # type: ignore
@property
def name(self) -> str:
"""Return the name of the auth module."""
return self.config.get(CONF_NAME, self.DEFAULT_TITLE)
# Implement by extending class
@property
def input_schema(self) -> vol.Schema:
"""Return a voluptuous schema to define mfa auth module's input."""
raise NotImplementedError
async def async_setup_flow(self, user_id: str) -> SetupFlow:
"""Return a data entry flow handler for setup module.
Mfa module should extend SetupFlow
"""
raise NotImplementedError
async def async_setup_user(self, user_id: str, setup_data: Any) -> Any:
"""Set up user for mfa auth module."""
raise NotImplementedError
async def async_depose_user(self, user_id: str) -> None:
"""Remove user from mfa module."""
raise NotImplementedError
async def async_is_user_setup(self, user_id: str) -> bool:
"""Return whether user is setup."""
raise NotImplementedError
async def async_validate(self, user_id: str, user_input: dict[str, Any]) -> bool:
"""Return True if validation passed."""
raise NotImplementedError
class SetupFlow(data_entry_flow.FlowHandler):
"""Handler for the setup flow."""
def __init__(
self, auth_module: MultiFactorAuthModule, setup_schema: vol.Schema, user_id: str
) -> None:
"""Initialize the setup flow."""
self._auth_module = auth_module
self._setup_schema = setup_schema
self._user_id = user_id
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the first step of setup flow.
Return self.async_show_form(step_id='init') if user_input is None.
Return self.async_create_entry(data={'result': result}) if finish.
"""
errors: dict[str, str] = {}
if user_input:
result = await self._auth_module.async_setup_user(self._user_id, user_input)
return self.async_create_entry(
title=self._auth_module.name, data={"result": result}
)
return self.async_show_form(
step_id="init", data_schema=self._setup_schema, errors=errors
)
async def auth_mfa_module_from_config(
hass: HomeAssistant, config: dict[str, Any]
) -> MultiFactorAuthModule:
"""Initialize an auth module from a config."""
module_name = config[CONF_TYPE]
module = await _load_mfa_module(hass, module_name)
try:
config = module.CONFIG_SCHEMA(config) # type: ignore
except vol.Invalid as err:
_LOGGER.error(
"Invalid configuration for multi-factor module %s: %s",
module_name,
humanize_error(config, err),
)
raise
return MULTI_FACTOR_AUTH_MODULES[module_name](hass, config) # type: ignore
async def _load_mfa_module(hass: HomeAssistant, module_name: str) -> types.ModuleType:
"""Load an mfa auth module."""
module_path = f"homeassistant.auth.mfa_modules.{module_name}"
try:
module = importlib.import_module(module_path)
except ImportError as err:
_LOGGER.error("Unable to load mfa module %s: %s", module_name, err)
raise HomeAssistantError(
f"Unable to load mfa module {module_name}: {err}"
) from err
if hass.config.skip_pip or not hasattr(module, "REQUIREMENTS"):
return module
processed = hass.data.get(DATA_REQS)
if processed and module_name in processed:
return module
processed = hass.data[DATA_REQS] = set()
# https://github.com/python/mypy/issues/1424
await requirements.async_process_requirements(
hass, module_path, module.REQUIREMENTS # type: ignore
)
processed.add(module_name)
return module | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/auth/mfa_modules/__init__.py | 0.850562 | 0.190235 | __init__.py | pypi |
from __future__ import annotations
from typing import Any
import voluptuous as vol
from homeassistant.core import HomeAssistant
from . import (
MULTI_FACTOR_AUTH_MODULE_SCHEMA,
MULTI_FACTOR_AUTH_MODULES,
MultiFactorAuthModule,
SetupFlow,
)
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend(
{
vol.Required("data"): [
vol.Schema({vol.Required("user_id"): str, vol.Required("pin"): str})
]
},
extra=vol.PREVENT_EXTRA,
)
@MULTI_FACTOR_AUTH_MODULES.register("insecure_example")
class InsecureExampleModule(MultiFactorAuthModule):
"""Example auth module validate pin."""
DEFAULT_TITLE = "Insecure Personal Identify Number"
def __init__(self, hass: HomeAssistant, config: dict[str, Any]) -> None:
"""Initialize the user data store."""
super().__init__(hass, config)
self._data = config["data"]
@property
def input_schema(self) -> vol.Schema:
"""Validate login flow input data."""
return vol.Schema({"pin": str})
@property
def setup_schema(self) -> vol.Schema:
"""Validate async_setup_user input data."""
return vol.Schema({"pin": str})
async def async_setup_flow(self, user_id: str) -> SetupFlow:
"""Return a data entry flow handler for setup module.
Mfa module should extend SetupFlow
"""
return SetupFlow(self, self.setup_schema, user_id)
async def async_setup_user(self, user_id: str, setup_data: Any) -> Any:
"""Set up user to use mfa module."""
# data shall has been validate in caller
pin = setup_data["pin"]
for data in self._data:
if data["user_id"] == user_id:
# already setup, override
data["pin"] = pin
return
self._data.append({"user_id": user_id, "pin": pin})
async def async_depose_user(self, user_id: str) -> None:
"""Remove user from mfa module."""
found = None
for data in self._data:
if data["user_id"] == user_id:
found = data
break
if found:
self._data.remove(found)
async def async_is_user_setup(self, user_id: str) -> bool:
"""Return whether user is setup."""
return any(data["user_id"] == user_id for data in self._data)
async def async_validate(self, user_id: str, user_input: dict[str, Any]) -> bool:
"""Return True if validation passed."""
return any(
data["user_id"] == user_id and data["pin"] == user_input["pin"]
for data in self._data
) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/auth/mfa_modules/insecure_example.py | 0.876951 | 0.216467 | insecure_example.py | pypi |
from __future__ import annotations
from collections.abc import Mapping
from ipaddress import (
IPv4Address,
IPv4Network,
IPv6Address,
IPv6Network,
ip_address,
ip_network,
)
from typing import Any, Dict, List, Union, cast
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
from .. import InvalidAuthError
from ..models import Credentials, RefreshToken, UserMeta
IPAddress = Union[IPv4Address, IPv6Address]
IPNetwork = Union[IPv4Network, IPv6Network]
CONF_TRUSTED_NETWORKS = "trusted_networks"
CONF_TRUSTED_USERS = "trusted_users"
CONF_GROUP = "group"
CONF_ALLOW_BYPASS_LOGIN = "allow_bypass_login"
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
{
vol.Required(CONF_TRUSTED_NETWORKS): vol.All(cv.ensure_list, [ip_network]),
vol.Optional(CONF_TRUSTED_USERS, default={}): vol.Schema(
# we only validate the format of user_id or group_id
{
ip_network: vol.All(
cv.ensure_list,
[
vol.Or(
cv.uuid4_hex,
vol.Schema({vol.Required(CONF_GROUP): cv.uuid4_hex}),
)
],
)
}
),
vol.Optional(CONF_ALLOW_BYPASS_LOGIN, default=False): cv.boolean,
},
extra=vol.PREVENT_EXTRA,
)
class InvalidUserError(HomeAssistantError):
"""Raised when try to login as invalid user."""
@AUTH_PROVIDERS.register("trusted_networks")
class TrustedNetworksAuthProvider(AuthProvider):
"""Trusted Networks auth provider.
Allow passwordless access from trusted network.
"""
DEFAULT_TITLE = "Trusted Networks"
@property
def trusted_networks(self) -> list[IPNetwork]:
"""Return trusted networks."""
return cast(List[IPNetwork], self.config[CONF_TRUSTED_NETWORKS])
@property
def trusted_users(self) -> dict[IPNetwork, Any]:
"""Return trusted users per network."""
return cast(Dict[IPNetwork, Any], self.config[CONF_TRUSTED_USERS])
@property
def trusted_proxies(self) -> list[IPNetwork]:
"""Return trusted proxies in the system."""
if not self.hass.http:
return []
return [
ip_network(trusted_proxy)
for trusted_proxy in self.hass.http.trusted_proxies
]
@property
def support_mfa(self) -> bool:
"""Trusted Networks auth provider does not support MFA."""
return False
async def async_login_flow(self, context: dict | None) -> LoginFlow:
"""Return a flow to login."""
assert context is not None
ip_addr = cast(IPAddress, context.get("ip_address"))
users = await self.store.async_get_users()
available_users = [
user for user in users if not user.system_generated and user.is_active
]
for ip_net, user_or_group_list in self.trusted_users.items():
if ip_addr not in ip_net:
continue
user_list = [
user_id for user_id in user_or_group_list if isinstance(user_id, str)
]
group_list = [
group[CONF_GROUP]
for group in user_or_group_list
if isinstance(group, dict)
]
flattened_group_list = [
group for sublist in group_list for group in sublist
]
available_users = [
user
for user in available_users
if (
user.id in user_list
or any(group.id in flattened_group_list for group in user.groups)
)
]
break
return TrustedNetworksLoginFlow(
self,
ip_addr,
{user.id: user.name for user in available_users},
self.config[CONF_ALLOW_BYPASS_LOGIN],
)
async def async_get_or_create_credentials(
self, flow_result: Mapping[str, str]
) -> Credentials:
"""Get credentials based on the flow result."""
user_id = flow_result["user"]
users = await self.store.async_get_users()
for user in users:
if user.id != user_id:
continue
if user.system_generated:
continue
if not user.is_active:
continue
for credential in await self.async_credentials():
if credential.data["user_id"] == user_id:
return credential
cred = self.async_create_credentials({"user_id": user_id})
await self.store.async_link_user(user, cred)
return cred
# We only allow login as exist user
raise InvalidUserError
async def async_user_meta_for_credentials(
self, credentials: Credentials
) -> UserMeta:
"""Return extra user metadata for credentials.
Trusted network auth provider should never create new user.
"""
raise NotImplementedError
@callback
def async_validate_access(self, ip_addr: IPAddress) -> None:
"""Make sure the access from trusted networks.
Raise InvalidAuthError if not.
Raise InvalidAuthError if trusted_networks is not configured.
"""
if not self.trusted_networks:
raise InvalidAuthError("trusted_networks is not configured")
if not any(
ip_addr in trusted_network for trusted_network in self.trusted_networks
):
raise InvalidAuthError("Not in trusted_networks")
if any(ip_addr in trusted_proxy for trusted_proxy in self.trusted_proxies):
raise InvalidAuthError("Can't allow access from a proxy server")
@callback
def async_validate_refresh_token(
self, refresh_token: RefreshToken, remote_ip: str | None = None
) -> None:
"""Verify a refresh token is still valid."""
if remote_ip is None:
raise InvalidAuthError(
"Unknown remote ip can't be used for trusted network provider."
)
self.async_validate_access(ip_address(remote_ip))
class TrustedNetworksLoginFlow(LoginFlow):
"""Handler for the login flow."""
def __init__(
self,
auth_provider: TrustedNetworksAuthProvider,
ip_addr: IPAddress,
available_users: dict[str, str | None],
allow_bypass_login: bool,
) -> None:
"""Initialize the login flow."""
super().__init__(auth_provider)
self._available_users = available_users
self._ip_address = ip_addr
self._allow_bypass_login = allow_bypass_login
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the step of the form."""
try:
cast(
TrustedNetworksAuthProvider, self._auth_provider
).async_validate_access(self._ip_address)
except InvalidAuthError:
return self.async_abort(reason="not_allowed")
if user_input is not None:
return await self.async_finish(user_input)
if self._allow_bypass_login and len(self._available_users) == 1:
return await self.async_finish(
{"user": next(iter(self._available_users.keys()))}
)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema({"user": vol.In(self._available_users)}),
) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/auth/providers/trusted_networks.py | 0.859207 | 0.163179 | trusted_networks.py | pypi |
from __future__ import annotations
import asyncio
import base64
from collections import OrderedDict
from collections.abc import Mapping
import logging
from typing import Any, cast
import bcrypt
import voluptuous as vol
from homeassistant.const import CONF_ID
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
from ..models import Credentials, UserMeta
STORAGE_VERSION = 1
STORAGE_KEY = "auth_provider.homeassistant"
def _disallow_id(conf: dict[str, Any]) -> dict[str, Any]:
"""Disallow ID in config."""
if CONF_ID in conf:
raise vol.Invalid("ID is not allowed for the homeassistant auth provider.")
return conf
CONFIG_SCHEMA = vol.All(AUTH_PROVIDER_SCHEMA, _disallow_id)
@callback
def async_get_provider(hass: HomeAssistant) -> HassAuthProvider:
"""Get the provider."""
for prv in hass.auth.auth_providers:
if prv.type == "homeassistant":
return cast(HassAuthProvider, prv)
raise RuntimeError("Provider not found")
class InvalidAuth(HomeAssistantError):
"""Raised when we encounter invalid authentication."""
class InvalidUser(HomeAssistantError):
"""Raised when invalid user is specified.
Will not be raised when validating authentication.
"""
class Data:
"""Hold the user data."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the user data store."""
self.hass = hass
self._store = hass.helpers.storage.Store(
STORAGE_VERSION, STORAGE_KEY, private=True
)
self._data: dict[str, Any] | None = None
# Legacy mode will allow usernames to start/end with whitespace
# and will compare usernames case-insensitive.
# Remove in 2020 or when we launch 1.0.
self.is_legacy = False
@callback
def normalize_username(self, username: str) -> str:
"""Normalize a username based on the mode."""
if self.is_legacy:
return username
return username.strip().casefold()
async def async_load(self) -> None:
"""Load stored data."""
data = await self._store.async_load()
if data is None:
data = {"users": []}
seen: set[str] = set()
for user in data["users"]:
username = user["username"]
# check if we have duplicates
folded = username.casefold()
if folded in seen:
self.is_legacy = True
logging.getLogger(__name__).warning(
"Safegate Pro auth provider is running in legacy mode "
"because we detected usernames that are case-insensitive"
"equivalent. Please change the username: '%s'.",
username,
)
break
seen.add(folded)
# check if we have unstripped usernames
if username != username.strip():
self.is_legacy = True
logging.getLogger(__name__).warning(
"Safegate Pro auth provider is running in legacy mode "
"because we detected usernames that start or end in a "
"space. Please change the username: '%s'.",
username,
)
break
self._data = data
@property
def users(self) -> list[dict[str, str]]:
"""Return users."""
return self._data["users"] # type: ignore
def validate_login(self, username: str, password: str) -> None:
"""Validate a username and password.
Raises InvalidAuth if auth invalid.
"""
username = self.normalize_username(username)
dummy = b"$2b$12$CiuFGszHx9eNHxPuQcwBWez4CwDTOcLTX5CbOpV6gef2nYuXkY7BO"
found = None
# Compare all users to avoid timing attacks.
for user in self.users:
if self.normalize_username(user["username"]) == username:
found = user
if found is None:
# check a hash to make timing the same as if user was found
bcrypt.checkpw(b"foo", dummy)
raise InvalidAuth
user_hash = base64.b64decode(found["password"])
# bcrypt.checkpw is timing-safe
if not bcrypt.checkpw(password.encode(), user_hash):
raise InvalidAuth
def hash_password( # pylint: disable=no-self-use
self, password: str, for_storage: bool = False
) -> bytes:
"""Encode a password."""
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
if for_storage:
hashed = base64.b64encode(hashed)
return hashed
def add_auth(self, username: str, password: str) -> None:
"""Add a new authenticated user/pass."""
username = self.normalize_username(username)
if any(
self.normalize_username(user["username"]) == username for user in self.users
):
raise InvalidUser
self.users.append(
{
"username": username,
"password": self.hash_password(password, True).decode(),
}
)
@callback
def async_remove_auth(self, username: str) -> None:
"""Remove authentication."""
username = self.normalize_username(username)
index = None
for i, user in enumerate(self.users):
if self.normalize_username(user["username"]) == username:
index = i
break
if index is None:
raise InvalidUser
self.users.pop(index)
def change_password(self, username: str, new_password: str) -> None:
"""Update the password.
Raises InvalidUser if user cannot be found.
"""
username = self.normalize_username(username)
for user in self.users:
if self.normalize_username(user["username"]) == username:
user["password"] = self.hash_password(new_password, True).decode()
break
else:
raise InvalidUser
async def async_save(self) -> None:
"""Save data."""
await self._store.async_save(self._data)
@AUTH_PROVIDERS.register("homeassistant")
class HassAuthProvider(AuthProvider):
"""Auth provider based on a local storage of users in Safegate Pro config dir."""
DEFAULT_TITLE = "Safegate Pro Local"
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""Initialize an Safegate Pro auth provider."""
super().__init__(*args, **kwargs)
self.data: Data | None = None
self._init_lock = asyncio.Lock()
async def async_initialize(self) -> None:
"""Initialize the auth provider."""
async with self._init_lock:
if self.data is not None:
return
data = Data(self.hass)
await data.async_load()
self.data = data
async def async_login_flow(self, context: dict | None) -> LoginFlow:
"""Return a flow to login."""
return HassLoginFlow(self)
async def async_validate_login(self, username: str, password: str) -> None:
"""Validate a username and password."""
if self.data is None:
await self.async_initialize()
assert self.data is not None
await self.hass.async_add_executor_job(
self.data.validate_login, username, password
)
async def async_add_auth(self, username: str, password: str) -> None:
"""Call add_auth on data."""
if self.data is None:
await self.async_initialize()
assert self.data is not None
await self.hass.async_add_executor_job(self.data.add_auth, username, password)
await self.data.async_save()
async def async_remove_auth(self, username: str) -> None:
"""Call remove_auth on data."""
if self.data is None:
await self.async_initialize()
assert self.data is not None
self.data.async_remove_auth(username)
await self.data.async_save()
async def async_change_password(self, username: str, new_password: str) -> None:
"""Call change_password on data."""
if self.data is None:
await self.async_initialize()
assert self.data is not None
await self.hass.async_add_executor_job(
self.data.change_password, username, new_password
)
await self.data.async_save()
async def async_get_or_create_credentials(
self, flow_result: Mapping[str, str]
) -> Credentials:
"""Get credentials based on the flow result."""
if self.data is None:
await self.async_initialize()
assert self.data is not None
norm_username = self.data.normalize_username
username = norm_username(flow_result["username"])
for credential in await self.async_credentials():
if norm_username(credential.data["username"]) == username:
return credential
# Create new credentials.
return self.async_create_credentials({"username": username})
async def async_user_meta_for_credentials(
self, credentials: Credentials
) -> UserMeta:
"""Get extra info for this credential."""
return UserMeta(name=credentials.data["username"], is_active=True)
async def async_will_remove_credentials(self, credentials: Credentials) -> None:
"""When credentials get removed, also remove the auth."""
if self.data is None:
await self.async_initialize()
assert self.data is not None
try:
self.data.async_remove_auth(credentials.data["username"])
await self.data.async_save()
except InvalidUser:
# Can happen if somehow we didn't clean up a credential
pass
class HassLoginFlow(LoginFlow):
"""Handler for the login flow."""
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the step of the form."""
errors = {}
if user_input is not None:
try:
await cast(HassAuthProvider, self._auth_provider).async_validate_login(
user_input["username"], user_input["password"]
)
except InvalidAuth:
errors["base"] = "invalid_auth"
if not errors:
user_input.pop("password")
return await self.async_finish(user_input)
schema: dict[str, type] = OrderedDict()
schema["username"] = str
schema["password"] = str
return self.async_show_form(
step_id="init", data_schema=vol.Schema(schema), errors=errors
) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/auth/providers/homeassistant.py | 0.860677 | 0.196325 | homeassistant.py | pypi |
from __future__ import annotations
from collections.abc import Mapping
import importlib
import logging
import types
from typing import Any
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant import data_entry_flow, requirements
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util
from homeassistant.util.decorator import Registry
from ..auth_store import AuthStore
from ..const import MFA_SESSION_EXPIRATION
from ..models import Credentials, RefreshToken, User, UserMeta
_LOGGER = logging.getLogger(__name__)
DATA_REQS = "auth_prov_reqs_processed"
AUTH_PROVIDERS = Registry()
AUTH_PROVIDER_SCHEMA = vol.Schema(
{
vol.Required(CONF_TYPE): str,
vol.Optional(CONF_NAME): str,
# Specify ID if you have two auth providers for same type.
vol.Optional(CONF_ID): str,
},
extra=vol.ALLOW_EXTRA,
)
class AuthProvider:
"""Provider of user authentication."""
DEFAULT_TITLE = "Unnamed auth provider"
def __init__(
self, hass: HomeAssistant, store: AuthStore, config: dict[str, Any]
) -> None:
"""Initialize an auth provider."""
self.hass = hass
self.store = store
self.config = config
@property
def id(self) -> str | None:
"""Return id of the auth provider.
Optional, can be None.
"""
return self.config.get(CONF_ID)
@property
def type(self) -> str:
"""Return type of the provider."""
return self.config[CONF_TYPE] # type: ignore
@property
def name(self) -> str:
"""Return the name of the auth provider."""
return self.config.get(CONF_NAME, self.DEFAULT_TITLE)
@property
def support_mfa(self) -> bool:
"""Return whether multi-factor auth supported by the auth provider."""
return True
async def async_credentials(self) -> list[Credentials]:
"""Return all credentials of this provider."""
users = await self.store.async_get_users()
return [
credentials
for user in users
for credentials in user.credentials
if (
credentials.auth_provider_type == self.type
and credentials.auth_provider_id == self.id
)
]
@callback
def async_create_credentials(self, data: dict[str, str]) -> Credentials:
"""Create credentials."""
return Credentials(
auth_provider_type=self.type, auth_provider_id=self.id, data=data
)
# Implement by extending class
async def async_login_flow(self, context: dict | None) -> LoginFlow:
"""Return the data flow for logging in with auth provider.
Auth provider should extend LoginFlow and return an instance.
"""
raise NotImplementedError
async def async_get_or_create_credentials(
self, flow_result: Mapping[str, str]
) -> Credentials:
"""Get credentials based on the flow result."""
raise NotImplementedError
async def async_user_meta_for_credentials(
self, credentials: Credentials
) -> UserMeta:
"""Return extra user metadata for credentials.
Will be used to populate info when creating a new user.
"""
raise NotImplementedError
async def async_initialize(self) -> None:
"""Initialize the auth provider."""
@callback
def async_validate_refresh_token(
self, refresh_token: RefreshToken, remote_ip: str | None = None
) -> None:
"""Verify a refresh token is still valid.
Optional hook for an auth provider to verify validity of a refresh token.
Should raise InvalidAuthError on errors.
"""
async def auth_provider_from_config(
hass: HomeAssistant, store: AuthStore, config: dict[str, Any]
) -> AuthProvider:
"""Initialize an auth provider from a config."""
provider_name = config[CONF_TYPE]
module = await load_auth_provider_module(hass, provider_name)
try:
config = module.CONFIG_SCHEMA(config) # type: ignore
except vol.Invalid as err:
_LOGGER.error(
"Invalid configuration for auth provider %s: %s",
provider_name,
humanize_error(config, err),
)
raise
return AUTH_PROVIDERS[provider_name](hass, store, config) # type: ignore
async def load_auth_provider_module(
hass: HomeAssistant, provider: str
) -> types.ModuleType:
"""Load an auth provider."""
try:
module = importlib.import_module(f"homeassistant.auth.providers.{provider}")
except ImportError as err:
_LOGGER.error("Unable to load auth provider %s: %s", provider, err)
raise HomeAssistantError(
f"Unable to load auth provider {provider}: {err}"
) from err
if hass.config.skip_pip or not hasattr(module, "REQUIREMENTS"):
return module
processed = hass.data.get(DATA_REQS)
if processed is None:
processed = hass.data[DATA_REQS] = set()
elif provider in processed:
return module
# https://github.com/python/mypy/issues/1424
reqs = module.REQUIREMENTS # type: ignore
await requirements.async_process_requirements(
hass, f"auth provider {provider}", reqs
)
processed.add(provider)
return module
class LoginFlow(data_entry_flow.FlowHandler):
"""Handler for the login flow."""
def __init__(self, auth_provider: AuthProvider) -> None:
"""Initialize the login flow."""
self._auth_provider = auth_provider
self._auth_module_id: str | None = None
self._auth_manager = auth_provider.hass.auth
self.available_mfa_modules: dict[str, str] = {}
self.created_at = dt_util.utcnow()
self.invalid_mfa_times = 0
self.user: User | None = None
self.credential: Credentials | None = None
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the first step of login flow.
Return self.async_show_form(step_id='init') if user_input is None.
Return await self.async_finish(flow_result) if login init step pass.
"""
raise NotImplementedError
async def async_step_select_mfa_module(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the step of select mfa module."""
errors = {}
if user_input is not None:
auth_module = user_input.get("multi_factor_auth_module")
if auth_module in self.available_mfa_modules:
self._auth_module_id = auth_module
return await self.async_step_mfa()
errors["base"] = "invalid_auth_module"
if len(self.available_mfa_modules) == 1:
self._auth_module_id = list(self.available_mfa_modules)[0]
return await self.async_step_mfa()
return self.async_show_form(
step_id="select_mfa_module",
data_schema=vol.Schema(
{"multi_factor_auth_module": vol.In(self.available_mfa_modules)}
),
errors=errors,
)
async def async_step_mfa(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the step of mfa validation."""
assert self.credential
assert self.user
errors = {}
assert self._auth_module_id is not None
auth_module = self._auth_manager.get_auth_mfa_module(self._auth_module_id)
if auth_module is None:
# Given an invalid input to async_step_select_mfa_module
# will show invalid_auth_module error
return await self.async_step_select_mfa_module(user_input={})
if user_input is None and hasattr(
auth_module, "async_initialize_login_mfa_step"
):
try:
await auth_module.async_initialize_login_mfa_step( # type: ignore
self.user.id
)
except HomeAssistantError:
_LOGGER.exception("Error initializing MFA step")
return self.async_abort(reason="unknown_error")
if user_input is not None:
expires = self.created_at + MFA_SESSION_EXPIRATION
if dt_util.utcnow() > expires:
return self.async_abort(reason="login_expired")
result = await auth_module.async_validate(self.user.id, user_input)
if not result:
errors["base"] = "invalid_code"
self.invalid_mfa_times += 1
if self.invalid_mfa_times >= auth_module.MAX_RETRY_TIME > 0:
return self.async_abort(reason="too_many_retry")
if not errors:
return await self.async_finish(self.credential)
description_placeholders: dict[str, str | None] = {
"mfa_module_name": auth_module.name,
"mfa_module_id": auth_module.id,
}
return self.async_show_form(
step_id="mfa",
data_schema=auth_module.input_schema,
description_placeholders=description_placeholders,
errors=errors,
)
async def async_finish(self, flow_result: Any) -> FlowResult:
"""Handle the pass of login flow."""
return self.async_create_entry(title=self._auth_provider.name, data=flow_result) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/auth/providers/__init__.py | 0.836421 | 0.163379 | __init__.py | pypi |
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.util import dt
from . import DATA_HYDRAWISE, DEVICE_MAP, DEVICE_MAP_INDEX, SENSORS, HydrawiseEntity
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSORS): vol.All(
cv.ensure_list, [vol.In(SENSORS)]
)
}
)
TWO_YEAR_SECONDS = 60 * 60 * 24 * 365 * 2
WATERING_TIME_ICON = "mdi:water-pump"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a sensor for a Hydrawise device."""
hydrawise = hass.data[DATA_HYDRAWISE].data
sensors = []
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
for zone in hydrawise.relays:
sensors.append(HydrawiseSensor(zone, sensor_type))
add_entities(sensors, True)
class HydrawiseSensor(HydrawiseEntity, SensorEntity):
"""A sensor implementation for Hydrawise device."""
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the units of measurement."""
return DEVICE_MAP[self._sensor_type][
DEVICE_MAP_INDEX.index("UNIT_OF_MEASURE_INDEX")
]
def update(self):
"""Get the latest data and updates the states."""
mydata = self.hass.data[DATA_HYDRAWISE].data
_LOGGER.debug("Updating Hydrawise sensor: %s", self._name)
relay_data = mydata.relays[self.data["relay"] - 1]
if self._sensor_type == "watering_time":
if relay_data["timestr"] == "Now":
self._state = int(relay_data["run"] / 60)
else:
self._state = 0
else: # _sensor_type == 'next_cycle'
next_cycle = min(relay_data["time"], TWO_YEAR_SECONDS)
_LOGGER.debug("New cycle time: %s", next_cycle)
self._state = dt.utc_from_timestamp(
dt.as_timestamp(dt.now()) + next_cycle
).isoformat() | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/components/hydrawise/sensor.py | 0.661376 | 0.191498 | sensor.py | pypi |
import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from . import (
ALLOWED_WATERING_TIME,
CONF_WATERING_TIME,
DATA_HYDRAWISE,
DEFAULT_WATERING_TIME,
SWITCHES,
HydrawiseEntity,
)
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=SWITCHES): vol.All(
cv.ensure_list, [vol.In(SWITCHES)]
),
vol.Optional(CONF_WATERING_TIME, default=DEFAULT_WATERING_TIME): vol.All(
vol.In(ALLOWED_WATERING_TIME)
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a sensor for a Hydrawise device."""
hydrawise = hass.data[DATA_HYDRAWISE].data
default_watering_timer = config.get(CONF_WATERING_TIME)
sensors = []
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
# Create a switch for each zone
for zone in hydrawise.relays:
sensors.append(HydrawiseSwitch(default_watering_timer, zone, sensor_type))
add_entities(sensors, True)
class HydrawiseSwitch(HydrawiseEntity, SwitchEntity):
"""A switch implementation for Hydrawise device."""
def __init__(self, default_watering_timer, *args):
"""Initialize a switch for Hydrawise device."""
super().__init__(*args)
self._default_watering_timer = default_watering_timer
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the device on."""
relay_data = self.data["relay"] - 1
if self._sensor_type == "manual_watering":
self.hass.data[DATA_HYDRAWISE].data.run_zone(
self._default_watering_timer, relay_data
)
elif self._sensor_type == "auto_watering":
self.hass.data[DATA_HYDRAWISE].data.suspend_zone(0, relay_data)
def turn_off(self, **kwargs):
"""Turn the device off."""
relay_data = self.data["relay"] - 1
if self._sensor_type == "manual_watering":
self.hass.data[DATA_HYDRAWISE].data.run_zone(0, relay_data)
elif self._sensor_type == "auto_watering":
self.hass.data[DATA_HYDRAWISE].data.suspend_zone(365, relay_data)
def update(self):
"""Update device state."""
relay_data = self.data["relay"] - 1
mydata = self.hass.data[DATA_HYDRAWISE].data
_LOGGER.debug("Updating Hydrawise switch: %s", self._name)
if self._sensor_type == "manual_watering":
self._state = mydata.relays[relay_data]["timestr"] == "Now"
elif self._sensor_type == "auto_watering":
self._state = (mydata.relays[relay_data]["timestr"] != "") and (
mydata.relays[relay_data]["timestr"] != "Now"
) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/components/hydrawise/switch.py | 0.729038 | 0.154791 | switch.py | pypi |
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
ATTRIBUTION,
DOMAIN,
MANUFACTURER,
SIGNAL_THERMOSTAT_UPDATE,
SIGNAL_ZONE_UPDATE,
)
class NexiaEntity(CoordinatorEntity):
"""Base class for nexia entities."""
def __init__(self, coordinator, name, unique_id):
"""Initialize the entity."""
super().__init__(coordinator)
self._unique_id = unique_id
self._name = name
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def name(self):
"""Return the name."""
return self._name
@property
def extra_state_attributes(self):
"""Return the device specific state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
}
class NexiaThermostatEntity(NexiaEntity):
"""Base class for nexia devices attached to a thermostat."""
def __init__(self, coordinator, thermostat, name, unique_id):
"""Initialize the entity."""
super().__init__(coordinator, name, unique_id)
self._thermostat = thermostat
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self._thermostat.thermostat_id)},
"name": self._thermostat.get_name(),
"model": self._thermostat.get_model(),
"sw_version": self._thermostat.get_firmware(),
"manufacturer": MANUFACTURER,
}
async def async_added_to_hass(self):
"""Listen for signals for services."""
await super().async_added_to_hass()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SIGNAL_THERMOSTAT_UPDATE}-{self._thermostat.thermostat_id}",
self.async_write_ha_state,
)
)
class NexiaThermostatZoneEntity(NexiaThermostatEntity):
"""Base class for nexia devices attached to a thermostat."""
def __init__(self, coordinator, zone, name, unique_id):
"""Initialize the entity."""
super().__init__(coordinator, zone.thermostat, name, unique_id)
self._zone = zone
@property
def device_info(self):
"""Return the device_info of the device."""
data = super().device_info
zone_name = self._zone.get_name()
data.update(
{
"identifiers": {(DOMAIN, self._zone.zone_id)},
"name": zone_name,
"suggested_area": zone_name,
"via_device": (DOMAIN, self._zone.thermostat.thermostat_id),
}
)
return data
async def async_added_to_hass(self):
"""Listen for signals for services."""
await super().async_added_to_hass()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SIGNAL_ZONE_UPDATE}-{self._zone.zone_id}",
self.async_write_ha_state,
)
) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/components/nexia/entity.py | 0.875321 | 0.154631 | entity.py | pypi |
from nexia.const import UNIT_CELSIUS
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import (
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from .const import DOMAIN, NEXIA_DEVICE, UPDATE_COORDINATOR
from .entity import NexiaThermostatEntity, NexiaThermostatZoneEntity
from .util import percent_conv
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up sensors for a Nexia device."""
nexia_data = hass.data[DOMAIN][config_entry.entry_id]
nexia_home = nexia_data[NEXIA_DEVICE]
coordinator = nexia_data[UPDATE_COORDINATOR]
entities = []
# Thermostat / System Sensors
for thermostat_id in nexia_home.get_thermostat_ids():
thermostat = nexia_home.get_thermostat_by_id(thermostat_id)
entities.append(
NexiaThermostatSensor(
coordinator,
thermostat,
"get_system_status",
"System Status",
None,
None,
)
)
# Air cleaner
entities.append(
NexiaThermostatSensor(
coordinator,
thermostat,
"get_air_cleaner_mode",
"Air Cleaner Mode",
None,
None,
)
)
# Compressor Speed
if thermostat.has_variable_speed_compressor():
entities.append(
NexiaThermostatSensor(
coordinator,
thermostat,
"get_current_compressor_speed",
"Current Compressor Speed",
None,
PERCENTAGE,
percent_conv,
)
)
entities.append(
NexiaThermostatSensor(
coordinator,
thermostat,
"get_requested_compressor_speed",
"Requested Compressor Speed",
None,
PERCENTAGE,
percent_conv,
)
)
# Outdoor Temperature
if thermostat.has_outdoor_temperature():
unit = (
TEMP_CELSIUS
if thermostat.get_unit() == UNIT_CELSIUS
else TEMP_FAHRENHEIT
)
entities.append(
NexiaThermostatSensor(
coordinator,
thermostat,
"get_outdoor_temperature",
"Outdoor Temperature",
DEVICE_CLASS_TEMPERATURE,
unit,
)
)
# Relative Humidity
if thermostat.has_relative_humidity():
entities.append(
NexiaThermostatSensor(
coordinator,
thermostat,
"get_relative_humidity",
"Relative Humidity",
DEVICE_CLASS_HUMIDITY,
PERCENTAGE,
percent_conv,
)
)
# Zone Sensors
for zone_id in thermostat.get_zone_ids():
zone = thermostat.get_zone_by_id(zone_id)
unit = (
TEMP_CELSIUS
if thermostat.get_unit() == UNIT_CELSIUS
else TEMP_FAHRENHEIT
)
# Temperature
entities.append(
NexiaThermostatZoneSensor(
coordinator,
zone,
"get_temperature",
"Temperature",
DEVICE_CLASS_TEMPERATURE,
unit,
None,
)
)
# Zone Status
entities.append(
NexiaThermostatZoneSensor(
coordinator,
zone,
"get_status",
"Zone Status",
None,
None,
)
)
# Setpoint Status
entities.append(
NexiaThermostatZoneSensor(
coordinator,
zone,
"get_setpoint_status",
"Zone Setpoint Status",
None,
None,
)
)
async_add_entities(entities, True)
class NexiaThermostatSensor(NexiaThermostatEntity, SensorEntity):
"""Provides Nexia thermostat sensor support."""
def __init__(
self,
coordinator,
thermostat,
sensor_call,
sensor_name,
sensor_class,
sensor_unit,
modifier=None,
):
"""Initialize the sensor."""
super().__init__(
coordinator,
thermostat,
name=f"{thermostat.get_name()} {sensor_name}",
unique_id=f"{thermostat.thermostat_id}_{sensor_call}",
)
self._call = sensor_call
self._class = sensor_class
self._state = None
self._unit_of_measurement = sensor_unit
self._modifier = modifier
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._class
@property
def state(self):
"""Return the state of the sensor."""
val = getattr(self._thermostat, self._call)()
if self._modifier:
val = self._modifier(val)
if isinstance(val, float):
val = round(val, 1)
return val
@property
def unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return self._unit_of_measurement
class NexiaThermostatZoneSensor(NexiaThermostatZoneEntity, SensorEntity):
"""Nexia Zone Sensor Support."""
def __init__(
self,
coordinator,
zone,
sensor_call,
sensor_name,
sensor_class,
sensor_unit,
modifier=None,
):
"""Create a zone sensor."""
super().__init__(
coordinator,
zone,
name=f"{zone.get_name()} {sensor_name}",
unique_id=f"{zone.zone_id}_{sensor_call}",
)
self._call = sensor_call
self._class = sensor_class
self._state = None
self._unit_of_measurement = sensor_unit
self._modifier = modifier
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._class
@property
def state(self):
"""Return the state of the sensor."""
val = getattr(self._zone, self._call)()
if self._modifier:
val = self._modifier(val)
if isinstance(val, float):
val = round(val, 1)
return val
@property
def unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return self._unit_of_measurement | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/components/nexia/sensor.py | 0.812942 | 0.175114 | sensor.py | pypi |
from __future__ import annotations
import logging
import voluptuous as vol
from homeassistant.components.nsw_fuel_station import (
DATA_NSW_FUEL_STATION,
StationPriceData,
)
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import ATTR_ATTRIBUTION, CURRENCY_CENT, VOLUME_LITERS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
_LOGGER = logging.getLogger(__name__)
ATTR_STATION_ID = "station_id"
ATTR_STATION_NAME = "station_name"
CONF_STATION_ID = "station_id"
CONF_FUEL_TYPES = "fuel_types"
CONF_ALLOWED_FUEL_TYPES = [
"E10",
"U91",
"E85",
"P95",
"P98",
"DL",
"PDL",
"B20",
"LPG",
"CNG",
"EV",
]
CONF_DEFAULT_FUEL_TYPES = ["E10", "U91"]
ATTRIBUTION = "Data provided by NSW Government FuelCheck"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_ID): cv.positive_int,
vol.Optional(CONF_FUEL_TYPES, default=CONF_DEFAULT_FUEL_TYPES): vol.All(
cv.ensure_list, [vol.In(CONF_ALLOWED_FUEL_TYPES)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NSW Fuel Station sensor."""
station_id = config[CONF_STATION_ID]
fuel_types = config[CONF_FUEL_TYPES]
coordinator = hass.data[DATA_NSW_FUEL_STATION]
if coordinator.data is None:
_LOGGER.error("Initial fuel station price data not available")
return
entities = []
for fuel_type in fuel_types:
if coordinator.data.prices.get((station_id, fuel_type)) is None:
_LOGGER.error(
"Fuel station price data not available for station %d and fuel type %s",
station_id,
fuel_type,
)
continue
entities.append(StationPriceSensor(coordinator, station_id, fuel_type))
add_entities(entities)
class StationPriceSensor(CoordinatorEntity, SensorEntity):
"""Implementation of a sensor that reports the fuel price for a station."""
def __init__(
self,
coordinator: DataUpdateCoordinator[StationPriceData],
station_id: int,
fuel_type: str,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self._station_id = station_id
self._fuel_type = fuel_type
@property
def name(self) -> str:
"""Return the name of the sensor."""
station_name = self._get_station_name()
return f"{station_name} {self._fuel_type}"
@property
def state(self) -> float | None:
"""Return the state of the sensor."""
if self.coordinator.data is None:
return None
prices = self.coordinator.data.prices
return prices.get((self._station_id, self._fuel_type))
@property
def extra_state_attributes(self) -> dict:
"""Return the state attributes of the device."""
return {
ATTR_STATION_ID: self._station_id,
ATTR_STATION_NAME: self._get_station_name(),
ATTR_ATTRIBUTION: ATTRIBUTION,
}
@property
def unit_of_measurement(self) -> str:
"""Return the units of measurement."""
return f"{CURRENCY_CENT}/{VOLUME_LITERS}"
def _get_station_name(self):
default_name = f"station {self._station_id}"
if self.coordinator.data is None:
return default_name
station = self.coordinator.data.stations.get(self._station_id)
if station is None:
return default_name
return station.name
@property
def unique_id(self) -> str | None:
"""Return a unique ID."""
return f"{self._station_id}_{self._fuel_type}" | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/components/nsw_fuel_station/sensor.py | 0.810329 | 0.223007 | sensor.py | pypi |
from __future__ import annotations
from typing import Any
from homeassistant.components.sensor import SensorEntity
from homeassistant.components.speedtestdotnet import SpeedTestDataCoordinator
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
ATTR_BYTES_RECEIVED,
ATTR_BYTES_SENT,
ATTR_SERVER_COUNTRY,
ATTR_SERVER_ID,
ATTR_SERVER_NAME,
ATTRIBUTION,
DEFAULT_NAME,
DOMAIN,
ICON,
SENSOR_TYPES,
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Speedtestdotnet sensors."""
speedtest_coordinator = hass.data[DOMAIN]
async_add_entities(
SpeedtestSensor(speedtest_coordinator, sensor_type)
for sensor_type in SENSOR_TYPES
)
class SpeedtestSensor(CoordinatorEntity, RestoreEntity, SensorEntity):
"""Implementation of a speedtest.net sensor."""
coordinator: SpeedTestDataCoordinator
_attr_icon = ICON
def __init__(self, coordinator: SpeedTestDataCoordinator, sensor_type: str) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.type = sensor_type
self._attr_name = f"{DEFAULT_NAME} {SENSOR_TYPES[sensor_type][0]}"
self._attr_unit_of_measurement = SENSOR_TYPES[self.type][1]
self._attr_unique_id = sensor_type
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return the state attributes."""
if not self.coordinator.data:
return None
attributes = {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_SERVER_NAME: self.coordinator.data["server"]["name"],
ATTR_SERVER_COUNTRY: self.coordinator.data["server"]["country"],
ATTR_SERVER_ID: self.coordinator.data["server"]["id"],
}
if self.type == "download":
attributes[ATTR_BYTES_RECEIVED] = self.coordinator.data["bytes_received"]
elif self.type == "upload":
attributes[ATTR_BYTES_SENT] = self.coordinator.data["bytes_sent"]
return attributes
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if state:
self._attr_state = state.state
@callback
def update() -> None:
"""Update state."""
self._update_state()
self.async_write_ha_state()
self.async_on_remove(self.coordinator.async_add_listener(update))
self._update_state()
def _update_state(self) -> None:
"""Update sensors state."""
if not self.coordinator.data:
return
if self.type == "ping":
self._attr_state = self.coordinator.data["ping"]
elif self.type == "download":
self._attr_state = round(self.coordinator.data["download"] / 10 ** 6, 2)
elif self.type == "upload":
self._attr_state = round(self.coordinator.data["upload"] / 10 ** 6, 2) | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/components/speedtestdotnet/sensor.py | 0.918105 | 0.219129 | sensor.py | pypi |
import logging
SUPPORTED_SCALING_FACTORS = [(7, 8), (3, 4), (5, 8), (1, 2), (3, 8), (1, 4), (1, 8)]
_LOGGER = logging.getLogger(__name__)
def scale_jpeg_camera_image(cam_image, width, height):
"""Scale a camera image as close as possible to one of the supported scaling factors."""
turbo_jpeg = TurboJPEGSingleton.instance()
if not turbo_jpeg:
return cam_image.content
(current_width, current_height, _, _) = turbo_jpeg.decode_header(cam_image.content)
if current_width <= width or current_height <= height:
return cam_image.content
ratio = width / current_width
scaling_factor = SUPPORTED_SCALING_FACTORS[-1]
for supported_sf in SUPPORTED_SCALING_FACTORS:
if ratio >= (supported_sf[0] / supported_sf[1]):
scaling_factor = supported_sf
break
return turbo_jpeg.scale_with_quality(
cam_image.content,
scaling_factor=scaling_factor,
quality=75,
)
class TurboJPEGSingleton:
"""
Load TurboJPEG only once.
Ensures we do not log load failures each snapshot
since camera image fetches happen every few
seconds.
"""
__instance = None
@staticmethod
def instance():
"""Singleton for TurboJPEG."""
if TurboJPEGSingleton.__instance is None:
TurboJPEGSingleton()
return TurboJPEGSingleton.__instance
def __init__(self):
"""Try to create TurboJPEG only once."""
try:
# TurboJPEG checks for libturbojpeg
# when its created, but it imports
# numpy which may or may not work so
# we have to guard the import here.
from turbojpeg import TurboJPEG # pylint: disable=import-outside-toplevel
TurboJPEGSingleton.__instance = TurboJPEG()
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Error loading libturbojpeg; Cameras may impact HomeKit performance"
)
TurboJPEGSingleton.__instance = False | /safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/components/homekit/img_util.py | 0.803135 | 0.201401 | img_util.py | pypi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.