index
int64 0
731k
| package
stringlengths 2
98
β | name
stringlengths 1
76
| docstring
stringlengths 0
281k
β | code
stringlengths 4
1.07M
β | signature
stringlengths 2
42.8k
β |
|---|---|---|---|---|---|
712,672
|
braintree.webhook_testing_gateway
|
__venmo_account_xml
| null |
def __venmo_account_xml(self, id):
return """
<venmo-account>
<created-at type="datetime">2018-10-11T21:28:37Z</created-at>
<updated-at type="datetime">2018-10-11T21:28:37Z</updated-at>
<default type="boolean">true</default>
<image-url>https://assets.braintreegateway.com/payment_method_logo/venmo.png?environment=test</image-url>
<token>%s</token>
<source-description>Venmo Account: venmojoe</source-description>
<username>venmojoe</username>
<venmo-user-id>456</venmo-user-id>
<subscriptions type="array"/>
<customer-id>venmo_customer_id</customer-id>
<global-id>cGF5bWVudG1ldGhvZF92ZW5tb2FjY291bnQ</global-id>
</venmo-account>
""" % id
|
(self, id)
|
712,674
|
braintree.webhook_testing_gateway
|
sample_notification
| null |
def sample_notification(self, kind, id, source_merchant_id=None):
payload = encodebytes(self.__sample_xml(kind, id, source_merchant_id))
hmac_payload = Crypto.sha1_hmac_hash(self.gateway.config.private_key, payload)
signature = "%s|%s" % (self.gateway.config.public_key, hmac_payload)
return {'bt_signature': signature, 'bt_payload': payload}
|
(self, kind, id, source_merchant_id=None)
|
712,750
|
braintree.payment_method_parser
|
parse_payment_method
| null |
def parse_payment_method(gateway, attributes):
if "paypal_account" in attributes:
return PayPalAccount(gateway, attributes["paypal_account"])
elif "credit_card" in attributes:
return CreditCard(gateway, attributes["credit_card"])
elif "europe_bank_account" in attributes:
return EuropeBankAccount(gateway, attributes["europe_bank_account"])
elif "apple_pay_card" in attributes:
return ApplePayCard(gateway, attributes["apple_pay_card"])
elif "android_pay_card" in attributes:
return AndroidPayCard(gateway, attributes["android_pay_card"])
# NEXT_MAJOR_VERSION remove amex express checkout
elif "amex_express_checkout_card" in attributes:
return AmexExpressCheckoutCard(gateway, attributes["amex_express_checkout_card"])
elif "sepa_debit_account" in attributes:
return SepaDirectDebitAccount(gateway, attributes["sepa_debit_account"])
elif "venmo_account" in attributes:
return VenmoAccount(gateway, attributes["venmo_account"])
elif "us_bank_account" in attributes:
return UsBankAccount(gateway, attributes["us_bank_account"])
elif "visa_checkout_card" in attributes:
return VisaCheckoutCard(gateway, attributes["visa_checkout_card"])
# NEXT_MAJOR_VERSION remove masterpass
elif "masterpass_card" in attributes:
return MasterpassCard(gateway, attributes["masterpass_card"])
elif "samsung_pay_card" in attributes:
return SamsungPayCard(gateway, attributes["samsung_pay_card"])
else:
name = list(attributes)[0]
return UnknownPaymentMethod(gateway, attributes[name])
|
(gateway, attributes)
|
712,810
|
xdatasets.core
|
Query
|
The Query class.
The Query interface facilitates access to analysis-ready earth observation datasets and allows for
spatiotemporal operations to be performed based on user queries.
Attributes
----------
datasets : str, list, dict-like
If a str, a dataset name, i.e.: era5_land_reanalysis.
If a list, a list of dataset names, i.e.: [era5_single_levels_reanalysis, era5_land_reanalysis].
If a dictionary, it should map dataset names to their corresponding requested
content such as some desired variables. See the notes below for more details. The list of available datasets in this library is coming soon!
space : dict-like
A dictionary that maps spatial parameters with their corresponding value.
More information on accepted key/value pairs : :py:meth:`~xdatasets.Query._resolve_space_params`
time : dict-like
A dictionary that maps temporal parameters with their corresponding value.
More information on accepted key/value pairs : :py:meth:`~xdatasets.Query._resolve_time_params`
catalog_path : str
URL for the intake catalog which provides access to the datasets. While this library provides its own
intake catalog, users have the option to provide their own catalog, which can be particularly beneficial for
private datasets or if different configurations are needed.
Notes
-----
The dictionary approach allows more flexibility in the request. i.e.:
>>> query = {
... era5_land_reanalysis: {"variables": ["t2m", "tp"]},
... era5_single_levels_reanalysis: {"variables": "t2m"},
... }
Currently, accepted key, value pairs for a mapping argument include the following:
>>> {"variables": Union[str, List[str]]}
Examples
--------
Create data:
>>> sites = {
... "Montreal": (45.508888, -73.561668),
... "New York": (40.730610, -73.935242),
... "Miami": (25.761681, -80.191788),
... }
>>> query = {
... "datasets": "era5_land_reanalysis_dev",
... "space": {"clip": "point", "geometry": sites},
... "time": {
... "timestep": "D",
... "averaging": {"tp": np.nansum, "t2m": np.nanmean},
... "start": "1950-01-01",
... "end": "1955-12-31",
... "timezone": "America/Montreal",
... },
... }
>>> xds = xd.Query(**query)
>>> xds.data
<xarray.Dataset>
Dimensions: (site: 3, time: 2191, source: 1)
Coordinates:
latitude (site) float64 45.5 40.7 25.8
longitude (site) float64 -73.6 -73.9 -80.2
* site (site) <U8 'Montreal' 'New York' 'Miami'
* time (time) datetime64[ns] 1950-01-01 1950-01-02 ... 1955-12-31
* source (source) <U24 'era5_land_reanalysis_dev'
Data variables:
t2m_nanmean (time, site, source) float32 269.6 273.8 294.3 ... 268.1 292.1
tp_nansum (time, site, source) float32 0.0004192 2.792e-06 ... 0.0001207
Attributes:
pangeo-forge:inputs_hash: 1622c0abe9326bfa4d6ee6cdf817fccb1ef1661046f30f...
pangeo-forge:recipe_hash: f2b6c75f28693bbae820161d5b71ebdb9d740dcdde0666...
pangeo-forge:version: 0.9.4
|
class Query:
"""The Query class.
The Query interface facilitates access to analysis-ready earth observation datasets and allows for
spatiotemporal operations to be performed based on user queries.
Attributes
----------
datasets : str, list, dict-like
If a str, a dataset name, i.e.: era5_land_reanalysis.
If a list, a list of dataset names, i.e.: [era5_single_levels_reanalysis, era5_land_reanalysis].
If a dictionary, it should map dataset names to their corresponding requested
content such as some desired variables. See the notes below for more details. The list of available datasets in this library is coming soon!
space : dict-like
A dictionary that maps spatial parameters with their corresponding value.
More information on accepted key/value pairs : :py:meth:`~xdatasets.Query._resolve_space_params`
time : dict-like
A dictionary that maps temporal parameters with their corresponding value.
More information on accepted key/value pairs : :py:meth:`~xdatasets.Query._resolve_time_params`
catalog_path : str
URL for the intake catalog which provides access to the datasets. While this library provides its own
intake catalog, users have the option to provide their own catalog, which can be particularly beneficial for
private datasets or if different configurations are needed.
Notes
-----
The dictionary approach allows more flexibility in the request. i.e.:
>>> query = {
... era5_land_reanalysis: {"variables": ["t2m", "tp"]},
... era5_single_levels_reanalysis: {"variables": "t2m"},
... }
Currently, accepted key, value pairs for a mapping argument include the following:
>>> {"variables": Union[str, List[str]]}
Examples
--------
Create data:
>>> sites = {
... "Montreal": (45.508888, -73.561668),
... "New York": (40.730610, -73.935242),
... "Miami": (25.761681, -80.191788),
... }
>>> query = {
... "datasets": "era5_land_reanalysis_dev",
... "space": {"clip": "point", "geometry": sites},
... "time": {
... "timestep": "D",
... "averaging": {"tp": np.nansum, "t2m": np.nanmean},
... "start": "1950-01-01",
... "end": "1955-12-31",
... "timezone": "America/Montreal",
... },
... }
>>> xds = xd.Query(**query)
>>> xds.data
<xarray.Dataset>
Dimensions: (site: 3, time: 2191, source: 1)
Coordinates:
latitude (site) float64 45.5 40.7 25.8
longitude (site) float64 -73.6 -73.9 -80.2
* site (site) <U8 'Montreal' 'New York' 'Miami'
* time (time) datetime64[ns] 1950-01-01 1950-01-02 ... 1955-12-31
* source (source) <U24 'era5_land_reanalysis_dev'
Data variables:
t2m_nanmean (time, site, source) float32 269.6 273.8 294.3 ... 268.1 292.1
tp_nansum (time, site, source) float32 0.0004192 2.792e-06 ... 0.0001207
Attributes:
pangeo-forge:inputs_hash: 1622c0abe9326bfa4d6ee6cdf817fccb1ef1661046f30f...
pangeo-forge:recipe_hash: f2b6c75f28693bbae820161d5b71ebdb9d740dcdde0666...
pangeo-forge:version: 0.9.4
"""
def __init__(
self,
datasets: Union[str, List[str], Dict[str, Union[str, List[str]]]],
space: Dict[str, Union[str, List[str]]] = dict(),
time: Dict[str, Union[str, List[str]]] = dict(),
catalog_path: str = url_path,
) -> None:
# We cache the catalog's yaml files for easier access behind corporate firewalls
catalog_path = cache_catalog(catalog_path)
self.catalog = intake.open_catalog(catalog_path)
self.datasets = datasets
self.space = self._resolve_space_params(**space)
self.time = self._resolve_time_params(**time)
self.load_query(datasets=self.datasets, space=self.space, time=self.time)
def _resolve_space_params(
self,
clip: str = None,
geometry: Union[Dict[str, tuple], gpd.GeoDataFrame] = None,
averaging: Optional[bool] = False,
unique_id: Optional[str] = None,
) -> Dict:
"""Resolves and validates user-provided space params.
Parameters
----------
clip : str
Which kind of clip operation to perform on geometry.
Possible values are one of "polygon", "point" or "bbox".
geometry : gdf.DataFrame, Dict[str, Tuple]
Geometry/geometries on which to perform spatial operations
averaging : bool, optional
Whether to spatially average the arrays within a geometry or not
unique_id : str, optional
a column name, if gdf.DataFrame is provided, to identify each unique geometry
"""
space = locals()
space.pop("self")
assert _validate_space_params(**space)
if isinstance(geometry, gpd.GeoDataFrame):
geometry = geometry.reset_index(drop=True)
# We created a new dict based on user-provided parameters
# TODO : adapt all parameters before requesting any operations on datasets
args = {
"clip": clip,
"geometry": geometry,
"averaging": averaging,
"unique_id": unique_id,
}
return args
def _resolve_time_params(
self,
timestep: Optional[str] = None,
aggregation: Optional[
Dict[str, Union[Callable[..., Any], List[Callable[..., Any]]]]
] = None,
start: Optional[bool] = None,
end: Optional[str] = None,
timezone: Optional[str] = None,
minimum_duration: Optional[str] = None,
) -> Dict:
"""Resolves and validates user-provided time params.
Parameters
----------
timestep : str, optional
In which time step should the data be returned
Possible values: https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases
aggregation : Dict[str, callable], optional
Mapping that associates a variable name with the aggregation function
to be applied to it. Function which can be called in the form
`f(x, axis=axis, **kwargs)` to return the result of reducing an
np.ndarray over an integer valued axis. This parameter is required
should the `timestep` argument be passed.
start : str, optional
Start date of the selected time period.
String format β can be year (β%Yβ), year-month (β%Y-%mβ) or
year-month-day(β%Y-%m-%dβ)
end : str, optional
End date of the selected time period.
String format β can be year (β%Yβ), year-month (β%Y-%mβ) or
year-month-day(β%Y-%m-%dβ)
timezone : str, optional
Timezone to be used for the returned datasets
Possible values are listed here:
https://gist.github.com/heyalexej/8bf688fd67d7199be4a1682b3eec7568
minimum_duration : str, optional
Minimum duration of a time series (id) in order to be kept
Possible values: https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases
"""
space = locals()
space.pop("self")
# assert _validate_time_params(**space)
# We created a new dict based on user-provided parameters
# TODO : adapt all parameters before requesting any operations on datasets
args = {
"timestep": timestep,
"aggregation": aggregation,
"start": start,
"end": end,
"timezone": timezone,
"minimum_duration": minimum_duration,
}
return args
def load_query(
self,
datasets: Union[str, Dict[str, Union[str, List[str]]]],
space: Dict[str, Union[str, List[str]]],
time,
):
# Get all datasets in query
if isinstance(datasets, str):
datasets_name = [datasets]
elif isinstance(datasets, dict):
datasets_name = list(datasets.keys())
# Load data for each dataset
dsets = []
for dataset_name in datasets_name:
# data = None # FIXME: This is never used.
kwargs = {}
try:
variables_name = self.datasets[dataset_name]["variables"]
if isinstance(variables_name, str):
variables_name = [variables_name]
except:
variables_name = None
pass
try:
kwargs = {
k: v
for k, v in self.datasets[dataset_name].items()
if k not in ["variables"]
}
except:
pass
ds_one = self._process_one_dataset(
dataset_name=dataset_name,
variables=variables_name,
space=space,
time=time,
**kwargs,
)
dsets.append(ds_one)
try:
# Try naively merging datasets into single dataset
ds = None
if type(dsets[0]) == xr.Dataset:
# if more than one dataset, then we add source as a dimension
# so we can merge two or more datasets together
if len(dsets) > 1:
for idx, dset in enumerate(dsets):
for var in dset.keys():
dset[var] = dset[var].expand_dims("source", axis=-1)
dsets[idx] = dset
ds = xr.merge(dsets)
elif len(dsets) == 1:
ds = dsets[0]
except:
logging.warn("Couldn't merge datasets so we pass a list of datasets. ")
# Look into passing a DataTree instead
ds = dsets
pass
self.data = ds
return self
def _process_one_dataset(self, dataset_name, variables, space, time, **kwargs):
data = None
if "data" in kwargs:
data = kwargs["data"]
if data is not None and isinstance(data, xr.Dataset):
dataset_category = "user-provided"
elif isinstance(dataset_name, str):
dataset_category = [
category
for category in self.catalog._entries.keys()
for name in self.catalog[category]._entries.keys()
if name == dataset_name
][0]
if dataset_category in ["atmosphere"]:
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
ds = climate_request(dataset_name, variables, space, time, self.catalog)
elif dataset_category in ["hydrology"]:
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
ds = hydrometric_request(
dataset_name, variables, space, time, self.catalog, **kwargs
)
if dataset_category in ["geography"]:
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
ds = gis_request(
dataset_name, variables, space, time, self.catalog, **kwargs
)
elif dataset_category in ["user-provided"]:
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
ds = user_provided_dataset(dataset_name, variables, space, time, data)
return ds
def bbox_clip(self, ds, variable="weights"):
return ds.where(~ds[variable].isnull(), drop=True)
|
(datasets: Union[str, List[str], Dict[str, Union[str, List[str]]]], space: Dict[str, Union[str, List[str]]] = {}, time: Dict[str, Union[str, List[str]]] = {}, catalog_path: str = 'https://raw.githubusercontent.com/hydrocloudservices/catalogs/main/catalogs/main.yaml') -> None
|
712,811
|
xdatasets.core
|
__init__
| null |
def __init__(
self,
datasets: Union[str, List[str], Dict[str, Union[str, List[str]]]],
space: Dict[str, Union[str, List[str]]] = dict(),
time: Dict[str, Union[str, List[str]]] = dict(),
catalog_path: str = url_path,
) -> None:
# We cache the catalog's yaml files for easier access behind corporate firewalls
catalog_path = cache_catalog(catalog_path)
self.catalog = intake.open_catalog(catalog_path)
self.datasets = datasets
self.space = self._resolve_space_params(**space)
self.time = self._resolve_time_params(**time)
self.load_query(datasets=self.datasets, space=self.space, time=self.time)
|
(self, datasets: Union[str, List[str], Dict[str, Union[str, List[str]]]], space: Dict[str, Union[str, List[str]]] = {}, time: Dict[str, Union[str, List[str]]] = {}, catalog_path: str = 'https://raw.githubusercontent.com/hydrocloudservices/catalogs/main/catalogs/main.yaml') -> NoneType
|
712,812
|
xdatasets.core
|
_process_one_dataset
| null |
def _process_one_dataset(self, dataset_name, variables, space, time, **kwargs):
data = None
if "data" in kwargs:
data = kwargs["data"]
if data is not None and isinstance(data, xr.Dataset):
dataset_category = "user-provided"
elif isinstance(dataset_name, str):
dataset_category = [
category
for category in self.catalog._entries.keys()
for name in self.catalog[category]._entries.keys()
if name == dataset_name
][0]
if dataset_category in ["atmosphere"]:
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
ds = climate_request(dataset_name, variables, space, time, self.catalog)
elif dataset_category in ["hydrology"]:
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
ds = hydrometric_request(
dataset_name, variables, space, time, self.catalog, **kwargs
)
if dataset_category in ["geography"]:
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
ds = gis_request(
dataset_name, variables, space, time, self.catalog, **kwargs
)
elif dataset_category in ["user-provided"]:
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
ds = user_provided_dataset(dataset_name, variables, space, time, data)
return ds
|
(self, dataset_name, variables, space, time, **kwargs)
|
712,813
|
xdatasets.core
|
_resolve_space_params
|
Resolves and validates user-provided space params.
Parameters
----------
clip : str
Which kind of clip operation to perform on geometry.
Possible values are one of "polygon", "point" or "bbox".
geometry : gdf.DataFrame, Dict[str, Tuple]
Geometry/geometries on which to perform spatial operations
averaging : bool, optional
Whether to spatially average the arrays within a geometry or not
unique_id : str, optional
a column name, if gdf.DataFrame is provided, to identify each unique geometry
|
def _resolve_space_params(
self,
clip: str = None,
geometry: Union[Dict[str, tuple], gpd.GeoDataFrame] = None,
averaging: Optional[bool] = False,
unique_id: Optional[str] = None,
) -> Dict:
"""Resolves and validates user-provided space params.
Parameters
----------
clip : str
Which kind of clip operation to perform on geometry.
Possible values are one of "polygon", "point" or "bbox".
geometry : gdf.DataFrame, Dict[str, Tuple]
Geometry/geometries on which to perform spatial operations
averaging : bool, optional
Whether to spatially average the arrays within a geometry or not
unique_id : str, optional
a column name, if gdf.DataFrame is provided, to identify each unique geometry
"""
space = locals()
space.pop("self")
assert _validate_space_params(**space)
if isinstance(geometry, gpd.GeoDataFrame):
geometry = geometry.reset_index(drop=True)
# We created a new dict based on user-provided parameters
# TODO : adapt all parameters before requesting any operations on datasets
args = {
"clip": clip,
"geometry": geometry,
"averaging": averaging,
"unique_id": unique_id,
}
return args
|
(self, clip: Optional[str] = None, geometry: Union[Dict[str, tuple], geopandas.geodataframe.GeoDataFrame, NoneType] = None, averaging: Optional[bool] = False, unique_id: Optional[str] = None) -> Dict
|
712,814
|
xdatasets.core
|
_resolve_time_params
|
Resolves and validates user-provided time params.
Parameters
----------
timestep : str, optional
In which time step should the data be returned
Possible values: https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases
aggregation : Dict[str, callable], optional
Mapping that associates a variable name with the aggregation function
to be applied to it. Function which can be called in the form
`f(x, axis=axis, **kwargs)` to return the result of reducing an
np.ndarray over an integer valued axis. This parameter is required
should the `timestep` argument be passed.
start : str, optional
Start date of the selected time period.
String format β can be year (β%Yβ), year-month (β%Y-%mβ) or
year-month-day(β%Y-%m-%dβ)
end : str, optional
End date of the selected time period.
String format β can be year (β%Yβ), year-month (β%Y-%mβ) or
year-month-day(β%Y-%m-%dβ)
timezone : str, optional
Timezone to be used for the returned datasets
Possible values are listed here:
https://gist.github.com/heyalexej/8bf688fd67d7199be4a1682b3eec7568
minimum_duration : str, optional
Minimum duration of a time series (id) in order to be kept
Possible values: https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases
|
def _resolve_time_params(
self,
timestep: Optional[str] = None,
aggregation: Optional[
Dict[str, Union[Callable[..., Any], List[Callable[..., Any]]]]
] = None,
start: Optional[bool] = None,
end: Optional[str] = None,
timezone: Optional[str] = None,
minimum_duration: Optional[str] = None,
) -> Dict:
"""Resolves and validates user-provided time params.
Parameters
----------
timestep : str, optional
In which time step should the data be returned
Possible values: https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases
aggregation : Dict[str, callable], optional
Mapping that associates a variable name with the aggregation function
to be applied to it. Function which can be called in the form
`f(x, axis=axis, **kwargs)` to return the result of reducing an
np.ndarray over an integer valued axis. This parameter is required
should the `timestep` argument be passed.
start : str, optional
Start date of the selected time period.
String format β can be year (β%Yβ), year-month (β%Y-%mβ) or
year-month-day(β%Y-%m-%dβ)
end : str, optional
End date of the selected time period.
String format β can be year (β%Yβ), year-month (β%Y-%mβ) or
year-month-day(β%Y-%m-%dβ)
timezone : str, optional
Timezone to be used for the returned datasets
Possible values are listed here:
https://gist.github.com/heyalexej/8bf688fd67d7199be4a1682b3eec7568
minimum_duration : str, optional
Minimum duration of a time series (id) in order to be kept
Possible values: https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases
"""
space = locals()
space.pop("self")
# assert _validate_time_params(**space)
# We created a new dict based on user-provided parameters
# TODO : adapt all parameters before requesting any operations on datasets
args = {
"timestep": timestep,
"aggregation": aggregation,
"start": start,
"end": end,
"timezone": timezone,
"minimum_duration": minimum_duration,
}
return args
|
(self, timestep: Optional[str] = None, aggregation: Optional[Dict[str, Union[Callable[..., Any], List[Callable[..., Any]]]]] = None, start: Optional[bool] = None, end: Optional[str] = None, timezone: Optional[str] = None, minimum_duration: Optional[str] = None) -> Dict
|
712,815
|
xdatasets.core
|
bbox_clip
| null |
def bbox_clip(self, ds, variable="weights"):
return ds.where(~ds[variable].isnull(), drop=True)
|
(self, ds, variable='weights')
|
712,816
|
xdatasets.core
|
load_query
| null |
def load_query(
self,
datasets: Union[str, Dict[str, Union[str, List[str]]]],
space: Dict[str, Union[str, List[str]]],
time,
):
# Get all datasets in query
if isinstance(datasets, str):
datasets_name = [datasets]
elif isinstance(datasets, dict):
datasets_name = list(datasets.keys())
# Load data for each dataset
dsets = []
for dataset_name in datasets_name:
# data = None # FIXME: This is never used.
kwargs = {}
try:
variables_name = self.datasets[dataset_name]["variables"]
if isinstance(variables_name, str):
variables_name = [variables_name]
except:
variables_name = None
pass
try:
kwargs = {
k: v
for k, v in self.datasets[dataset_name].items()
if k not in ["variables"]
}
except:
pass
ds_one = self._process_one_dataset(
dataset_name=dataset_name,
variables=variables_name,
space=space,
time=time,
**kwargs,
)
dsets.append(ds_one)
try:
# Try naively merging datasets into single dataset
ds = None
if type(dsets[0]) == xr.Dataset:
# if more than one dataset, then we add source as a dimension
# so we can merge two or more datasets together
if len(dsets) > 1:
for idx, dset in enumerate(dsets):
for var in dset.keys():
dset[var] = dset[var].expand_dims("source", axis=-1)
dsets[idx] = dset
ds = xr.merge(dsets)
elif len(dsets) == 1:
ds = dsets[0]
except:
logging.warn("Couldn't merge datasets so we pass a list of datasets. ")
# Look into passing a DataTree instead
ds = dsets
pass
self.data = ds
return self
|
(self, datasets: Union[str, Dict[str, Union[str, List[str]]]], space: Dict[str, Union[str, List[str]]], time)
|
712,818
|
xdatasets.tutorial
|
list_available_datasets
|
Open, load lazily, and close a dataset from the public online repository (requires internet).
See Also
--------
open_dataset
|
def list_available_datasets():
"""Open, load lazily, and close a dataset from the public online repository (requires internet).
See Also
--------
open_dataset
"""
try:
import intake
except ImportError as e:
raise ImportError(
"tutorial.open_dataset depends on intake and intake-xarray to download and manage datasets."
" To proceed please install intake and intake-xarray."
) from e
cat = intake.open_catalog(catalog_path)
# This will need refactor if the catalog has more than 2 levels
# list(itertools.chain.from_iterable([list(cat[name].keys()) for name in cat._entries.keys()]))
datasets_catalog = {
field: list(sorted(cat[field]._entries.keys()))
for field in sorted(cat._entries.keys())
}
def add_section(datasets_catalog):
return [
_mapping_section(
datasets,
name=field.capitalize(),
details_func=summarize_coords,
max_items_collapse=25,
expand_option_name="display_expand_coords",
)
for field, datasets in datasets_catalog.items()
]
a = _obj_repr(
"",
[f"<div class='xr-obj-type'>{escape('xdatasets.Catalog')}</div>"],
add_section(datasets_catalog),
)
return HTML(a)
|
()
|
712,819
|
xdatasets.tutorial
|
load_dataset
|
Open, load lazily, and close a dataset from the online repository (requires internet).
See Also
--------
open_dataset
|
def load_dataset(*args, **kwargs):
"""Open, load lazily, and close a dataset from the online repository (requires internet).
See Also
--------
open_dataset
"""
return open_dataset(*args, **kwargs)
|
(*args, **kwargs)
|
712,827
|
zlliutest
|
function
| null |
def function():
print("hello from src/zlliutest/__init__.py")
|
()
|
712,828
|
devtools.debug
|
Debug
| null |
class Debug:
output_class = DebugOutput
def __init__(self, *, warnings: 'Optional[bool]' = None, highlight: 'Optional[bool]' = None):
self._show_warnings = env_bool(warnings, 'PY_DEVTOOLS_WARNINGS', True)
self._highlight = highlight
def __call__(
self,
*args: 'Any',
file_: 'Any' = None,
flush_: bool = True,
frame_depth_: int = 2,
**kwargs: 'Any',
) -> 'Any':
d_out = self._process(args, kwargs, frame_depth_)
s = d_out.str(use_highlight(self._highlight, file_))
print(s, file=file_, flush=flush_)
if kwargs:
return (*args, kwargs)
elif len(args) == 1:
return args[0]
else:
return args
def format(self, *args: 'Any', frame_depth_: int = 2, **kwargs: 'Any') -> DebugOutput:
return self._process(args, kwargs, frame_depth_)
def breakpoint(self) -> None:
import pdb
pdb.Pdb(skip=['devtools.*']).set_trace()
def timer(self, name: 'Optional[str]' = None, *, verbose: bool = True, file: 'Any' = None, dp: int = 3) -> Timer:
return Timer(name=name, verbose=verbose, file=file, dp=dp)
def _process(self, args: 'Any', kwargs: 'Any', frame_depth: int) -> DebugOutput:
"""
BEWARE: this must be called from a function exactly `frame_depth` levels below the top of the stack.
"""
# HELP: any errors other than ValueError from _getframe? If so please submit an issue
try:
call_frame: 'FrameType' = sys._getframe(frame_depth)
except ValueError:
# "If [ValueError] is deeper than the call stack, ValueError is raised"
return self.output_class(
filename='<unknown>',
lineno=0,
frame='',
arguments=list(self._args_inspection_failed(args, kwargs)),
warning=self._show_warnings and 'error parsing code, call stack too shallow',
)
function = call_frame.f_code.co_name
from pathlib import Path
path = Path(call_frame.f_code.co_filename)
if path.is_absolute():
# make the path relative
cwd = Path('.').resolve()
try:
path = path.relative_to(cwd)
except ValueError:
# happens if filename path is not within CWD
pass
lineno = call_frame.f_lineno
warning = None
import executing
source = executing.Source.for_frame(call_frame)
if not source.text:
warning = 'no code context for debug call, code inspection impossible'
arguments = list(self._args_inspection_failed(args, kwargs))
else:
ex = source.executing(call_frame)
function = ex.code_qualname()
if not ex.node:
warning = 'executing failed to find the calling node'
arguments = list(self._args_inspection_failed(args, kwargs))
else:
arguments = list(self._process_args(ex, args, kwargs))
return self.output_class(
filename=str(path),
lineno=lineno,
frame=function,
arguments=arguments,
warning=self._show_warnings and warning,
)
def _args_inspection_failed(self, args: 'Any', kwargs: 'Any') -> 'Generator[DebugArgument, None, None]':
for arg in args:
yield self.output_class.arg_class(arg)
for name, value in kwargs.items():
yield self.output_class.arg_class(value, name=name)
def _process_args(self, ex: 'Any', args: 'Any', kwargs: 'Any') -> 'Generator[DebugArgument, None, None]':
import ast
func_ast = ex.node
atok = ex.source.asttokens()
for arg, ast_arg in zip(args, func_ast.args):
if isinstance(ast_arg, ast.Name):
yield self.output_class.arg_class(arg, name=ast_arg.id)
else:
name = ' '.join(map(str.strip, atok.get_text(ast_arg).splitlines()))
yield self.output_class.arg_class(arg, name=name)
kw_arg_names = {}
for kw in func_ast.keywords:
if isinstance(kw.value, ast.Name):
kw_arg_names[kw.arg] = kw.value.id
for name, value in kwargs.items():
yield self.output_class.arg_class(value, name=name, variable=kw_arg_names.get(name))
|
(*, warnings: 'Optional[bool]' = None, highlight: 'Optional[bool]' = None)
|
712,829
|
devtools.debug
|
__call__
| null |
def __call__(
self,
*args: 'Any',
file_: 'Any' = None,
flush_: bool = True,
frame_depth_: int = 2,
**kwargs: 'Any',
) -> 'Any':
d_out = self._process(args, kwargs, frame_depth_)
s = d_out.str(use_highlight(self._highlight, file_))
print(s, file=file_, flush=flush_)
if kwargs:
return (*args, kwargs)
elif len(args) == 1:
return args[0]
else:
return args
|
(self, *args: 'Any', file_: 'Any' = None, flush_: bool = True, frame_depth_: int = 2, **kwargs: 'Any') -> 'Any'
|
712,830
|
devtools.debug
|
__init__
| null |
def __init__(self, *, warnings: 'Optional[bool]' = None, highlight: 'Optional[bool]' = None):
self._show_warnings = env_bool(warnings, 'PY_DEVTOOLS_WARNINGS', True)
self._highlight = highlight
|
(self, *, warnings: 'Optional[bool]' = None, highlight: 'Optional[bool]' = None)
|
712,831
|
devtools.debug
|
_args_inspection_failed
| null |
def _args_inspection_failed(self, args: 'Any', kwargs: 'Any') -> 'Generator[DebugArgument, None, None]':
for arg in args:
yield self.output_class.arg_class(arg)
for name, value in kwargs.items():
yield self.output_class.arg_class(value, name=name)
|
(self, args: 'Any', kwargs: 'Any') -> 'Generator[DebugArgument, None, None]'
|
712,832
|
devtools.debug
|
_process
|
BEWARE: this must be called from a function exactly `frame_depth` levels below the top of the stack.
|
def _process(self, args: 'Any', kwargs: 'Any', frame_depth: int) -> DebugOutput:
"""
BEWARE: this must be called from a function exactly `frame_depth` levels below the top of the stack.
"""
# HELP: any errors other than ValueError from _getframe? If so please submit an issue
try:
call_frame: 'FrameType' = sys._getframe(frame_depth)
except ValueError:
# "If [ValueError] is deeper than the call stack, ValueError is raised"
return self.output_class(
filename='<unknown>',
lineno=0,
frame='',
arguments=list(self._args_inspection_failed(args, kwargs)),
warning=self._show_warnings and 'error parsing code, call stack too shallow',
)
function = call_frame.f_code.co_name
from pathlib import Path
path = Path(call_frame.f_code.co_filename)
if path.is_absolute():
# make the path relative
cwd = Path('.').resolve()
try:
path = path.relative_to(cwd)
except ValueError:
# happens if filename path is not within CWD
pass
lineno = call_frame.f_lineno
warning = None
import executing
source = executing.Source.for_frame(call_frame)
if not source.text:
warning = 'no code context for debug call, code inspection impossible'
arguments = list(self._args_inspection_failed(args, kwargs))
else:
ex = source.executing(call_frame)
function = ex.code_qualname()
if not ex.node:
warning = 'executing failed to find the calling node'
arguments = list(self._args_inspection_failed(args, kwargs))
else:
arguments = list(self._process_args(ex, args, kwargs))
return self.output_class(
filename=str(path),
lineno=lineno,
frame=function,
arguments=arguments,
warning=self._show_warnings and warning,
)
|
(self, args: 'Any', kwargs: 'Any', frame_depth: int) -> devtools.debug.DebugOutput
|
712,833
|
devtools.debug
|
_process_args
| null |
def _process_args(self, ex: 'Any', args: 'Any', kwargs: 'Any') -> 'Generator[DebugArgument, None, None]':
import ast
func_ast = ex.node
atok = ex.source.asttokens()
for arg, ast_arg in zip(args, func_ast.args):
if isinstance(ast_arg, ast.Name):
yield self.output_class.arg_class(arg, name=ast_arg.id)
else:
name = ' '.join(map(str.strip, atok.get_text(ast_arg).splitlines()))
yield self.output_class.arg_class(arg, name=name)
kw_arg_names = {}
for kw in func_ast.keywords:
if isinstance(kw.value, ast.Name):
kw_arg_names[kw.arg] = kw.value.id
for name, value in kwargs.items():
yield self.output_class.arg_class(value, name=name, variable=kw_arg_names.get(name))
|
(self, ex: 'Any', args: 'Any', kwargs: 'Any') -> 'Generator[DebugArgument, None, None]'
|
712,834
|
devtools.debug
|
breakpoint
| null |
def breakpoint(self) -> None:
import pdb
pdb.Pdb(skip=['devtools.*']).set_trace()
|
(self) -> NoneType
|
712,835
|
devtools.debug
|
format
| null |
def format(self, *args: 'Any', frame_depth_: int = 2, **kwargs: 'Any') -> DebugOutput:
return self._process(args, kwargs, frame_depth_)
|
(self, *args: 'Any', frame_depth_: int = 2, **kwargs: 'Any') -> devtools.debug.DebugOutput
|
712,836
|
devtools.debug
|
timer
| null |
def timer(self, name: 'Optional[str]' = None, *, verbose: bool = True, file: 'Any' = None, dp: int = 3) -> Timer:
return Timer(name=name, verbose=verbose, file=file, dp=dp)
|
(self, name: 'Optional[str]' = None, *, verbose: bool = True, file: 'Any' = None, dp: int = 3) -> devtools.timer.Timer
|
712,837
|
devtools.prettier
|
PrettyFormat
| null |
class PrettyFormat:
def __init__(
self,
indent_step: int = 4,
indent_char: str = ' ',
repr_strings: bool = False,
simple_cutoff: int = 10,
width: int = 120,
yield_from_generators: bool = True,
):
self._indent_step = indent_step
self._c = indent_char
self._repr_strings = repr_strings
self._repr_generators = not yield_from_generators
self._simple_cutoff = simple_cutoff
self._width = width
self._type_lookup: 'List[Tuple[Any, Callable[[Any, str, int, int], None]]]' = [
(dict, self._format_dict),
((str, bytes), self._format_str_bytes),
(tuple, self._format_tuples),
((list, set, frozenset), self._format_list_like),
(bytearray, self._format_bytearray),
(generator_types, self._format_generator),
# put these last as the check can be slow
(ast.AST, self._format_ast_expression),
(LaxMapping, self._format_dict),
(DataClassType, self._format_dataclass),
(SQLAlchemyClassType, self._format_sqlalchemy_class),
]
def __call__(self, value: 'Any', *, indent: int = 0, indent_first: bool = False, highlight: bool = False) -> str:
self._stream = io.StringIO()
self._format(value, indent_current=indent, indent_first=indent_first)
s = self._stream.getvalue()
pygments, pyg_lexer, pyg_formatter = get_pygments()
if highlight and pygments:
# apparently highlight adds a trailing new line we don't want
s = pygments.highlight(s, lexer=pyg_lexer, formatter=pyg_formatter).rstrip('\n')
return s
def _format(self, value: 'Any', indent_current: int, indent_first: bool) -> None:
if indent_first:
self._stream.write(indent_current * self._c)
try:
pretty_func = getattr(value, '__pretty__')
except AttributeError:
pass
else:
# `pretty_func.__class__.__name__ == 'method'` should only be true for bound methods,
# `hasattr(pretty_func, '__self__')` is more canonical but weirdly is true for unbound cython functions
from unittest.mock import _Call as MockCall
if pretty_func.__class__.__name__ == 'method' and not isinstance(value, MockCall):
try:
gen = pretty_func(fmt=fmt, skip_exc=SkipPretty)
self._render_pretty(gen, indent_current)
except SkipPretty:
pass
else:
return None
value_repr = repr(value)
if len(value_repr) <= self._simple_cutoff and not isinstance(value, generator_types):
self._stream.write(value_repr)
else:
indent_new = indent_current + self._indent_step
for t, func in self._type_lookup:
if isinstance(value, t):
func(value, value_repr, indent_current, indent_new)
return None
self._format_raw(value, value_repr, indent_current, indent_new)
def _render_pretty(self, gen: 'Iterable[Any]', indent: int) -> None:
prefix = False
for v in gen:
if isinstance(v, int) and v in {-1, 0, 1}:
indent += v * self._indent_step
prefix = True
else:
if prefix:
self._stream.write('\n' + self._c * indent)
prefix = False
pretty_value = v.get(PRETTY_KEY, MISSING) if (isinstance(v, dict) and len(v) == 1) else MISSING
if pretty_value is not MISSING:
self._format(pretty_value, indent, False)
elif isinstance(v, str):
self._stream.write(v)
else:
# shouldn't happen but will
self._stream.write(repr(v))
def _format_dict(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None:
open_, before_, split_, after_, close_ = '{\n', indent_new * self._c, ': ', ',\n', '}'
if isinstance(value, OrderedDict):
open_, split_, after_, close_ = 'OrderedDict([\n', ', ', '),\n', '])'
before_ += '('
elif type(value) != dict:
open_, close_ = f'<{value.__class__.__name__}({{\n', '})>'
self._stream.write(open_)
for k, v in value.items():
self._stream.write(before_)
self._format(k, indent_new, False)
self._stream.write(split_)
self._format(v, indent_new, False)
self._stream.write(after_)
self._stream.write(indent_current * self._c + close_)
def _format_list_like(
self, value: 'Union[List[Any], Tuple[Any, ...], Set[Any]]', _: str, indent_current: int, indent_new: int
) -> None:
open_, close_ = '(', ')'
for t, *oc in PARENTHESES_LOOKUP:
if isinstance(value, t):
open_, close_ = oc
break
self._stream.write(open_ + '\n')
for v in value:
self._format(v, indent_new, True)
self._stream.write(',\n')
self._stream.write(indent_current * self._c + close_)
def _format_tuples(self, value: 'Tuple[Any, ...]', value_repr: str, indent_current: int, indent_new: int) -> None:
fields = getattr(value, '_fields', None)
if fields:
# named tuple
self._format_fields(value, zip(fields, value), indent_current, indent_new)
else:
# normal tuples are just like other similar iterables
self._format_list_like(value, value_repr, indent_current, indent_new)
def _format_str_bytes(
self, value: 'Union[str, bytes]', value_repr: str, indent_current: int, indent_new: int
) -> None:
if self._repr_strings:
self._stream.write(value_repr)
else:
lines = list(self._wrap_lines(value, indent_new))
if len(lines) > 1:
self._str_lines(lines, indent_current, indent_new)
else:
self._stream.write(value_repr)
def _str_lines(self, lines: 'Iterable[Union[str, bytes]]', indent_current: int, indent_new: int) -> None:
self._stream.write('(\n')
prefix = indent_new * self._c
for line in lines:
self._stream.write(prefix + repr(line) + '\n')
self._stream.write(indent_current * self._c + ')')
def _wrap_lines(self, s: 'Union[str, bytes]', indent_new: int) -> 'Generator[Union[str, bytes], None, None]':
width = self._width - indent_new - 3
for line in s.splitlines(True):
start = 0
for pos in range(width, len(line), width):
yield line[start:pos]
start = pos
yield line[start:]
def _format_generator(
self, value: 'Generator[Any, None, None]', value_repr: str, indent_current: int, indent_new: int
) -> None:
if self._repr_generators:
self._stream.write(value_repr)
else:
name = value.__class__.__name__
if name == 'generator':
# no name if the name is just "generator"
self._stream.write('(\n')
else:
self._stream.write(f'{name}(\n')
for v in value:
self._format(v, indent_new, True)
self._stream.write(',\n')
self._stream.write(indent_current * self._c + ')')
def _format_bytearray(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None:
self._stream.write('bytearray')
lines = self._wrap_lines(bytes(value), indent_new)
self._str_lines(lines, indent_current, indent_new)
def _format_ast_expression(self, value: ast.AST, _: str, indent_current: int, indent_new: int) -> None:
try:
s = ast.dump(value, indent=self._indent_step)
except TypeError:
# no indent before 3.9
s = ast.dump(value)
lines = s.splitlines(True)
self._stream.write(lines[0])
for line in lines[1:]:
self._stream.write(indent_current * self._c + line)
def _format_dataclass(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None:
try:
field_items = value.__dict__.items()
except AttributeError:
# slots
field_items = ((f, getattr(value, f)) for f in value.__slots__)
self._format_fields(value, field_items, indent_current, indent_new)
def _format_sqlalchemy_class(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None:
if sa_inspect is not None:
state = sa_inspect(value)
deferred = state.unloaded
else:
deferred = set()
fields = [
(field, getattr(value, field) if field not in deferred else '<deferred>')
for field in dir(value)
if not (field.startswith('_') or field in ['metadata', 'registry'])
]
self._format_fields(value, fields, indent_current, indent_new)
def _format_raw(self, _: 'Any', value_repr: str, indent_current: int, indent_new: int) -> None:
lines = value_repr.splitlines(True)
if len(lines) > 1 or (len(value_repr) + indent_current) >= self._width:
self._stream.write('(\n')
wrap_at = self._width - indent_new
prefix = indent_new * self._c
from textwrap import wrap
for line in lines:
sub_lines = wrap(line, wrap_at)
for sline in sub_lines:
self._stream.write(prefix + sline + '\n')
self._stream.write(indent_current * self._c + ')')
else:
self._stream.write(value_repr)
def _format_fields(
self, value: 'Any', fields: 'Iterable[Tuple[str, Any]]', indent_current: int, indent_new: int
) -> None:
self._stream.write(f'{value.__class__.__name__}(\n')
for field, v in fields:
self._stream.write(indent_new * self._c)
if field: # field is falsy sometimes for odd things like call_args
self._stream.write(f'{field}=')
self._format(v, indent_new, False)
self._stream.write(',\n')
self._stream.write(indent_current * self._c + ')')
|
(indent_step: int = 4, indent_char: str = ' ', repr_strings: bool = False, simple_cutoff: int = 10, width: int = 120, yield_from_generators: bool = True)
|
712,838
|
devtools.prettier
|
__call__
| null |
def __call__(self, value: 'Any', *, indent: int = 0, indent_first: bool = False, highlight: bool = False) -> str:
self._stream = io.StringIO()
self._format(value, indent_current=indent, indent_first=indent_first)
s = self._stream.getvalue()
pygments, pyg_lexer, pyg_formatter = get_pygments()
if highlight and pygments:
# apparently highlight adds a trailing new line we don't want
s = pygments.highlight(s, lexer=pyg_lexer, formatter=pyg_formatter).rstrip('\n')
return s
|
(self, value: 'Any', *, indent: int = 0, indent_first: bool = False, highlight: bool = False) -> str
|
712,839
|
devtools.prettier
|
__init__
| null |
def __init__(
self,
indent_step: int = 4,
indent_char: str = ' ',
repr_strings: bool = False,
simple_cutoff: int = 10,
width: int = 120,
yield_from_generators: bool = True,
):
self._indent_step = indent_step
self._c = indent_char
self._repr_strings = repr_strings
self._repr_generators = not yield_from_generators
self._simple_cutoff = simple_cutoff
self._width = width
self._type_lookup: 'List[Tuple[Any, Callable[[Any, str, int, int], None]]]' = [
(dict, self._format_dict),
((str, bytes), self._format_str_bytes),
(tuple, self._format_tuples),
((list, set, frozenset), self._format_list_like),
(bytearray, self._format_bytearray),
(generator_types, self._format_generator),
# put these last as the check can be slow
(ast.AST, self._format_ast_expression),
(LaxMapping, self._format_dict),
(DataClassType, self._format_dataclass),
(SQLAlchemyClassType, self._format_sqlalchemy_class),
]
|
(self, indent_step: int = 4, indent_char: str = ' ', repr_strings: bool = False, simple_cutoff: int = 10, width: int = 120, yield_from_generators: bool = True)
|
712,840
|
devtools.prettier
|
_format
| null |
def _format(self, value: 'Any', indent_current: int, indent_first: bool) -> None:
if indent_first:
self._stream.write(indent_current * self._c)
try:
pretty_func = getattr(value, '__pretty__')
except AttributeError:
pass
else:
# `pretty_func.__class__.__name__ == 'method'` should only be true for bound methods,
# `hasattr(pretty_func, '__self__')` is more canonical but weirdly is true for unbound cython functions
from unittest.mock import _Call as MockCall
if pretty_func.__class__.__name__ == 'method' and not isinstance(value, MockCall):
try:
gen = pretty_func(fmt=fmt, skip_exc=SkipPretty)
self._render_pretty(gen, indent_current)
except SkipPretty:
pass
else:
return None
value_repr = repr(value)
if len(value_repr) <= self._simple_cutoff and not isinstance(value, generator_types):
self._stream.write(value_repr)
else:
indent_new = indent_current + self._indent_step
for t, func in self._type_lookup:
if isinstance(value, t):
func(value, value_repr, indent_current, indent_new)
return None
self._format_raw(value, value_repr, indent_current, indent_new)
|
(self, value: 'Any', indent_current: int, indent_first: bool) -> None
|
712,841
|
devtools.prettier
|
_format_ast_expression
| null |
def _format_ast_expression(self, value: ast.AST, _: str, indent_current: int, indent_new: int) -> None:
try:
s = ast.dump(value, indent=self._indent_step)
except TypeError:
# no indent before 3.9
s = ast.dump(value)
lines = s.splitlines(True)
self._stream.write(lines[0])
for line in lines[1:]:
self._stream.write(indent_current * self._c + line)
|
(self, value: ast.AST, _: str, indent_current: int, indent_new: int) -> NoneType
|
712,842
|
devtools.prettier
|
_format_bytearray
| null |
def _format_bytearray(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None:
self._stream.write('bytearray')
lines = self._wrap_lines(bytes(value), indent_new)
self._str_lines(lines, indent_current, indent_new)
|
(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None
|
712,843
|
devtools.prettier
|
_format_dataclass
| null |
def _format_dataclass(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None:
try:
field_items = value.__dict__.items()
except AttributeError:
# slots
field_items = ((f, getattr(value, f)) for f in value.__slots__)
self._format_fields(value, field_items, indent_current, indent_new)
|
(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None
|
712,844
|
devtools.prettier
|
_format_dict
| null |
def _format_dict(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None:
open_, before_, split_, after_, close_ = '{\n', indent_new * self._c, ': ', ',\n', '}'
if isinstance(value, OrderedDict):
open_, split_, after_, close_ = 'OrderedDict([\n', ', ', '),\n', '])'
before_ += '('
elif type(value) != dict:
open_, close_ = f'<{value.__class__.__name__}({{\n', '})>'
self._stream.write(open_)
for k, v in value.items():
self._stream.write(before_)
self._format(k, indent_new, False)
self._stream.write(split_)
self._format(v, indent_new, False)
self._stream.write(after_)
self._stream.write(indent_current * self._c + close_)
|
(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None
|
712,845
|
devtools.prettier
|
_format_fields
| null |
def _format_fields(
self, value: 'Any', fields: 'Iterable[Tuple[str, Any]]', indent_current: int, indent_new: int
) -> None:
self._stream.write(f'{value.__class__.__name__}(\n')
for field, v in fields:
self._stream.write(indent_new * self._c)
if field: # field is falsy sometimes for odd things like call_args
self._stream.write(f'{field}=')
self._format(v, indent_new, False)
self._stream.write(',\n')
self._stream.write(indent_current * self._c + ')')
|
(self, value: 'Any', fields: 'Iterable[Tuple[str, Any]]', indent_current: int, indent_new: int) -> None
|
712,846
|
devtools.prettier
|
_format_generator
| null |
def _format_generator(
self, value: 'Generator[Any, None, None]', value_repr: str, indent_current: int, indent_new: int
) -> None:
if self._repr_generators:
self._stream.write(value_repr)
else:
name = value.__class__.__name__
if name == 'generator':
# no name if the name is just "generator"
self._stream.write('(\n')
else:
self._stream.write(f'{name}(\n')
for v in value:
self._format(v, indent_new, True)
self._stream.write(',\n')
self._stream.write(indent_current * self._c + ')')
|
(self, value: 'Generator[Any, None, None]', value_repr: str, indent_current: int, indent_new: int) -> None
|
712,847
|
devtools.prettier
|
_format_list_like
| null |
def _format_list_like(
self, value: 'Union[List[Any], Tuple[Any, ...], Set[Any]]', _: str, indent_current: int, indent_new: int
) -> None:
open_, close_ = '(', ')'
for t, *oc in PARENTHESES_LOOKUP:
if isinstance(value, t):
open_, close_ = oc
break
self._stream.write(open_ + '\n')
for v in value:
self._format(v, indent_new, True)
self._stream.write(',\n')
self._stream.write(indent_current * self._c + close_)
|
(self, value: 'Union[List[Any], Tuple[Any, ...], Set[Any]]', _: str, indent_current: int, indent_new: int) -> None
|
712,848
|
devtools.prettier
|
_format_raw
| null |
def _format_raw(self, _: 'Any', value_repr: str, indent_current: int, indent_new: int) -> None:
lines = value_repr.splitlines(True)
if len(lines) > 1 or (len(value_repr) + indent_current) >= self._width:
self._stream.write('(\n')
wrap_at = self._width - indent_new
prefix = indent_new * self._c
from textwrap import wrap
for line in lines:
sub_lines = wrap(line, wrap_at)
for sline in sub_lines:
self._stream.write(prefix + sline + '\n')
self._stream.write(indent_current * self._c + ')')
else:
self._stream.write(value_repr)
|
(self, _: 'Any', value_repr: str, indent_current: int, indent_new: int) -> None
|
712,849
|
devtools.prettier
|
_format_sqlalchemy_class
| null |
def _format_sqlalchemy_class(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None:
if sa_inspect is not None:
state = sa_inspect(value)
deferred = state.unloaded
else:
deferred = set()
fields = [
(field, getattr(value, field) if field not in deferred else '<deferred>')
for field in dir(value)
if not (field.startswith('_') or field in ['metadata', 'registry'])
]
self._format_fields(value, fields, indent_current, indent_new)
|
(self, value: 'Any', _: str, indent_current: int, indent_new: int) -> None
|
712,850
|
devtools.prettier
|
_format_str_bytes
| null |
def _format_str_bytes(
self, value: 'Union[str, bytes]', value_repr: str, indent_current: int, indent_new: int
) -> None:
if self._repr_strings:
self._stream.write(value_repr)
else:
lines = list(self._wrap_lines(value, indent_new))
if len(lines) > 1:
self._str_lines(lines, indent_current, indent_new)
else:
self._stream.write(value_repr)
|
(self, value: 'Union[str, bytes]', value_repr: str, indent_current: int, indent_new: int) -> None
|
712,851
|
devtools.prettier
|
_format_tuples
| null |
def _format_tuples(self, value: 'Tuple[Any, ...]', value_repr: str, indent_current: int, indent_new: int) -> None:
fields = getattr(value, '_fields', None)
if fields:
# named tuple
self._format_fields(value, zip(fields, value), indent_current, indent_new)
else:
# normal tuples are just like other similar iterables
self._format_list_like(value, value_repr, indent_current, indent_new)
|
(self, value: 'Tuple[Any, ...]', value_repr: str, indent_current: int, indent_new: int) -> None
|
712,852
|
devtools.prettier
|
_render_pretty
| null |
def _render_pretty(self, gen: 'Iterable[Any]', indent: int) -> None:
prefix = False
for v in gen:
if isinstance(v, int) and v in {-1, 0, 1}:
indent += v * self._indent_step
prefix = True
else:
if prefix:
self._stream.write('\n' + self._c * indent)
prefix = False
pretty_value = v.get(PRETTY_KEY, MISSING) if (isinstance(v, dict) and len(v) == 1) else MISSING
if pretty_value is not MISSING:
self._format(pretty_value, indent, False)
elif isinstance(v, str):
self._stream.write(v)
else:
# shouldn't happen but will
self._stream.write(repr(v))
|
(self, gen: 'Iterable[Any]', indent: int) -> None
|
712,853
|
devtools.prettier
|
_str_lines
| null |
def _str_lines(self, lines: 'Iterable[Union[str, bytes]]', indent_current: int, indent_new: int) -> None:
self._stream.write('(\n')
prefix = indent_new * self._c
for line in lines:
self._stream.write(prefix + repr(line) + '\n')
self._stream.write(indent_current * self._c + ')')
|
(self, lines: 'Iterable[Union[str, bytes]]', indent_current: int, indent_new: int) -> None
|
712,854
|
devtools.prettier
|
_wrap_lines
| null |
def _wrap_lines(self, s: 'Union[str, bytes]', indent_new: int) -> 'Generator[Union[str, bytes], None, None]':
width = self._width - indent_new - 3
for line in s.splitlines(True):
start = 0
for pos in range(width, len(line), width):
yield line[start:pos]
start = pos
yield line[start:]
|
(self, s: 'Union[str, bytes]', indent_new: int) -> 'Generator[Union[str, bytes], None, None]'
|
712,855
|
devtools.timer
|
Timer
| null |
class Timer:
def __init__(self, name: 'Optional[str]' = None, verbose: bool = True, file: 'Any' = None, dp: int = 3) -> None:
self.file = file
self.dp = dp
self._name = name
self._verbose = verbose
self.results: 'List[TimerResult]' = []
def __call__(self, name: 'Optional[str]' = None, verbose: 'Optional[bool]' = None) -> 'Timer':
if name:
self._name = name
if verbose is not None:
self._verbose = verbose
return self
def start(self, name: 'Optional[str]' = None, verbose: 'Optional[bool]' = None) -> 'Timer':
self.results.append(TimerResult(name or self._name, self._verbose if verbose is None else verbose))
return self
def capture(self, verbose: 'Optional[bool]' = None) -> 'TimerResult':
r = self.results[-1]
r.capture()
print_ = r.verbose if verbose is None else verbose
if print_:
print(r.str(self.dp), file=self.file, flush=True)
return r
def summary(self, verbose: bool = False) -> 'List[float]':
times = []
for r in self.results:
if not r.finish:
r.capture()
if verbose:
print(f' {r.str(self.dp)}', file=self.file)
times.append(r.elapsed())
if times:
from statistics import mean, stdev
print(
f'{len(times)} times: '
f'mean={mean(times):0.{self.dp}f}s '
f'stdev={stdev(times) if len(times) > 1 else 0:0.{self.dp}f}s '
f'min={min(times):0.{self.dp}f}s '
f'max={max(times):0.{self.dp}f}s',
file=self.file,
flush=True,
)
else:
raise RuntimeError('timer not started')
return times
def __enter__(self) -> 'Timer':
self.start()
return self
def __exit__(self, *args: 'Any') -> None:
self.capture()
|
(name: 'Optional[str]' = None, verbose: bool = True, file: 'Any' = None, dp: int = 3) -> None
|
712,856
|
devtools.timer
|
__call__
| null |
def __call__(self, name: 'Optional[str]' = None, verbose: 'Optional[bool]' = None) -> 'Timer':
if name:
self._name = name
if verbose is not None:
self._verbose = verbose
return self
|
(self, name: 'Optional[str]' = None, verbose: 'Optional[bool]' = None) -> 'Timer'
|
712,857
|
devtools.timer
|
__enter__
| null |
def __enter__(self) -> 'Timer':
self.start()
return self
|
(self) -> devtools.timer.Timer
|
712,858
|
devtools.timer
|
__exit__
| null |
def __exit__(self, *args: 'Any') -> None:
self.capture()
|
(self, *args: 'Any') -> None
|
712,859
|
devtools.timer
|
__init__
| null |
def __init__(self, name: 'Optional[str]' = None, verbose: bool = True, file: 'Any' = None, dp: int = 3) -> None:
self.file = file
self.dp = dp
self._name = name
self._verbose = verbose
self.results: 'List[TimerResult]' = []
|
(self, name: 'Optional[str]' = None, verbose: bool = True, file: 'Any' = None, dp: int = 3) -> None
|
712,860
|
devtools.timer
|
capture
| null |
def capture(self, verbose: 'Optional[bool]' = None) -> 'TimerResult':
r = self.results[-1]
r.capture()
print_ = r.verbose if verbose is None else verbose
if print_:
print(r.str(self.dp), file=self.file, flush=True)
return r
|
(self, verbose: 'Optional[bool]' = None) -> 'TimerResult'
|
712,861
|
devtools.timer
|
start
| null |
def start(self, name: 'Optional[str]' = None, verbose: 'Optional[bool]' = None) -> 'Timer':
self.results.append(TimerResult(name or self._name, self._verbose if verbose is None else verbose))
return self
|
(self, name: 'Optional[str]' = None, verbose: 'Optional[bool]' = None) -> 'Timer'
|
712,862
|
devtools.timer
|
summary
| null |
def summary(self, verbose: bool = False) -> 'List[float]':
times = []
for r in self.results:
if not r.finish:
r.capture()
if verbose:
print(f' {r.str(self.dp)}', file=self.file)
times.append(r.elapsed())
if times:
from statistics import mean, stdev
print(
f'{len(times)} times: '
f'mean={mean(times):0.{self.dp}f}s '
f'stdev={stdev(times) if len(times) > 1 else 0:0.{self.dp}f}s '
f'min={min(times):0.{self.dp}f}s '
f'max={max(times):0.{self.dp}f}s',
file=self.file,
flush=True,
)
else:
raise RuntimeError('timer not started')
return times
|
(self, verbose: bool = False) -> 'List[float]'
|
712,864
|
devtools.prettier
|
pprint
| null |
def pprint(s: 'Any', file: 'Any' = None) -> None:
highlight = isatty(file) if force_highlight is None else force_highlight
print(pformat(s, highlight=highlight), file=file, flush=True)
|
(s: 'Any', file: 'Any' = None) -> None
|
712,869
|
dbus_next.constants
|
ArgDirection
|
For an introspected argument, indicates whether it is an input parameter or a return value.
|
class ArgDirection(Enum):
"""For an introspected argument, indicates whether it is an input parameter or a return value.
"""
IN = 'in'
OUT = 'out'
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
712,871
|
dbus_next.constants
|
BusType
|
An enum that indicates a type of bus. On most systems, there are
normally two different kinds of buses running.
|
class BusType(Enum):
"""An enum that indicates a type of bus. On most systems, there are
normally two different kinds of buses running.
"""
SESSION = 1 #: A bus for the current graphical user session.
SYSTEM = 2 #: A persistent bus for the whole machine.
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
712,872
|
dbus_next.errors
|
DBusError
| null |
class DBusError(Exception):
def __init__(self, type_, text, reply=None):
super().__init__(text)
if type(type_) is ErrorType:
type_ = type_.value
assert_interface_name_valid(type_)
if reply is not None and type(reply) is not Message:
raise TypeError('reply must be of type Message')
self.type = type_
self.text = text
self.reply = reply
@staticmethod
def _from_message(msg):
assert msg.message_type == MessageType.ERROR
return DBusError(msg.error_name, msg.body[0], reply=msg)
def _as_message(self, msg):
return Message.new_error(msg, self.type, self.text)
|
(type_, text, reply=None)
|
712,873
|
dbus_next.errors
|
__init__
| null |
def __init__(self, type_, text, reply=None):
super().__init__(text)
if type(type_) is ErrorType:
type_ = type_.value
assert_interface_name_valid(type_)
if reply is not None and type(reply) is not Message:
raise TypeError('reply must be of type Message')
self.type = type_
self.text = text
self.reply = reply
|
(self, type_, text, reply=None)
|
712,874
|
dbus_next.errors
|
_as_message
| null |
def _as_message(self, msg):
return Message.new_error(msg, self.type, self.text)
|
(self, msg)
|
712,875
|
dbus_next.errors
|
_from_message
| null |
@staticmethod
def _from_message(msg):
assert msg.message_type == MessageType.ERROR
return DBusError(msg.error_name, msg.body[0], reply=msg)
|
(msg)
|
712,876
|
dbus_next.constants
|
ErrorType
|
An enum for the type of an error for a message reply.
:seealso: http://man7.org/linux/man-pages/man3/sd-bus-errors.3.html
|
class ErrorType(Enum):
"""An enum for the type of an error for a message reply.
:seealso: http://man7.org/linux/man-pages/man3/sd-bus-errors.3.html
"""
SERVICE_ERROR = 'com.dubstepdish.dbus.next.ServiceError' #: A custom error to indicate an exported service threw an exception.
INTERNAL_ERROR = 'com.dubstepdish.dbus.next.InternalError' #: A custom error to indicate something went wrong with the library.
CLIENT_ERROR = 'com.dubstepdish.dbus.next.ClientError' #: A custom error to indicate something went wrong with the client.
FAILED = "org.freedesktop.DBus.Error.Failed"
NO_MEMORY = "org.freedesktop.DBus.Error.NoMemory"
SERVICE_UNKNOWN = "org.freedesktop.DBus.Error.ServiceUnknown"
NAME_HAS_NO_OWNER = "org.freedesktop.DBus.Error.NameHasNoOwner"
NO_REPLY = "org.freedesktop.DBus.Error.NoReply"
IO_ERROR = "org.freedesktop.DBus.Error.IOError"
BAD_ADDRESS = "org.freedesktop.DBus.Error.BadAddress"
NOT_SUPPORTED = "org.freedesktop.DBus.Error.NotSupported"
LIMITS_EXCEEDED = "org.freedesktop.DBus.Error.LimitsExceeded"
ACCESS_DENIED = "org.freedesktop.DBus.Error.AccessDenied"
AUTH_FAILED = "org.freedesktop.DBus.Error.AuthFailed"
NO_SERVER = "org.freedesktop.DBus.Error.NoServer"
TIMEOUT = "org.freedesktop.DBus.Error.Timeout"
NO_NETWORK = "org.freedesktop.DBus.Error.NoNetwork"
ADDRESS_IN_USE = "org.freedesktop.DBus.Error.AddressInUse"
DISCONNECTED = "org.freedesktop.DBus.Error.Disconnected"
INVALID_ARGS = "org.freedesktop.DBus.Error.InvalidArgs"
FILE_NOT_FOUND = "org.freedesktop.DBus.Error.FileNotFound"
FILE_EXISTS = "org.freedesktop.DBus.Error.FileExists"
UNKNOWN_METHOD = "org.freedesktop.DBus.Error.UnknownMethod"
UNKNOWN_OBJECT = "org.freedesktop.DBus.Error.UnknownObject"
UNKNOWN_INTERFACE = "org.freedesktop.DBus.Error.UnknownInterface"
UNKNOWN_PROPERTY = "org.freedesktop.DBus.Error.UnknownProperty"
PROPERTY_READ_ONLY = "org.freedesktop.DBus.Error.PropertyReadOnly"
UNIX_PROCESS_ID_UNKNOWN = "org.freedesktop.DBus.Error.UnixProcessIdUnknown"
INVALID_SIGNATURE = "org.freedesktop.DBus.Error.InvalidSignature"
INCONSISTENT_MESSAGE = "org.freedesktop.DBus.Error.InconsistentMessage"
MATCH_RULE_NOT_FOUND = "org.freedesktop.DBus.Error.MatchRuleNotFound"
MATCH_RULE_INVALID = "org.freedesktop.DBus.Error.MatchRuleInvalid"
INTERACTIVE_AUTHORIZATION_REQUIRED = "org.freedesktop.DBus.Error.InteractiveAuthorizationRequired"
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
712,879
|
dbus_next.errors
|
InvalidBusNameError
| null |
class InvalidBusNameError(TypeError):
def __init__(self, name):
super().__init__(f'invalid bus name: {name}')
|
(name)
|
712,880
|
dbus_next.errors
|
__init__
| null |
def __init__(self, name):
super().__init__(f'invalid bus name: {name}')
|
(self, name)
|
712,881
|
dbus_next.errors
|
InvalidInterfaceNameError
| null |
class InvalidInterfaceNameError(TypeError):
def __init__(self, name):
super().__init__(f'invalid interface name: {name}')
|
(name)
|
712,882
|
dbus_next.errors
|
__init__
| null |
def __init__(self, name):
super().__init__(f'invalid interface name: {name}')
|
(self, name)
|
712,884
|
dbus_next.errors
|
InvalidMemberNameError
| null |
class InvalidMemberNameError(TypeError):
def __init__(self, member):
super().__init__(f'invalid member name: {member}')
|
(member)
|
712,885
|
dbus_next.errors
|
__init__
| null |
def __init__(self, member):
super().__init__(f'invalid member name: {member}')
|
(self, member)
|
712,887
|
dbus_next.errors
|
InvalidObjectPathError
| null |
class InvalidObjectPathError(TypeError):
def __init__(self, path):
super().__init__(f'invalid object path: {path}')
|
(path)
|
712,888
|
dbus_next.errors
|
__init__
| null |
def __init__(self, path):
super().__init__(f'invalid object path: {path}')
|
(self, path)
|
712,890
|
dbus_next.message
|
Message
|
A class for sending and receiving messages through the
:class:`MessageBus <dbus_next.message_bus.BaseMessageBus>` with the
low-level api.
A ``Message`` can be constructed by the user to send over the message bus.
When messages are received, such as from method calls or signal emissions,
they will use this class as well.
:ivar destination: The address of the client for which this message is intended.
:vartype destination: str
:ivar path: The intended object path exported on the destination bus.
:vartype path: str
:ivar interface: The intended interface on the object path.
:vartype interface: str
:ivar member: The intended member on the interface.
:vartype member: str
:ivar message_type: The type of this message. A method call, signal, method return, or error.
:vartype message_type: :class:`MessageType`
:ivar flags: Flags that affect the behavior of this message.
:vartype flags: :class:`MessageFlag`
:ivar error_name: If this message is an error, the name of this error. Must be a valid interface name.
:vartype error_name: str
:ivar reply_serial: If this is a return type, the serial this message is in reply to.
:vartype reply_serial: int
:ivar sender: The address of the sender of this message. Will be a unique name.
:vartype sender: str
:ivar unix_fds: A list of unix fds that were sent in the header of this message.
:vartype unix_fds: list(int)
:ivar signature: The signature of the body of this message.
:vartype signature: str
:ivar signature_tree: The signature parsed as a signature tree.
:vartype signature_tree: :class:`SignatureTree`
:ivar body: The body of this message. Must match the signature.
:vartype body: list(Any)
:ivar serial: The serial of the message. Will be automatically set during message sending if not present. Use the ``new_serial()`` method of the bus to generate a serial.
:vartype serial: int
:raises:
- :class:`InvalidMessageError` - If the message is malformed or missing fields for the message type.
- :class:`InvalidSignatureError` - If the given signature is not valid.
- :class:`InvalidObjectPathError` - If ``path`` is not a valid object path.
- :class:`InvalidBusNameError` - If ``destination`` is not a valid bus name.
- :class:`InvalidMemberNameError` - If ``member`` is not a valid member name.
- :class:`InvalidInterfaceNameError` - If ``error_name`` or ``interface`` is not a valid interface name.
|
class Message:
"""A class for sending and receiving messages through the
:class:`MessageBus <dbus_next.message_bus.BaseMessageBus>` with the
low-level api.
A ``Message`` can be constructed by the user to send over the message bus.
When messages are received, such as from method calls or signal emissions,
they will use this class as well.
:ivar destination: The address of the client for which this message is intended.
:vartype destination: str
:ivar path: The intended object path exported on the destination bus.
:vartype path: str
:ivar interface: The intended interface on the object path.
:vartype interface: str
:ivar member: The intended member on the interface.
:vartype member: str
:ivar message_type: The type of this message. A method call, signal, method return, or error.
:vartype message_type: :class:`MessageType`
:ivar flags: Flags that affect the behavior of this message.
:vartype flags: :class:`MessageFlag`
:ivar error_name: If this message is an error, the name of this error. Must be a valid interface name.
:vartype error_name: str
:ivar reply_serial: If this is a return type, the serial this message is in reply to.
:vartype reply_serial: int
:ivar sender: The address of the sender of this message. Will be a unique name.
:vartype sender: str
:ivar unix_fds: A list of unix fds that were sent in the header of this message.
:vartype unix_fds: list(int)
:ivar signature: The signature of the body of this message.
:vartype signature: str
:ivar signature_tree: The signature parsed as a signature tree.
:vartype signature_tree: :class:`SignatureTree`
:ivar body: The body of this message. Must match the signature.
:vartype body: list(Any)
:ivar serial: The serial of the message. Will be automatically set during message sending if not present. Use the ``new_serial()`` method of the bus to generate a serial.
:vartype serial: int
:raises:
- :class:`InvalidMessageError` - If the message is malformed or missing fields for the message type.
- :class:`InvalidSignatureError` - If the given signature is not valid.
- :class:`InvalidObjectPathError` - If ``path`` is not a valid object path.
- :class:`InvalidBusNameError` - If ``destination`` is not a valid bus name.
- :class:`InvalidMemberNameError` - If ``member`` is not a valid member name.
- :class:`InvalidInterfaceNameError` - If ``error_name`` or ``interface`` is not a valid interface name.
"""
def __init__(self,
destination: str = None,
path: str = None,
interface: str = None,
member: str = None,
message_type: MessageType = MessageType.METHOD_CALL,
flags: MessageFlag = MessageFlag.NONE,
error_name: str = None,
reply_serial: int = None,
sender: str = None,
unix_fds: List[int] = [],
signature: str = '',
body: List[Any] = [],
serial: int = 0):
self.destination = destination
self.path = path
self.interface = interface
self.member = member
self.message_type = message_type
self.flags = flags if type(flags) is MessageFlag else MessageFlag(bytes([flags]))
self.error_name = error_name if type(error_name) is not ErrorType else error_name.value
self.reply_serial = reply_serial
self.sender = sender
self.unix_fds = unix_fds
self.signature = signature.signature if type(signature) is SignatureTree else signature
self.signature_tree = signature if type(signature) is SignatureTree else SignatureTree._get(
signature)
self.body = body
self.serial = serial
if self.destination is not None:
assert_bus_name_valid(self.destination)
if self.interface is not None:
assert_interface_name_valid(self.interface)
if self.path is not None:
assert_object_path_valid(self.path)
if self.member is not None:
assert_member_name_valid(self.member)
if self.error_name is not None:
assert_interface_name_valid(self.error_name)
def require_fields(*fields):
for field in fields:
if not getattr(self, field):
raise InvalidMessageError(f'missing required field: {field}')
if self.message_type == MessageType.METHOD_CALL:
require_fields('path', 'member')
elif self.message_type == MessageType.SIGNAL:
require_fields('path', 'member', 'interface')
elif self.message_type == MessageType.ERROR:
require_fields('error_name', 'reply_serial')
elif self.message_type == MessageType.METHOD_RETURN:
require_fields('reply_serial')
else:
raise InvalidMessageError(f'got unknown message type: {self.message_type}')
@staticmethod
def new_error(msg: 'Message', error_name: str, error_text: str) -> 'Message':
"""A convenience constructor to create an error message in reply to the given message.
:param msg: The message this error is in reply to.
:type msg: :class:`Message`
:param error_name: The name of this error. Must be a valid interface name.
:type error_name: str
:param error_text: Human-readable text for the error.
:returns: The error message.
:rtype: :class:`Message`
:raises:
- :class:`InvalidInterfaceNameError` - If the error_name is not a valid interface name.
"""
return Message(message_type=MessageType.ERROR,
reply_serial=msg.serial,
destination=msg.sender,
error_name=error_name,
signature='s',
body=[error_text])
@staticmethod
def new_method_return(msg: 'Message',
signature: str = '',
body: List[Any] = [],
unix_fds: List[int] = []) -> 'Message':
"""A convenience constructor to create a method return to the given method call message.
:param msg: The method call message this is a reply to.
:type msg: :class:`Message`
:param signature: The signature for the message body.
:type signature: str
:param body: The body of this message. Must match the signature.
:type body: list(Any)
:param unix_fds: List integer file descriptors to send with this message.
:type body: list(int)
:returns: The method return message
:rtype: :class:`Message`
:raises:
- :class:`InvalidSignatureError` - If the signature is not a valid signature.
"""
return Message(message_type=MessageType.METHOD_RETURN,
reply_serial=msg.serial,
destination=msg.sender,
signature=signature,
body=body,
unix_fds=unix_fds)
@staticmethod
def new_signal(path: str,
interface: str,
member: str,
signature: str = '',
body: List[Any] = None,
unix_fds: List[int] = None) -> 'Message':
"""A convenience constructor to create a new signal message.
:param path: The path of this signal.
:type path: str
:param interface: The interface of this signal.
:type interface: str
:param member: The member name of this signal.
:type member: str
:param signature: The signature of the signal body.
:type signature: str
:param body: The body of this signal message.
:type body: list(Any)
:param unix_fds: List integer file descriptors to send with this message.
:type body: list(int)
:returns: The signal message.
:rtype: :class:`Message`
:raises:
- :class:`InvalidSignatureError` - If the signature is not a valid signature.
- :class:`InvalidObjectPathError` - If ``path`` is not a valid object path.
- :class:`InvalidInterfaceNameError` - If ``interface`` is not a valid interface name.
- :class:`InvalidMemberNameError` - If ``member`` is not a valid member name.
"""
body = body if body else []
return Message(message_type=MessageType.SIGNAL,
interface=interface,
path=path,
member=member,
signature=signature,
body=body,
unix_fds=unix_fds)
def _matches(self, **kwargs):
for attr, val in kwargs.items():
if getattr(self, attr) != val:
return False
return True
def _marshall(self, negotiate_unix_fd=False):
# TODO maximum message size is 134217728 (128 MiB)
body_block = Marshaller(self.signature, self.body)
body_block.marshall()
fields = []
if self.path:
fields.append([HeaderField.PATH.value, Variant('o', self.path)])
if self.interface:
fields.append([HeaderField.INTERFACE.value, Variant('s', self.interface)])
if self.member:
fields.append([HeaderField.MEMBER.value, Variant('s', self.member)])
if self.error_name:
fields.append([HeaderField.ERROR_NAME.value, Variant('s', self.error_name)])
if self.reply_serial:
fields.append([HeaderField.REPLY_SERIAL.value, Variant('u', self.reply_serial)])
if self.destination:
fields.append([HeaderField.DESTINATION.value, Variant('s', self.destination)])
if self.signature:
fields.append([HeaderField.SIGNATURE.value, Variant('g', self.signature)])
if self.unix_fds and negotiate_unix_fd:
fields.append([HeaderField.UNIX_FDS.value, Variant('u', len(self.unix_fds))])
header_body = [
LITTLE_ENDIAN, self.message_type.value, self.flags.value, PROTOCOL_VERSION,
len(body_block.buffer), self.serial, fields
]
header_block = Marshaller('yyyyuua(yv)', header_body)
header_block.marshall()
header_block.align(8)
return header_block.buffer + body_block.buffer
|
(destination: str = None, path: str = None, interface: str = None, member: str = None, message_type: dbus_next.constants.MessageType = <MessageType.METHOD_CALL: 1>, flags: dbus_next.constants.MessageFlag = <MessageFlag.NONE: 0>, error_name: str = None, reply_serial: int = None, sender: str = None, unix_fds: List[int] = [], signature: str = '', body: List[Any] = [], serial: int = 0)
|
712,891
|
dbus_next.message
|
__init__
| null |
def __init__(self,
destination: str = None,
path: str = None,
interface: str = None,
member: str = None,
message_type: MessageType = MessageType.METHOD_CALL,
flags: MessageFlag = MessageFlag.NONE,
error_name: str = None,
reply_serial: int = None,
sender: str = None,
unix_fds: List[int] = [],
signature: str = '',
body: List[Any] = [],
serial: int = 0):
self.destination = destination
self.path = path
self.interface = interface
self.member = member
self.message_type = message_type
self.flags = flags if type(flags) is MessageFlag else MessageFlag(bytes([flags]))
self.error_name = error_name if type(error_name) is not ErrorType else error_name.value
self.reply_serial = reply_serial
self.sender = sender
self.unix_fds = unix_fds
self.signature = signature.signature if type(signature) is SignatureTree else signature
self.signature_tree = signature if type(signature) is SignatureTree else SignatureTree._get(
signature)
self.body = body
self.serial = serial
if self.destination is not None:
assert_bus_name_valid(self.destination)
if self.interface is not None:
assert_interface_name_valid(self.interface)
if self.path is not None:
assert_object_path_valid(self.path)
if self.member is not None:
assert_member_name_valid(self.member)
if self.error_name is not None:
assert_interface_name_valid(self.error_name)
def require_fields(*fields):
for field in fields:
if not getattr(self, field):
raise InvalidMessageError(f'missing required field: {field}')
if self.message_type == MessageType.METHOD_CALL:
require_fields('path', 'member')
elif self.message_type == MessageType.SIGNAL:
require_fields('path', 'member', 'interface')
elif self.message_type == MessageType.ERROR:
require_fields('error_name', 'reply_serial')
elif self.message_type == MessageType.METHOD_RETURN:
require_fields('reply_serial')
else:
raise InvalidMessageError(f'got unknown message type: {self.message_type}')
|
(self, destination: Optional[str] = None, path: Optional[str] = None, interface: Optional[str] = None, member: Optional[str] = None, message_type: dbus_next.constants.MessageType = <MessageType.METHOD_CALL: 1>, flags: dbus_next.constants.MessageFlag = <MessageFlag.NONE: 0>, error_name: Optional[str] = None, reply_serial: Optional[int] = None, sender: Optional[str] = None, unix_fds: List[int] = [], signature: str = '', body: List[Any] = [], serial: int = 0)
|
712,892
|
dbus_next.message
|
_marshall
| null |
def _marshall(self, negotiate_unix_fd=False):
# TODO maximum message size is 134217728 (128 MiB)
body_block = Marshaller(self.signature, self.body)
body_block.marshall()
fields = []
if self.path:
fields.append([HeaderField.PATH.value, Variant('o', self.path)])
if self.interface:
fields.append([HeaderField.INTERFACE.value, Variant('s', self.interface)])
if self.member:
fields.append([HeaderField.MEMBER.value, Variant('s', self.member)])
if self.error_name:
fields.append([HeaderField.ERROR_NAME.value, Variant('s', self.error_name)])
if self.reply_serial:
fields.append([HeaderField.REPLY_SERIAL.value, Variant('u', self.reply_serial)])
if self.destination:
fields.append([HeaderField.DESTINATION.value, Variant('s', self.destination)])
if self.signature:
fields.append([HeaderField.SIGNATURE.value, Variant('g', self.signature)])
if self.unix_fds and negotiate_unix_fd:
fields.append([HeaderField.UNIX_FDS.value, Variant('u', len(self.unix_fds))])
header_body = [
LITTLE_ENDIAN, self.message_type.value, self.flags.value, PROTOCOL_VERSION,
len(body_block.buffer), self.serial, fields
]
header_block = Marshaller('yyyyuua(yv)', header_body)
header_block.marshall()
header_block.align(8)
return header_block.buffer + body_block.buffer
|
(self, negotiate_unix_fd=False)
|
712,893
|
dbus_next.message
|
_matches
| null |
def _matches(self, **kwargs):
for attr, val in kwargs.items():
if getattr(self, attr) != val:
return False
return True
|
(self, **kwargs)
|
712,894
|
dbus_next.message
|
new_error
|
A convenience constructor to create an error message in reply to the given message.
:param msg: The message this error is in reply to.
:type msg: :class:`Message`
:param error_name: The name of this error. Must be a valid interface name.
:type error_name: str
:param error_text: Human-readable text for the error.
:returns: The error message.
:rtype: :class:`Message`
:raises:
- :class:`InvalidInterfaceNameError` - If the error_name is not a valid interface name.
|
@staticmethod
def new_error(msg: 'Message', error_name: str, error_text: str) -> 'Message':
"""A convenience constructor to create an error message in reply to the given message.
:param msg: The message this error is in reply to.
:type msg: :class:`Message`
:param error_name: The name of this error. Must be a valid interface name.
:type error_name: str
:param error_text: Human-readable text for the error.
:returns: The error message.
:rtype: :class:`Message`
:raises:
- :class:`InvalidInterfaceNameError` - If the error_name is not a valid interface name.
"""
return Message(message_type=MessageType.ERROR,
reply_serial=msg.serial,
destination=msg.sender,
error_name=error_name,
signature='s',
body=[error_text])
|
(msg: dbus_next.message.Message, error_name: str, error_text: str) -> dbus_next.message.Message
|
712,895
|
dbus_next.message
|
new_method_return
|
A convenience constructor to create a method return to the given method call message.
:param msg: The method call message this is a reply to.
:type msg: :class:`Message`
:param signature: The signature for the message body.
:type signature: str
:param body: The body of this message. Must match the signature.
:type body: list(Any)
:param unix_fds: List integer file descriptors to send with this message.
:type body: list(int)
:returns: The method return message
:rtype: :class:`Message`
:raises:
- :class:`InvalidSignatureError` - If the signature is not a valid signature.
|
@staticmethod
def new_method_return(msg: 'Message',
signature: str = '',
body: List[Any] = [],
unix_fds: List[int] = []) -> 'Message':
"""A convenience constructor to create a method return to the given method call message.
:param msg: The method call message this is a reply to.
:type msg: :class:`Message`
:param signature: The signature for the message body.
:type signature: str
:param body: The body of this message. Must match the signature.
:type body: list(Any)
:param unix_fds: List integer file descriptors to send with this message.
:type body: list(int)
:returns: The method return message
:rtype: :class:`Message`
:raises:
- :class:`InvalidSignatureError` - If the signature is not a valid signature.
"""
return Message(message_type=MessageType.METHOD_RETURN,
reply_serial=msg.serial,
destination=msg.sender,
signature=signature,
body=body,
unix_fds=unix_fds)
|
(msg: dbus_next.message.Message, signature: str = '', body: List[Any] = [], unix_fds: List[int] = []) -> dbus_next.message.Message
|
712,896
|
dbus_next.message
|
new_signal
|
A convenience constructor to create a new signal message.
:param path: The path of this signal.
:type path: str
:param interface: The interface of this signal.
:type interface: str
:param member: The member name of this signal.
:type member: str
:param signature: The signature of the signal body.
:type signature: str
:param body: The body of this signal message.
:type body: list(Any)
:param unix_fds: List integer file descriptors to send with this message.
:type body: list(int)
:returns: The signal message.
:rtype: :class:`Message`
:raises:
- :class:`InvalidSignatureError` - If the signature is not a valid signature.
- :class:`InvalidObjectPathError` - If ``path`` is not a valid object path.
- :class:`InvalidInterfaceNameError` - If ``interface`` is not a valid interface name.
- :class:`InvalidMemberNameError` - If ``member`` is not a valid member name.
|
@staticmethod
def new_signal(path: str,
interface: str,
member: str,
signature: str = '',
body: List[Any] = None,
unix_fds: List[int] = None) -> 'Message':
"""A convenience constructor to create a new signal message.
:param path: The path of this signal.
:type path: str
:param interface: The interface of this signal.
:type interface: str
:param member: The member name of this signal.
:type member: str
:param signature: The signature of the signal body.
:type signature: str
:param body: The body of this signal message.
:type body: list(Any)
:param unix_fds: List integer file descriptors to send with this message.
:type body: list(int)
:returns: The signal message.
:rtype: :class:`Message`
:raises:
- :class:`InvalidSignatureError` - If the signature is not a valid signature.
- :class:`InvalidObjectPathError` - If ``path`` is not a valid object path.
- :class:`InvalidInterfaceNameError` - If ``interface`` is not a valid interface name.
- :class:`InvalidMemberNameError` - If ``member`` is not a valid member name.
"""
body = body if body else []
return Message(message_type=MessageType.SIGNAL,
interface=interface,
path=path,
member=member,
signature=signature,
body=body,
unix_fds=unix_fds)
|
(path: str, interface: str, member: str, signature: str = '', body: Optional[List[Any]] = None, unix_fds: Optional[List[int]] = None) -> dbus_next.message.Message
|
712,897
|
dbus_next.constants
|
MessageFlag
|
Flags that affect the behavior of sent and received messages
|
class MessageFlag(IntFlag):
"""Flags that affect the behavior of sent and received messages
"""
NONE = 0
NO_REPLY_EXPECTED = 1 #: The method call does not expect a method return.
NO_AUTOSTART = 2
ALLOW_INTERACTIVE_AUTHORIZATION = 4
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
712,898
|
dbus_next.constants
|
MessageType
|
An enum that indicates a type of message.
|
class MessageType(Enum):
"""An enum that indicates a type of message."""
METHOD_CALL = 1 #: An outgoing method call.
METHOD_RETURN = 2 #: A return to a previously sent method call
ERROR = 3 #: A return to a method call that has failed
SIGNAL = 4 #: A broadcast signal to subscribed connections
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
712,899
|
dbus_next.constants
|
NameFlag
|
A flag that affects the behavior of a name request.
|
class NameFlag(IntFlag):
"""A flag that affects the behavior of a name request.
"""
NONE = 0
ALLOW_REPLACEMENT = 1 #: If another client requests this name, let them have it.
REPLACE_EXISTING = 2 #: If another client owns this name, try to take it.
DO_NOT_QUEUE = 4 #: Name requests normally queue and wait for the owner to release the name. Do not enter this queue.
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
712,900
|
dbus_next.constants
|
PropertyAccess
|
An enum that describes whether a DBus property can be gotten or set with
the ``org.freedesktop.DBus.Properties`` interface.
|
class PropertyAccess(Enum):
"""An enum that describes whether a DBus property can be gotten or set with
the ``org.freedesktop.DBus.Properties`` interface.
"""
READ = 'read' #: The property is readonly.
WRITE = 'write' #: The property is writeonly.
READWRITE = 'readwrite' #: The property can be read or written to.
def readable(self):
"""Get whether the property can be read.
"""
return self == PropertyAccess.READ or self == PropertyAccess.READWRITE
def writable(self):
"""Get whether the property can be written to.
"""
return self == PropertyAccess.WRITE or self == PropertyAccess.READWRITE
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
712,901
|
dbus_next.constants
|
ReleaseNameReply
|
An enum that describes the result of a name release request
|
class ReleaseNameReply(Enum):
"""An enum that describes the result of a name release request
"""
RELEASED = 1
NON_EXISTENT = 2
NOT_OWNER = 3
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
712,902
|
dbus_next.constants
|
RequestNameReply
|
An enum that describes the result of a name request.
|
class RequestNameReply(Enum):
"""An enum that describes the result of a name request.
"""
PRIMARY_OWNER = 1 #: The bus owns the name.
IN_QUEUE = 2 #: The bus is in a queue and may receive the name after it is relased by the primary owner.
EXISTS = 3 #: The name has an owner and NameFlag.DO_NOT_QUEUE was given.
ALREADY_OWNER = 4 #: The bus already owns the name.
|
(value, names=None, *, module=None, qualname=None, type=None, start=1)
|
712,905
|
dbus_next.signature
|
SignatureTree
|
A class that represents a signature as a tree structure for conveniently
working with DBus signatures.
This class will not normally be used directly by the user.
:ivar types: A list of parsed complete types.
:vartype types: list(:class:`SignatureType`)
:ivar ~.signature: The signature of this signature tree.
:vartype ~.signature: str
:raises:
:class:`InvalidSignatureError` if the given signature is not valid.
|
class SignatureTree:
"""A class that represents a signature as a tree structure for conveniently
working with DBus signatures.
This class will not normally be used directly by the user.
:ivar types: A list of parsed complete types.
:vartype types: list(:class:`SignatureType`)
:ivar ~.signature: The signature of this signature tree.
:vartype ~.signature: str
:raises:
:class:`InvalidSignatureError` if the given signature is not valid.
"""
_cache = {}
@staticmethod
def _get(signature: str = ''):
if signature in SignatureTree._cache:
return SignatureTree._cache[signature]
SignatureTree._cache[signature] = SignatureTree(signature)
return SignatureTree._cache[signature]
def __init__(self, signature: str = ''):
self.signature = signature
self.types = []
if len(signature) > 0xff:
raise InvalidSignatureError('A signature must be less than 256 characters')
while signature:
(type_, signature) = SignatureType._parse_next(signature)
self.types.append(type_)
def __eq__(self, other):
if type(other) is SignatureTree:
return self.signature == other.signature
else:
return super().__eq__(other)
def verify(self, body: List[Any]):
"""Verifies that the give body matches this signature tree
:param body: the body to verify for this tree
:type body: list(Any)
:returns: True if the signature matches the body or an exception if not.
:raises:
:class:`SignatureBodyMismatchError` if the signature does not match the body.
"""
if not isinstance(body, list):
raise SignatureBodyMismatchError(f'The body must be a list (got {type(body)})')
if len(body) != len(self.types):
raise SignatureBodyMismatchError(
f'The body has the wrong number of types (got {len(body)}, expected {len(self.types)})'
)
for i, type_ in enumerate(self.types):
type_.verify(body[i])
return True
|
(signature: str = '')
|
712,906
|
dbus_next.signature
|
__eq__
| null |
def __eq__(self, other):
if type(other) is SignatureTree:
return self.signature == other.signature
else:
return super().__eq__(other)
|
(self, other)
|
712,907
|
dbus_next.signature
|
__init__
| null |
def __init__(self, signature: str = ''):
self.signature = signature
self.types = []
if len(signature) > 0xff:
raise InvalidSignatureError('A signature must be less than 256 characters')
while signature:
(type_, signature) = SignatureType._parse_next(signature)
self.types.append(type_)
|
(self, signature: str = '')
|
712,908
|
dbus_next.signature
|
_get
| null |
@staticmethod
def _get(signature: str = ''):
if signature in SignatureTree._cache:
return SignatureTree._cache[signature]
SignatureTree._cache[signature] = SignatureTree(signature)
return SignatureTree._cache[signature]
|
(signature: str = '')
|
712,909
|
dbus_next.signature
|
verify
|
Verifies that the give body matches this signature tree
:param body: the body to verify for this tree
:type body: list(Any)
:returns: True if the signature matches the body or an exception if not.
:raises:
:class:`SignatureBodyMismatchError` if the signature does not match the body.
|
def verify(self, body: List[Any]):
"""Verifies that the give body matches this signature tree
:param body: the body to verify for this tree
:type body: list(Any)
:returns: True if the signature matches the body or an exception if not.
:raises:
:class:`SignatureBodyMismatchError` if the signature does not match the body.
"""
if not isinstance(body, list):
raise SignatureBodyMismatchError(f'The body must be a list (got {type(body)})')
if len(body) != len(self.types):
raise SignatureBodyMismatchError(
f'The body has the wrong number of types (got {len(body)}, expected {len(self.types)})'
)
for i, type_ in enumerate(self.types):
type_.verify(body[i])
return True
|
(self, body: List[Any])
|
712,910
|
dbus_next.signature
|
SignatureType
|
A class that represents a single complete type within a signature.
This class is not meant to be constructed directly. Use the :class:`SignatureTree`
class to parse signatures.
:ivar ~.signature: The signature of this complete type.
:vartype ~.signature: str
:ivar children: A list of child types if this is a container type. Arrays have one child type, dict entries have two child types (key and value), and structs have child types equal to the number of struct members.
:vartype children: list(:class:`SignatureType`)
|
class SignatureType:
"""A class that represents a single complete type within a signature.
This class is not meant to be constructed directly. Use the :class:`SignatureTree`
class to parse signatures.
:ivar ~.signature: The signature of this complete type.
:vartype ~.signature: str
:ivar children: A list of child types if this is a container type. Arrays \
have one child type, dict entries have two child types (key and value), and \
structs have child types equal to the number of struct members.
:vartype children: list(:class:`SignatureType`)
"""
_tokens = 'ybnqiuxtdsogavh({'
def __init__(self, token):
self.token = token
self.children = []
self._signature = None
def __eq__(self, other):
if type(other) is SignatureType:
return self.signature == other.signature
else:
return super().__eq__(other)
def _collapse(self):
if self.token not in 'a({':
return self.token
signature = [self.token]
for child in self.children:
signature.append(child._collapse())
if self.token == '(':
signature.append(')')
elif self.token == '{':
signature.append('}')
return ''.join(signature)
@property
def signature(self) -> str:
if self._signature is not None:
return self._signature
self._signature = self._collapse()
return self._signature
@staticmethod
def _parse_next(signature):
if not signature:
return (None, '')
token = signature[0]
if token not in SignatureType._tokens:
raise InvalidSignatureError(f'got unexpected token: "{token}"')
# container types
if token == 'a':
self = SignatureType('a')
(child, signature) = SignatureType._parse_next(signature[1:])
if not child:
raise InvalidSignatureError('missing type for array')
self.children.append(child)
return (self, signature)
elif token == '(':
self = SignatureType('(')
signature = signature[1:]
while True:
(child, signature) = SignatureType._parse_next(signature)
if not signature:
raise InvalidSignatureError('missing closing ")" for struct')
self.children.append(child)
if signature[0] == ')':
return (self, signature[1:])
elif token == '{':
self = SignatureType('{')
signature = signature[1:]
(key_child, signature) = SignatureType._parse_next(signature)
if not key_child or len(key_child.children):
raise InvalidSignatureError('expected a simple type for dict entry key')
self.children.append(key_child)
(value_child, signature) = SignatureType._parse_next(signature)
if not value_child:
raise InvalidSignatureError('expected a value for dict entry')
if not signature or signature[0] != '}':
raise InvalidSignatureError('missing closing "}" for dict entry')
self.children.append(value_child)
return (self, signature[1:])
# basic type
return (SignatureType(token), signature[1:])
def _verify_byte(self, body):
BYTE_MIN = 0x00
BYTE_MAX = 0xff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus BYTE type "y" must be Python type "int", got {type(body)}')
if body < BYTE_MIN or body > BYTE_MAX:
raise SignatureBodyMismatchError(
f'DBus BYTE type must be between {BYTE_MIN} and {BYTE_MAX}')
def _verify_boolean(self, body):
if not isinstance(body, bool):
raise SignatureBodyMismatchError(
f'DBus BOOLEAN type "b" must be Python type "bool", got {type(body)}')
def _verify_int16(self, body):
INT16_MIN = -0x7fff - 1
INT16_MAX = 0x7fff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus INT16 type "n" must be Python type "int", got {type(body)}')
elif body > INT16_MAX or body < INT16_MIN:
raise SignatureBodyMismatchError(
f'DBus INT16 type "n" must be between {INT16_MIN} and {INT16_MAX}')
def _verify_uint16(self, body):
UINT16_MIN = 0
UINT16_MAX = 0xffff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus UINT16 type "q" must be Python type "int", got {type(body)}')
elif body > UINT16_MAX or body < UINT16_MIN:
raise SignatureBodyMismatchError(
f'DBus UINT16 type "q" must be between {UINT16_MIN} and {UINT16_MAX}')
def _verify_int32(self, body):
INT32_MIN = -0x7fffffff - 1
INT32_MAX = 0x7fffffff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus INT32 type "i" must be Python type "int", got {type(body)}')
elif body > INT32_MAX or body < INT32_MIN:
raise SignatureBodyMismatchError(
f'DBus INT32 type "i" must be between {INT32_MIN} and {INT32_MAX}')
def _verify_uint32(self, body):
UINT32_MIN = 0
UINT32_MAX = 0xffffffff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus UINT32 type "u" must be Python type "int", got {type(body)}')
elif body > UINT32_MAX or body < UINT32_MIN:
raise SignatureBodyMismatchError(
f'DBus UINT32 type "u" must be between {UINT32_MIN} and {UINT32_MAX}')
def _verify_int64(self, body):
INT64_MAX = 9223372036854775807
INT64_MIN = -INT64_MAX - 1
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus INT64 type "x" must be Python type "int", got {type(body)}')
elif body > INT64_MAX or body < INT64_MIN:
raise SignatureBodyMismatchError(
f'DBus INT64 type "x" must be between {INT64_MIN} and {INT64_MAX}')
def _verify_uint64(self, body):
UINT64_MIN = 0
UINT64_MAX = 18446744073709551615
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus UINT64 type "t" must be Python type "int", got {type(body)}')
elif body > UINT64_MAX or body < UINT64_MIN:
raise SignatureBodyMismatchError(
f'DBus UINT64 type "t" must be between {UINT64_MIN} and {UINT64_MAX}')
def _verify_double(self, body):
if not isinstance(body, float) and not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus DOUBLE type "d" must be Python type "float" or "int", got {type(body)}')
def _verify_unix_fd(self, body):
try:
self._verify_uint32(body)
except SignatureBodyMismatchError:
raise SignatureBodyMismatchError('DBus UNIX_FD type "h" must be a valid UINT32')
def _verify_object_path(self, body):
if not is_object_path_valid(body):
raise SignatureBodyMismatchError(
'DBus OBJECT_PATH type "o" must be a valid object path')
def _verify_string(self, body):
if not isinstance(body, str):
raise SignatureBodyMismatchError(
f'DBus STRING type "s" must be Python type "str", got {type(body)}')
def _verify_signature(self, body):
# I guess we could run it through the SignatureTree parser instead
if not isinstance(body, str):
raise SignatureBodyMismatchError(
f'DBus SIGNATURE type "g" must be Python type "str", got {type(body)}')
if len(body.encode()) > 0xff:
raise SignatureBodyMismatchError('DBus SIGNATURE type "g" must be less than 256 bytes')
def _verify_array(self, body):
child_type = self.children[0]
if child_type.token == '{':
if not isinstance(body, dict):
raise SignatureBodyMismatchError(
f'DBus ARRAY type "a" with DICT_ENTRY child must be Python type "dict", got {type(body)}'
)
for key, value in body.items():
child_type.children[0].verify(key)
child_type.children[1].verify(value)
elif child_type.token == 'y':
if not isinstance(body, bytes):
raise SignatureBodyMismatchError(
f'DBus ARRAY type "a" with BYTE child must be Python type "bytes", got {type(body)}'
)
# no need to verify children
else:
if not isinstance(body, list):
raise SignatureBodyMismatchError(
f'DBus ARRAY type "a" must be Python type "list", got {type(body)}')
for member in body:
child_type.verify(member)
def _verify_struct(self, body):
# TODO allow tuples
if not isinstance(body, list):
raise SignatureBodyMismatchError(
f'DBus STRUCT type "(" must be Python type "list", got {type(body)}')
if len(body) != len(self.children):
raise SignatureBodyMismatchError(
'DBus STRUCT type "(" must have Python list members equal to the number of struct type members'
)
for i, member in enumerate(body):
self.children[i].verify(member)
def _verify_variant(self, body):
# a variant signature and value is valid by construction
if not isinstance(body, Variant):
raise SignatureBodyMismatchError(
f'DBus VARIANT type "v" must be Python type "Variant", got {type(body)}')
def verify(self, body: Any) -> bool:
"""Verify that the body matches this type.
:returns: True if the body matches this type.
:raises:
:class:`SignatureBodyMismatchError` if the body does not match this type.
"""
if body is None:
raise SignatureBodyMismatchError('Cannot serialize Python type "None"')
elif self.token == 'y':
self._verify_byte(body)
elif self.token == 'b':
self._verify_boolean(body)
elif self.token == 'n':
self._verify_int16(body)
elif self.token == 'q':
self._verify_uint16(body)
elif self.token == 'i':
self._verify_int32(body)
elif self.token == 'u':
self._verify_uint32(body)
elif self.token == 'x':
self._verify_int64(body)
elif self.token == 't':
self._verify_uint64(body)
elif self.token == 'd':
self._verify_double(body)
elif self.token == 'h':
self._verify_unix_fd(body)
elif self.token == 'o':
self._verify_object_path(body)
elif self.token == 's':
self._verify_string(body)
elif self.token == 'g':
self._verify_signature(body)
elif self.token == 'a':
self._verify_array(body)
elif self.token == '(':
self._verify_struct(body)
elif self.token == 'v':
self._verify_variant(body)
else:
raise Exception(f'cannot verify type with token {self.token}')
return True
|
(token)
|
712,911
|
dbus_next.signature
|
__eq__
| null |
def __eq__(self, other):
if type(other) is SignatureType:
return self.signature == other.signature
else:
return super().__eq__(other)
|
(self, other)
|
712,912
|
dbus_next.signature
|
__init__
| null |
def __init__(self, token):
self.token = token
self.children = []
self._signature = None
|
(self, token)
|
712,913
|
dbus_next.signature
|
_collapse
| null |
def _collapse(self):
if self.token not in 'a({':
return self.token
signature = [self.token]
for child in self.children:
signature.append(child._collapse())
if self.token == '(':
signature.append(')')
elif self.token == '{':
signature.append('}')
return ''.join(signature)
|
(self)
|
712,914
|
dbus_next.signature
|
_parse_next
| null |
@staticmethod
def _parse_next(signature):
if not signature:
return (None, '')
token = signature[0]
if token not in SignatureType._tokens:
raise InvalidSignatureError(f'got unexpected token: "{token}"')
# container types
if token == 'a':
self = SignatureType('a')
(child, signature) = SignatureType._parse_next(signature[1:])
if not child:
raise InvalidSignatureError('missing type for array')
self.children.append(child)
return (self, signature)
elif token == '(':
self = SignatureType('(')
signature = signature[1:]
while True:
(child, signature) = SignatureType._parse_next(signature)
if not signature:
raise InvalidSignatureError('missing closing ")" for struct')
self.children.append(child)
if signature[0] == ')':
return (self, signature[1:])
elif token == '{':
self = SignatureType('{')
signature = signature[1:]
(key_child, signature) = SignatureType._parse_next(signature)
if not key_child or len(key_child.children):
raise InvalidSignatureError('expected a simple type for dict entry key')
self.children.append(key_child)
(value_child, signature) = SignatureType._parse_next(signature)
if not value_child:
raise InvalidSignatureError('expected a value for dict entry')
if not signature or signature[0] != '}':
raise InvalidSignatureError('missing closing "}" for dict entry')
self.children.append(value_child)
return (self, signature[1:])
# basic type
return (SignatureType(token), signature[1:])
|
(signature)
|
712,915
|
dbus_next.signature
|
_verify_array
| null |
def _verify_array(self, body):
child_type = self.children[0]
if child_type.token == '{':
if not isinstance(body, dict):
raise SignatureBodyMismatchError(
f'DBus ARRAY type "a" with DICT_ENTRY child must be Python type "dict", got {type(body)}'
)
for key, value in body.items():
child_type.children[0].verify(key)
child_type.children[1].verify(value)
elif child_type.token == 'y':
if not isinstance(body, bytes):
raise SignatureBodyMismatchError(
f'DBus ARRAY type "a" with BYTE child must be Python type "bytes", got {type(body)}'
)
# no need to verify children
else:
if not isinstance(body, list):
raise SignatureBodyMismatchError(
f'DBus ARRAY type "a" must be Python type "list", got {type(body)}')
for member in body:
child_type.verify(member)
|
(self, body)
|
712,916
|
dbus_next.signature
|
_verify_boolean
| null |
def _verify_boolean(self, body):
if not isinstance(body, bool):
raise SignatureBodyMismatchError(
f'DBus BOOLEAN type "b" must be Python type "bool", got {type(body)}')
|
(self, body)
|
712,917
|
dbus_next.signature
|
_verify_byte
| null |
def _verify_byte(self, body):
BYTE_MIN = 0x00
BYTE_MAX = 0xff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus BYTE type "y" must be Python type "int", got {type(body)}')
if body < BYTE_MIN or body > BYTE_MAX:
raise SignatureBodyMismatchError(
f'DBus BYTE type must be between {BYTE_MIN} and {BYTE_MAX}')
|
(self, body)
|
712,918
|
dbus_next.signature
|
_verify_double
| null |
def _verify_double(self, body):
if not isinstance(body, float) and not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus DOUBLE type "d" must be Python type "float" or "int", got {type(body)}')
|
(self, body)
|
712,919
|
dbus_next.signature
|
_verify_int16
| null |
def _verify_int16(self, body):
INT16_MIN = -0x7fff - 1
INT16_MAX = 0x7fff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus INT16 type "n" must be Python type "int", got {type(body)}')
elif body > INT16_MAX or body < INT16_MIN:
raise SignatureBodyMismatchError(
f'DBus INT16 type "n" must be between {INT16_MIN} and {INT16_MAX}')
|
(self, body)
|
712,920
|
dbus_next.signature
|
_verify_int32
| null |
def _verify_int32(self, body):
INT32_MIN = -0x7fffffff - 1
INT32_MAX = 0x7fffffff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus INT32 type "i" must be Python type "int", got {type(body)}')
elif body > INT32_MAX or body < INT32_MIN:
raise SignatureBodyMismatchError(
f'DBus INT32 type "i" must be between {INT32_MIN} and {INT32_MAX}')
|
(self, body)
|
712,921
|
dbus_next.signature
|
_verify_int64
| null |
def _verify_int64(self, body):
INT64_MAX = 9223372036854775807
INT64_MIN = -INT64_MAX - 1
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus INT64 type "x" must be Python type "int", got {type(body)}')
elif body > INT64_MAX or body < INT64_MIN:
raise SignatureBodyMismatchError(
f'DBus INT64 type "x" must be between {INT64_MIN} and {INT64_MAX}')
|
(self, body)
|
712,922
|
dbus_next.signature
|
_verify_object_path
| null |
def _verify_object_path(self, body):
if not is_object_path_valid(body):
raise SignatureBodyMismatchError(
'DBus OBJECT_PATH type "o" must be a valid object path')
|
(self, body)
|
712,923
|
dbus_next.signature
|
_verify_signature
| null |
def _verify_signature(self, body):
# I guess we could run it through the SignatureTree parser instead
if not isinstance(body, str):
raise SignatureBodyMismatchError(
f'DBus SIGNATURE type "g" must be Python type "str", got {type(body)}')
if len(body.encode()) > 0xff:
raise SignatureBodyMismatchError('DBus SIGNATURE type "g" must be less than 256 bytes')
|
(self, body)
|
712,924
|
dbus_next.signature
|
_verify_string
| null |
def _verify_string(self, body):
if not isinstance(body, str):
raise SignatureBodyMismatchError(
f'DBus STRING type "s" must be Python type "str", got {type(body)}')
|
(self, body)
|
712,925
|
dbus_next.signature
|
_verify_struct
| null |
def _verify_struct(self, body):
# TODO allow tuples
if not isinstance(body, list):
raise SignatureBodyMismatchError(
f'DBus STRUCT type "(" must be Python type "list", got {type(body)}')
if len(body) != len(self.children):
raise SignatureBodyMismatchError(
'DBus STRUCT type "(" must have Python list members equal to the number of struct type members'
)
for i, member in enumerate(body):
self.children[i].verify(member)
|
(self, body)
|
712,926
|
dbus_next.signature
|
_verify_uint16
| null |
def _verify_uint16(self, body):
UINT16_MIN = 0
UINT16_MAX = 0xffff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus UINT16 type "q" must be Python type "int", got {type(body)}')
elif body > UINT16_MAX or body < UINT16_MIN:
raise SignatureBodyMismatchError(
f'DBus UINT16 type "q" must be between {UINT16_MIN} and {UINT16_MAX}')
|
(self, body)
|
712,927
|
dbus_next.signature
|
_verify_uint32
| null |
def _verify_uint32(self, body):
UINT32_MIN = 0
UINT32_MAX = 0xffffffff
if not isinstance(body, int):
raise SignatureBodyMismatchError(
f'DBus UINT32 type "u" must be Python type "int", got {type(body)}')
elif body > UINT32_MAX or body < UINT32_MIN:
raise SignatureBodyMismatchError(
f'DBus UINT32 type "u" must be between {UINT32_MIN} and {UINT32_MAX}')
|
(self, body)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.